diff --git a/dist/cli.js b/dist/cli.js index 5974ea7..e0ecbd1 100644 --- a/dist/cli.js +++ b/dist/cli.js @@ -10,18 +10,18 @@ var promises = require('fs/promises'); var process$2 = require('process'); var readline = require('readline'); var require$$0$3 = require('events'); -var require$$1$1 = require('child_process'); -var require$$1$2 = require('util'); +var require$$1$2 = require('child_process'); +var require$$1$3 = require('util'); var http$2 = require('http'); -var https$2 = require('https'); +var require$$1$4 = require('https'); var zlib$1 = require('zlib'); var Stream = require('stream'); var require$$0$4 = require('net'); -var require$$1$3 = require('tls'); +var require$$1$5 = require('tls'); var Url = require('url'); var require$$0$5 = require('punycode'); var require$$0$6 = require('tty'); -var assert$6 = require('assert'); +var assert$a = require('assert'); var require$$0$7 = require('buffer'); var require$$3$1 = require('dns'); var module$2 = require('module'); @@ -3883,7 +3883,7 @@ var name$1 = "dotenv"; var version$6 = "16.4.5"; var description$2 = "Loads environment variables from .env file"; var main$1 = "lib/main.js"; -var types$2 = "lib/main.d.ts"; +var types$4 = "lib/main.d.ts"; var exports$1 = { ".": { types: "./lib/main.d.ts", @@ -3947,7 +3947,7 @@ var require$$4 = { version: version$6, description: description$2, main: main$1, - types: types$2, + types: types$4, exports: exports$1, scripts: scripts$1, repository: repository, @@ -3963,7 +3963,7 @@ var require$$4 = { const fs$1 = require$$0$1; const path$1 = path$3; const os$1 = require$$0$2; -const crypto$2 = require$$5; +const crypto$4 = require$$5; const packageJson$1 = require$$4; const version$5 = packageJson$1.version; @@ -3971,7 +3971,7 @@ const version$5 = packageJson$1.version; const LINE = /(?:^|^)\s*(?:export\s+)?([\w.-]+)(?:\s*=\s*?|:\s+?)(\s*'(?:\\'|[^'])*'|\s*"(?:\\"|[^"])*"|\s*`(?:\\`|[^`])*`|[^#\r\n]+)?\s*(?:#.*)?(?:$|$)/mg; // Parse src into an Object -function parse$2 (src) { +function parse$9 (src) { const obj = {}; // Convert buffer to string @@ -4059,7 +4059,7 @@ function _warn (message) { console.log(`[dotenv@${version$5}][WARN] ${message}`); } -function _debug (message) { +function _debug$1 (message) { console.log(`[dotenv@${version$5}][DEBUG] ${message}`); } @@ -4173,7 +4173,7 @@ function configDotenv (options) { encoding = options.encoding; } else { if (debug) { - _debug('No encoding is specified. UTF-8 is used by default'); + _debug$1('No encoding is specified. UTF-8 is used by default'); } } @@ -4201,7 +4201,7 @@ function configDotenv (options) { DotenvModule.populate(parsedAll, parsed, options); } catch (e) { if (debug) { - _debug(`Failed to load ${path} ${e.message}`); + _debug$1(`Failed to load ${path} ${e.message}`); } lastError = e; } @@ -4240,7 +4240,7 @@ function config (options) { return DotenvModule._configVault(options) } -function decrypt$1 (encrypted, keyStr) { +function decrypt$2 (encrypted, keyStr) { const key = Buffer.from(keyStr.slice(-64), 'hex'); let ciphertext = Buffer.from(encrypted, 'base64'); @@ -4249,7 +4249,7 @@ function decrypt$1 (encrypted, keyStr) { ciphertext = ciphertext.subarray(12, -16); try { - const aesgcm = crypto$2.createDecipheriv('aes-256-gcm', key, nonce); + const aesgcm = crypto$4.createDecipheriv('aes-256-gcm', key, nonce); aesgcm.setAuthTag(authTag); return `${aesgcm.update(ciphertext)}${aesgcm.final()}` } catch (error) { @@ -4291,9 +4291,9 @@ function populate$1 (processEnv, parsed, options = {}) { if (debug) { if (override === true) { - _debug(`"${key}" is already defined and WAS overwritten`); + _debug$1(`"${key}" is already defined and WAS overwritten`); } else { - _debug(`"${key}" is already defined and was NOT overwritten`); + _debug$1(`"${key}" is already defined and was NOT overwritten`); } } } else { @@ -4307,8 +4307,8 @@ const DotenvModule = { _configVault, _parseVault, config, - decrypt: decrypt$1, - parse: parse$2, + decrypt: decrypt$2, + parse: parse$9, populate: populate$1 }; @@ -4349,11 +4349,11 @@ if (process.env.DOTENV_CONFIG_DOTENV_KEY != null) { var envOptions = options; -const re = /^dotenv_config_(encoding|path|debug|override|DOTENV_KEY)=(.+)$/; +const re$3 = /^dotenv_config_(encoding|path|debug|override|DOTENV_KEY)=(.+)$/; var cliOptions = function optionMatcher (args) { return args.reduce(function (acc, cur) { - const matches = cur.match(re); + const matches = cur.match(re$3); if (matches) { acc[matches[1]] = matches[2]; } @@ -5464,7 +5464,7 @@ function suggestSimilar$1(word, candidates) { suggestSimilar$2.suggestSimilar = suggestSimilar$1; const EventEmitter$1 = require$$0$3.EventEmitter; -const childProcess = require$$1$1; +const childProcess = require$$1$2; const path = path$3; const fs = require$$0$1; const process$1 = process$2; @@ -7804,31 +7804,31 @@ const { let messages = []; let level = 0; -const debug$7 = (msg, min) => { +const debug$9 = (msg, min) => { if (level >= min) { messages.push(msg); } }; -debug$7.WARN = 1; -debug$7.INFO = 2; -debug$7.DEBUG = 3; +debug$9.WARN = 1; +debug$9.INFO = 2; +debug$9.DEBUG = 3; -debug$7.reset = () => { +debug$9.reset = () => { messages = []; }; -debug$7.setDebugLevel = (v) => { +debug$9.setDebugLevel = (v) => { level = v; }; -debug$7.warn = (msg) => debug$7(msg, debug$7.WARN); -debug$7.info = (msg) => debug$7(msg, debug$7.INFO); -debug$7.debug = (msg) => debug$7(msg, debug$7.DEBUG); +debug$9.warn = (msg) => debug$9(msg, debug$9.WARN); +debug$9.info = (msg) => debug$9(msg, debug$9.INFO); +debug$9.debug = (msg) => debug$9(msg, debug$9.DEBUG); -debug$7.debugMessages = () => messages; +debug$9.debugMessages = () => messages; -var debug_1$4 = debug$7; +var debug_1$5 = debug$9; var stringWidth$2 = {exports: {}}; @@ -8279,7 +8279,7 @@ function hyperlink(url, text) { return [OSC, '8', SEP, SEP, url || text, BEL, text, OSC, '8', SEP, SEP, BEL].join(''); } -var utils$i = { +var utils$o = { strlen: strlen, repeat: repeat, pad: pad, @@ -8884,7 +8884,7 @@ THE SOFTWARE. colors.themes = {}; - var util = require$$1$2; + var util = require$$1$3; var ansiStyles = colors.styles = stylesExports; var defineProps = Object.defineProperties; var newLineRegex = new RegExp(/[\r\n]+/g); @@ -9084,8 +9084,8 @@ function requireSafe () { return safe.exports; } -const { info, debug: debug$6 } = debug_1$4; -const utils$h = utils$i; +const { info, debug: debug$8 } = debug_1$5; +const utils$n = utils$o; let Cell$1 = class Cell { /** @@ -9154,7 +9154,7 @@ let Cell$1 = class Cell { this.fixedWidth = tableOptions.colWidths[this.x]; this.lines = this.computeLines(tableOptions); - this.desiredWidth = utils$h.strlen(this.content) + this.paddingLeft + this.paddingRight; + this.desiredWidth = utils$n.strlen(this.content) + this.paddingLeft + this.paddingRight; this.desiredHeight = this.lines.length; } @@ -9172,15 +9172,15 @@ let Cell$1 = class Cell { } const { wrapOnWordBoundary: tableWrapOnWordBoundary = true } = tableOptions; const { wrapOnWordBoundary = tableWrapOnWordBoundary } = this.options; - return this.wrapLines(utils$h.wordWrap(this.fixedWidth, this.content, wrapOnWordBoundary)); + return this.wrapLines(utils$n.wordWrap(this.fixedWidth, this.content, wrapOnWordBoundary)); } return this.wrapLines(this.content.split('\n')); } wrapLines(computedLines) { - const lines = utils$h.colorizeLines(computedLines); + const lines = utils$n.colorizeLines(computedLines); if (this.href) { - return lines.map((line) => utils$h.hyperlink(this.href, line)); + return lines.map((line) => utils$n.hyperlink(this.href, line)); } return lines; } @@ -9219,7 +9219,7 @@ let Cell$1 = class Cell { draw(lineNum, spanningCell) { if (lineNum == 'top') return this.drawTop(this.drawRight); if (lineNum == 'bottom') return this.drawBottom(this.drawRight); - let content = utils$h.truncate(this.content, 10, this.truncate); + let content = utils$n.truncate(this.content, 10, this.truncate); if (!lineNum) { info(`${this.y}-${this.x}: ${this.rowSpan - lineNum}x${this.colSpan} Cell ${content}`); } @@ -9253,11 +9253,11 @@ let Cell$1 = class Cell { //TODO: cells should always exist - some tests don't fill it in though this.widths.forEach(function (width, index) { content.push(this._topLeftChar(index)); - content.push(utils$h.repeat(this.chars[this.y == 0 ? 'top' : 'mid'], width)); + content.push(utils$n.repeat(this.chars[this.y == 0 ? 'top' : 'mid'], width)); }, this); } else { content.push(this._topLeftChar(0)); - content.push(utils$h.repeat(this.chars[this.y == 0 ? 'top' : 'mid'], this.width)); + content.push(utils$n.repeat(this.chars[this.y == 0 ? 'top' : 'mid'], this.width)); } if (drawRight) { content.push(this.chars[this.y == 0 ? 'topRight' : 'rightMid']); @@ -9334,14 +9334,14 @@ let Cell$1 = class Cell { left = this.chars['rightMid']; } } - let leftPadding = utils$h.repeat(' ', this.paddingLeft); + let leftPadding = utils$n.repeat(' ', this.paddingLeft); let right = drawRight ? this.chars['right'] : ''; - let rightPadding = utils$h.repeat(' ', this.paddingRight); + let rightPadding = utils$n.repeat(' ', this.paddingRight); let line = this.lines[lineNum]; let len = this.width - (this.paddingLeft + this.paddingRight); if (forceTruncationSymbol) line += this.truncate || '…'; - let content = utils$h.truncate(line, len, this.truncate); - content = utils$h.pad(content, len, ' ', this.hAlign); + let content = utils$n.truncate(line, len, this.truncate); + content = utils$n.pad(content, len, ' ', this.hAlign); content = leftPadding + content + rightPadding; return this.stylizeLine(left, content, right); } @@ -9362,7 +9362,7 @@ let Cell$1 = class Cell { */ drawBottom(drawRight) { let left = this.chars[this.x == 0 ? 'bottomLeft' : 'bottomMid']; - let content = utils$h.repeat(this.chars.bottom, this.width); + let content = utils$n.repeat(this.chars.bottom, this.width); let right = drawRight ? this.chars['bottomRight'] : ''; return this.wrapWithStyleColors('border', left + content + right); } @@ -9385,7 +9385,7 @@ let Cell$1 = class Cell { } } let right = drawRight ? this.chars['right'] : ''; - let content = utils$h.repeat(' ', this.width); + let content = utils$n.repeat(' ', this.width); return this.stylizeLine(left, content, right); } }; @@ -9400,7 +9400,7 @@ let ColSpanCell$1 = class ColSpanCell { draw(lineNum) { if (typeof lineNum === 'number') { - debug$6(`${this.y}-${this.x}: 1x1 ColSpanCell`); + debug$8(`${this.y}-${this.x}: 1x1 ColSpanCell`); } return ''; } @@ -9435,7 +9435,7 @@ let RowSpanCell$1 = class RowSpanCell { if (lineNum == 'bottom') { return this.originalCell.draw('bottom'); } - debug$6(`${this.y}-${this.x}: 1x${this.colSpan} RowSpanCell for ${this.originalCell.content}`); + debug$8(`${this.y}-${this.x}: 1x${this.colSpan} RowSpanCell for ${this.originalCell.content}`); return this.originalCell.draw(this.offset + 1 + lineNum); } @@ -9494,7 +9494,7 @@ cell.exports.RowSpanCell = RowSpanCell$1; var cellExports = cell.exports; -const { warn, debug: debug$5 } = debug_1$4; +const { warn, debug: debug$7 } = debug_1$5; const Cell = cellExports; const { ColSpanCell, RowSpanCell } = Cell; @@ -9623,7 +9623,7 @@ const { ColSpanCell, RowSpanCell } = Cell; function fillInTable(table) { let h_max = maxHeight(table); let w_max = maxWidth(table); - debug$5(`Max rows: ${h_max}; Max cols: ${w_max}`); + debug$7(`Max rows: ${h_max}; Max cols: ${w_max}`); for (let y = 0; y < h_max; y++) { for (let x = 0; x < w_max; x++) { if (!conflictExists(table, x, y)) { @@ -9751,15 +9751,15 @@ function makeComputeWidths(colSpan, desiredWidth, x, forcedMin) { var layoutManagerExports = layoutManager.exports; -const debug$4 = debug_1$4; -const utils$g = utils$i; +const debug$6 = debug_1$5; +const utils$m = utils$o; const tableLayout = layoutManagerExports; let Table$1 = class Table extends Array { constructor(opts) { super(); - const options = utils$g.mergeOptions(opts); + const options = utils$m.mergeOptions(opts); Object.defineProperty(this, 'options', { value: options, enumerable: options.debug, @@ -9768,21 +9768,21 @@ let Table$1 = class Table extends Array { if (options.debug) { switch (typeof options.debug) { case 'boolean': - debug$4.setDebugLevel(debug$4.WARN); + debug$6.setDebugLevel(debug$6.WARN); break; case 'number': - debug$4.setDebugLevel(options.debug); + debug$6.setDebugLevel(options.debug); break; case 'string': - debug$4.setDebugLevel(parseInt(options.debug, 10)); + debug$6.setDebugLevel(parseInt(options.debug, 10)); break; default: - debug$4.setDebugLevel(debug$4.WARN); - debug$4.warn(`Debug option is expected to be boolean, number, or string. Received a ${typeof options.debug}`); + debug$6.setDebugLevel(debug$6.WARN); + debug$6.warn(`Debug option is expected to be boolean, number, or string. Received a ${typeof options.debug}`); } Object.defineProperty(this, 'messages', { get() { - return debug$4.debugMessages(); + return debug$6.debugMessages(); }, }); } @@ -9845,7 +9845,7 @@ let Table$1 = class Table extends Array { } }; -Table$1.reset = () => debug$4.reset(); +Table$1.reset = () => debug$6.reset(); function doDraw(row, lineNum, result) { let line = []; @@ -9862,7 +9862,7 @@ var cliTable3 = table; var Table = /*@__PURE__*/getDefaultExportFromCjs(cliTable3); -var lib$4 = {exports: {}}; +var lib$5 = {exports: {}}; var extendStringPrototype = {exports: {}}; @@ -9995,9 +9995,9 @@ var extendStringPrototypeExports = extendStringPrototype.exports; // // extendStringPrototypeExports(); -} (lib$4)); +} (lib$5)); -var libExports = lib$4.exports; +var libExports = lib$5.exports; var colors = /*@__PURE__*/getDefaultExportFromCjs(libExports); function commonjsRequire(path) { @@ -15816,14 +15816,14 @@ function stringify(value) { * } * } */ -function isError(error, code) { +function isError$1(error, code) { return (error && error.code === code); } /** * Returns true if %%error%% is a [[CallExceptionError]. */ function isCallException(error) { - return isError(error, "CALL_EXCEPTION"); + return isError$1(error, "CALL_EXCEPTION"); } /** * Returns a new Error configured to the format ethers emits errors, with @@ -15889,7 +15889,7 @@ function makeError(message, code, info) { * * @see [[api:makeError]] */ -function assert$5(check, message, code, info) { +function assert$9(check, message, code, info) { if (!check) { throw makeError(message, code, info); } @@ -15902,7 +15902,7 @@ function assert$5(check, message, code, info) { * any further code does not need additional compile-time checks. */ function assertArgument(check, message, name, value) { - assert$5(check, message, "INVALID_ARGUMENT", { argument: name, value: value }); + assert$9(check, message, "INVALID_ARGUMENT", { argument: name, value: value }); } function assertArgumentCount(count, expectedCount, message) { if (message == null) { @@ -15911,11 +15911,11 @@ function assertArgumentCount(count, expectedCount, message) { if (message) { message = ": " + message; } - assert$5(count >= expectedCount, "missing arguemnt" + message, "MISSING_ARGUMENT", { + assert$9(count >= expectedCount, "missing arguemnt" + message, "MISSING_ARGUMENT", { count: count, expectedCount: expectedCount }); - assert$5(count <= expectedCount, "too many arguments" + message, "UNEXPECTED_ARGUMENT", { + assert$9(count <= expectedCount, "too many arguments" + message, "UNEXPECTED_ARGUMENT", { count: count, expectedCount: expectedCount }); @@ -15947,7 +15947,7 @@ const _normalizeForms = ["NFD", "NFC", "NFKD", "NFKC"].reduce((accum, form) => { * Throws if the normalization %%form%% is not supported. */ function assertNormalize(form) { - assert$5(_normalizeForms.indexOf(form) >= 0, "platform missing String.prototype.normalize", "UNSUPPORTED_OPERATION", { + assert$9(_normalizeForms.indexOf(form) >= 0, "platform missing String.prototype.normalize", "UNSUPPORTED_OPERATION", { operation: "String.prototype.normalize", info: { form } }); } @@ -15967,7 +15967,7 @@ function assertPrivate(givenGuard, guard, className) { method += "."; operation += " " + className; } - assert$5(false, `private constructor; use ${method}from* methods`, "UNSUPPORTED_OPERATION", { + assert$9(false, `private constructor; use ${method}from* methods`, "UNSUPPORTED_OPERATION", { operation }); } @@ -16024,7 +16024,7 @@ function getBytesCopy(value, name) { * %%value%% is a valid [[DataHexString]] of %%length%% (if a //number//) * bytes of data (e.g. ``0x1234`` is 2 bytes). */ -function isHexString$1(value, length) { +function isHexString$3(value, length) { if (typeof (value) !== "string" || !value.match(/^0x[0-9A-Fa-f]*$/)) { return false; } @@ -16041,7 +16041,7 @@ function isHexString$1(value, length) { * data (i.e. a valid [[DataHexString]] or a Uint8Array). */ function isBytesLike(value) { - return (isHexString$1(value, true) || (value instanceof Uint8Array)); + return (isHexString$3(value, true) || (value instanceof Uint8Array)); } const HexCharacters = "0123456789abcdef"; /** @@ -16067,7 +16067,7 @@ function concat$3(datas) { * Returns the length of %%data%%, in bytes. */ function dataLength(data) { - if (isHexString$1(data, true)) { + if (isHexString$3(data, true)) { return (data.length - 2) / 2; } return getBytes(data).length; @@ -16081,7 +16081,7 @@ function dataLength(data) { function dataSlice(data, start, end) { const bytes = getBytes(data); if (end != null && end > bytes.length) { - assert$5(false, "cannot slice beyond data bounds", "BUFFER_OVERRUN", { + assert$9(false, "cannot slice beyond data bounds", "BUFFER_OVERRUN", { buffer: bytes, length: bytes.length, offset: end }); } @@ -16089,7 +16089,7 @@ function dataSlice(data, start, end) { } function zeroPad(data, length, left) { const bytes = getBytes(data); - assert$5(length >= bytes.length, "padding exceeds data length", "BUFFER_OVERRUN", { + assert$9(length >= bytes.length, "padding exceeds data length", "BUFFER_OVERRUN", { buffer: new Uint8Array(bytes), length: length, offset: length + 1 @@ -16150,7 +16150,7 @@ const maxValue = 0x1fffffffffffff; function fromTwos(_value, _width) { const value = getUint(_value, "value"); const width = BigInt(getNumber(_width, "width")); - assert$5((value >> width) === BN_0$a, "overflow", "NUMERIC_FAULT", { + assert$9((value >> width) === BN_0$a, "overflow", "NUMERIC_FAULT", { operation: "fromTwos", fault: "overflow", value: _value }); // Top bit set; treat as a negative value @@ -16172,14 +16172,14 @@ function toTwos(_value, _width) { const limit = (BN_1$4 << (width - BN_1$4)); if (value < BN_0$a) { value = -value; - assert$5(value <= limit, "too low", "NUMERIC_FAULT", { + assert$9(value <= limit, "too low", "NUMERIC_FAULT", { operation: "toTwos", fault: "overflow", value: _value }); const mask = (BN_1$4 << width) - BN_1$4; return ((~value) & mask) + BN_1$4; } else { - assert$5(value < limit, "too high", "NUMERIC_FAULT", { + assert$9(value < limit, "too high", "NUMERIC_FAULT", { operation: "toTwos", fault: "overflow", value: _value }); } @@ -16188,7 +16188,7 @@ function toTwos(_value, _width) { /** * Mask %%value%% with a bitmask of %%bits%% ones. */ -function mask(_value, _bits) { +function mask$1(_value, _bits) { const value = getUint(_value, "value"); const bits = BigInt(getNumber(_bits, "bits")); return value & ((BN_1$4 << bits) - BN_1$4); @@ -16226,7 +16226,7 @@ function getBigInt(value, name) { */ function getUint(value, name) { const result = getBigInt(value, name); - assert$5(result >= BN_0$a, "unsigned value cannot be negative", "NUMERIC_FAULT", { + assert$9(result >= BN_0$a, "unsigned value cannot be negative", "NUMERIC_FAULT", { fault: "overflow", operation: "getUint", value }); return result; @@ -16295,7 +16295,7 @@ function toBeHex(_value, _width) { } else { const width = getNumber(_width, "width"); - assert$5(width * 2 >= result.length, `value exceeds width (${width} bytes)`, "NUMERIC_FAULT", { + assert$9(width * 2 >= result.length, `value exceeds width (${width} bytes)`, "NUMERIC_FAULT", { operation: "toBeHex", fault: "overflow", value: _value @@ -16711,11 +16711,11 @@ function toUtf8String(bytes, onError) { function createGetUrl(options) { async function getUrl(req, signal) { const protocol = req.url.split(":")[0].toLowerCase(); - assert$5(protocol === "http" || protocol === "https", `unsupported protocol ${protocol}`, "UNSUPPORTED_OPERATION", { + assert$9(protocol === "http" || protocol === "https", `unsupported protocol ${protocol}`, "UNSUPPORTED_OPERATION", { info: { protocol }, operation: "request" }); - assert$5(protocol === "https" || !req.credentials || req.allowInsecureAuthentication, "insecure authorized connections unsupported", "UNSUPPORTED_OPERATION", { + assert$9(protocol === "https" || !req.credentials || req.allowInsecureAuthentication, "insecure authorized connections unsupported", "UNSUPPORTED_OPERATION", { operation: "request" }); const method = req.method; @@ -16726,7 +16726,7 @@ function createGetUrl(options) { reqOptions.agent = options.agent; } } - const request = ((protocol === "http") ? http$2 : https$2).request(req.url, reqOptions); + const request = ((protocol === "http") ? http$2 : require$$1$4).request(req.url, reqOptions); request.setTimeout(req.timeout); const body = req.body; if (body) { @@ -16873,14 +16873,14 @@ class FetchCancelSignal { }); } addListener(listener) { - assert$5(!this.#cancelled, "singal already cancelled", "UNSUPPORTED_OPERATION", { + assert$9(!this.#cancelled, "singal already cancelled", "UNSUPPORTED_OPERATION", { operation: "fetchCancelSignal.addCancelListener" }); this.#listeners.push(listener); } get cancelled() { return this.#cancelled; } checkSignal() { - assert$5(!this.cancelled, "cancelled", "CANCELLED", {}); + assert$9(!this.cancelled, "cancelled", "CANCELLED", {}); } } // Check the signal, throwing if it is cancelled @@ -17202,7 +17202,7 @@ class FetchRequest { if (attempt >= this.#throttle.maxAttempts) { return _response.makeServerError("exceeded maximum retry limit"); } - assert$5(getTime$1() <= expires, "timeout", "TIMEOUT", { + assert$9(getTime$1() <= expires, "timeout", "TIMEOUT", { operation: "request.send", reason: "timeout", request: _request }); if (delay > 0) { @@ -17283,7 +17283,7 @@ class FetchRequest { * Resolves to the response by sending the request. */ send() { - assert$5(this.#signal == null, "request already sent", "UNSUPPORTED_OPERATION", { operation: "fetchRequest.send" }); + assert$9(this.#signal == null, "request already sent", "UNSUPPORTED_OPERATION", { operation: "fetchRequest.send" }); this.#signal = new FetchCancelSignal(this); return this.#send(0, getTime$1() + this.timeout, 0, this, new FetchResponse(0, "", {}, null, this)); } @@ -17292,7 +17292,7 @@ class FetchRequest { * error to be rejected from the [[send]]. */ cancel() { - assert$5(this.#signal != null, "request has not been sent", "UNSUPPORTED_OPERATION", { operation: "fetchRequest.cancel" }); + assert$9(this.#signal != null, "request has not been sent", "UNSUPPORTED_OPERATION", { operation: "fetchRequest.cancel" }); const signal = fetchSignals.get(this); if (!signal) { throw new Error("missing signal; should not happen"); @@ -17311,7 +17311,7 @@ class FetchRequest { // - non-GET requests // - downgrading the security (e.g. https => http) // - to non-HTTP (or non-HTTPS) protocols [this could be relaxed?] - assert$5(this.method === "GET" && (current !== "https" || target !== "http") && location.match(/^https?:/), `unsupported redirect`, "UNSUPPORTED_OPERATION", { + assert$9(this.method === "GET" && (current !== "https" || target !== "http") && location.match(/^https?:/), `unsupported redirect`, "UNSUPPORTED_OPERATION", { operation: `redirect(${this.method} ${JSON.stringify(this.url)} => ${JSON.stringify(location)})` }); // Create a copy of this request, with a new URL @@ -17484,7 +17484,7 @@ class FetchResponse { return (this.#body == null) ? "" : toUtf8String(this.#body); } catch (error) { - assert$5(false, "response body is not valid UTF-8 data", "UNSUPPORTED_OPERATION", { + assert$9(false, "response body is not valid UTF-8 data", "UNSUPPORTED_OPERATION", { operation: "bodyText", info: { response: this } }); } @@ -17500,7 +17500,7 @@ class FetchResponse { return JSON.parse(this.bodyText); } catch (error) { - assert$5(false, "response body is not valid JSON", "UNSUPPORTED_OPERATION", { + assert$9(false, "response body is not valid JSON", "UNSUPPORTED_OPERATION", { operation: "bodyJson", info: { response: this } }); } @@ -17610,7 +17610,7 @@ class FetchResponse { } } catch (e) { } - assert$5(false, message, "SERVER_ERROR", { + assert$9(false, message, "SERVER_ERROR", { request: (this.request || "unknown request"), response: this, error, info: { requestUrl, responseBody, @@ -17661,19 +17661,19 @@ function checkValue(val, format, safeOp) { const width = BigInt(format.width); if (format.signed) { const limit = (BN_1$3 << (width - BN_1$3)); - assert$5(safeOp == null || (val >= -limit && val < limit), "overflow", "NUMERIC_FAULT", { + assert$9(safeOp == null || (val >= -limit && val < limit), "overflow", "NUMERIC_FAULT", { operation: safeOp, fault: "overflow", value: val }); if (val > BN_0$8) { - val = fromTwos(mask(val, width), width); + val = fromTwos(mask$1(val, width), width); } else { - val = -fromTwos(mask(-val, width), width); + val = -fromTwos(mask$1(-val, width), width); } } else { const limit = (BN_1$3 << width); - assert$5(safeOp == null || (val >= 0 && val < limit), "overflow", "NUMERIC_FAULT", { + assert$9(safeOp == null || (val >= 0 && val < limit), "overflow", "NUMERIC_FAULT", { operation: safeOp, fault: "overflow", value: val }); val = (((val % limit) + limit) % limit) & (limit - BN_1$3); @@ -17913,13 +17913,13 @@ class FixedNumber { mulSignal(other) { this.#checkFormat(other); const value = this.#val * other.#val; - assert$5((value % this.#tens) === BN_0$8, "precision lost during signalling mul", "NUMERIC_FAULT", { + assert$9((value % this.#tens) === BN_0$8, "precision lost during signalling mul", "NUMERIC_FAULT", { operation: "mulSignal", fault: "underflow", value: this }); return this.#checkValue(value / this.#tens, "mulSignal"); } #div(o, safeOp) { - assert$5(o.#val !== BN_0$8, "division by zero", "NUMERIC_FAULT", { + assert$9(o.#val !== BN_0$8, "division by zero", "NUMERIC_FAULT", { operation: "div", fault: "divide-by-zero", value: this }); this.#checkFormat(o); @@ -17943,12 +17943,12 @@ class FixedNumber { * (precision loss) occurs. */ divSignal(other) { - assert$5(other.#val !== BN_0$8, "division by zero", "NUMERIC_FAULT", { + assert$9(other.#val !== BN_0$8, "division by zero", "NUMERIC_FAULT", { operation: "div", fault: "divide-by-zero", value: this }); this.#checkFormat(other); const value = (this.#val * this.#tens); - assert$5((value % other.#val) === BN_0$8, "precision lost during signalling div", "NUMERIC_FAULT", { + assert$9((value % other.#val) === BN_0$8, "precision lost during signalling div", "NUMERIC_FAULT", { operation: "divSignal", fault: "underflow", value: this }); return this.#checkValue(value / other.#val, "divSignal"); @@ -18091,7 +18091,7 @@ class FixedNumber { const delta = decimals - format.decimals; if (delta > 0) { const tens = getTens(delta); - assert$5((value % tens) === BN_0$8, "value loses precision for format", "NUMERIC_FAULT", { + assert$9((value % tens) === BN_0$8, "value loses precision for format", "NUMERIC_FAULT", { operation: "fromValue", fault: "underflow", value: _value }); value /= tens; @@ -18118,7 +18118,7 @@ class FixedNumber { decimal += Zeros$1; } // Check precision is safe - assert$5(decimal.substring(format.decimals).match(/^0*$/), "too many decimals for format", "NUMERIC_FAULT", { + assert$9(decimal.substring(format.decimals).match(/^0*$/), "too many decimals for format", "NUMERIC_FAULT", { operation: "fromString", fault: "underflow", value: _value }); // Remove extra padding @@ -18167,22 +18167,22 @@ function unarrayifyInteger(data, offset, length) { function _decodeChildren(data, offset, childOffset, length) { const result = []; while (childOffset < offset + 1 + length) { - const decoded = _decode(data, childOffset); + const decoded = _decode$1(data, childOffset); result.push(decoded.result); childOffset += decoded.consumed; - assert$5(childOffset <= offset + 1 + length, "child data too short", "BUFFER_OVERRUN", { + assert$9(childOffset <= offset + 1 + length, "child data too short", "BUFFER_OVERRUN", { buffer: data, length, offset }); } return { consumed: (1 + length), result: result }; } // returns { consumed: number, result: Object } -function _decode(data, offset) { - assert$5(data.length !== 0, "data too short", "BUFFER_OVERRUN", { +function _decode$1(data, offset) { + assert$9(data.length !== 0, "data too short", "BUFFER_OVERRUN", { buffer: data, length: 0, offset: 1 }); const checkOffset = (offset) => { - assert$5(offset <= data.length, "data short segment too short", "BUFFER_OVERRUN", { + assert$9(offset <= data.length, "data short segment too short", "BUFFER_OVERRUN", { buffer: data, length: data.length, offset }); }; @@ -18220,7 +18220,7 @@ function _decode(data, offset) { */ function decodeRlp(_data) { const data = getBytes(_data, "data"); - const decoded = _decode(data, 0); + const decoded = _decode$1(data, 0); assertArgument(decoded.consumed === data.length, "unexpected junk after rlp payload", "data", _data); return decoded.result; } @@ -18515,7 +18515,7 @@ class Result extends Array { */ toObject(deep) { return this.#names.reduce((accum, name, index) => { - assert$5(name != null, "value at index ${ index } unnamed", "UNSUPPORTED_OPERATION", { + assert$9(name != null, "value at index ${ index } unnamed", "UNSUPPORTED_OPERATION", { operation: "toObject()" }); // Add values for names that don't conflict @@ -18621,7 +18621,7 @@ class Result extends Array { } function getValue$1(value) { let bytes = toBeArray(value); - assert$5(bytes.length <= WordSize, "value out-of-bounds", "BUFFER_OVERRUN", { buffer: bytes, length: WordSize, offset: bytes.length }); + assert$9(bytes.length <= WordSize, "value out-of-bounds", "BUFFER_OVERRUN", { buffer: bytes, length: WordSize, offset: bytes.length }); if (bytes.length !== WordSize) { bytes = getBytesCopy(concat$3([Padding.slice(bytes.length % WordSize), bytes])); } @@ -18732,7 +18732,7 @@ class Reader { } this.#bytesRead += count; // Check for excessive inflation (see: #4537) - assert$5(this.#maxInflation < 1 || this.#bytesRead <= this.#maxInflation * this.dataLength, `compressed ABI data exceeds inflation ratio of ${this.#maxInflation} ( see: https:/\/github.com/ethers-io/ethers.js/issues/4537 )`, "BUFFER_OVERRUN", { + assert$9(this.#maxInflation < 1 || this.#bytesRead <= this.#maxInflation * this.dataLength, `compressed ABI data exceeds inflation ratio of ${this.#maxInflation} ( see: https:/\/github.com/ethers-io/ethers.js/issues/4537 )`, "BUFFER_OVERRUN", { buffer: getBytesCopy(this.#data), offset: this.#offset, length: count, info: { bytesRead: this.#bytesRead, @@ -18747,7 +18747,7 @@ class Reader { alignedLength = length; } else { - assert$5(false, "data out-of-bounds", "BUFFER_OVERRUN", { + assert$9(false, "data out-of-bounds", "BUFFER_OVERRUN", { buffer: getBytesCopy(this.#data), length: this.#data.length, offset: this.#offset + alignedLength @@ -18824,61 +18824,61 @@ computeHmac.register = function (func) { }; Object.freeze(computeHmac); -function number(n) { +function number$4(n) { if (!Number.isSafeInteger(n) || n < 0) throw new Error(`Wrong positive integer: ${n}`); } -function bytes(b, ...lengths) { +function bytes$4(b, ...lengths) { if (!(b instanceof Uint8Array)) throw new Error('Expected Uint8Array'); if (lengths.length > 0 && !lengths.includes(b.length)) throw new Error(`Expected Uint8Array of length ${lengths}, not of length=${b.length}`); } -function hash(hash) { +function hash$2(hash) { if (typeof hash !== 'function' || typeof hash.create !== 'function') throw new Error('Hash should be wrapped by utils.wrapConstructor'); - number(hash.outputLen); - number(hash.blockLen); + number$4(hash.outputLen); + number$4(hash.blockLen); } -function exists(instance, checkFinished = true) { +function exists$2(instance, checkFinished = true) { if (instance.destroyed) throw new Error('Hash instance has been destroyed'); if (checkFinished && instance.finished) throw new Error('Hash#digest() has already been called'); } -function output(out, instance) { - bytes(out); +function output$2(out, instance) { + bytes$4(out); const min = instance.outputLen; if (out.length < min) { throw new Error(`digestInto() expects output buffer of length at least ${min}`); } } -const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); -const _32n = /* @__PURE__ */ BigInt(32); +const U32_MASK64$2 = /* @__PURE__ */ BigInt(2 ** 32 - 1); +const _32n$2 = /* @__PURE__ */ BigInt(32); // We are not using BigUint64Array, because they are extremely slow as per 2022 -function fromBig(n, le = false) { +function fromBig$2(n, le = false) { if (le) - return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; - return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; + return { h: Number(n & U32_MASK64$2), l: Number((n >> _32n$2) & U32_MASK64$2) }; + return { h: Number((n >> _32n$2) & U32_MASK64$2) | 0, l: Number(n & U32_MASK64$2) | 0 }; } -function split$1(lst, le = false) { +function split$3(lst, le = false) { let Ah = new Uint32Array(lst.length); let Al = new Uint32Array(lst.length); for (let i = 0; i < lst.length; i++) { - const { h, l } = fromBig(lst[i], le); + const { h, l } = fromBig$2(lst[i], le); [Ah[i], Al[i]] = [h, l]; } return [Ah, Al]; } // Left rotate for Shift in [1, 32) -const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); -const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); +const rotlSH$2 = (h, l, s) => (h << s) | (l >>> (32 - s)); +const rotlSL$2 = (h, l, s) => (l << s) | (h >>> (32 - s)); // Left rotate for Shift in (32, 64), NOTE: 32 is special case. -const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); -const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); +const rotlBH$2 = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); +const rotlBL$2 = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); -const crypto$1 = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; +const crypto$3 = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; /*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. @@ -18918,7 +18918,7 @@ async function asyncLoop(iters, tick, cb) { /** * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) */ -function utf8ToBytes$1(str) { +function utf8ToBytes$3(str) { if (typeof str !== 'string') throw new Error(`utf8ToBytes expected string, got ${typeof str}`); return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 @@ -18928,9 +18928,9 @@ function utf8ToBytes$1(str) { * Warning: when Uint8Array is passed, it would NOT get copied. * Keep in mind for future mutable operations. */ -function toBytes(data) { +function toBytes$1(data) { if (typeof data === 'string') - data = utf8ToBytes$1(data); + data = utf8ToBytes$3(data); if (!u8a$1(data)) throw new Error(`expected Uint8Array, got ${typeof data}`); return data; @@ -18938,7 +18938,7 @@ function toBytes(data) { /** * Copies several Uint8Arrays into one. */ -function concatBytes$1(...arrays) { +function concatBytes$5(...arrays) { const r = new Uint8Array(arrays.reduce((sum, a) => sum + a.length, 0)); let pad = 0; // walk through each item, ensure they have proper type arrays.forEach((a) => { @@ -18964,7 +18964,7 @@ function checkOpts(defaults, opts) { return merged; } function wrapConstructor(hashCons) { - const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); + const hashC = (msg) => hashCons().update(toBytes$1(msg)).digest(); const tmp = hashCons(); hashC.outputLen = tmp.outputLen; hashC.blockLen = tmp.blockLen; @@ -18975,8 +18975,8 @@ function wrapConstructor(hashCons) { * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. */ function randomBytes$2(bytesLength = 32) { - if (crypto$1 && typeof crypto$1.getRandomValues === 'function') { - return crypto$1.getRandomValues(new Uint8Array(bytesLength)); + if (crypto$3 && typeof crypto$3.getRandomValues === 'function') { + return crypto$3.getRandomValues(new Uint8Array(bytesLength)); } throw new Error('crypto.getRandomValues must be defined'); } @@ -18984,34 +18984,34 @@ function randomBytes$2(bytesLength = 32) { // SHA3 (keccak) is based on a new design: basically, the internal state is bigger than output size. // It's called a sponge function. // Various per round constants calculations -const [SHA3_PI, SHA3_ROTL, _SHA3_IOTA] = [[], [], []]; -const _0n$4 = /* @__PURE__ */ BigInt(0); -const _1n$5 = /* @__PURE__ */ BigInt(1); -const _2n$3 = /* @__PURE__ */ BigInt(2); -const _7n = /* @__PURE__ */ BigInt(7); -const _256n = /* @__PURE__ */ BigInt(256); -const _0x71n = /* @__PURE__ */ BigInt(0x71); -for (let round = 0, R = _1n$5, x = 1, y = 0; round < 24; round++) { +const [SHA3_PI$2, SHA3_ROTL$2, _SHA3_IOTA$2] = [[], [], []]; +const _0n$9 = /* @__PURE__ */ BigInt(0); +const _1n$a = /* @__PURE__ */ BigInt(1); +const _2n$7 = /* @__PURE__ */ BigInt(2); +const _7n$2 = /* @__PURE__ */ BigInt(7); +const _256n$2 = /* @__PURE__ */ BigInt(256); +const _0x71n$2 = /* @__PURE__ */ BigInt(0x71); +for (let round = 0, R = _1n$a, x = 1, y = 0; round < 24; round++) { // Pi [x, y] = [y, (2 * x + 3 * y) % 5]; - SHA3_PI.push(2 * (5 * y + x)); + SHA3_PI$2.push(2 * (5 * y + x)); // Rotational - SHA3_ROTL.push((((round + 1) * (round + 2)) / 2) % 64); + SHA3_ROTL$2.push((((round + 1) * (round + 2)) / 2) % 64); // Iota - let t = _0n$4; + let t = _0n$9; for (let j = 0; j < 7; j++) { - R = ((R << _1n$5) ^ ((R >> _7n) * _0x71n)) % _256n; - if (R & _2n$3) - t ^= _1n$5 << ((_1n$5 << /* @__PURE__ */ BigInt(j)) - _1n$5); + R = ((R << _1n$a) ^ ((R >> _7n$2) * _0x71n$2)) % _256n$2; + if (R & _2n$7) + t ^= _1n$a << ((_1n$a << /* @__PURE__ */ BigInt(j)) - _1n$a); } - _SHA3_IOTA.push(t); + _SHA3_IOTA$2.push(t); } -const [SHA3_IOTA_H, SHA3_IOTA_L] = /* @__PURE__ */ split$1(_SHA3_IOTA, true); +const [SHA3_IOTA_H$2, SHA3_IOTA_L$2] = /* @__PURE__ */ split$3(_SHA3_IOTA$2, true); // Left rotation (without 0, 32, 64) -const rotlH = (h, l, s) => (s > 32 ? rotlBH(h, l, s) : rotlSH(h, l, s)); -const rotlL = (h, l, s) => (s > 32 ? rotlBL(h, l, s) : rotlSL(h, l, s)); +const rotlH$2 = (h, l, s) => (s > 32 ? rotlBH$2(h, l, s) : rotlSH$2(h, l, s)); +const rotlL$2 = (h, l, s) => (s > 32 ? rotlBL$2(h, l, s) : rotlSL$2(h, l, s)); // Same as keccakf1600, but allows to skip some rounds -function keccakP(s, rounds = 24) { +function keccakP$2(s, rounds = 24) { const B = new Uint32Array(5 * 2); // NOTE: all indices are x2 since we store state as u32 instead of u64 (bigints to slow in js) for (let round = 24 - rounds; round < 24; round++) { @@ -19023,8 +19023,8 @@ function keccakP(s, rounds = 24) { const idx0 = (x + 2) % 10; const B0 = B[idx0]; const B1 = B[idx0 + 1]; - const Th = rotlH(B0, B1, 1) ^ B[idx1]; - const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; + const Th = rotlH$2(B0, B1, 1) ^ B[idx1]; + const Tl = rotlL$2(B0, B1, 1) ^ B[idx1 + 1]; for (let y = 0; y < 50; y += 10) { s[x + y] ^= Th; s[x + y + 1] ^= Tl; @@ -19034,10 +19034,10 @@ function keccakP(s, rounds = 24) { let curH = s[2]; let curL = s[3]; for (let t = 0; t < 24; t++) { - const shift = SHA3_ROTL[t]; - const Th = rotlH(curH, curL, shift); - const Tl = rotlL(curH, curL, shift); - const PI = SHA3_PI[t]; + const shift = SHA3_ROTL$2[t]; + const Th = rotlH$2(curH, curL, shift); + const Tl = rotlL$2(curH, curL, shift); + const PI = SHA3_PI$2[t]; curH = s[PI]; curL = s[PI + 1]; s[PI] = Th; @@ -19051,12 +19051,12 @@ function keccakP(s, rounds = 24) { s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; } // Iota (ι) - s[0] ^= SHA3_IOTA_H[round]; - s[1] ^= SHA3_IOTA_L[round]; + s[0] ^= SHA3_IOTA_H$2[round]; + s[1] ^= SHA3_IOTA_L$2[round]; } B.fill(0); } -class Keccak extends Hash { +let Keccak$2 = class Keccak extends Hash { // NOTE: we accept arguments in bytes instead of bits here. constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { super(); @@ -19070,7 +19070,7 @@ class Keccak extends Hash { this.finished = false; this.destroyed = false; // Can be passed from user as dkLen - number(outputLen); + number$4(outputLen); // 1600 = 5x5 matrix of 64bit. 1600 bits === 200 bytes if (0 >= this.blockLen || this.blockLen >= 200) throw new Error('Sha3 supports only keccak-f1600 function'); @@ -19078,14 +19078,14 @@ class Keccak extends Hash { this.state32 = u32$1(this.state); } keccak() { - keccakP(this.state32, this.rounds); + keccakP$2(this.state32, this.rounds); this.posOut = 0; this.pos = 0; } update(data) { - exists(this); + exists$2(this); const { blockLen, state } = this; - data = toBytes(data); + data = toBytes$1(data); const len = data.length; for (let pos = 0; pos < len;) { const take = Math.min(blockLen - this.pos, len - pos); @@ -19109,8 +19109,8 @@ class Keccak extends Hash { this.keccak(); } writeInto(out) { - exists(this, false); - bytes(out); + exists$2(this, false); + bytes$4(out); this.finish(); const bufferOut = this.state; const { blockLen } = this; @@ -19131,11 +19131,11 @@ class Keccak extends Hash { return this.writeInto(out); } xof(bytes) { - number(bytes); + number$4(bytes); return this.xofInto(new Uint8Array(bytes)); } digestInto(out) { - output(out, this); + output$2(out, this); if (this.finished) throw new Error('digest() was already called'); this.writeInto(out); @@ -19164,13 +19164,13 @@ class Keccak extends Hash { to.destroyed = this.destroyed; return to; } -} -const gen = (suffix, blockLen, outputLen) => wrapConstructor(() => new Keccak(blockLen, suffix, outputLen)); +}; +const gen$2 = (suffix, blockLen, outputLen) => wrapConstructor(() => new Keccak$2(blockLen, suffix, outputLen)); /** * keccak-256 hash function. Different from SHA3-256. * @param message - that would be hashed */ -const keccak_256 = /* @__PURE__ */ gen(0x01, 136, 256 / 8); +const keccak_256 = /* @__PURE__ */ gen$2(0x01, 136, 256 / 8); /** * Cryptographic hashing functions @@ -19219,7 +19219,7 @@ keccak256$1.register = function (func) { Object.freeze(keccak256$1); // Polyfill for Safari 14 -function setBigUint64(view, byteOffset, value, isLE) { +function setBigUint64$1(view, byteOffset, value, isLE) { if (typeof view.setBigUint64 === 'function') return view.setBigUint64(byteOffset, value, isLE); const _32n = BigInt(32); @@ -19232,7 +19232,7 @@ function setBigUint64(view, byteOffset, value, isLE) { view.setUint32(byteOffset + l, wl, isLE); } // Base SHA2 class (RFC 6234) -class SHA2 extends Hash { +let SHA2$1 = class SHA2 extends Hash { constructor(blockLen, outputLen, padOffset, isLE) { super(); this.blockLen = blockLen; @@ -19247,9 +19247,9 @@ class SHA2 extends Hash { this.view = createView(this.buffer); } update(data) { - exists(this); + exists$2(this); const { view, buffer, blockLen } = this; - data = toBytes(data); + data = toBytes$1(data); const len = data.length; for (let pos = 0; pos < len;) { const take = Math.min(blockLen - this.pos, len - pos); @@ -19273,8 +19273,8 @@ class SHA2 extends Hash { return this; } digestInto(out) { - exists(this); - output(out, this); + exists$2(this); + output$2(out, this); this.finished = true; // Padding // We can avoid allocation of buffer for padding completely if it @@ -19295,7 +19295,7 @@ class SHA2 extends Hash { // Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that // You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen. // So we just write lowest 64 bits of that value. - setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE); + setBigUint64$1(view, blockLen - 8, BigInt(this.length * 8), isLE); this.process(view, 0); const oview = createView(out); const len = this.outputLen; @@ -19328,7 +19328,7 @@ class SHA2 extends Hash { to.buffer.set(buffer); return to; } -} +}; // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html // https://homes.esat.kuleuven.be/~bosselae/ripemd160/pdf/AB-9601/AB-9601.pdf @@ -19372,7 +19372,7 @@ function f(group, x, y, z) { } // Temporary buffer, not used to store anything between runs const BUF = /* @__PURE__ */ new Uint32Array(16); -class RIPEMD160 extends SHA2 { +class RIPEMD160 extends SHA2$1 { constructor() { super(64, 20, 8, true); this.h0 = 0x67452301 | 0; @@ -19551,13 +19551,13 @@ Object.freeze(randomBytes$1); // SHA2-256 need to try 2^128 hashes to execute birthday attack. // BTC network is doing 2^67 hashes/sec as per early 2023. // Choice: a ? b : c -const Chi = (a, b, c) => (a & b) ^ (~a & c); +const Chi$1 = (a, b, c) => (a & b) ^ (~a & c); // Majority function, true if any two inpust is true -const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c); +const Maj$1 = (a, b, c) => (a & b) ^ (a & c) ^ (b & c); // Round constants: // first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311) // prettier-ignore -const SHA256_K = /* @__PURE__ */ new Uint32Array([ +const SHA256_K$1 = /* @__PURE__ */ new Uint32Array([ 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, @@ -19569,25 +19569,25 @@ const SHA256_K = /* @__PURE__ */ new Uint32Array([ ]); // Initial state (first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19): // prettier-ignore -const IV = /* @__PURE__ */ new Uint32Array([ +const IV$1 = /* @__PURE__ */ new Uint32Array([ 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 ]); // Temporary buffer, not used to store anything between runs // Named this way because it matches specification. -const SHA256_W = /* @__PURE__ */ new Uint32Array(64); -class SHA256 extends SHA2 { +const SHA256_W$1 = /* @__PURE__ */ new Uint32Array(64); +let SHA256$1 = class SHA256 extends SHA2$1 { constructor() { super(64, 32, 8, false); // We cannot use array here since array allows indexing by variable // which means optimizer/compiler cannot use registers. - this.A = IV[0] | 0; - this.B = IV[1] | 0; - this.C = IV[2] | 0; - this.D = IV[3] | 0; - this.E = IV[4] | 0; - this.F = IV[5] | 0; - this.G = IV[6] | 0; - this.H = IV[7] | 0; + this.A = IV$1[0] | 0; + this.B = IV$1[1] | 0; + this.C = IV$1[2] | 0; + this.D = IV$1[3] | 0; + this.E = IV$1[4] | 0; + this.F = IV$1[5] | 0; + this.G = IV$1[6] | 0; + this.H = IV$1[7] | 0; } get() { const { A, B, C, D, E, F, G, H } = this; @@ -19607,21 +19607,21 @@ class SHA256 extends SHA2 { process(view, offset) { // Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array for (let i = 0; i < 16; i++, offset += 4) - SHA256_W[i] = view.getUint32(offset, false); + SHA256_W$1[i] = view.getUint32(offset, false); for (let i = 16; i < 64; i++) { - const W15 = SHA256_W[i - 15]; - const W2 = SHA256_W[i - 2]; + const W15 = SHA256_W$1[i - 15]; + const W2 = SHA256_W$1[i - 2]; const s0 = rotr(W15, 7) ^ rotr(W15, 18) ^ (W15 >>> 3); const s1 = rotr(W2, 17) ^ rotr(W2, 19) ^ (W2 >>> 10); - SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0; + SHA256_W$1[i] = (s1 + SHA256_W$1[i - 7] + s0 + SHA256_W$1[i - 16]) | 0; } // Compression function main loop, 64 rounds let { A, B, C, D, E, F, G, H } = this; for (let i = 0; i < 64; i++) { const sigma1 = rotr(E, 6) ^ rotr(E, 11) ^ rotr(E, 25); - const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0; + const T1 = (H + sigma1 + Chi$1(E, F, G) + SHA256_K$1[i] + SHA256_W$1[i]) | 0; const sigma0 = rotr(A, 2) ^ rotr(A, 13) ^ rotr(A, 22); - const T2 = (sigma0 + Maj(A, B, C)) | 0; + const T2 = (sigma0 + Maj$1(A, B, C)) | 0; H = G; G = F; F = E; @@ -19643,28 +19643,28 @@ class SHA256 extends SHA2 { this.set(A, B, C, D, E, F, G, H); } roundClean() { - SHA256_W.fill(0); + SHA256_W$1.fill(0); } destroy() { this.set(0, 0, 0, 0, 0, 0, 0, 0); this.buffer.fill(0); } -} +}; /** * SHA2-256 hash function * @param message - data that would be hashed */ -const sha256$1 = /* @__PURE__ */ wrapConstructor(() => new SHA256()); +const sha256$2 = /* @__PURE__ */ wrapConstructor(() => new SHA256$1()); // HMAC (RFC 2104) class HMAC extends Hash { - constructor(hash$1, _key) { + constructor(hash, _key) { super(); this.finished = false; this.destroyed = false; - hash(hash$1); - const key = toBytes(_key); - this.iHash = hash$1.create(); + hash$2(hash); + const key = toBytes$1(_key); + this.iHash = hash.create(); if (typeof this.iHash.update !== 'function') throw new Error('Expected instance of class which extends utils.Hash'); this.blockLen = this.iHash.blockLen; @@ -19672,12 +19672,12 @@ class HMAC extends Hash { const blockLen = this.blockLen; const pad = new Uint8Array(blockLen); // blockLen can be bigger than outputLen - pad.set(key.length > blockLen ? hash$1.create().update(key).digest() : key); + pad.set(key.length > blockLen ? hash.create().update(key).digest() : key); for (let i = 0; i < pad.length; i++) pad[i] ^= 0x36; this.iHash.update(pad); // By doing update (processing of first block) of outer hash here we can re-use it between multiple calls via clone - this.oHash = hash$1.create(); + this.oHash = hash.create(); // Undo internal XOR && apply outer XOR for (let i = 0; i < pad.length; i++) pad[i] ^= 0x36 ^ 0x5c; @@ -19685,13 +19685,13 @@ class HMAC extends Hash { pad.fill(0); } update(buf) { - exists(this); + exists$2(this); this.iHash.update(buf); return this; } digestInto(out) { - exists(this); - bytes(out, this.outputLen); + exists$2(this); + bytes$4(out, this.outputLen); this.finished = true; this.iHash.digestInto(out); this.oHash.update(out); @@ -19728,25 +19728,25 @@ class HMAC extends Hash { * @param key - message key * @param message - message data */ -const hmac = (hash, key, message) => new HMAC(hash, key).update(message).digest(); -hmac.create = (hash, key) => new HMAC(hash, key); +const hmac$1 = (hash, key, message) => new HMAC(hash, key).update(message).digest(); +hmac$1.create = (hash, key) => new HMAC(hash, key); // Common prologue and epilogue for sync/async functions -function pbkdf2Init(hash$1, _password, _salt, _opts) { - hash(hash$1); +function pbkdf2Init(hash, _password, _salt, _opts) { + hash$2(hash); const opts = checkOpts({ dkLen: 32, asyncTick: 10 }, _opts); const { c, dkLen, asyncTick } = opts; - number(c); - number(dkLen); - number(asyncTick); + number$4(c); + number$4(dkLen); + number$4(asyncTick); if (c < 1) throw new Error('PBKDF2: iterations (c) should be >= 1'); - const password = toBytes(_password); - const salt = toBytes(_salt); + const password = toBytes$1(_password); + const salt = toBytes$1(_salt); // DK = PBKDF2(PRF, Password, Salt, c, dkLen); const DK = new Uint8Array(dkLen); // U1 = PRF(Password, Salt + INT_32_BE(i)) - const PRF = hmac.create(hash$1, password); + const PRF = hmac$1.create(hash, password); const PRFSalt = PRF._cloneInto().update(salt); return { c, dkLen, asyncTick, DK, PRF, PRFSalt }; } @@ -19885,12 +19885,12 @@ function scryptInit(password, salt, _opts) { maxmem: 1024 ** 3 + 1024, }, _opts); const { N, r, p, dkLen, asyncTick, maxmem, onProgress } = opts; - number(N); - number(r); - number(p); - number(dkLen); - number(asyncTick); - number(maxmem); + number$4(N); + number$4(r); + number$4(p); + number$4(dkLen); + number$4(asyncTick); + number$4(maxmem); if (onProgress !== undefined && typeof onProgress !== 'function') throw new Error('progressCb should be function'); const blockSize = 128 * r; @@ -19912,7 +19912,7 @@ function scryptInit(password, salt, _opts) { } // [B0...Bp−1] ← PBKDF2HMAC-SHA256(Passphrase, Salt, 1, blockSize*ParallelizationFactor) // Since it has only one iteration there is no reason to use async variant - const B = pbkdf2(sha256$1, password, salt, { c: 1, dkLen: blockSize * p }); + const B = pbkdf2(sha256$2, password, salt, { c: 1, dkLen: blockSize * p }); const B32 = u32$1(B); // Re-used between parallel iterations. Array(iterations) of B const V = u32$1(new Uint8Array(blockSize * N)); @@ -19933,7 +19933,7 @@ function scryptInit(password, salt, _opts) { return { N, r, p, dkLen, blockSize32, V, B32, B, tmp, blockMixCb, asyncTick }; } function scryptOutput(password, dkLen, B, V, tmp) { - const res = pbkdf2(sha256$1, password, B, { c: 1, dkLen }); + const res = pbkdf2(sha256$2, password, B, { c: 1, dkLen }); B.fill(0); V.fill(0); tmp.fill(0); @@ -20123,49 +20123,49 @@ let locked256 = false; * //_result: * */ -function sha256(_data) { +function sha256$1(_data) { const data = getBytes(_data, "data"); return hexlify(__sha256(data)); } -sha256._ = _sha256; -sha256.lock = function () { locked256 = true; }; -sha256.register = function (func) { +sha256$1._ = _sha256; +sha256$1.lock = function () { locked256 = true; }; +sha256$1.register = function (func) { if (locked256) { throw new Error("sha256 is locked"); } __sha256 = func; }; -Object.freeze(sha256); -Object.freeze(sha256); +Object.freeze(sha256$1); +Object.freeze(sha256$1); /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // 100 lines of code in the file are duplicated from noble-hashes (utils). // This is OK: `abstract` directory does not use noble-hashes. // User may opt-in into using different hashing library. This way, noble-hashes // won't be included into their bundle. -const _0n$3 = BigInt(0); -const _1n$4 = BigInt(1); -const _2n$2 = BigInt(2); +const _0n$8 = BigInt(0); +const _1n$9 = BigInt(1); +const _2n$6 = BigInt(2); const u8a = (a) => a instanceof Uint8Array; -const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +const hexes$1 = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); /** * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' */ -function bytesToHex$1(bytes) { +function bytesToHex$4(bytes) { if (!u8a(bytes)) throw new Error('Uint8Array expected'); // pre-caching improves the speed 6x let hex = ''; for (let i = 0; i < bytes.length; i++) { - hex += hexes[bytes[i]]; + hex += hexes$1[bytes[i]]; } return hex; } -function numberToHexUnpadded(num) { +function numberToHexUnpadded$1(num) { const hex = num.toString(16); return hex.length & 1 ? `0${hex}` : hex; } -function hexToNumber(hex) { +function hexToNumber$2(hex) { if (typeof hex !== 'string') throw new Error('hex string expected, got ' + typeof hex); // Big Endian @@ -20174,7 +20174,7 @@ function hexToNumber(hex) { /** * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) */ -function hexToBytes(hex) { +function hexToBytes$4(hex) { if (typeof hex !== 'string') throw new Error('hex string expected, got ' + typeof hex); const len = hex.length; @@ -20192,23 +20192,23 @@ function hexToBytes(hex) { return array; } // BE: Big Endian, LE: Little Endian -function bytesToNumberBE(bytes) { - return hexToNumber(bytesToHex$1(bytes)); +function bytesToNumberBE$1(bytes) { + return hexToNumber$2(bytesToHex$4(bytes)); } -function bytesToNumberLE(bytes) { +function bytesToNumberLE$1(bytes) { if (!u8a(bytes)) throw new Error('Uint8Array expected'); - return hexToNumber(bytesToHex$1(Uint8Array.from(bytes).reverse())); + return hexToNumber$2(bytesToHex$4(Uint8Array.from(bytes).reverse())); } -function numberToBytesBE(n, len) { - return hexToBytes(n.toString(16).padStart(len * 2, '0')); +function numberToBytesBE$1(n, len) { + return hexToBytes$4(n.toString(16).padStart(len * 2, '0')); } -function numberToBytesLE(n, len) { - return numberToBytesBE(n, len).reverse(); +function numberToBytesLE$1(n, len) { + return numberToBytesBE$1(n, len).reverse(); } // Unpadded, rarely used -function numberToVarBytesBE(n) { - return hexToBytes(numberToHexUnpadded(n)); +function numberToVarBytesBE$1(n) { + return hexToBytes$4(numberToHexUnpadded$1(n)); } /** * Takes hex string or Uint8Array, converts to Uint8Array. @@ -20219,11 +20219,11 @@ function numberToVarBytesBE(n) { * @param expectedLength optional, will compare to result array's length * @returns */ -function ensureBytes(title, hex, expectedLength) { +function ensureBytes$1(title, hex, expectedLength) { let res; if (typeof hex === 'string') { try { - res = hexToBytes(hex); + res = hexToBytes$4(hex); } catch (e) { throw new Error(`${title} must be valid hex string, got "${hex}". Cause: ${e}`); @@ -20245,7 +20245,7 @@ function ensureBytes(title, hex, expectedLength) { /** * Copies several Uint8Arrays into one. */ -function concatBytes(...arrays) { +function concatBytes$4(...arrays) { const r = new Uint8Array(arrays.reduce((sum, a) => sum + a.length, 0)); let pad = 0; // walk through each item, ensure they have proper type arrays.forEach((a) => { @@ -20256,7 +20256,7 @@ function concatBytes(...arrays) { }); return r; } -function equalBytes(b1, b2) { +function equalBytes$1(b1, b2) { // We don't care about timing attacks here if (b1.length !== b2.length) return false; @@ -20268,7 +20268,7 @@ function equalBytes(b1, b2) { /** * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) */ -function utf8ToBytes(str) { +function utf8ToBytes$2(str) { if (typeof str !== 'string') throw new Error(`utf8ToBytes expected string, got ${typeof str}`); return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 @@ -20278,9 +20278,9 @@ function utf8ToBytes(str) { * Calculates amount of bits in a bigint. * Same as `n.toString(2).length` */ -function bitLen(n) { +function bitLen$1(n) { let len; - for (len = 0; n > _0n$3; n >>= _1n$4, len += 1) + for (len = 0; n > _0n$8; n >>= _1n$9, len += 1) ; return len; } @@ -20289,23 +20289,23 @@ function bitLen(n) { * NOTE: first bit position is 0 (same as arrays) * Same as `!!+Array.from(n.toString(2)).reverse()[pos]` */ -function bitGet(n, pos) { - return (n >> BigInt(pos)) & _1n$4; +function bitGet$1(n, pos) { + return (n >> BigInt(pos)) & _1n$9; } /** * Sets single bit at position. */ -const bitSet = (n, pos, value) => { - return n | ((value ? _1n$4 : _0n$3) << BigInt(pos)); +const bitSet$1 = (n, pos, value) => { + return n | ((value ? _1n$9 : _0n$8) << BigInt(pos)); }; /** * Calculate mask for N bits. Not using ** operator with bigints because of old engines. * Same as BigInt(`0b${Array(i).fill('1').join('')}`) */ -const bitMask = (n) => (_2n$2 << BigInt(n - 1)) - _1n$4; +const bitMask$1 = (n) => (_2n$6 << BigInt(n - 1)) - _1n$9; // DRBG -const u8n = (data) => new Uint8Array(data); // creates Uint8Array -const u8fr = (arr) => Uint8Array.from(arr); // another shortcut +const u8n$1 = (data) => new Uint8Array(data); // creates Uint8Array +const u8fr$1 = (arr) => Uint8Array.from(arr); // another shortcut /** * Minimal HMAC-DRBG from NIST 800-90 for RFC6979 sigs. * @returns function that will call DRBG until 2nd arg returns something meaningful @@ -20313,7 +20313,7 @@ const u8fr = (arr) => Uint8Array.from(arr); // another shortcut * const drbg = createHmacDRBG(32, 32, hmac); * drbg(seed, bytesToKey); // bytesToKey must return Key or undefined */ -function createHmacDrbg(hashLen, qByteLen, hmacFn) { +function createHmacDrbg$1(hashLen, qByteLen, hmacFn) { if (typeof hashLen !== 'number' || hashLen < 2) throw new Error('hashLen must be a number'); if (typeof qByteLen !== 'number' || qByteLen < 2) @@ -20321,8 +20321,8 @@ function createHmacDrbg(hashLen, qByteLen, hmacFn) { if (typeof hmacFn !== 'function') throw new Error('hmacFn must be a function'); // Step B, Step C: set hashLen to 8*ceil(hlen/8) - let v = u8n(hashLen); // Minimal non-full-spec HMAC-DRBG from NIST 800-90 for RFC6979 sigs. - let k = u8n(hashLen); // Steps B and C of RFC6979 3.2: set hashLen, in our case always same + let v = u8n$1(hashLen); // Minimal non-full-spec HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + let k = u8n$1(hashLen); // Steps B and C of RFC6979 3.2: set hashLen, in our case always same let i = 0; // Iterations counter, will throw when over 1000 const reset = () => { v.fill(1); @@ -20330,13 +20330,13 @@ function createHmacDrbg(hashLen, qByteLen, hmacFn) { i = 0; }; const h = (...b) => hmacFn(k, v, ...b); // hmac(k)(v, ...values) - const reseed = (seed = u8n()) => { + const reseed = (seed = u8n$1()) => { // HMAC-DRBG reseed() function. Steps D-G - k = h(u8fr([0x00]), seed); // k = hmac(k || v || 0x00 || seed) + k = h(u8fr$1([0x00]), seed); // k = hmac(k || v || 0x00 || seed) v = h(); // v = hmac(k || v) if (seed.length === 0) return; - k = h(u8fr([0x01]), seed); // k = hmac(k || v || 0x01 || seed) + k = h(u8fr$1([0x01]), seed); // k = hmac(k || v || 0x01 || seed) v = h(); // v = hmac(k || v) }; const gen = () => { @@ -20351,7 +20351,7 @@ function createHmacDrbg(hashLen, qByteLen, hmacFn) { out.push(sl); len += v.length; } - return concatBytes(...out); + return concatBytes$4(...out); }; const genUntil = (seed, pred) => { reset(); @@ -20365,7 +20365,7 @@ function createHmacDrbg(hashLen, qByteLen, hmacFn) { return genUntil; } // Validating curves and fields -const validatorFns = { +const validatorFns$1 = { bigint: (val) => typeof val === 'bigint', function: (val) => typeof val === 'function', boolean: (val) => typeof val === 'boolean', @@ -20377,9 +20377,9 @@ const validatorFns = { hash: (val) => typeof val === 'function' && Number.isSafeInteger(val.outputLen), }; // type Record = { [P in K]: T; } -function validateObject(object, validators, optValidators = {}) { +function validateObject$1(object, validators, optValidators = {}) { const checkField = (fieldName, type, isOptional) => { - const checkVal = validatorFns[type]; + const checkVal = validatorFns$1[type]; if (typeof checkVal !== 'function') throw new Error(`Invalid validator "${type}", expected function`); const val = object[fieldName]; @@ -20406,39 +20406,39 @@ function validateObject(object, validators, optValidators = {}) { var ut = /*#__PURE__*/Object.freeze({ __proto__: null, - bitGet: bitGet, - bitLen: bitLen, - bitMask: bitMask, - bitSet: bitSet, - bytesToHex: bytesToHex$1, - bytesToNumberBE: bytesToNumberBE, - bytesToNumberLE: bytesToNumberLE, - concatBytes: concatBytes, - createHmacDrbg: createHmacDrbg, - ensureBytes: ensureBytes, - equalBytes: equalBytes, - hexToBytes: hexToBytes, - hexToNumber: hexToNumber, - numberToBytesBE: numberToBytesBE, - numberToBytesLE: numberToBytesLE, - numberToHexUnpadded: numberToHexUnpadded, - numberToVarBytesBE: numberToVarBytesBE, - utf8ToBytes: utf8ToBytes, - validateObject: validateObject + bitGet: bitGet$1, + bitLen: bitLen$1, + bitMask: bitMask$1, + bitSet: bitSet$1, + bytesToHex: bytesToHex$4, + bytesToNumberBE: bytesToNumberBE$1, + bytesToNumberLE: bytesToNumberLE$1, + concatBytes: concatBytes$4, + createHmacDrbg: createHmacDrbg$1, + ensureBytes: ensureBytes$1, + equalBytes: equalBytes$1, + hexToBytes: hexToBytes$4, + hexToNumber: hexToNumber$2, + numberToBytesBE: numberToBytesBE$1, + numberToBytesLE: numberToBytesLE$1, + numberToHexUnpadded: numberToHexUnpadded$1, + numberToVarBytesBE: numberToVarBytesBE$1, + utf8ToBytes: utf8ToBytes$2, + validateObject: validateObject$1 }); /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Utilities for modular arithmetics and finite fields // prettier-ignore -const _0n$2 = BigInt(0), _1n$3 = BigInt(1), _2n$1 = BigInt(2), _3n$1 = BigInt(3); +const _0n$7 = BigInt(0), _1n$8 = BigInt(1), _2n$5 = BigInt(2), _3n$2 = BigInt(3); // prettier-ignore -const _4n = BigInt(4), _5n = BigInt(5), _8n = BigInt(8); +const _4n$1 = BigInt(4), _5n$1 = BigInt(5), _8n$1 = BigInt(8); // prettier-ignore BigInt(9); BigInt(16); // Calculates a modulo b -function mod$3(a, b) { +function mod$4(a, b) { const result = a % b; - return result >= _0n$2 ? result : b + result; + return result >= _0n$7 ? result : b + result; } /** * Efficiently raise num to power and do modular division. @@ -20447,41 +20447,41 @@ function mod$3(a, b) { * pow(2n, 6n, 11n) // 64n % 11n == 9n */ // TODO: use field version && remove -function pow$3(num, power, modulo) { - if (modulo <= _0n$2 || power < _0n$2) +function pow$4(num, power, modulo) { + if (modulo <= _0n$7 || power < _0n$7) throw new Error('Expected power/modulo > 0'); - if (modulo === _1n$3) - return _0n$2; - let res = _1n$3; - while (power > _0n$2) { - if (power & _1n$3) + if (modulo === _1n$8) + return _0n$7; + let res = _1n$8; + while (power > _0n$7) { + if (power & _1n$8) res = (res * num) % modulo; num = (num * num) % modulo; - power >>= _1n$3; + power >>= _1n$8; } return res; } // Does x ^ (2 ^ power) mod p. pow2(30, 4) == 30 ^ (2 ^ 4) -function pow2(x, power, modulo) { +function pow2$1(x, power, modulo) { let res = x; - while (power-- > _0n$2) { + while (power-- > _0n$7) { res *= res; res %= modulo; } return res; } // Inverses number over modulo -function invert(number, modulo) { - if (number === _0n$2 || modulo <= _0n$2) { +function invert$1(number, modulo) { + if (number === _0n$7 || modulo <= _0n$7) { throw new Error(`invert: expected positive integers, got n=${number} mod=${modulo}`); } // Euclidean GCD https://brilliant.org/wiki/extended-euclidean-algorithm/ // Fermat's little theorem "CT-like" version inv(n) = n^(m-2) mod m is 30x slower. - let a = mod$3(number, modulo); + let a = mod$4(number, modulo); let b = modulo; // prettier-ignore - let x = _0n$2, u = _1n$3; - while (a !== _0n$2) { + let x = _0n$7, u = _1n$8; + while (a !== _0n$7) { // JIT applies optimization if those two lines follow each other const q = b / a; const r = b % a; @@ -20490,9 +20490,9 @@ function invert(number, modulo) { b = a, a = r, x = u, u = m; } const gcd = b; - if (gcd !== _1n$3) + if (gcd !== _1n$8) throw new Error('invert: does not exist'); - return mod$3(x, modulo); + return mod$4(x, modulo); } /** * Tonelli-Shanks square root search algorithm. @@ -20502,24 +20502,24 @@ function invert(number, modulo) { * @param P field order * @returns function that takes field Fp (created from P) and number n */ -function tonelliShanks(P) { +function tonelliShanks$1(P) { // Legendre constant: used to calculate Legendre symbol (a | p), // which denotes the value of a^((p-1)/2) (mod p). // (a | p) ≡ 1 if a is a square (mod p) // (a | p) ≡ -1 if a is not a square (mod p) // (a | p) ≡ 0 if a ≡ 0 (mod p) - const legendreC = (P - _1n$3) / _2n$1; + const legendreC = (P - _1n$8) / _2n$5; let Q, S, Z; // Step 1: By factoring out powers of 2 from p - 1, // find q and s such that p - 1 = q*(2^s) with q odd - for (Q = P - _1n$3, S = 0; Q % _2n$1 === _0n$2; Q /= _2n$1, S++) + for (Q = P - _1n$8, S = 0; Q % _2n$5 === _0n$7; Q /= _2n$5, S++) ; // Step 2: Select a non-square z such that (z | p) ≡ -1 and set c ≡ zq - for (Z = _2n$1; Z < P && pow$3(Z, legendreC, P) !== P - _1n$3; Z++) + for (Z = _2n$5; Z < P && pow$4(Z, legendreC, P) !== P - _1n$8; Z++) ; // Fast-path if (S === 1) { - const p1div4 = (P + _1n$3) / _4n; + const p1div4 = (P + _1n$8) / _4n$1; return function tonelliFast(Fp, n) { const root = Fp.pow(n, p1div4); if (!Fp.eql(Fp.sqr(root), n)) @@ -20528,7 +20528,7 @@ function tonelliShanks(P) { }; } // Slow-path - const Q1div2 = (Q + _1n$3) / _2n$1; + const Q1div2 = (Q + _1n$8) / _2n$5; return function tonelliSlow(Fp, n) { // Step 0: Check that n is indeed a square: (n | p) should not be ≡ -1 if (Fp.pow(n, legendreC) === Fp.neg(Fp.ONE)) @@ -20549,7 +20549,7 @@ function tonelliShanks(P) { t2 = Fp.sqr(t2); // t2 *= t2 } // NOTE: r-m-1 can be bigger than 32, need to convert to bigint before shift, otherwise there will be overflow - const ge = Fp.pow(g, _1n$3 << BigInt(r - m - 1)); // ge = 2^(r-m-1) + const ge = Fp.pow(g, _1n$8 << BigInt(r - m - 1)); // ge = 2^(r-m-1) g = Fp.sqr(ge); // g = ge * ge x = Fp.mul(x, ge); // x *= ge b = Fp.mul(b, g); // b *= g @@ -20558,17 +20558,17 @@ function tonelliShanks(P) { return x; }; } -function FpSqrt(P) { +function FpSqrt$1(P) { // NOTE: different algorithms can give different roots, it is up to user to decide which one they want. // For example there is FpSqrtOdd/FpSqrtEven to choice root based on oddness (used for hash-to-curve). // P ≡ 3 (mod 4) // √n = n^((P+1)/4) - if (P % _4n === _3n$1) { + if (P % _4n$1 === _3n$2) { // Not all roots possible! // const ORDER = // 0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaabn; // const NUM = 72057594037927816n; - const p1div4 = (P + _1n$3) / _4n; + const p1div4 = (P + _1n$8) / _4n$1; return function sqrt3mod4(Fp, n) { const root = Fp.pow(n, p1div4); // Throw if root**2 != n @@ -20578,13 +20578,13 @@ function FpSqrt(P) { }; } // Atkin algorithm for q ≡ 5 (mod 8), https://eprint.iacr.org/2012/685.pdf (page 10) - if (P % _8n === _5n) { - const c1 = (P - _5n) / _8n; + if (P % _8n$1 === _5n$1) { + const c1 = (P - _5n$1) / _8n$1; return function sqrt5mod8(Fp, n) { - const n2 = Fp.mul(n, _2n$1); + const n2 = Fp.mul(n, _2n$5); const v = Fp.pow(n2, c1); const nv = Fp.mul(n, v); - const i = Fp.mul(Fp.mul(nv, _2n$1), v); + const i = Fp.mul(Fp.mul(nv, _2n$5), v); const root = Fp.mul(nv, Fp.sub(i, Fp.ONE)); if (!Fp.eql(Fp.sqr(root), n)) throw new Error('Cannot find square root'); @@ -20592,48 +20592,48 @@ function FpSqrt(P) { }; } // Other cases: Tonelli-Shanks algorithm - return tonelliShanks(P); + return tonelliShanks$1(P); } // prettier-ignore -const FIELD_FIELDS = [ +const FIELD_FIELDS$1 = [ 'create', 'isValid', 'is0', 'neg', 'inv', 'sqrt', 'sqr', 'eql', 'add', 'sub', 'mul', 'pow', 'div', 'addN', 'subN', 'mulN', 'sqrN' ]; -function validateField(field) { +function validateField$1(field) { const initial = { ORDER: 'bigint', MASK: 'bigint', BYTES: 'isSafeInteger', BITS: 'isSafeInteger', }; - const opts = FIELD_FIELDS.reduce((map, val) => { + const opts = FIELD_FIELDS$1.reduce((map, val) => { map[val] = 'function'; return map; }, initial); - return validateObject(field, opts); + return validateObject$1(field, opts); } // Generic field functions /** * Same as `pow` but for Fp: non-constant-time. * Unsafe in some contexts: uses ladder, so can expose bigint bits. */ -function FpPow(f, num, power) { +function FpPow$1(f, num, power) { // Should have same speed as pow for bigints // TODO: benchmark! - if (power < _0n$2) + if (power < _0n$7) throw new Error('Expected power > 0'); - if (power === _0n$2) + if (power === _0n$7) return f.ONE; - if (power === _1n$3) + if (power === _1n$8) return num; let p = f.ONE; let d = num; - while (power > _0n$2) { - if (power & _1n$3) + while (power > _0n$7) { + if (power & _1n$8) p = f.mul(p, d); d = f.sqr(d); - power >>= _1n$3; + power >>= _1n$8; } return p; } @@ -20641,7 +20641,7 @@ function FpPow(f, num, power) { * Efficiently invert an array of Field elements. * `inv(0)` will return `undefined` here: make sure to throw an error. */ -function FpInvertBatch(f, nums) { +function FpInvertBatch$1(f, nums) { const tmp = new Array(nums.length); // Walk from first to last, multiply them by each other MOD p const lastMultiplied = nums.reduce((acc, num, i) => { @@ -20662,7 +20662,7 @@ function FpInvertBatch(f, nums) { return tmp; } // CURVE.n lengths -function nLength(n, nBitLength) { +function nLength$1(n, nBitLength) { // Bit size, byte size of CURVE.n const _nBitLength = nBitLength !== undefined ? nBitLength : n.toString(2).length; const nByteLength = Math.ceil(_nBitLength / 8); @@ -20680,52 +20680,52 @@ function nLength(n, nBitLength) { * @param isLE (def: false) if encoding / decoding should be in little-endian * @param redef optional faster redefinitions of sqrt and other methods */ -function Field(ORDER, bitLen, isLE = false, redef = {}) { - if (ORDER <= _0n$2) +function Field$1(ORDER, bitLen, isLE = false, redef = {}) { + if (ORDER <= _0n$7) throw new Error(`Expected Field ORDER > 0, got ${ORDER}`); - const { nBitLength: BITS, nByteLength: BYTES } = nLength(ORDER, bitLen); + const { nBitLength: BITS, nByteLength: BYTES } = nLength$1(ORDER, bitLen); if (BYTES > 2048) throw new Error('Field lengths over 2048 bytes are not supported'); - const sqrtP = FpSqrt(ORDER); + const sqrtP = FpSqrt$1(ORDER); const f = Object.freeze({ ORDER, BITS, BYTES, - MASK: bitMask(BITS), - ZERO: _0n$2, - ONE: _1n$3, - create: (num) => mod$3(num, ORDER), + MASK: bitMask$1(BITS), + ZERO: _0n$7, + ONE: _1n$8, + create: (num) => mod$4(num, ORDER), isValid: (num) => { if (typeof num !== 'bigint') throw new Error(`Invalid field element: expected bigint, got ${typeof num}`); - return _0n$2 <= num && num < ORDER; // 0 is valid element, but it's not invertible + return _0n$7 <= num && num < ORDER; // 0 is valid element, but it's not invertible }, - is0: (num) => num === _0n$2, - isOdd: (num) => (num & _1n$3) === _1n$3, - neg: (num) => mod$3(-num, ORDER), + is0: (num) => num === _0n$7, + isOdd: (num) => (num & _1n$8) === _1n$8, + neg: (num) => mod$4(-num, ORDER), eql: (lhs, rhs) => lhs === rhs, - sqr: (num) => mod$3(num * num, ORDER), - add: (lhs, rhs) => mod$3(lhs + rhs, ORDER), - sub: (lhs, rhs) => mod$3(lhs - rhs, ORDER), - mul: (lhs, rhs) => mod$3(lhs * rhs, ORDER), - pow: (num, power) => FpPow(f, num, power), - div: (lhs, rhs) => mod$3(lhs * invert(rhs, ORDER), ORDER), + sqr: (num) => mod$4(num * num, ORDER), + add: (lhs, rhs) => mod$4(lhs + rhs, ORDER), + sub: (lhs, rhs) => mod$4(lhs - rhs, ORDER), + mul: (lhs, rhs) => mod$4(lhs * rhs, ORDER), + pow: (num, power) => FpPow$1(f, num, power), + div: (lhs, rhs) => mod$4(lhs * invert$1(rhs, ORDER), ORDER), // Same as above, but doesn't normalize sqrN: (num) => num * num, addN: (lhs, rhs) => lhs + rhs, subN: (lhs, rhs) => lhs - rhs, mulN: (lhs, rhs) => lhs * rhs, - inv: (num) => invert(num, ORDER), + inv: (num) => invert$1(num, ORDER), sqrt: redef.sqrt || ((n) => sqrtP(f, n)), - invertBatch: (lst) => FpInvertBatch(f, lst), + invertBatch: (lst) => FpInvertBatch$1(f, lst), // TODO: do we really need constant cmov? // We don't have const-time bigints anyway, so probably will be not very useful cmov: (a, b, c) => (c ? b : a), - toBytes: (num) => (isLE ? numberToBytesLE(num, BYTES) : numberToBytesBE(num, BYTES)), + toBytes: (num) => (isLE ? numberToBytesLE$1(num, BYTES) : numberToBytesBE$1(num, BYTES)), fromBytes: (bytes) => { if (bytes.length !== BYTES) throw new Error(`Fp.fromBytes: expected ${BYTES}, got ${bytes.length}`); - return isLE ? bytesToNumberLE(bytes) : bytesToNumberBE(bytes); + return isLE ? bytesToNumberLE$1(bytes) : bytesToNumberBE$1(bytes); }, }); return Object.freeze(f); @@ -20736,7 +20736,7 @@ function Field(ORDER, bitLen, isLE = false, redef = {}) { * @param fieldOrder number of field elements, usually CURVE.n * @returns byte length of field */ -function getFieldBytesLength(fieldOrder) { +function getFieldBytesLength$1(fieldOrder) { if (typeof fieldOrder !== 'bigint') throw new Error('field order must be bigint'); const bitLength = fieldOrder.toString(2).length; @@ -20749,8 +20749,8 @@ function getFieldBytesLength(fieldOrder) { * @param fieldOrder number of field elements, usually CURVE.n * @returns byte length of target hash */ -function getMinHashLength(fieldOrder) { - const length = getFieldBytesLength(fieldOrder); +function getMinHashLength$1(fieldOrder) { + const length = getFieldBytesLength$1(fieldOrder); return length + Math.ceil(length / 2); } /** @@ -20766,23 +20766,23 @@ function getMinHashLength(fieldOrder) { * @param isLE interpret hash bytes as LE num * @returns valid private scalar */ -function mapHashToField(key, fieldOrder, isLE = false) { +function mapHashToField$1(key, fieldOrder, isLE = false) { const len = key.length; - const fieldLen = getFieldBytesLength(fieldOrder); - const minLen = getMinHashLength(fieldOrder); + const fieldLen = getFieldBytesLength$1(fieldOrder); + const minLen = getMinHashLength$1(fieldOrder); // No small numbers: need to understand bias story. No huge numbers: easier to detect JS timings. if (len < 16 || len < minLen || len > 1024) throw new Error(`expected ${minLen}-1024 bytes of input, got ${len}`); - const num = isLE ? bytesToNumberBE(key) : bytesToNumberLE(key); + const num = isLE ? bytesToNumberBE$1(key) : bytesToNumberLE$1(key); // `mod(x, 11)` can sometimes produce 0. `mod(x, 10) + 1` is the same, but no 0 - const reduced = mod$3(num, fieldOrder - _1n$3) + _1n$3; - return isLE ? numberToBytesLE(reduced, fieldLen) : numberToBytesBE(reduced, fieldLen); + const reduced = mod$4(num, fieldOrder - _1n$8) + _1n$8; + return isLE ? numberToBytesLE$1(reduced, fieldLen) : numberToBytesBE$1(reduced, fieldLen); } /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Abelian group utilities -const _0n$1 = BigInt(0); -const _1n$2 = BigInt(1); +const _0n$6 = BigInt(0); +const _1n$7 = BigInt(1); // Elliptic curve multiplication of Point by scalar. Fragile. // Scalars should always be less than curve order: this should be checked inside of a curve itself. // Creates precomputation tables for fast multiplication: @@ -20794,7 +20794,7 @@ const _1n$2 = BigInt(1); // - wNAF reduces table size: 2x less memory + 2x faster generation, but 10% slower multiplication // TODO: Research returning 2d JS array of windows, instead of a single window. This would allow // windows to be in different memory locations -function wNAF(c, bits) { +function wNAF$1(c, bits) { const constTimeNegate = (condition, item) => { const neg = item.negate(); return condition ? neg : item; @@ -20810,11 +20810,11 @@ function wNAF(c, bits) { unsafeLadder(elm, n) { let p = c.ZERO; let d = elm; - while (n > _0n$1) { - if (n & _1n$2) + while (n > _0n$6) { + if (n & _1n$7) p = p.add(d); d = d.double(); - n >>= _1n$2; + n >>= _1n$7; } return p; }, @@ -20871,7 +20871,7 @@ function wNAF(c, bits) { // +224 => 256 - 32 if (wbits > windowSize) { wbits -= maxNumber; - n += _1n$2; + n += _1n$7; } // This code was first written with assumption that 'f' and 'p' will never be infinity point: // since each addition is multiplied by 2 ** W, it cannot cancel each other. However, @@ -20914,9 +20914,9 @@ function wNAF(c, bits) { }, }; } -function validateBasic(curve) { - validateField(curve.Fp); - validateObject(curve, { +function validateBasic$1(curve) { + validateField$1(curve.Fp); + validateObject$1(curve, { n: 'bigint', h: 'bigint', Gx: 'field', @@ -20927,7 +20927,7 @@ function validateBasic(curve) { }); // Set defaults return Object.freeze({ - ...nLength(curve.n, curve.nBitLength), + ...nLength$1(curve.n, curve.nBitLength), ...curve, ...{ p: curve.Fp.ORDER }, }); @@ -20936,8 +20936,8 @@ function validateBasic(curve) { /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Short Weierstrass curve. The formula is: y² = x³ + ax + b function validatePointOpts(curve) { - const opts = validateBasic(curve); - validateObject(opts, { + const opts = validateBasic$1(curve); + validateObject$1(opts, { a: 'field', b: 'field', }, { @@ -21024,14 +21024,14 @@ const DER = { }; // Be friendly to bad ECMAScript parsers by not using bigint literals // prettier-ignore -const _0n = BigInt(0), _1n$1 = BigInt(1); BigInt(2); const _3n = BigInt(3); BigInt(4); +const _0n$5 = BigInt(0), _1n$6 = BigInt(1); BigInt(2); const _3n$1 = BigInt(3); BigInt(4); function weierstrassPoints(opts) { const CURVE = validatePointOpts(opts); const { Fp } = CURVE; // All curves has same field / group length as for now, but they can differ const toBytes = CURVE.toBytes || ((_c, point, _isCompressed) => { const a = point.toAffine(); - return concatBytes(Uint8Array.from([0x04]), Fp.toBytes(a.x), Fp.toBytes(a.y)); + return concatBytes$4(Uint8Array.from([0x04]), Fp.toBytes(a.x), Fp.toBytes(a.y)); }); const fromBytes = CURVE.fromBytes || ((bytes) => { @@ -21060,7 +21060,7 @@ function weierstrassPoints(opts) { throw new Error('bad generator point: equation left != right'); // Valid group elements reside in range 1..n-1 function isWithinCurveOrder(num) { - return typeof num === 'bigint' && _0n < num && num < CURVE.n; + return typeof num === 'bigint' && _0n$5 < num && num < CURVE.n; } function assertGE(num) { if (!isWithinCurveOrder(num)) @@ -21072,7 +21072,7 @@ function weierstrassPoints(opts) { const { allowedPrivateKeyLengths: lengths, nByteLength, wrapPrivateKey, n } = CURVE; if (lengths && typeof key !== 'bigint') { if (key instanceof Uint8Array) - key = bytesToHex$1(key); + key = bytesToHex$4(key); // Normalize to hex string, pad. E.g. P521 would norm 130-132 char hex to 132-char bytes if (typeof key !== 'string' || !lengths.includes(key.length)) throw new Error('Invalid key'); @@ -21083,13 +21083,13 @@ function weierstrassPoints(opts) { num = typeof key === 'bigint' ? key - : bytesToNumberBE(ensureBytes('private key', key, nByteLength)); + : bytesToNumberBE$1(ensureBytes$1('private key', key, nByteLength)); } catch (error) { throw new Error(`private key must be ${nByteLength} bytes, hex or bigint, not ${typeof key}`); } if (wrapPrivateKey) - num = mod$3(num, n); // disabled by default, enabled for BLS + num = mod$4(num, n); // disabled by default, enabled for BLS assertGE(num); // num in range [1..N-1] return num; } @@ -21150,7 +21150,7 @@ function weierstrassPoints(opts) { * @param hex short/long ECDSA hex */ static fromHex(hex) { - const P = Point.fromAffine(fromBytes(ensureBytes('pointHex', hex))); + const P = Point.fromAffine(fromBytes(ensureBytes$1('pointHex', hex))); P.assertValidity(); return P; } @@ -21214,7 +21214,7 @@ function weierstrassPoints(opts) { // Cost: 8M + 3S + 3*a + 2*b3 + 15add. double() { const { a, b } = CURVE; - const b3 = Fp.mul(b, _3n); + const b3 = Fp.mul(b, _3n$1); const { px: X1, py: Y1, pz: Z1 } = this; let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore let t0 = Fp.mul(X1, X1); // step 1 @@ -21260,7 +21260,7 @@ function weierstrassPoints(opts) { const { px: X2, py: Y2, pz: Z2 } = other; let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore const a = CURVE.a; - const b3 = Fp.mul(CURVE.b, _3n); + const b3 = Fp.mul(CURVE.b, _3n$1); let t0 = Fp.mul(X1, X2); // step 1 let t1 = Fp.mul(Y1, Y2); let t2 = Fp.mul(Z1, Z2); @@ -21322,10 +21322,10 @@ function weierstrassPoints(opts) { */ multiplyUnsafe(n) { const I = Point.ZERO; - if (n === _0n) + if (n === _0n$5) return I; assertGE(n); // Will throw on 0 - if (n === _1n$1) + if (n === _1n$6) return this; const { endo } = CURVE; if (!endo) @@ -21335,14 +21335,14 @@ function weierstrassPoints(opts) { let k1p = I; let k2p = I; let d = this; - while (k1 > _0n || k2 > _0n) { - if (k1 & _1n$1) + while (k1 > _0n$5 || k2 > _0n$5) { + if (k1 & _1n$6) k1p = k1p.add(d); - if (k2 & _1n$1) + if (k2 & _1n$6) k2p = k2p.add(d); d = d.double(); - k1 >>= _1n$1; - k2 >>= _1n$1; + k1 >>= _1n$6; + k2 >>= _1n$6; } if (k1neg) k1p = k1p.negate(); @@ -21392,7 +21392,7 @@ function weierstrassPoints(opts) { multiplyAndAddUnsafe(Q, a, b) { const G = Point.BASE; // No Strauss-Shamir trick: we have 10% faster G precomputes const mul = (P, a // Select faster multiply() method - ) => (a === _0n || a === _1n$1 || !P.equals(G) ? P.multiplyUnsafe(a) : P.multiply(a)); + ) => (a === _0n$5 || a === _1n$6 || !P.equals(G) ? P.multiplyUnsafe(a) : P.multiply(a)); const sum = mul(this, a).add(mul(Q, b)); return sum.is0() ? undefined : sum; } @@ -21417,7 +21417,7 @@ function weierstrassPoints(opts) { } isTorsionFree() { const { h: cofactor, isTorsionFree } = CURVE; - if (cofactor === _1n$1) + if (cofactor === _1n$6) return true; // No subgroups, always torsion-free if (isTorsionFree) return isTorsionFree(Point, this); @@ -21425,7 +21425,7 @@ function weierstrassPoints(opts) { } clearCofactor() { const { h: cofactor, clearCofactor } = CURVE; - if (cofactor === _1n$1) + if (cofactor === _1n$6) return this; // Fast-path if (clearCofactor) return clearCofactor(Point, this); @@ -21436,13 +21436,13 @@ function weierstrassPoints(opts) { return toBytes(Point, this, isCompressed); } toHex(isCompressed = true) { - return bytesToHex$1(this.toRawBytes(isCompressed)); + return bytesToHex$4(this.toRawBytes(isCompressed)); } } Point.BASE = new Point(CURVE.Gx, CURVE.Gy, Fp.ONE); Point.ZERO = new Point(Fp.ZERO, Fp.ONE, Fp.ZERO); const _bits = CURVE.nBitLength; - const wnaf = wNAF(Point, CURVE.endo ? Math.ceil(_bits / 2) : _bits); + const wnaf = wNAF$1(Point, CURVE.endo ? Math.ceil(_bits / 2) : _bits); // Validate if generator point is on curve return { CURVE, @@ -21453,8 +21453,8 @@ function weierstrassPoints(opts) { }; } function validateOpts(curve) { - const opts = validateBasic(curve); - validateObject(opts, { + const opts = validateBasic$1(curve); + validateObject$1(opts, { hash: 'hash', hmac: 'function', randomBytes: 'function', @@ -21465,26 +21465,26 @@ function validateOpts(curve) { }); return Object.freeze({ lowS: true, ...opts }); } -function weierstrass(curveDef) { +function weierstrass$1(curveDef) { const CURVE = validateOpts(curveDef); const { Fp, n: CURVE_ORDER } = CURVE; const compressedLen = Fp.BYTES + 1; // e.g. 33 for 32 const uncompressedLen = 2 * Fp.BYTES + 1; // e.g. 65 for 32 function isValidFieldElement(num) { - return _0n < num && num < Fp.ORDER; // 0 is banned since it's not invertible FE + return _0n$5 < num && num < Fp.ORDER; // 0 is banned since it's not invertible FE } function modN(a) { - return mod$3(a, CURVE_ORDER); + return mod$4(a, CURVE_ORDER); } function invN(a) { - return invert(a, CURVE_ORDER); + return invert$1(a, CURVE_ORDER); } const { ProjectivePoint: Point, normPrivateKeyToScalar, weierstrassEquation, isWithinCurveOrder, } = weierstrassPoints({ ...CURVE, toBytes(_c, point, isCompressed) { const a = point.toAffine(); const x = Fp.toBytes(a.x); - const cat = concatBytes; + const cat = concatBytes$4; if (isCompressed) { return cat(Uint8Array.from([point.hasEvenY() ? 0x02 : 0x03]), x); } @@ -21498,12 +21498,12 @@ function weierstrass(curveDef) { const tail = bytes.subarray(1); // this.assertValidity() is done inside of fromHex if (len === compressedLen && (head === 0x02 || head === 0x03)) { - const x = bytesToNumberBE(tail); + const x = bytesToNumberBE$1(tail); if (!isValidFieldElement(x)) throw new Error('Point is not on curve'); const y2 = weierstrassEquation(x); // y² = x³ + ax + b let y = Fp.sqrt(y2); // y = y² ^ (p+1)/4 - const isYOdd = (y & _1n$1) === _1n$1; + const isYOdd = (y & _1n$6) === _1n$6; // ECDSA const isHeadOdd = (head & 1) === 1; if (isHeadOdd !== isYOdd) @@ -21520,16 +21520,16 @@ function weierstrass(curveDef) { } }, }); - const numToNByteStr = (num) => bytesToHex$1(numberToBytesBE(num, CURVE.nByteLength)); + const numToNByteStr = (num) => bytesToHex$4(numberToBytesBE$1(num, CURVE.nByteLength)); function isBiggerThanHalfOrder(number) { - const HALF = CURVE_ORDER >> _1n$1; + const HALF = CURVE_ORDER >> _1n$6; return number > HALF; } function normalizeS(s) { return isBiggerThanHalfOrder(s) ? modN(-s) : s; } // slice bytes num - const slcNum = (b, from, to) => bytesToNumberBE(b.slice(from, to)); + const slcNum = (b, from, to) => bytesToNumberBE$1(b.slice(from, to)); /** * ECDSA signature with its (r, s) properties. Supports DER & compact representations. */ @@ -21543,13 +21543,13 @@ function weierstrass(curveDef) { // pair (bytes of r, bytes of s) static fromCompact(hex) { const l = CURVE.nByteLength; - hex = ensureBytes('compactSignature', hex, l * 2); + hex = ensureBytes$1('compactSignature', hex, l * 2); return new Signature(slcNum(hex, 0, l), slcNum(hex, l, 2 * l)); } // DER encoded ECDSA signature // https://bitcoin.stackexchange.com/questions/57644/what-are-the-parts-of-a-bitcoin-transaction-input-script static fromDER(hex) { - const { r, s } = DER.toSig(ensureBytes('DER', hex)); + const { r, s } = DER.toSig(ensureBytes$1('DER', hex)); return new Signature(r, s); } assertValidity() { @@ -21564,7 +21564,7 @@ function weierstrass(curveDef) { } recoverPublicKey(msgHash) { const { r, s, recovery: rec } = this; - const h = bits2int_modN(ensureBytes('msgHash', msgHash)); // Truncate hash + const h = bits2int_modN(ensureBytes$1('msgHash', msgHash)); // Truncate hash if (rec == null || ![0, 1, 2, 3].includes(rec)) throw new Error('recovery id invalid'); const radj = rec === 2 || rec === 3 ? r + CURVE.n : r; @@ -21590,14 +21590,14 @@ function weierstrass(curveDef) { } // DER-encoded toDERRawBytes() { - return hexToBytes(this.toDERHex()); + return hexToBytes$4(this.toDERHex()); } toDERHex() { return DER.hexFromSig({ r: this.r, s: this.s }); } // padded bytes of r, then padded bytes of s toCompactRawBytes() { - return hexToBytes(this.toCompactHex()); + return hexToBytes$4(this.toCompactHex()); } toCompactHex() { return numToNByteStr(this.r) + numToNByteStr(this.s); @@ -21619,8 +21619,8 @@ function weierstrass(curveDef) { * (groupLen + ceil(groupLen / 2)) with modulo bias being negligible. */ randomPrivateKey: () => { - const length = getMinHashLength(CURVE.n); - return mapHashToField(CURVE.randomBytes(length), CURVE.n); + const length = getMinHashLength$1(CURVE.n); + return mapHashToField$1(CURVE.randomBytes(length), CURVE.n); }, /** * Creates precompute table for an arbitrary EC point. Makes point "cached". @@ -21686,7 +21686,7 @@ function weierstrass(curveDef) { function (bytes) { // For curves with nBitLength % 8 !== 0: bits2octets(bits2octets(m)) !== bits2octets(m) // for some cases, since bytes.length * 8 is not actual bitLength. - const num = bytesToNumberBE(bytes); // check for == u8 done here + const num = bytesToNumberBE$1(bytes); // check for == u8 done here const delta = bytes.length * 8 - CURVE.nBitLength; // truncate to nBitLength leftmost bits return delta > 0 ? num >> BigInt(delta) : num; }; @@ -21695,17 +21695,17 @@ function weierstrass(curveDef) { return modN(bits2int(bytes)); // can't use bytesToNumberBE here }; // NOTE: pads output with zero as per spec - const ORDER_MASK = bitMask(CURVE.nBitLength); + const ORDER_MASK = bitMask$1(CURVE.nBitLength); /** * Converts to bytes. Checks if num in `[0..ORDER_MASK-1]` e.g.: `[0..2^256-1]`. */ function int2octets(num) { if (typeof num !== 'bigint') throw new Error('bigint expected'); - if (!(_0n <= num && num < ORDER_MASK)) + if (!(_0n$5 <= num && num < ORDER_MASK)) throw new Error(`bigint expected < 2^${CURVE.nBitLength}`); // works with order, can have different size than numToField! - return numberToBytesBE(num, CURVE.nByteLength); + return numberToBytesBE$1(num, CURVE.nByteLength); } // Steps A, D of RFC6979 3.2 // Creates RFC6979 seed; converts msg/privKey to numbers. @@ -21719,9 +21719,9 @@ function weierstrass(curveDef) { let { lowS, prehash, extraEntropy: ent } = opts; // generates low-s sigs by default if (lowS == null) lowS = true; // RFC6979 3.2: we skip step A, because we already provide hash - msgHash = ensureBytes('msgHash', msgHash); + msgHash = ensureBytes$1('msgHash', msgHash); if (prehash) - msgHash = ensureBytes('prehashed msgHash', hash(msgHash)); + msgHash = ensureBytes$1('prehashed msgHash', hash(msgHash)); // We can't later call bits2octets, since nested bits2int is broken for curves // with nBitLength % 8 !== 0. Because of that, we unwrap it here as int2octets call. // const bits2octets = (bits) => int2octets(bits2int_modN(bits)) @@ -21732,9 +21732,9 @@ function weierstrass(curveDef) { if (ent != null) { // K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1) || k') const e = ent === true ? randomBytes(Fp.BYTES) : ent; // generate random bytes OR pass as-is - seedArgs.push(ensureBytes('extraEntropy', e)); // check for being bytes + seedArgs.push(ensureBytes$1('extraEntropy', e)); // check for being bytes } - const seed = concatBytes(...seedArgs); // Step D of RFC6979 3.2 + const seed = concatBytes$4(...seedArgs); // Step D of RFC6979 3.2 const m = h1int; // NOTE: no need to call bits2int second time here, it is inside truncateHash! // Converts signature params into point w r/s, checks result for validity. function k2sig(kBytes) { @@ -21745,15 +21745,15 @@ function weierstrass(curveDef) { const ik = invN(k); // k^-1 mod n const q = Point.BASE.multiply(k).toAffine(); // q = Gk const r = modN(q.x); // r = q.x mod n - if (r === _0n) + if (r === _0n$5) return; // Can use scalar blinding b^-1(bm + bdr) where b ∈ [1,q−1] according to // https://tches.iacr.org/index.php/TCHES/article/view/7337/6509. We've decided against it: // a) dependency on CSPRNG b) 15% slowdown c) doesn't really help since bigints are not CT const s = modN(ik * modN(m + r * d)); // Not using blinding here - if (s === _0n) + if (s === _0n$5) return; - let recovery = (q.x === r ? 0 : 2) | Number(q.y & _1n$1); // recovery bit (2 or 3, when q.x > n) + let recovery = (q.x === r ? 0 : 2) | Number(q.y & _1n$6); // recovery bit (2 or 3, when q.x > n) let normS = s; if (lowS && isBiggerThanHalfOrder(s)) { normS = normalizeS(s); // if lowS was passed, ensure s is always @@ -21781,7 +21781,7 @@ function weierstrass(curveDef) { function sign(msgHash, privKey, opts = defaultSigOpts) { const { seed, k2sig } = prepSig(msgHash, privKey, opts); // Steps A, D of RFC6979 3.2. const C = CURVE; - const drbg = createHmacDrbg(C.hash.outputLen, C.nByteLength, C.hmac); + const drbg = createHmacDrbg$1(C.hash.outputLen, C.nByteLength, C.hmac); return drbg(seed, k2sig); // Steps B, C, D, E, F, G } // Enable precomputes. Slows down first publicKey computation by 20ms. @@ -21802,8 +21802,8 @@ function weierstrass(curveDef) { */ function verify(signature, msgHash, publicKey, opts = defaultVerOpts) { const sg = signature; - msgHash = ensureBytes('msgHash', msgHash); - publicKey = ensureBytes('publicKey', publicKey); + msgHash = ensureBytes$1('msgHash', msgHash); + publicKey = ensureBytes$1('publicKey', publicKey); if ('strict' in opts) throw new Error('options.strict was renamed to lowS'); const { lowS, prehash } = opts; @@ -21865,24 +21865,24 @@ function weierstrass(curveDef) { /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // connects noble-curves to noble-hashes -function getHash(hash) { +function getHash$1(hash) { return { hash, - hmac: (key, ...msgs) => hmac(hash, key, concatBytes$1(...msgs)), + hmac: (key, ...msgs) => hmac$1(hash, key, concatBytes$5(...msgs)), randomBytes: randomBytes$2, }; } -function createCurve(curveDef, defHash) { - const create = (hash) => weierstrass({ ...curveDef, ...getHash(hash) }); +function createCurve$1(curveDef, defHash) { + const create = (hash) => weierstrass$1({ ...curveDef, ...getHash$1(hash) }); return Object.freeze({ ...create(defHash), create }); } /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ const secp256k1P = BigInt('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f'); const secp256k1N = BigInt('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141'); -const _1n = BigInt(1); -const _2n = BigInt(2); -const divNearest = (a, b) => (a + b / _2n) / b; +const _1n$5 = BigInt(1); +const _2n$4 = BigInt(2); +const divNearest = (a, b) => (a + b / _2n$4) / b; /** * √n = n^((p+1)/4) for fields p = 3 mod 4. We unwrap the loop and multiply bit-by-bit. * (P+1n/4n).toString(2) would produce bits [223x 1, 0, 22x 1, 4x 0, 11, 00] @@ -21895,24 +21895,24 @@ function sqrtMod(y) { const _23n = BigInt(23), _44n = BigInt(44), _88n = BigInt(88); const b2 = (y * y * y) % P; // x^3, 11 const b3 = (b2 * b2 * y) % P; // x^7 - const b6 = (pow2(b3, _3n, P) * b3) % P; - const b9 = (pow2(b6, _3n, P) * b3) % P; - const b11 = (pow2(b9, _2n, P) * b2) % P; - const b22 = (pow2(b11, _11n, P) * b11) % P; - const b44 = (pow2(b22, _22n, P) * b22) % P; - const b88 = (pow2(b44, _44n, P) * b44) % P; - const b176 = (pow2(b88, _88n, P) * b88) % P; - const b220 = (pow2(b176, _44n, P) * b44) % P; - const b223 = (pow2(b220, _3n, P) * b3) % P; - const t1 = (pow2(b223, _23n, P) * b22) % P; - const t2 = (pow2(t1, _6n, P) * b2) % P; - const root = pow2(t2, _2n, P); + const b6 = (pow2$1(b3, _3n, P) * b3) % P; + const b9 = (pow2$1(b6, _3n, P) * b3) % P; + const b11 = (pow2$1(b9, _2n$4, P) * b2) % P; + const b22 = (pow2$1(b11, _11n, P) * b11) % P; + const b44 = (pow2$1(b22, _22n, P) * b22) % P; + const b88 = (pow2$1(b44, _44n, P) * b44) % P; + const b176 = (pow2$1(b88, _88n, P) * b88) % P; + const b220 = (pow2$1(b176, _44n, P) * b44) % P; + const b223 = (pow2$1(b220, _3n, P) * b3) % P; + const t1 = (pow2$1(b223, _23n, P) * b22) % P; + const t2 = (pow2$1(t1, _6n, P) * b2) % P; + const root = pow2$1(t2, _2n$4, P); if (!Fp.eql(Fp.sqr(root), y)) throw new Error('Cannot find square root'); return root; } -const Fp = Field(secp256k1P, undefined, undefined, { sqrt: sqrtMod }); -const secp256k1 = createCurve({ +const Fp = Field$1(secp256k1P, undefined, undefined, { sqrt: sqrtMod }); +const secp256k1$2 = createCurve$1({ a: BigInt(0), b: BigInt(7), Fp, @@ -21933,14 +21933,14 @@ const secp256k1 = createCurve({ splitScalar: (k) => { const n = secp256k1N; const a1 = BigInt('0x3086d221a7d46bcde86c90e49284eb15'); - const b1 = -_1n * BigInt('0xe4437ed6010e88286f547fa90abfe4c3'); + const b1 = -_1n$5 * BigInt('0xe4437ed6010e88286f547fa90abfe4c3'); const a2 = BigInt('0x114ca50f7a8e2f3f657c1108d9d44cfd8'); const b2 = a1; const POW_2_128 = BigInt('0x100000000000000000000000000000000'); // (2n**128n).toString(16) const c1 = divNearest(b2 * k, n); const c2 = divNearest(-b1 * k, n); - let k1 = mod$3(k - c1 * a1 - c2 * a2, n); - let k2 = mod$3(-c1 * b1 - c2 * b2, n); + let k1 = mod$4(k - c1 * a1 - c2 * a2, n); + let k2 = mod$4(-c1 * b1 - c2 * b2, n); const k1neg = k1 > POW_2_128; const k2neg = k2 > POW_2_128; if (k1neg) @@ -21953,11 +21953,11 @@ const secp256k1 = createCurve({ return { k1neg, k1, k2neg, k2 }; }, }, -}, sha256$1); +}, sha256$2); // Schnorr signatures are superior to ECDSA from above. Below is Schnorr-specific BIP0340 code. // https://github.com/bitcoin/bips/blob/master/bip-0340.mediawiki BigInt(0); -secp256k1.ProjectivePoint; +secp256k1$2.ProjectivePoint; /** * A constant for the zero address. @@ -22274,7 +22274,7 @@ class Signature { return toUint256(s); } if (yParityAndS != null) { - assertError(isHexString$1(yParityAndS, 32), "invalid yParityAndS"); + assertError(isHexString$3(yParityAndS, 32), "invalid yParityAndS"); const bytes = getBytes(yParityAndS); bytes[0] &= 0x7f; return hexlify(bytes); @@ -22292,7 +22292,7 @@ class Signature { }; } if (yParityAndS != null) { - assertError(isHexString$1(yParityAndS, 32), "invalid yParityAndS"); + assertError(isHexString$3(yParityAndS, 32), "invalid yParityAndS"); return { v: ((getBytes(yParityAndS)[0] & 0x80) ? 28 : 27) }; } if (yParity != null) { @@ -22357,7 +22357,7 @@ class SigningKey { */ sign(digest) { assertArgument(dataLength(digest) === 32, "invalid digest length", "digest", digest); - const sig = secp256k1.sign(getBytesCopy(digest), getBytesCopy(this.#privateKey), { + const sig = secp256k1$2.sign(getBytesCopy(digest), getBytesCopy(this.#privateKey), { lowS: true }); return Signature.from({ @@ -22390,7 +22390,7 @@ class SigningKey { */ computeSharedSecret(other) { const pubKey = SigningKey.computePublicKey(other); - return hexlify(secp256k1.getSharedSecret(getBytesCopy(this.#privateKey), getBytes(pubKey), false)); + return hexlify(secp256k1$2.getSharedSecret(getBytesCopy(this.#privateKey), getBytes(pubKey), false)); } /** * Compute the public key for %%key%%, optionally %%compressed%%. @@ -22421,7 +22421,7 @@ class SigningKey { let bytes = getBytes(key, "key"); // private key if (bytes.length === 32) { - const pubKey = secp256k1.getPublicKey(bytes, !!compressed); + const pubKey = secp256k1$2.getPublicKey(bytes, !!compressed); return hexlify(pubKey); } // raw public key; use uncompressed key with 0x04 prefix @@ -22431,7 +22431,7 @@ class SigningKey { pub.set(bytes, 1); bytes = pub; } - const point = secp256k1.ProjectivePoint.fromHex(bytes); + const point = secp256k1$2.ProjectivePoint.fromHex(bytes); return hexlify(point.toRawBytes(compressed)); } /** @@ -22455,7 +22455,7 @@ class SigningKey { static recoverPublicKey(digest, signature) { assertArgument(dataLength(digest) === 32, "invalid digest length", "digest", digest); const sig = Signature.from(signature); - let secpSig = secp256k1.Signature.fromCompact(getBytesCopy(concat$3([sig.r, sig.s]))); + let secpSig = secp256k1$2.Signature.fromCompact(getBytesCopy(concat$3([sig.r, sig.s]))); secpSig = secpSig.addRecoveryBit(sig.yParity); const pubKey = secpSig.recoverPublicKey(getBytesCopy(digest)); assertArgument(pubKey != null, "invalid signautre for digest", "signature", signature); @@ -22472,15 +22472,15 @@ class SigningKey { * addresses from parent public keys and chain codes. */ static addPoints(p0, p1, compressed) { - const pub0 = secp256k1.ProjectivePoint.fromHex(SigningKey.computePublicKey(p0).substring(2)); - const pub1 = secp256k1.ProjectivePoint.fromHex(SigningKey.computePublicKey(p1).substring(2)); + const pub0 = secp256k1$2.ProjectivePoint.fromHex(SigningKey.computePublicKey(p0).substring(2)); + const pub1 = secp256k1$2.ProjectivePoint.fromHex(SigningKey.computePublicKey(p1).substring(2)); return "0x" + pub0.add(pub1).toHex(!!compressed); } } const BN_0$6 = BigInt(0); const BN_36 = BigInt(36); -function getChecksumAddress(address) { +function getChecksumAddress$1(address) { // if (!isHexString(address, 20)) { // logger.throwArgumentError("invalid address", "address", address); // } @@ -22586,7 +22586,7 @@ function getAddress(address) { if (!address.startsWith("0x")) { address = "0x" + address; } - const result = getChecksumAddress(address); + const result = getChecksumAddress$1(address); // It is a checksummed address with a bad checksum assertArgument(!address.match(/([A-F].*[a-f])|([a-f].*[A-F])/) || result === address, "bad address checksum", "address", address); return result; @@ -22599,7 +22599,7 @@ function getAddress(address) { while (result.length < 40) { result = "0" + result; } - return getChecksumAddress("0x" + result); + return getChecksumAddress$1("0x" + result); } assertArgument(false, "invalid address", "address", address); } @@ -22659,7 +22659,7 @@ function isAddressable(value) { async function checkAddress(target, promise) { const result = await promise; if (result == null || result === "0x0000000000000000000000000000000000000000") { - assert$5(typeof (target) !== "string", "unconfigured name", "UNCONFIGURED_NAME", { value: target }); + assert$9(typeof (target) !== "string", "unconfigured name", "UNCONFIGURED_NAME", { value: target }); assertArgument(false, "invalid AddressLike value; did not resolve to a value address", "target", target); } return getAddress(result); @@ -22706,7 +22706,7 @@ function resolveAddress(target, resolver) { if (target.match(/^0x[0-9a-f]{40}$/i)) { return getAddress(target); } - assert$5(resolver != null, "ENS resolution requires a provider", "UNSUPPORTED_OPERATION", { operation: "resolveName" }); + assert$9(resolver != null, "ENS resolution requires a provider", "UNSUPPORTED_OPERATION", { operation: "resolveName" }); return checkAddress(target, resolver.resolveName(target)); } else if (isAddressable(target)) { @@ -23376,8 +23376,8 @@ function pack(writer, coders, values) { let unique = {}; arrayValues = coders.map((coder) => { const name = coder.localName; - assert$5(name, "cannot encode object for signature with missing names", "INVALID_ARGUMENT", { argument: "values", info: { coder }, value: values }); - assert$5(!unique[name], "cannot encode object for signature with duplicate names", "INVALID_ARGUMENT", { argument: "values", info: { coder }, value: values }); + assert$9(name, "cannot encode object for signature with missing names", "INVALID_ARGUMENT", { argument: "values", info: { coder }, value: values }); + assert$9(!unique[name], "cannot encode object for signature with duplicate names", "INVALID_ARGUMENT", { argument: "values", info: { coder }, value: values }); unique[name] = true; return values[name]; }); @@ -23430,7 +23430,7 @@ function unpack(reader, coders) { } catch (error) { // Cannot recover from this - if (isError(error, "BUFFER_OVERRUN")) { + if (isError$1(error, "BUFFER_OVERRUN")) { throw error; } value = error; @@ -23445,7 +23445,7 @@ function unpack(reader, coders) { } catch (error) { // Cannot recover from this - if (isError(error, "BUFFER_OVERRUN")) { + if (isError$1(error, "BUFFER_OVERRUN")) { throw error; } value = error; @@ -23509,7 +23509,7 @@ class ArrayCoder extends Coder { // slot requires at least 32 bytes for their value (or 32 // bytes as a link to the data). This could use a much // tighter bound, but we are erroring on the side of safety. - assert$5(count * WordSize <= reader.dataLength, "insufficient data length", "BUFFER_OVERRUN", { buffer: reader.bytes, offset: count * WordSize, length: reader.dataLength }); + assert$9(count * WordSize <= reader.dataLength, "insufficient data length", "BUFFER_OVERRUN", { buffer: reader.bytes, offset: count * WordSize, length: reader.dataLength }); } let coders = []; for (let i = 0; i < count; i++) { @@ -23638,21 +23638,21 @@ class NumberCoder extends Coder { encode(writer, _value) { let value = getBigInt(Typed.dereference(_value, this.type)); // Check bounds are safe for encoding - let maxUintValue = mask(BN_MAX_UINT256$1, WordSize * 8); + let maxUintValue = mask$1(BN_MAX_UINT256$1, WordSize * 8); if (this.signed) { - let bounds = mask(maxUintValue, (this.size * 8) - 1); + let bounds = mask$1(maxUintValue, (this.size * 8) - 1); if (value > bounds || value < -(bounds + BN_1$1)) { this._throwError("value out-of-bounds", _value); } value = toTwos(value, 8 * WordSize); } - else if (value < BN_0$5 || value > mask(maxUintValue, this.size * 8)) { + else if (value < BN_0$5 || value > mask$1(maxUintValue, this.size * 8)) { this._throwError("value out-of-bounds", _value); } return writer.writeValue(value); } decode(reader) { - let value = mask(reader.readValue(), this.size * 8); + let value = mask$1(reader.readValue(), this.size * 8); if (this.signed) { value = fromTwos(value, this.size * 8); } @@ -24490,10 +24490,10 @@ function should_escape(cp) { } function ens_normalize(name) { - return flatten(split(name, nfc, filter_fe0f)); + return flatten(split$2(name, nfc, filter_fe0f)); } -function split(name, nf, ef) { +function split$2(name, nf, ef) { if (!name) return []; // 20230719: empty name allowance init(); let offset = 0; @@ -24898,7 +24898,7 @@ function accessSetify(addr, storageKeys) { return { address: getAddress(addr), storageKeys: storageKeys.map((storageKey, index) => { - assertArgument(isHexString$1(storageKey, 32), "invalid slot", `storageKeys[${index}]`, storageKey); + assertArgument(isHexString$3(storageKey, 32), "invalid slot", `storageKeys[${index}]`, storageKey); return storageKey.toLowerCase(); }) }; @@ -24964,7 +24964,7 @@ function getVersionedHash(version, hash) { while (versioned.length < 2) { versioned = "0" + versioned; } - versioned += sha256(hash).substring(4); + versioned += sha256$1(hash).substring(4); return "0x" + versioned; } function handleAddress(value) { @@ -25007,7 +25007,7 @@ function formatAccessList(value) { function formatHashes(value, param) { assertArgument(Array.isArray(value), `invalid ${param}`, "value", value); for (let i = 0; i < value.length; i++) { - assertArgument(isHexString$1(value[i], 32), "invalid ${ param } hash", `value[${i}]`, value[i]); + assertArgument(isHexString$3(value[i], 32), "invalid ${ param } hash", `value[${i}]`, value[i]); } return value; } @@ -25248,7 +25248,7 @@ function _parseEip4844(data) { assertArgument(tx.to != null, `invalid address for transaction type: ${typeName}`, "data", data); assertArgument(Array.isArray(tx.blobVersionedHashes), "invalid blobVersionedHashes: must be an array", "data", data); for (let i = 0; i < tx.blobVersionedHashes.length; i++) { - assertArgument(isHexString$1(tx.blobVersionedHashes[i], 32), `invalid blobVersionedHash at index ${i}: must be length 32`, "data", data); + assertArgument(isHexString$3(tx.blobVersionedHashes[i], 32), `invalid blobVersionedHash at index ${i}: must be length 32`, "data", data); } // Unsigned EIP-4844 Transaction if (fields.length === 11) { @@ -25520,7 +25520,7 @@ class Transaction { assertArgument(Array.isArray(value), "blobVersionedHashes must be an Array", "value", value); value = value.slice(); for (let i = 0; i < value.length; i++) { - assertArgument(isHexString$1(value[i], 32), "invalid blobVersionedHash", `value[${i}]`, value[i]); + assertArgument(isHexString$3(value[i], 32), "invalid blobVersionedHash", `value[${i}]`, value[i]); } } this.#blobVersionedHashes = value; @@ -25569,7 +25569,7 @@ class Transaction { for (let i = 0; i < _blobs.length; i++) { const blob = _blobs[i]; if (isBytesLike(blob)) { - assert$5(this.#kzg, "adding a raw blob requires a KZG library", "UNSUPPORTED_OPERATION", { + assert$9(this.#kzg, "adding a raw blob requires a KZG library", "UNSUPPORTED_OPERATION", { operation: "set blobs()" }); let data = getBytes(blob); @@ -25673,7 +25673,7 @@ class Transaction { return this.signature != null; } #getSerialized(signed, sidecar) { - assert$5(!signed || this.signature != null, "cannot serialize unsigned transaction; maybe you meant .unsignedSerialized", "UNSUPPORTED_OPERATION", { operation: ".serialized" }); + assert$9(!signed || this.signature != null, "cannot serialize unsigned transaction; maybe you meant .unsignedSerialized", "UNSUPPORTED_OPERATION", { operation: ".serialized" }); const sig = signed ? this.signature : null; switch (this.inferType()) { case 0: @@ -25685,7 +25685,7 @@ class Transaction { case 3: return _serializeEip4844(this, sig, sidecar ? this.blobs : null); } - assert$5(false, "unsupported transaction type", "UNSUPPORTED_OPERATION", { operation: ".serialized" }); + assert$9(false, "unsupported transaction type", "UNSUPPORTED_OPERATION", { operation: ".serialized" }); } /** * The serialized transaction. @@ -25732,13 +25732,13 @@ class Transaction { // throw new Error("transaction cannot have gasPrice and maxFeePerGas"); //} if (this.maxFeePerGas != null && this.maxPriorityFeePerGas != null) { - assert$5(this.maxFeePerGas >= this.maxPriorityFeePerGas, "priorityFee cannot be more than maxFee", "BAD_DATA", { value: this }); + assert$9(this.maxFeePerGas >= this.maxPriorityFeePerGas, "priorityFee cannot be more than maxFee", "BAD_DATA", { value: this }); } //if (this.type === 2 && hasGasPrice) { // throw new Error("eip-1559 transaction cannot have gasPrice"); //} - assert$5(!hasFee || (this.type !== 0 && this.type !== 1), "transaction type cannot have maxFeePerGas or maxPriorityFeePerGas", "BAD_DATA", { value: this }); - assert$5(this.type !== 0 || !hasAccessList, "legacy transaction cannot have accessList", "BAD_DATA", { value: this }); + assert$9(!hasFee || (this.type !== 0 && this.type !== 1), "transaction type cannot have maxFeePerGas or maxPriorityFeePerGas", "BAD_DATA", { value: this }); + assert$9(this.type !== 0 || !hasAccessList, "legacy transaction cannot have accessList", "BAD_DATA", { value: this }); const types = []; // Explicit type if (this.type != null) { @@ -25861,7 +25861,7 @@ class Transaction { case 2: return Transaction.from(_parseEip1559(payload)); case 3: return Transaction.from(_parseEip4844(payload)); } - assert$5(false, "unsupported transaction type", "UNSUPPORTED_OPERATION", { operation: "from" }); + assert$9(false, "unsupported transaction type", "UNSUPPORTED_OPERATION", { operation: "from" }); } const result = new Transaction(); if (tx.type != null) { @@ -26031,7 +26031,7 @@ function getBaseEncoder(type) { const signed = (match[1] === ""); const width = parseInt(match[2]); assertArgument(width % 8 === 0 && width !== 0 && width <= 256 && match[2] === String(width), "invalid numeric width", "type", type); - const boundsUpper = mask(BN_MAX_UINT256, signed ? (width - 1) : width); + const boundsUpper = mask$1(BN_MAX_UINT256, signed ? (width - 1) : width); const boundsLower = signed ? ((boundsUpper + BN_1) * BN__1) : BN_0$3; return function (_value) { const value = getBigInt(_value, "value"); @@ -26389,14 +26389,14 @@ class TypedDataEncoder { // Look up all ENS names const ensCache = {}; // Do we need to look up the domain's verifyingContract? - if (domain.verifyingContract && !isHexString$1(domain.verifyingContract, 20)) { + if (domain.verifyingContract && !isHexString$3(domain.verifyingContract, 20)) { ensCache[domain.verifyingContract] = "0x"; } // We are going to use the encoder to visit all the base values const encoder = TypedDataEncoder.from(types); // Get a list of all the addresses encoder.visit(value, (type, value) => { - if (type === "address" && !isHexString$1(value, 20)) { + if (type === "address" && !isHexString$3(value, 20)) { ensCache[value] = "0x"; } return value; @@ -26806,7 +26806,7 @@ function verifyBasicType(type) { } // Make the Fragment constructors effectively private const _guard$2 = {}; -const internal$1 = Symbol.for("_ethers_internal"); +const internal$2 = Symbol.for("_ethers_internal"); const ParamTypeInternal = "_ParamTypeInternal"; const ErrorFragmentInternal = "_ErrorInternal"; const EventFragmentInternal = "_EventInternal"; @@ -26860,7 +26860,7 @@ class ParamType { */ constructor(guard, name, type, baseType, indexed, components, arrayLength, arrayChildren) { assertPrivate(guard, _guard$2, "ParamType"); - Object.defineProperty(this, internal$1, { value: ParamTypeInternal }); + Object.defineProperty(this, internal$2, { value: ParamTypeInternal }); if (components) { components = Object.freeze(components.slice()); } @@ -27160,7 +27160,7 @@ class ParamType { * Returns true if %%value%% is a **ParamType**. */ static isParamType(value) { - return (value && value[internal$1] === ParamTypeInternal); + return (value && value[internal$2] === ParamTypeInternal); } } /** @@ -27223,7 +27223,7 @@ class Fragment { case "function": return FunctionFragment.from(obj); case "struct": return StructFragment.from(obj); } - assert$5(false, `unsupported type: ${obj.type}`, "UNSUPPORTED_OPERATION", { + assert$9(false, `unsupported type: ${obj.type}`, "UNSUPPORTED_OPERATION", { operation: "Fragment.from" }); } @@ -27291,7 +27291,7 @@ class ErrorFragment extends NamedFragment { */ constructor(guard, name, inputs) { super(guard, "error", name, inputs); - Object.defineProperty(this, internal$1, { value: ErrorFragmentInternal }); + Object.defineProperty(this, internal$2, { value: ErrorFragmentInternal }); } /** * The Custom Error selector. @@ -27343,7 +27343,7 @@ class ErrorFragment extends NamedFragment { * **ErrorFragment**. */ static isFragment(value) { - return (value && value[internal$1] === ErrorFragmentInternal); + return (value && value[internal$2] === ErrorFragmentInternal); } } /** @@ -27359,7 +27359,7 @@ class EventFragment extends NamedFragment { */ constructor(guard, name, inputs, anonymous) { super(guard, "event", name, inputs); - Object.defineProperty(this, internal$1, { value: EventFragmentInternal }); + Object.defineProperty(this, internal$2, { value: EventFragmentInternal }); defineProperties(this, { anonymous }); } /** @@ -27430,7 +27430,7 @@ class EventFragment extends NamedFragment { * **EventFragment**. */ static isFragment(value) { - return (value && value[internal$1] === EventFragmentInternal); + return (value && value[internal$2] === EventFragmentInternal); } } /** @@ -27450,14 +27450,14 @@ class ConstructorFragment extends Fragment { */ constructor(guard, type, inputs, payable, gas) { super(guard, type, inputs); - Object.defineProperty(this, internal$1, { value: ConstructorFragmentInternal }); + Object.defineProperty(this, internal$2, { value: ConstructorFragmentInternal }); defineProperties(this, { payable, gas }); } /** * Returns a string representation of this constructor as %%format%%. */ format(format) { - assert$5(format != null && format !== "sighash", "cannot format a constructor for sighash", "UNSUPPORTED_OPERATION", { operation: "format(sighash)" }); + assert$9(format != null && format !== "sighash", "cannot format a constructor for sighash", "UNSUPPORTED_OPERATION", { operation: "format(sighash)" }); if (format === "json") { return JSON.stringify({ type: "constructor", @@ -27506,7 +27506,7 @@ class ConstructorFragment extends Fragment { * **ConstructorFragment**. */ static isFragment(value) { - return (value && value[internal$1] === ConstructorFragmentInternal); + return (value && value[internal$2] === ConstructorFragmentInternal); } } /** @@ -27519,7 +27519,7 @@ class FallbackFragment extends Fragment { payable; constructor(guard, inputs, payable) { super(guard, "fallback", inputs); - Object.defineProperty(this, internal$1, { value: FallbackFragmentInternal }); + Object.defineProperty(this, internal$2, { value: FallbackFragmentInternal }); defineProperties(this, { payable }); } /** @@ -27594,7 +27594,7 @@ class FallbackFragment extends Fragment { * **FallbackFragment**. */ static isFragment(value) { - return (value && value[internal$1] === FallbackFragmentInternal); + return (value && value[internal$2] === FallbackFragmentInternal); } } /** @@ -27627,7 +27627,7 @@ class FunctionFragment extends NamedFragment { */ constructor(guard, name, stateMutability, inputs, outputs, gas) { super(guard, "function", name, inputs); - Object.defineProperty(this, internal$1, { value: FunctionFragmentInternal }); + Object.defineProperty(this, internal$2, { value: FunctionFragmentInternal }); outputs = Object.freeze(outputs.slice()); const constant = (stateMutability === "view" || stateMutability === "pure"); const payable = (stateMutability === "payable"); @@ -27738,7 +27738,7 @@ class FunctionFragment extends NamedFragment { * **FunctionFragment**. */ static isFragment(value) { - return (value && value[internal$1] === FunctionFragmentInternal); + return (value && value[internal$2] === FunctionFragmentInternal); } } /** @@ -27750,7 +27750,7 @@ class StructFragment extends NamedFragment { */ constructor(guard, name, inputs) { super(guard, "struct", name, inputs); - Object.defineProperty(this, internal$1, { value: StructFragmentInternal }); + Object.defineProperty(this, internal$2, { value: StructFragmentInternal }); } /** * Returns a string representation of this struct as %%format%%. @@ -27784,7 +27784,7 @@ class StructFragment extends NamedFragment { * **StructFragment**. */ static isFragment(value) { - return (value && value[internal$1] === StructFragmentInternal); + return (value && value[internal$2] === StructFragmentInternal); } } @@ -28309,7 +28309,7 @@ class Interface { // Find a function definition by any means necessary (unless it is ambiguous) #getFunction(key, values, forceUnique) { // Selector - if (isHexString$1(key)) { + if (isHexString$3(key)) { const selector = key.toLowerCase(); for (const fragment of this.#functions.values()) { if (selector === fragment.selector) { @@ -28436,7 +28436,7 @@ class Interface { // Find an event definition by any means necessary (unless it is ambiguous) #getEvent(key, values, forceUnique) { // EventTopic - if (isHexString$1(key)) { + if (isHexString$3(key)) { const eventTopic = key.toLowerCase(); for (const fragment of this.#events.values()) { if (eventTopic === fragment.topicHash) { @@ -28546,7 +28546,7 @@ class Interface { * the ABI, this will throw. */ getError(key, values) { - if (isHexString$1(key)) { + if (isHexString$3(key)) { const selector = key.toLowerCase(); if (BuiltinErrors[selector]) { return ErrorFragment.from(BuiltinErrors[selector].signature); @@ -28743,7 +28743,7 @@ getSelector(fragment: ErrorFragment | FunctionFragment): string { } } // Call returned data with no error, but the data is junk - assert$5(false, message, "BAD_DATA", { + assert$9(false, message, "BAD_DATA", { value: hexlify(bytes), info: { method: fragment.name, signature: fragment.format() } }); @@ -28834,7 +28834,7 @@ getSelector(fragment: ErrorFragment | FunctionFragment): string { assertArgument(f, "unknown event", "eventFragment", fragment); fragment = f; } - assert$5(values.length <= fragment.inputs.length, `too many arguments for ${fragment.format()}`, "UNEXPECTED_ARGUMENT", { count: values.length, expectedCount: fragment.inputs.length }); + assert$9(values.length <= fragment.inputs.length, `too many arguments for ${fragment.format()}`, "UNEXPECTED_ARGUMENT", { count: values.length, expectedCount: fragment.inputs.length }); const topics = []; if (!fragment.anonymous) { topics.push(fragment.topicHash); @@ -28936,7 +28936,7 @@ getSelector(fragment: ErrorFragment | FunctionFragment): string { } if (topics != null && !fragment.anonymous) { const eventTopic = fragment.topicHash; - assertArgument(isHexString$1(topics[0], 32) && topics[0].toLowerCase() === eventTopic, "fragment/topic mismatch", "topics[0]", topics[0]); + assertArgument(isHexString$3(topics[0], 32) && topics[0].toLowerCase() === eventTopic, "fragment/topic mismatch", "topics[0]", topics[0]); topics = topics.slice(1); } const indexed = []; @@ -29363,7 +29363,7 @@ class Block { return []; } // Make sure we prefetched the transactions - assert$5(typeof (txs[0]) === "object", "transactions were not prefetched with block request", "UNSUPPORTED_OPERATION", { + assert$9(typeof (txs[0]) === "object", "transactions were not prefetched with block request", "UNSUPPORTED_OPERATION", { operation: "transactionResponses()" }); return txs; @@ -29588,7 +29588,7 @@ class Log { */ async getBlock() { const block = await this.provider.getBlock(this.blockHash); - assert$5(!!block, "failed to find transaction", "UNKNOWN_ERROR", {}); + assert$9(!!block, "failed to find transaction", "UNKNOWN_ERROR", {}); return block; } /** @@ -29596,7 +29596,7 @@ class Log { */ async getTransaction() { const tx = await this.provider.getTransaction(this.transactionHash); - assert$5(!!tx, "failed to find transaction", "UNKNOWN_ERROR", {}); + assert$9(!!tx, "failed to find transaction", "UNKNOWN_ERROR", {}); return tx; } /** @@ -29605,7 +29605,7 @@ class Log { */ async getTransactionReceipt() { const receipt = await this.provider.getTransactionReceipt(this.transactionHash); - assert$5(!!receipt, "failed to find transaction receipt", "UNKNOWN_ERROR", {}); + assert$9(!!receipt, "failed to find transaction receipt", "UNKNOWN_ERROR", {}); return receipt; } /** @@ -29856,7 +29856,7 @@ class TransactionReceipt { * @_ignore: */ reorderedEvent(other) { - assert$5(!other || other.isMined(), "unmined 'other' transction cannot be orphaned", "UNSUPPORTED_OPERATION", { operation: "reorderedEvent(other)" }); + assert$9(!other || other.isMined(), "unmined 'other' transction cannot be orphaned", "UNSUPPORTED_OPERATION", { operation: "reorderedEvent(other)" }); return createReorderedTransactionFilter(this, other); } } @@ -30167,7 +30167,7 @@ class TransactionResponse { else if (tx.data === "0x" && tx.from === tx.to && tx.value === BN_0$2) { reason = "cancelled"; } - assert$5(false, "transaction was replaced", "TRANSACTION_REPLACED", { + assert$9(false, "transaction was replaced", "TRANSACTION_REPLACED", { cancelled: (reason === "replaced" || reason === "cancelled"), reason, replacement: tx.replaceableTransaction(startBlock), @@ -30184,7 +30184,7 @@ class TransactionResponse { if (receipt == null || receipt.status !== 0) { return receipt; } - assert$5(false, "transaction execution reverted", "CALL_EXCEPTION", { + assert$9(false, "transaction execution reverted", "CALL_EXCEPTION", { action: "sendTransaction", data: null, reason: null, invocation: null, revert: null, transaction: { @@ -30248,7 +30248,7 @@ class TransactionResponse { } catch (error) { // We were replaced (with enough confirms); re-throw the error - if (isError(error, "TRANSACTION_REPLACED")) { + if (isError$1(error, "TRANSACTION_REPLACED")) { cancel(); reject(error); return; @@ -30321,7 +30321,7 @@ class TransactionResponse { * that evict this transaction. */ removedEvent() { - assert$5(this.isMined(), "unmined transaction canot be orphaned", "UNSUPPORTED_OPERATION", { operation: "removeEvent()" }); + assert$9(this.isMined(), "unmined transaction canot be orphaned", "UNSUPPORTED_OPERATION", { operation: "removeEvent()" }); return createRemovedTransactionFilter(this); } /** @@ -30329,8 +30329,8 @@ class TransactionResponse { * that re-order this event against %%other%%. */ reorderedEvent(other) { - assert$5(this.isMined(), "unmined transaction canot be orphaned", "UNSUPPORTED_OPERATION", { operation: "removeEvent()" }); - assert$5(!other || other.isMined(), "unmined 'other' transaction canot be orphaned", "UNSUPPORTED_OPERATION", { operation: "removeEvent()" }); + assert$9(this.isMined(), "unmined transaction canot be orphaned", "UNSUPPORTED_OPERATION", { operation: "removeEvent()" }); + assert$9(!other || other.isMined(), "unmined 'other' transaction canot be orphaned", "UNSUPPORTED_OPERATION", { operation: "removeEvent()" }); return createReorderedTransactionFilter(this, other); } /** @@ -30621,7 +30621,7 @@ function getRunner(value, feature) { } return null; } -function getProvider(value) { +function getProvider$1(value) { if (value == null) { return null; } @@ -30685,7 +30685,7 @@ function buildWrappedFallback(contract) { }; const staticCall = async function (overrides) { const runner = getRunner(contract.runner, "call"); - assert$5(canCall(runner), "contract runner does not support calling", "UNSUPPORTED_OPERATION", { operation: "call" }); + assert$9(canCall(runner), "contract runner does not support calling", "UNSUPPORTED_OPERATION", { operation: "call" }); const tx = await populateTransaction(overrides); try { return await runner.call(tx); @@ -30699,16 +30699,16 @@ function buildWrappedFallback(contract) { }; const send = async function (overrides) { const runner = contract.runner; - assert$5(canSend(runner), "contract runner does not support sending transactions", "UNSUPPORTED_OPERATION", { operation: "sendTransaction" }); + assert$9(canSend(runner), "contract runner does not support sending transactions", "UNSUPPORTED_OPERATION", { operation: "sendTransaction" }); const tx = await runner.sendTransaction(await populateTransaction(overrides)); - const provider = getProvider(contract.runner); + const provider = getProvider$1(contract.runner); // @TODO: the provider can be null; make a custom dummy provider that will throw a // meaningful error return new ContractTransactionResponse(contract.interface, provider, tx); }; const estimateGas = async function (overrides) { const runner = getRunner(contract.runner, "estimateGas"); - assert$5(canEstimate(runner), "contract runner does not support gas estimation", "UNSUPPORTED_OPERATION", { operation: "estimateGas" }); + assert$9(canEstimate(runner), "contract runner does not support gas estimation", "UNSUPPORTED_OPERATION", { operation: "estimateGas" }); return await runner.estimateGas(await populateTransaction(overrides)); }; const method = async (overrides) => { @@ -30725,7 +30725,7 @@ function buildWrappedFallback(contract) { function buildWrappedMethod(contract, key) { const getFragment = function (...args) { const fragment = contract.interface.getFunction(key, args); - assert$5(fragment, "no matching fragment", "UNSUPPORTED_OPERATION", { + assert$9(fragment, "no matching fragment", "UNSUPPORTED_OPERATION", { operation: "fragment", info: { key, args } }); @@ -30759,21 +30759,21 @@ function buildWrappedMethod(contract, key) { }; const send = async function (...args) { const runner = contract.runner; - assert$5(canSend(runner), "contract runner does not support sending transactions", "UNSUPPORTED_OPERATION", { operation: "sendTransaction" }); + assert$9(canSend(runner), "contract runner does not support sending transactions", "UNSUPPORTED_OPERATION", { operation: "sendTransaction" }); const tx = await runner.sendTransaction(await populateTransaction(...args)); - const provider = getProvider(contract.runner); + const provider = getProvider$1(contract.runner); // @TODO: the provider can be null; make a custom dummy provider that will throw a // meaningful error return new ContractTransactionResponse(contract.interface, provider, tx); }; const estimateGas = async function (...args) { const runner = getRunner(contract.runner, "estimateGas"); - assert$5(canEstimate(runner), "contract runner does not support gas estimation", "UNSUPPORTED_OPERATION", { operation: "estimateGas" }); + assert$9(canEstimate(runner), "contract runner does not support gas estimation", "UNSUPPORTED_OPERATION", { operation: "estimateGas" }); return await runner.estimateGas(await populateTransaction(...args)); }; const staticCallResult = async function (...args) { const runner = getRunner(contract.runner, "call"); - assert$5(canCall(runner), "contract runner does not support calling", "UNSUPPORTED_OPERATION", { operation: "call" }); + assert$9(canCall(runner), "contract runner does not support calling", "UNSUPPORTED_OPERATION", { operation: "call" }); const tx = await populateTransaction(...args); let result = "0x"; try { @@ -30809,7 +30809,7 @@ function buildWrappedMethod(contract, key) { enumerable: true, get: () => { const fragment = contract.interface.getFunction(key); - assert$5(fragment, "no matching fragment", "UNSUPPORTED_OPERATION", { + assert$9(fragment, "no matching fragment", "UNSUPPORTED_OPERATION", { operation: "fragment", info: { key } }); @@ -30821,7 +30821,7 @@ function buildWrappedMethod(contract, key) { function buildWrappedEvent(contract, key) { const getFragment = function (...args) { const fragment = contract.interface.getEvent(key, args); - assert$5(fragment, "no matching fragment", "UNSUPPORTED_OPERATION", { + assert$9(fragment, "no matching fragment", "UNSUPPORTED_OPERATION", { operation: "fragment", info: { key, args } }); @@ -30841,7 +30841,7 @@ function buildWrappedEvent(contract, key) { enumerable: true, get: () => { const fragment = contract.interface.getEvent(key); - assert$5(fragment, "no matching fragment", "UNSUPPORTED_OPERATION", { + assert$9(fragment, "no matching fragment", "UNSUPPORTED_OPERATION", { operation: "fragment", info: { key } }); @@ -30854,13 +30854,13 @@ function buildWrappedEvent(contract, key) { // the world go boom; so we hide variables with some trickery keeping // a symbol attached to each BaseContract which its sub-class (even // via a Proxy) can reach and use to look up its internal values. -const internal = Symbol.for("_ethersInternal_contract"); +const internal$1 = Symbol.for("_ethersInternal_contract"); const internalValues = new WeakMap(); function setInternal(contract, values) { - internalValues.set(contract[internal], values); + internalValues.set(contract[internal$1], values); } function getInternal(contract) { - return internalValues.get(contract[internal]); + return internalValues.get(contract[internal$1]); } function isDeferred(value) { return (value && typeof (value) === "object" && ("getTopicFilter" in value) && @@ -30873,7 +30873,7 @@ async function getSubInfo(contract, event) { // events which need deconstructing. if (Array.isArray(event)) { const topicHashify = function (name) { - if (isHexString$1(name, 32)) { + if (isHexString$3(name, 32)) { return name; } const fragment = contract.interface.getEvent(name); @@ -30895,7 +30895,7 @@ async function getSubInfo(contract, event) { topics = [null]; } else if (typeof (event) === "string") { - if (isHexString$1(event, 32)) { + if (isHexString$3(event, 32)) { // Topic Hash topics = [event]; } @@ -30950,8 +30950,8 @@ async function hasSub(contract, event) { } async function getSub(contract, operation, event) { // Make sure our runner can actually subscribe to events - const provider = getProvider(contract.runner); - assert$5(provider, "contract runner does not support subscribing", "UNSUPPORTED_OPERATION", { operation }); + const provider = getProvider$1(contract.runner); + assert$9(provider, "contract runner does not support subscribing", "UNSUPPORTED_OPERATION", { operation }); const { fragment, tag, topics } = await getSubInfo(contract, event); const { addr, subs } = getInternal(contract); let sub = subs.get(tag); @@ -31067,7 +31067,7 @@ class BaseContract { /** * @_ignore: */ - [internal]; + [internal$1]; /** * The fallback or receive function if any. */ @@ -31084,12 +31084,12 @@ class BaseContract { } const iface = Interface.from(abi); defineProperties(this, { target, runner, interface: iface }); - Object.defineProperty(this, internal, { value: {} }); + Object.defineProperty(this, internal$1, { value: {} }); let addrPromise; let addr = null; let deployTx = null; if (_deployTx) { - const provider = getProvider(runner); + const provider = getProvider$1(runner); // @TODO: the provider can be null; make a custom dummy provider that will throw a // meaningful error deployTx = new ContractTransactionResponse(this.interface, provider, _deployTx); @@ -31097,7 +31097,7 @@ class BaseContract { let subs = new Map(); // Resolve the target as the address if (typeof (target) === "string") { - if (isHexString$1(target)) { + if (isHexString$3(target)) { addr = target; addrPromise = Promise.resolve(target); } @@ -31141,7 +31141,7 @@ class BaseContract { return this.getEvent(prop); } catch (error) { - if (!isError(error, "INVALID_ARGUMENT") || error.argument !== "key") { + if (!isError$1(error, "INVALID_ARGUMENT") || error.argument !== "key") { throw error; } } @@ -31170,7 +31170,7 @@ class BaseContract { return target.getFunction(prop); } catch (error) { - if (!isError(error, "INVALID_ARGUMENT") || error.argument !== "key") { + if (!isError$1(error, "INVALID_ARGUMENT") || error.argument !== "key") { throw error; } } @@ -31206,8 +31206,8 @@ class BaseContract { * Return the deployed bytecode or null if no bytecode is found. */ async getDeployedCode() { - const provider = getProvider(this.runner); - assert$5(provider, "runner does not support .provider", "UNSUPPORTED_OPERATION", { operation: "getDeployedCode" }); + const provider = getProvider$1(this.runner); + assert$9(provider, "runner does not support .provider", "UNSUPPORTED_OPERATION", { operation: "getDeployedCode" }); const code = await provider.getCode(await this.getAddress()); if (code === "0x") { return null; @@ -31231,8 +31231,8 @@ class BaseContract { return this; } // Make sure we can subscribe to a provider event - const provider = getProvider(this.runner); - assert$5(provider != null, "contract runner does not support .provider", "UNSUPPORTED_OPERATION", { operation: "waitForDeployment" }); + const provider = getProvider$1(this.runner); + assert$9(provider != null, "contract runner does not support .provider", "UNSUPPORTED_OPERATION", { operation: "waitForDeployment" }); return new Promise((resolve, reject) => { const checkCode = async () => { try { @@ -31318,8 +31318,8 @@ class BaseContract { const address = (addr ? addr : (await addrPromise)); const { fragment, topics } = await getSubInfo(this, event); const filter = { address, topics, fromBlock, toBlock }; - const provider = getProvider(this.runner); - assert$5(provider, "contract runner does not have a provider", "UNSUPPORTED_OPERATION", { operation: "queryFilter" }); + const provider = getProvider$1(this.runner); + assert$9(provider, "contract runner does not have a provider", "UNSUPPORTED_OPERATION", { operation: "queryFilter" }); return (await provider.getLogs(filter)).map((log) => { let foundFragment = fragment; if (foundFragment == null) { @@ -31565,7 +31565,7 @@ class ContractFactory { */ async deploy(...args) { const tx = await this.getDeployTransaction(...args); - assert$5(this.runner && typeof (this.runner.sendTransaction) === "function", "factory runner does not support sending transactions", "UNSUPPORTED_OPERATION", { + assert$9(this.runner && typeof (this.runner.sendTransaction) === "function", "factory runner does not support sending transactions", "UNSUPPORTED_OPERATION", { operation: "sendTransaction" }); const sentTx = await this.runner.sendTransaction(tx); @@ -31706,7 +31706,7 @@ class EnsResolver { catch (error) { // Wildcard resolvers must understand supportsInterface // and return true. - if (isError(error, "CALL_EXCEPTION")) { + if (isError$1(error, "CALL_EXCEPTION")) { return false; } // Let future attempts try again... @@ -31725,7 +31725,7 @@ class EnsResolver { let fragment = null; if (await this.supportsWildcard()) { fragment = iface.getFunction(funcName); - assert$5(fragment, "missing fragment", "UNKNOWN_ERROR", { + assert$9(fragment, "missing fragment", "UNKNOWN_ERROR", { info: { funcName } }); params = [ @@ -31745,7 +31745,7 @@ class EnsResolver { return result; } catch (error) { - if (!isError(error, "CALL_EXCEPTION")) { + if (!isError$1(error, "CALL_EXCEPTION")) { throw error; } } @@ -31769,7 +31769,7 @@ class EnsResolver { return result; } catch (error) { - if (isError(error, "CALL_EXCEPTION")) { + if (isError$1(error, "CALL_EXCEPTION")) { return null; } throw error; @@ -31779,7 +31779,7 @@ class EnsResolver { if (coinType >= 0 && coinType < 0x80000000) { let ethCoinType = coinType + 0x80000000; const data = await this.#fetch("addr(bytes32,uint)", [ethCoinType]); - if (isHexString$1(data, 20)) { + if (isHexString$3(data, 20)) { return getAddress(data); } } @@ -31807,7 +31807,7 @@ class EnsResolver { if (address != null) { return address; } - assert$5(false, `invalid coin data`, "UNSUPPORTED_OPERATION", { + assert$9(false, `invalid coin data`, "UNSUPPORTED_OPERATION", { operation: `getAddress(${coinType})`, info: { coinType, data } }); @@ -31847,7 +31847,7 @@ class EnsResolver { if (swarm && swarm[1].length === 64) { return `bzz:/\/${swarm[1]}`; } - assert$5(false, `invalid or unsupported content hash data`, "UNSUPPORTED_OPERATION", { + assert$9(false, `invalid or unsupported content hash data`, "UNSUPPORTED_OPERATION", { operation: "getContentHash()", info: { data } }); @@ -32016,7 +32016,7 @@ class EnsResolver { const network = await provider.getNetwork(); const ensPlugin = network.getPlugin("org.ethers.plugins.network.Ens"); // No ENS... - assert$5(ensPlugin, "network does not support ENS", "UNSUPPORTED_OPERATION", { + assert$9(ensPlugin, "network does not support ENS", "UNSUPPORTED_OPERATION", { operation: "getEnsAddress", info: { network } }); return ensPlugin.address; @@ -32100,7 +32100,7 @@ function arrayOf(format, allowNull) { // Requires an object which matches a fleet of other formatters // Any FormatFunc may return `undefined` to have the value omitted // from the result object. Calls preserve `this`. -function object(format, altNames) { +function object$2(format, altNames) { return ((value) => { const result = {}; for (const key in format) { @@ -32121,7 +32121,7 @@ function object(format, altNames) { } catch (error) { const message = (error instanceof Error) ? error.message : "not-an-error"; - assert$5(false, `invalid value for value.${key} (${message})`, "BAD_DATA", { value }); + assert$9(false, `invalid value for value.${key} (${message})`, "BAD_DATA", { value }); } } return result; @@ -32139,14 +32139,14 @@ function formatBoolean(value) { assertArgument(false, `invalid boolean; ${JSON.stringify(value)}`, "value", value); } function formatData(value) { - assertArgument(isHexString$1(value, true), "invalid data", "value", value); + assertArgument(isHexString$3(value, true), "invalid data", "value", value); return value; } function formatHash(value) { - assertArgument(isHexString$1(value, 32), "invalid hash", "value", value); + assertArgument(isHexString$3(value, 32), "invalid hash", "value", value); return value; } -const _formatLog = object({ +const _formatLog = object$2({ address: getAddress, blockHash: formatHash, blockNumber: getNumber, @@ -32162,7 +32162,7 @@ const _formatLog = object({ function formatLog(value) { return _formatLog(value); } -const _formatBlock = object({ +const _formatBlock = object$2({ hash: allowNull(formatHash), parentHash: formatHash, parentBeaconBlockRoot: allowNull(formatHash, null), @@ -32193,7 +32193,7 @@ function formatBlock(value) { }); return result; } -const _formatReceiptLog = object({ +const _formatReceiptLog = object$2({ transactionIndex: getNumber, blockNumber: getNumber, transactionHash: formatHash, @@ -32208,7 +32208,7 @@ const _formatReceiptLog = object({ function formatReceiptLog(value) { return _formatReceiptLog(value); } -const _formatTransactionReceipt = object({ +const _formatTransactionReceipt = object$2({ to: allowNull(getAddress, null), from: allowNull(getAddress, null), contractAddress: allowNull(getAddress, null), @@ -32242,7 +32242,7 @@ function formatTransactionResponse(value) { if (value.to && getBigInt(value.to) === BN_0) { value.to = "0x0000000000000000000000000000000000000000"; } - const result = object({ + const result = object$2({ hash: formatHash, // Some nodes do not return this, usually test nodes (like Ganache) index: allowNull(getNumber, undefined), @@ -32795,7 +32795,7 @@ function getGasStationPlugin(url) { return feeData; } catch (error) { - assert$5(false, `error encountered with polygon gas station (${JSON.stringify(request.url)})`, "SERVER_ERROR", { request, response, error }); + assert$9(false, `error encountered with polygon gas station (${JSON.stringify(request.url)})`, "SERVER_ERROR", { request, response, error }); } }); } @@ -33239,7 +33239,7 @@ async function getSubscription(_event, provider) { } } } - if (isHexString$1(_event, 32)) { + if (isHexString$3(_event, 32)) { const hash = _event.toLowerCase(); return { type: "transaction", tag: getTag("tx", { hash }), hash }; } @@ -33265,7 +33265,7 @@ async function getSubscription(_event, provider) { const addresses = []; const promises = []; const addAddress = (addr) => { - if (isHexString$1(addr)) { + if (isHexString$3(addr)) { addresses.push(addr); } else { @@ -33441,11 +33441,11 @@ class AbstractProvider { } catch (error) { } // 4xx indicates the result is not present; stop - assert$5(resp.statusCode < 400 || resp.statusCode >= 500, `response not found during CCIP fetch: ${errorMessage}`, "OFFCHAIN_FAULT", { reason: "404_MISSING_RESOURCE", transaction: tx, info: { url, errorMessage } }); + assert$9(resp.statusCode < 400 || resp.statusCode >= 500, `response not found during CCIP fetch: ${errorMessage}`, "OFFCHAIN_FAULT", { reason: "404_MISSING_RESOURCE", transaction: tx, info: { url, errorMessage } }); // 5xx indicates server issue; try the next url errorMessages.push(errorMessage); } - assert$5(false, `error encountered during CCIP fetch: ${errorMessages.map((m) => JSON.stringify(m)).join(", ")}`, "OFFCHAIN_FAULT", { + assert$9(false, `error encountered during CCIP fetch: ${errorMessages.map((m) => JSON.stringify(m)).join(", ")}`, "OFFCHAIN_FAULT", { reason: "500_SERVER_ERROR", transaction: tx, info: { urls, errorMessages } }); @@ -33489,7 +33489,7 @@ class AbstractProvider { * Sub-classes **must** override this. */ _detectNetwork() { - assert$5(false, "sub-classes must implement this", "UNSUPPORTED_OPERATION", { + assert$9(false, "sub-classes must implement this", "UNSUPPORTED_OPERATION", { operation: "_detectNetwork" }); } @@ -33500,7 +33500,7 @@ class AbstractProvider { * Sub-classes **must** override this. */ async _perform(req) { - assert$5(false, `unsupported method: ${req.method}`, "UNSUPPORTED_OPERATION", { + assert$9(false, `unsupported method: ${req.method}`, "UNSUPPORTED_OPERATION", { operation: req.method, info: req }); @@ -33538,8 +33538,8 @@ class AbstractProvider { case "safe": return blockTag; } - if (isHexString$1(blockTag)) { - if (isHexString$1(blockTag, 32)) { + if (isHexString$3(blockTag)) { + if (isHexString$3(blockTag, 32)) { return blockTag; } return toQuantity(blockTag); @@ -33711,7 +33711,7 @@ class AbstractProvider { } else { // Otherwise, we do not allow changes to the underlying network - assert$5(false, `network changed: ${expected.chainId} => ${actual.chainId} `, "NETWORK_ERROR", { + assert$9(false, `network changed: ${expected.chainId} => ${actual.chainId} `, "NETWORK_ERROR", { event: "changed" }); } @@ -33769,7 +33769,7 @@ class AbstractProvider { }), "%response"); } async #call(tx, blockTag, attempt) { - assert$5(attempt < MAX_CCIP_REDIRECTS, "CCIP read exceeded maximum redirections", "OFFCHAIN_FAULT", { + assert$9(attempt < MAX_CCIP_REDIRECTS, "CCIP read exceeded maximum redirections", "OFFCHAIN_FAULT", { reason: "TOO_MANY_REDIRECTS", transaction: Object.assign({}, tx, { blockTag, enableCcipRead: true }) }); @@ -33789,12 +33789,12 @@ class AbstractProvider { ccipArgs = parseOffchainLookup(dataSlice(error.data, 4)); } catch (error) { - assert$5(false, error.message, "OFFCHAIN_FAULT", { + assert$9(false, error.message, "OFFCHAIN_FAULT", { reason: "BAD_DATA", transaction, info: { data } }); } // Check the sender of the OffchainLookup matches the transaction - assert$5(ccipArgs.sender.toLowerCase() === txSender.toLowerCase(), "CCIP Read sender mismatch", "CALL_EXCEPTION", { + assert$9(ccipArgs.sender.toLowerCase() === txSender.toLowerCase(), "CCIP Read sender mismatch", "CALL_EXCEPTION", { action: "call", data, reason: "OffchainLookup", @@ -33807,7 +33807,7 @@ class AbstractProvider { } }); const ccipResult = await this.ccipReadFetch(transaction, ccipArgs.calldata, ccipArgs.urls); - assert$5(ccipResult != null, "CCIP Read failed to fetch data", "OFFCHAIN_FAULT", { + assert$9(ccipResult != null, "CCIP Read failed to fetch data", "OFFCHAIN_FAULT", { reason: "FETCH_FAILED", transaction, info: { data: error.data, errorArgs: ccipArgs.errorArgs } }); const tx = { @@ -33882,7 +33882,7 @@ class AbstractProvider { } async #getBlock(block, includeTransactions) { // @TODO: Add CustomBlockPlugin check - if (isHexString$1(block, 32)) { + if (isHexString$3(block, 32)) { return await this.#perform({ method: "getBlock", blockHash: block, includeTransactions }); @@ -33959,7 +33959,7 @@ class AbstractProvider { } // ENS _getProvider(chainId) { - assert$5(false, "provider cannot connect to target network", "UNSUPPORTED_OPERATION", { + assert$9(false, "provider cannot connect to target network", "UNSUPPORTED_OPERATION", { operation: "_getProvider()" }); } @@ -34005,11 +34005,11 @@ class AbstractProvider { } catch (error) { // No data was returned from the resolver - if (isError(error, "BAD_DATA") && error.value === "0x") { + if (isError$1(error, "BAD_DATA") && error.value === "0x") { return null; } // Something reerted - if (isError(error, "CALL_EXCEPTION")) { + if (isError$1(error, "CALL_EXCEPTION")) { return null; } throw error; @@ -34057,7 +34057,7 @@ class AbstractProvider { }); } async waitForBlock(blockTag) { - assert$5(false, "not implemented yet", "NOT_IMPLEMENTED", { + assert$9(false, "not implemented yet", "NOT_IMPLEMENTED", { operation: "waitForBlock" }); } @@ -34364,7 +34364,7 @@ class AbstractProvider { if (this.#pausedState == !!dropWhilePaused) { return; } - assert$5(false, "cannot change pause type; resume first", "UNSUPPORTED_OPERATION", { + assert$9(false, "cannot change pause type; resume first", "UNSUPPORTED_OPERATION", { operation: "pause" }); } @@ -34440,14 +34440,14 @@ function bytesPad(value) { result.set(value); return result; } -const empty = new Uint8Array([]); +const empty$1 = new Uint8Array([]); // ABI Encodes a series of (bytes, bytes, ...) function encodeBytes(datas) { const result = []; let byteCount = 0; // Add place-holders for pointers as we add items for (let i = 0; i < datas.length; i++) { - result.push(empty); + result.push(empty$1); byteCount += 32; } for (let i = 0; i < datas.length; i++) { @@ -34466,11 +34466,11 @@ function parseOffchainLookup(data) { const result = { sender: "", urls: [], calldata: "", selector: "", extraData: "", errorArgs: [] }; - assert$5(dataLength(data) >= 5 * 32, "insufficient OffchainLookup data", "OFFCHAIN_FAULT", { + assert$9(dataLength(data) >= 5 * 32, "insufficient OffchainLookup data", "OFFCHAIN_FAULT", { reason: "insufficient OffchainLookup data" }); const sender = dataSlice(data, 0, 32); - assert$5(dataSlice(sender, 0, 12) === dataSlice(zeros, 0, 12), "corrupt OffchainLookup sender", "OFFCHAIN_FAULT", { + assert$9(dataSlice(sender, 0, 12) === dataSlice(zeros, 0, 12), "corrupt OffchainLookup sender", "OFFCHAIN_FAULT", { reason: "corrupt OffchainLookup sender" }); result.sender = dataSlice(sender, 12); @@ -34490,7 +34490,7 @@ function parseOffchainLookup(data) { result.urls = urls; } catch (error) { - assert$5(false, "corrupt OffchainLookup urls", "OFFCHAIN_FAULT", { + assert$9(false, "corrupt OffchainLookup urls", "OFFCHAIN_FAULT", { reason: "corrupt OffchainLookup urls" }); } @@ -34503,12 +34503,12 @@ function parseOffchainLookup(data) { result.calldata = calldata; } catch (error) { - assert$5(false, "corrupt OffchainLookup calldata", "OFFCHAIN_FAULT", { + assert$9(false, "corrupt OffchainLookup calldata", "OFFCHAIN_FAULT", { reason: "corrupt OffchainLookup calldata" }); } // Get the callbackSelector (bytes4) - assert$5(dataSlice(data, 100, 128) === dataSlice(zeros, 0, 28), "corrupt OffchainLookup callbaackSelector", "OFFCHAIN_FAULT", { + assert$9(dataSlice(data, 100, 128) === dataSlice(zeros, 0, 28), "corrupt OffchainLookup callbaackSelector", "OFFCHAIN_FAULT", { reason: "corrupt OffchainLookup callbaackSelector" }); result.selector = dataSlice(data, 96, 100); @@ -34521,7 +34521,7 @@ function parseOffchainLookup(data) { result.extraData = extraData; } catch (error) { - assert$5(false, "corrupt OffchainLookup extraData", "OFFCHAIN_FAULT", { + assert$9(false, "corrupt OffchainLookup extraData", "OFFCHAIN_FAULT", { reason: "corrupt OffchainLookup extraData" }); } @@ -34540,7 +34540,7 @@ function checkProvider(signer, operation) { if (signer.provider) { return signer.provider; } - assert$5(false, "missing provider", "UNSUPPORTED_OPERATION", { operation }); + assert$9(false, "missing provider", "UNSUPPORTED_OPERATION", { operation }); } async function populate(signer, tx) { let pop = copyRequest(tx); @@ -34620,7 +34620,7 @@ class AbstractSigner { // Explicit Legacy or EIP-2930 transaction // We need to get fee data to determine things const feeData = await provider.getFeeData(); - assert$5(feeData.gasPrice != null, "network does not support gasPrice", "UNSUPPORTED_OPERATION", { + assert$9(feeData.gasPrice != null, "network does not support gasPrice", "UNSUPPORTED_OPERATION", { operation: "getGasPrice" }); // Populate missing gasPrice @@ -34658,7 +34658,7 @@ class AbstractSigner { else if (feeData.gasPrice != null) { // Network doesn't support EIP-1559... // ...but they are trying to use EIP-1559 properties - assert$5(!hasEip1559, "network does not support EIP-1559", "UNSUPPORTED_OPERATION", { + assert$9(!hasEip1559, "network does not support EIP-1559", "UNSUPPORTED_OPERATION", { operation: "populateTransaction" }); // Populate missing fee data @@ -34671,7 +34671,7 @@ class AbstractSigner { } else { // getFeeData has failed us. - assert$5(false, "failed to get consistent fee data", "UNSUPPORTED_OPERATION", { + assert$9(false, "failed to get consistent fee data", "UNSUPPORTED_OPERATION", { operation: "signer.getFeeData" }); } @@ -34735,7 +34735,7 @@ class VoidSigner extends AbstractSigner { return new VoidSigner(this.address, provider); } #throwUnsupported(suffix, operation) { - assert$5(false, `VoidSigner cannot sign ${suffix}`, "UNSUPPORTED_OPERATION", { operation }); + assert$9(false, `VoidSigner cannot sign ${suffix}`, "UNSUPPORTED_OPERATION", { operation }); } async signTransaction(tx) { this.#throwUnsupported("transactions", "signTransaction"); @@ -34811,7 +34811,7 @@ class FilterIdSubscriber { filterId = await this.#filterIdPromise; } catch (error) { - if (!isError(error, "UNSUPPORTED_OPERATION") || error.operation !== "eth_newFilter") { + if (!isError$1(error, "UNSUPPORTED_OPERATION") || error.operation !== "eth_newFilter") { throw error; } } @@ -34985,7 +34985,7 @@ class JsonRpcSigner extends AbstractSigner { defineProperties(this, { address }); } connect(provider) { - assert$5(false, "cannot reconnect JsonRpcSigner", "UNSUPPORTED_OPERATION", { + assert$9(false, "cannot reconnect JsonRpcSigner", "UNSUPPORTED_OPERATION", { operation: "signer.connect" }); } @@ -35060,8 +35060,8 @@ class JsonRpcSigner extends AbstractSigner { // If the data is bad: the node returns bad transactions // If the network changed: calling again will also fail // If unsupported: likely destroyed - if (isError(error, "CANCELLED") || isError(error, "BAD_DATA") || - isError(error, "NETWORK_ERROR" )) { + if (isError$1(error, "CANCELLED") || isError$1(error, "BAD_DATA") || + isError$1(error, "NETWORK_ERROR" )) { if (error.info == null) { error.info = {}; } @@ -35070,7 +35070,7 @@ class JsonRpcSigner extends AbstractSigner { return; } // Stop-gap for misbehaving backends; see #4513 - if (isError(error, "INVALID_ARGUMENT")) { + if (isError$1(error, "INVALID_ARGUMENT")) { invalids++; if (error.info == null) { error.info = {}; @@ -35266,7 +35266,7 @@ class JsonRpcApiProvider extends AbstractProvider { * is detected, and if it has changed, the call will reject. */ get _network() { - assert$5(this.#network, "network is not available yet", "NETWORK_ERROR"); + assert$9(this.#network, "network is not available yet", "NETWORK_ERROR"); return this.#network; } /** @@ -35802,7 +35802,7 @@ function spelunkData(value) { return null; } // These *are* the droids we're looking for. - if (typeof (value.message) === "string" && value.message.match(/revert/i) && isHexString$1(value.data)) { + if (typeof (value.message) === "string" && value.message.match(/revert/i) && isHexString$3(value.data)) { return { message: value.message, data: value.data }; } // Spelunk further... @@ -35854,7 +35854,7 @@ function spelunkMessage(value) { var bufferUtil$1 = {exports: {}}; -var constants$4 = { +var constants$7 = { BINARY_TYPES: ['nodebuffer', 'arraybuffer', 'fragments'], EMPTY_BUFFER: Buffer.alloc(0), GUID: '258EAFA5-E914-47DA-95CA-C5AB0DC85B11', @@ -35865,7 +35865,7 @@ var constants$4 = { NOOP: () => {} }; -const { EMPTY_BUFFER: EMPTY_BUFFER$3 } = constants$4; +const { EMPTY_BUFFER: EMPTY_BUFFER$3 } = constants$7; /** * Merges an array of buffers into a new buffer. @@ -36050,7 +36050,7 @@ const zlib = zlib$1; const bufferUtil = bufferUtilExports; const Limiter = limiter; -const { kStatusCode: kStatusCode$2 } = constants$4; +const { kStatusCode: kStatusCode$2 } = constants$7; const TRAILER = Buffer.from([0x00, 0x00, 0xff, 0xff]); const kPerMessageDeflate = Symbol('permessage-deflate'); @@ -36691,7 +36691,7 @@ const { EMPTY_BUFFER: EMPTY_BUFFER$2, kStatusCode: kStatusCode$1, kWebSocket: kWebSocket$1 -} = constants$4; +} = constants$7; const { concat: concat$1, toArrayBuffer, unmask } = bufferUtilExports; const { isValidStatusCode: isValidStatusCode$1, isValidUTF8 } = validationExports; @@ -37304,7 +37304,7 @@ function error$j(ErrorCtor, message, prefix, statusCode, errorCode) { const { randomFillSync } = require$$5; const PerMessageDeflate$1 = permessageDeflate; -const { EMPTY_BUFFER: EMPTY_BUFFER$1 } = constants$4; +const { EMPTY_BUFFER: EMPTY_BUFFER$1 } = constants$7; const { isValidStatusCode } = validationExports; const { mask: applyMask, toBuffer: toBuffer$3 } = bufferUtilExports; @@ -37774,7 +37774,7 @@ let Sender$1 = class Sender { var sender = Sender$1; -const { kForOnEventAttribute: kForOnEventAttribute$1, kListener: kListener$1 } = constants$4; +const { kForOnEventAttribute: kForOnEventAttribute$1, kListener: kListener$1 } = constants$7; const kCode = Symbol('kCode'); const kData = Symbol('kData'); @@ -38063,7 +38063,7 @@ function push(dest, name, elem) { * @return {Object} The parsed object * @public */ -function parse$1(header) { +function parse$8(header) { const offers = Object.create(null); let params = Object.create(null); let mustUnescape = false; @@ -38239,15 +38239,15 @@ function format$4(extensions) { .join(', '); } -var extension = { format: format$4, parse: parse$1 }; +var extension = { format: format$4, parse: parse$8 }; /* eslint no-unused-vars: ["error", { "varsIgnorePattern": "^Readable$" }] */ const EventEmitter = require$$0$3; -const https$1 = https$2; +const https$1 = require$$1$4; const http$1 = http$2; const net$4 = require$$0$4; -const tls$3 = require$$1$3; +const tls$3 = require$$1$5; const { randomBytes, createHash } = require$$5; const { URL: URL$3 } = Url; @@ -38263,11 +38263,11 @@ const { kStatusCode, kWebSocket, NOOP -} = constants$4; +} = constants$7; const { EventTarget: { addEventListener, removeEventListener } } = eventTarget; -const { format: format$3, parse } = extension; +const { format: format$3, parse: parse$7 } = extension; const { toBuffer: toBuffer$2 } = bufferUtilExports; const readyStates = ['CONNECTING', 'OPEN', 'CLOSING', 'CLOSED']; @@ -39149,7 +39149,7 @@ function initAsClient(websocket, address, protocols, options) { let extensions; try { - extensions = parse(secWebSocketExtensions); + extensions = parse$7(secWebSocketExtensions); } catch (err) { const message = 'Invalid Sec-WebSocket-Extensions header'; abortHandshake(websocket, socket, message); @@ -39592,12 +39592,12 @@ class BaseWallet extends AbstractSigner { const populated = await TypedDataEncoder.resolveNames(domain, types, value, async (name) => { // @TODO: this should use resolveName; addresses don't // need a provider - assert$5(this.provider != null, "cannot resolve ENS names without a provider", "UNSUPPORTED_OPERATION", { + assert$9(this.provider != null, "cannot resolve ENS names without a provider", "UNSUPPORTED_OPERATION", { operation: "resolveName", info: { name } }); const address = await this.provider.resolveName(name); - assert$5(address != null, "unconfigured ENS name", "UNCONFIGURED_NAME", { + assert$9(address != null, "unconfigured ENS name", "UNCONFIGURED_NAME", { value: name }); return address; @@ -39627,7 +39627,7 @@ function unfold(words, sep) { /** * @_ignore */ -function decode(data, subs) { +function decode$1(data, subs) { // Replace all the substitutions with their expanded form for (let i = subsChrs.length - 1; i >= 0; i--) { data = data.split(subsChrs[i]).join(subs.substring(2 * i, 2 * i + 2)); @@ -39657,7 +39657,7 @@ function decode(data, subs) { */ function decodeOwl(data) { assertArgument(data[0] === "0", "unsupported auwl data", "data", data); - return decode(data.substring(1 + 2 * subsChrs.length), data.substring(1, 1 + 2 * subsChrs.length)); + return decode$1(data.substring(1 + 2 * subsChrs.length), data.substring(1, 1 + 2 * subsChrs.length)); } /** @@ -39823,7 +39823,7 @@ function mnemonicToEntropy(mnemonic, wordlist) { const entropyBits = 32 * words.length / 3; const checksumBits = words.length / 3; const checksumMask = getUpperMask(checksumBits); - const checksum = getBytes(sha256(entropy.slice(0, entropyBits / 8)))[0] & checksumMask; + const checksum = getBytes(sha256$1(entropy.slice(0, entropyBits / 8)))[0] & checksumMask; assertArgument(checksum === (entropy[entropy.length - 1] & checksumMask), "invalid mnemonic checksum", "mnemonic", "[ REDACTED ]"); return hexlify(entropy.slice(0, entropyBits / 8)); } @@ -39852,7 +39852,7 @@ function entropyToMnemonic(entropy, wordlist) { } // Compute the checksum bits const checksumBits = entropy.length / 4; - const checksum = parseInt(sha256(entropy).substring(2, 4), 16) & getUpperMask(checksumBits); + const checksum = parseInt(sha256$1(entropy).substring(2, 4), 16) & getUpperMask(checksumBits); // Shift the checksum into the word indices indices[indices.length - 1] <<= checksumBits; indices[indices.length - 1] |= (checksum >> (8 - checksumBits)); @@ -40490,14 +40490,14 @@ function isKeystoreJson(json) { catch (error) { } return false; } -function decrypt(data, key, ciphertext) { +function decrypt$1(data, key, ciphertext) { const cipher = spelunk(data, "crypto.cipher:string"); if (cipher === "aes-128-ctr") { const iv = spelunk(data, "crypto.cipherparams.iv:data!"); const aesCtr = new CTR(key, iv); return hexlify(aesCtr.decrypt(ciphertext)); } - assert$5(false, "unsupported cipher", "UNSUPPORTED_OPERATION", { + assert$9(false, "unsupported cipher", "UNSUPPORTED_OPERATION", { operation: "decrypt" }); } @@ -40506,7 +40506,7 @@ function getAccount(data, _key) { const ciphertext = spelunk(data, "crypto.ciphertext:data!"); const computedMAC = hexlify(keccak256$1(concat$3([key.slice(16, 32), ciphertext]))).substring(2); assertArgument(computedMAC === spelunk(data, "crypto.mac:string!").toLowerCase(), "incorrect password", "password", "[ REDACTED ]"); - const privateKey = decrypt(data, key.slice(0, 16), ciphertext); + const privateKey = decrypt$1(data, key.slice(0, 16), ciphertext); const address = computeAddress(privateKey); if (data.address) { let check = data.address.toLowerCase(); @@ -40580,7 +40580,7 @@ function decryptKeystoreJsonSync(json, _password) { const key = pbkdf2$1(password, salt, count, dkLen, algorithm); return getAccount(data, key); } - assert$5(params.name === "scrypt", "cannot be reached", "UNKNOWN_ERROR", { params }); + assert$9(params.name === "scrypt", "cannot be reached", "UNKNOWN_ERROR", { params }); const { salt, N, r, p, dkLen } = params; const key = scryptSync(password, salt, N, r, p, dkLen); return getAccount(data, key); @@ -40616,7 +40616,7 @@ async function decryptKeystoreJson(json, _password, progress) { } return getAccount(data, key); } - assert$5(params.name === "scrypt", "cannot be reached", "UNKNOWN_ERROR", { params }); + assert$9(params.name === "scrypt", "cannot be reached", "UNKNOWN_ERROR", { params }); const { salt, N, r, p, dkLen } = params; const key = await scrypt(password, salt, N, r, p, dkLen, progress); return getAccount(data, key); @@ -40772,7 +40772,7 @@ function zpad(value, length) { } function encodeBase58Check(_value) { const value = getBytes(_value); - const check = dataSlice(sha256(sha256(value)), 0, 4); + const check = dataSlice(sha256$1(sha256$1(value)), 0, 4); const bytes = concat$3([value, check]); return encodeBase58(bytes); } @@ -40780,7 +40780,7 @@ const _guard = {}; function ser_I(index, chainCode, publicKey, privateKey) { const data = new Uint8Array(37); if (index & HardenedBit) { - assert$5(privateKey != null, "cannot derive child of neutered node", "UNSUPPORTED_OPERATION", { + assert$9(privateKey != null, "cannot derive child of neutered node", "UNSUPPORTED_OPERATION", { operation: "deriveChild" }); // Data = 0x00 || ser_256(k_par) @@ -40885,7 +40885,7 @@ class HDNodeWallet extends BaseWallet { super(signingKey, provider); assertPrivate(guard, _guard, "HDNodeWallet"); defineProperties(this, { publicKey: signingKey.compressedPublicKey }); - const fingerprint = dataSlice(ripemd160(sha256(this.publicKey)), 0, 4); + const fingerprint = dataSlice(ripemd160(sha256$1(this.publicKey)), 0, 4); defineProperties(this, { parentFingerprint, fingerprint, chainCode, path, index, depth @@ -40942,7 +40942,7 @@ class HDNodeWallet extends BaseWallet { // we should always use mainnet, and use BIP-44 to derive the network // - Mainnet: public=0x0488B21E, private=0x0488ADE4 // - Testnet: public=0x043587CF, private=0x04358394 - assert$5(this.depth < 256, "Depth too deep", "UNSUPPORTED_OPERATION", { operation: "extendedKey" }); + assert$9(this.depth < 256, "Depth too deep", "UNSUPPORTED_OPERATION", { operation: "extendedKey" }); return encodeBase58Check(concat$3([ "0x0488ADE4", zpad(this.depth, 1), this.parentFingerprint, zpad(this.index, 4), this.chainCode, @@ -41131,7 +41131,7 @@ class HDNodeVoidWallet extends VoidSigner { super(address, provider); assertPrivate(guard, _guard, "HDNodeVoidWallet"); defineProperties(this, { publicKey }); - const fingerprint = dataSlice(ripemd160(sha256(publicKey)), 0, 4); + const fingerprint = dataSlice(ripemd160(sha256$1(publicKey)), 0, 4); defineProperties(this, { publicKey, fingerprint, parentFingerprint, chainCode, path, index, depth }); @@ -41151,7 +41151,7 @@ class HDNodeVoidWallet extends VoidSigner { // we should always use mainnet, and use BIP-44 to derive the network // - Mainnet: public=0x0488B21E, private=0x0488ADE4 // - Testnet: public=0x043587CF, private=0x04358394 - assert$5(this.depth < 256, "Depth too deep", "UNSUPPORTED_OPERATION", { operation: "extendedKey" }); + assert$9(this.depth < 256, "Depth too deep", "UNSUPPORTED_OPERATION", { operation: "extendedKey" }); return encodeBase58Check(concat$3([ "0x0488B21E", zpad(this.depth, 1), @@ -67523,7 +67523,7 @@ var version$3 = "1.0.1-alpha"; var description$1 = "Modern Toolsets for Privacy Pools on Ethereum"; var main = "./dist/index.js"; var module$1 = "./dist/index.mjs"; -var types$1 = "./dist/index.d.ts"; +var types$3 = "./dist/index.d.ts"; var unpkg = "./dist/index.umd.js"; var jsdelivr = "./dist/index.umd.js"; var bin = { @@ -67543,8 +67543,10 @@ var scripts = { depositInvoice: "ts-node src/cli.ts depositInvoice", withdraw: "ts-node src/cli.ts withdraw", compliance: "ts-node src/cli.ts compliance", - syncEvents: "ts-node src/cli.ts syncEvents", + updateEvents: "ts-node src/cli.ts updateEvents", relayers: "ts-node src/cli.ts relayers", + createAccount: "ts-node src/cli.ts createAccount", + decryptNotes: "ts-node src/cli.ts decryptNotes", send: "ts-node src/cli.ts send", balance: "ts-node src/cli.ts balance", sign: "ts-node src/cli.ts sign", @@ -67569,11 +67571,13 @@ var files = [ "yarn.lock" ]; var dependencies$1 = { + "@metamask/eth-sig-util": "^7.0.1", "@tornado/contracts": "1.0.0", "@tornado/fixed-merkle-tree": "0.7.3", "@tornado/snarkjs": "0.1.20", "@tornado/websnark": "0.0.4", ajv: "^8.12.0", + "bloomfilter.js": "^1.0.2", "bn.js": "^5.2.1", circomlibjs: "0.1.7", "cross-fetch": "^4.0.0", @@ -67628,7 +67632,7 @@ var _package = { description: description$1, main: main, module: module$1, - types: types$1, + types: types$3, unpkg: unpkg, jsdelivr: jsdelivr, bin: bin, @@ -67657,7 +67661,7 @@ var packageJson = /*#__PURE__*/Object.freeze({ name: name, optionalDependencies: optionalDependencies, scripts: scripts, - types: types$1, + types: types$3, unpkg: unpkg, version: version$3 }); @@ -69462,7 +69466,7 @@ var publicApi = {}; var URL$2 = {exports: {}}; var conversions = {}; -var lib$3 = conversions; +var lib$4 = conversions; function sign(x) { return x < 0 ? -1 : 1; @@ -69649,7 +69653,7 @@ conversions["RegExp"] = function (V, opts) { return V; }; -var utils$f = {exports: {}}; +var utils$l = {exports: {}}; (function (module) { @@ -69670,9 +69674,9 @@ var utils$f = {exports: {}}; module.exports.implForWrapper = function (wrapper) { return wrapper[module.exports.implSymbol]; }; -} (utils$f)); +} (utils$l)); -var utilsExports = utils$f.exports; +var utilsExports$1 = utils$l.exports; var URLImpl = {}; @@ -69680,7 +69684,7 @@ var urlStateMachine = {exports: {}}; var tr46 = {}; -var require$$1 = [ +var require$$1$1 = [ [ [ 0, @@ -147332,14 +147336,14 @@ var require$$1 = [ ]; var punycode = require$$0$5; -var mappingTable = require$$1; +var mappingTable = require$$1$1; var PROCESSING_OPTIONS = { TRANSITIONAL: 0, NONTRANSITIONAL: 1 }; -function normalize(str) { // fix bug in v8 +function normalize$1(str) { // fix bug in v8 return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); } @@ -147436,7 +147440,7 @@ function validateLabel(label, processing_option) { var error = false; - if (normalize(label) !== label || + if (normalize$1(label) !== label || (label[3] === "-" && label[4] === "-") || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || @@ -147463,7 +147467,7 @@ function validateLabel(label, processing_option) { function processing(domain_name, useSTD3, processing_option) { var result = mapChars(domain_name, useSTD3, processing_option); - result.string = normalize(result.string); + result.string = normalize$1(result.string); var labels = result.string.split("."); for (var i = 0; i < labels.length; ++i) { @@ -149025,8 +149029,8 @@ URLImpl.implementation = class URLImpl { (function (module) { - const conversions = lib$3; - const utils = utilsExports; + const conversions = lib$4; + const utils = utilsExports$1; const Impl = URLImpl; const impl = utils.implSymbol; @@ -149925,7 +149929,7 @@ function find(map, name) { } const MAP = Symbol('map'); -class Headers { +let Headers$1 = class Headers { /** * Headers class * @@ -150127,17 +150131,17 @@ class Headers { [Symbol.iterator]() { return createHeadersIterator(this, 'key+value'); } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; +}; +Headers$1.prototype.entries = Headers$1.prototype[Symbol.iterator]; -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { +Object.defineProperty(Headers$1.prototype, Symbol.toStringTag, { value: 'Headers', writable: false, enumerable: false, configurable: true }); -Object.defineProperties(Headers.prototype, { +Object.defineProperties(Headers$1.prototype, { get: { enumerable: true }, forEach: { enumerable: true }, set: { enumerable: true }, @@ -150238,7 +150242,7 @@ function exportNodeCompatibleHeaders(headers) { * @return Headers */ function createHeadersLenient(obj) { - const headers = new Headers(); + const headers = new Headers$1(); for (const name of Object.keys(obj)) { if (invalidTokenRegex.test(name)) { continue; @@ -150281,7 +150285,7 @@ class Response { Body.call(this, body, opts); const status = opts.status || 200; - const headers = new Headers(opts.headers); + const headers = new Headers$1(opts.headers); if (body != null && !headers.has('Content-Type')) { const contentType = extractContentType(body); @@ -150449,7 +150453,7 @@ class Request { size: init.size || input.size || 0 }); - const headers = new Headers(init.headers || input.headers || {}); + const headers = new Headers$1(init.headers || input.headers || {}); if (inputBody != null && !headers.has('Content-Type')) { const contentType = extractContentType(inputBody); @@ -150536,7 +150540,7 @@ Object.defineProperties(Request.prototype, { */ function getNodeRequestOptions(request) { const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); + const headers = new Headers$1(request[INTERNALS$2].headers); // fetch step 1.3 if (!headers.has('Accept')) { @@ -150670,7 +150674,7 @@ function fetch$2(url, opts) { const request = new Request(url, opts); const options = getNodeRequestOptions(request); - const send = (options.protocol === 'https:' ? https$2 : http$2).request; + const send = (options.protocol === 'https:' ? require$$1$4 : http$2).request; const signal = request.signal; let response = null; @@ -150816,7 +150820,7 @@ function fetch$2(url, opts) { // HTTP-redirect fetch step 6 (counter increment) // Create a new Request object. const requestOpts = { - headers: new Headers(request.headers), + headers: new Headers$1(request.headers), follow: request.follow, counter: request.counter + 1, agent: request.agent, @@ -150998,17 +151002,17 @@ fetch$2.isRedirect = function (code) { // expose Promise fetch$2.Promise = global.Promise; -var lib$2 = /*#__PURE__*/Object.freeze({ +var lib$3 = /*#__PURE__*/Object.freeze({ __proto__: null, AbortError: AbortError, FetchError: FetchError, - Headers: Headers, + Headers: Headers$1, Request: Request, Response: Response, default: fetch$2 }); -var require$$0 = /*@__PURE__*/getAugmentedNamespace(lib$2); +var require$$0 = /*@__PURE__*/getAugmentedNamespace(lib$3); (function (module, exports) { const nodeFetch = require$$0; @@ -151038,7 +151042,7 @@ var require$$0 = /*@__PURE__*/getAugmentedNamespace(lib$2); var nodePonyfillExports = nodePonyfill.exports; var crossFetch = /*@__PURE__*/getDefaultExportFromCjs(nodePonyfillExports); -var dist$3 = {}; +var dist$9 = {}; var src = {exports: {}}; @@ -151944,7 +151948,7 @@ function requireNode$1 () { hasRequiredNode$1 = 1; (function (module, exports) { const tty = require$$0$6; - const util = require$$1$2; + const util = require$$1$3; /** * This is the Node.js implementation of `debug()`. @@ -152219,11 +152223,11 @@ if (typeof process === 'undefined' || process.type === 'renderer' || process.bro var srcExports = src.exports; -var dist$2 = {}; +var dist$8 = {}; -var helpers$3 = {}; +var helpers$5 = {}; -var __createBinding$6 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { +var __createBinding$7 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { @@ -152234,22 +152238,22 @@ var __createBinding$6 = (commonjsGlobal && commonjsGlobal.__createBinding) || (O if (k2 === undefined) k2 = k; o[k2] = m[k]; })); -var __setModuleDefault$6 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { +var __setModuleDefault$7 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); -var __importStar$6 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { +var __importStar$7 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$6(result, mod, k); - __setModuleDefault$6(result, mod); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$7(result, mod, k); + __setModuleDefault$7(result, mod); return result; }; -Object.defineProperty(helpers$3, "__esModule", { value: true }); -helpers$3.req = helpers$3.json = helpers$3.toBuffer = void 0; -const http = __importStar$6(http$2); -const https = __importStar$6(https$2); +Object.defineProperty(helpers$5, "__esModule", { value: true }); +helpers$5.req = helpers$5.json = helpers$5.toBuffer = void 0; +const http = __importStar$7(http$2); +const https = __importStar$7(require$$1$4); async function toBuffer$1(stream) { let length = 0; const chunks = []; @@ -152259,7 +152263,7 @@ async function toBuffer$1(stream) { } return Buffer.concat(chunks, length); } -helpers$3.toBuffer = toBuffer$1; +helpers$5.toBuffer = toBuffer$1; // eslint-disable-next-line @typescript-eslint/no-explicit-any async function json(stream) { const buf = await toBuffer$1(stream); @@ -152273,7 +152277,7 @@ async function json(stream) { throw err; } } -helpers$3.json = json; +helpers$5.json = json; function req(url, opts = {}) { const href = typeof url === 'string' ? url : url.href; const req = (href.startsWith('https:') ? https : http).request(url, opts); @@ -152286,7 +152290,7 @@ function req(url, opts = {}) { req.then = promise.then.bind(promise); return req; } -helpers$3.req = req; +helpers$5.req = req; (function (exports) { var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { @@ -152319,8 +152323,8 @@ helpers$3.req = req; exports.Agent = void 0; const net = __importStar(require$$0$4); const http = __importStar(http$2); - const https_1 = https$2; - __exportStar(helpers$3, exports); + const https_1 = require$$1$4; + __exportStar(helpers$5, exports); const INTERNAL = Symbol('AgentBaseInternalState'); class Agent extends http.Agent { constructor(opts) { @@ -152463,9 +152467,9 @@ helpers$3.req = req; } exports.Agent = Agent; -} (dist$2)); +} (dist$8)); -var __createBinding$5 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { +var __createBinding$6 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { @@ -152476,30 +152480,30 @@ var __createBinding$5 = (commonjsGlobal && commonjsGlobal.__createBinding) || (O if (k2 === undefined) k2 = k; o[k2] = m[k]; })); -var __setModuleDefault$5 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { +var __setModuleDefault$6 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); -var __importStar$5 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { +var __importStar$6 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$5(result, mod, k); - __setModuleDefault$5(result, mod); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$6(result, mod, k); + __setModuleDefault$6(result, mod); return result; }; var __importDefault$5 = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; -Object.defineProperty(dist$3, "__esModule", { value: true }); -var HttpProxyAgent_1 = dist$3.HttpProxyAgent = void 0; -const net$3 = __importStar$5(require$$0$4); -const tls$2 = __importStar$5(require$$1$3); -const debug_1$3 = __importDefault$5(srcExports); -const events_1 = require$$0$3; -const agent_base_1$2 = dist$2; +Object.defineProperty(dist$9, "__esModule", { value: true }); +var HttpProxyAgent_1 = dist$9.HttpProxyAgent = void 0; +const net$3 = __importStar$6(require$$0$4); +const tls$2 = __importStar$6(require$$1$5); +const debug_1$4 = __importDefault$5(srcExports); +const events_1$1 = require$$0$3; +const agent_base_1$2 = dist$8; const url_1$2 = Url; -const debug$3 = (0, debug_1$3.default)('http-proxy-agent'); +const debug$5 = (0, debug_1$4.default)('http-proxy-agent'); /** * The `HttpProxyAgent` implements an HTTP Agent subclass that connects * to the specified "HTTP proxy server" in order to proxy HTTP requests. @@ -152509,7 +152513,7 @@ class HttpProxyAgent extends agent_base_1$2.Agent { super(opts); this.proxy = typeof proxy === 'string' ? new url_1$2.URL(proxy) : proxy; this.proxyHeaders = opts?.headers ?? {}; - debug$3('Creating new HttpProxyAgent instance: %o', this.proxy.href); + debug$5('Creating new HttpProxyAgent instance: %o', this.proxy.href); // Trim off the brackets from IPv6 addresses const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); const port = this.proxy.port @@ -152518,7 +152522,7 @@ class HttpProxyAgent extends agent_base_1$2.Agent { ? 443 : 80; this.connectOpts = { - ...(opts ? omit$2(opts, 'headers') : null), + ...(opts ? omit$3(opts, 'headers') : null), host, port, }; @@ -152571,37 +152575,37 @@ class HttpProxyAgent extends agent_base_1$2.Agent { // to re-generate the string since we just changed the `req.path`. let first; let endOfHeaders; - debug$3('Regenerating stored HTTP header string for request'); + debug$5('Regenerating stored HTTP header string for request'); req._implicitHeader(); if (req.outputData && req.outputData.length > 0) { - debug$3('Patching connection write() output buffer with updated header'); + debug$5('Patching connection write() output buffer with updated header'); first = req.outputData[0].data; endOfHeaders = first.indexOf('\r\n\r\n') + 4; req.outputData[0].data = req._header + first.substring(endOfHeaders); - debug$3('Output buffer: %o', req.outputData[0].data); + debug$5('Output buffer: %o', req.outputData[0].data); } // Create a socket connection to the proxy server. let socket; if (this.proxy.protocol === 'https:') { - debug$3('Creating `tls.Socket`: %o', this.connectOpts); + debug$5('Creating `tls.Socket`: %o', this.connectOpts); socket = tls$2.connect(this.connectOpts); } else { - debug$3('Creating `net.Socket`: %o', this.connectOpts); + debug$5('Creating `net.Socket`: %o', this.connectOpts); socket = net$3.connect(this.connectOpts); } // Wait for the socket's `connect` event, so that this `callback()` // function throws instead of the `http` request machinery. This is // important for i.e. `PacProxyAgent` which determines a failed proxy // connection via the `callback()` function throwing. - await (0, events_1.once)(socket, 'connect'); + await (0, events_1$1.once)(socket, 'connect'); return socket; } } HttpProxyAgent.protocols = ['http', 'https']; -HttpProxyAgent_1 = dist$3.HttpProxyAgent = HttpProxyAgent; -function omit$2(obj, ...keys) { +HttpProxyAgent_1 = dist$9.HttpProxyAgent = HttpProxyAgent; +function omit$3(obj, ...keys) { const ret = {}; let key; for (key in obj) { @@ -152612,7 +152616,7 @@ function omit$2(obj, ...keys) { return ret; } -var dist$1 = {}; +var dist$7 = {}; var parseProxyResponse$1 = {}; @@ -152621,8 +152625,8 @@ var __importDefault$4 = (commonjsGlobal && commonjsGlobal.__importDefault) || fu }; Object.defineProperty(parseProxyResponse$1, "__esModule", { value: true }); parseProxyResponse$1.parseProxyResponse = void 0; -const debug_1$2 = __importDefault$4(srcExports); -const debug$2 = (0, debug_1$2.default)('https-proxy-agent:parse-proxy-response'); +const debug_1$3 = __importDefault$4(srcExports); +const debug$4 = (0, debug_1$3.default)('https-proxy-agent:parse-proxy-response'); function parseProxyResponse(socket) { return new Promise((resolve, reject) => { // we need to buffer any HTTP traffic that happens with the proxy before we get @@ -152645,12 +152649,12 @@ function parseProxyResponse(socket) { } function onend() { cleanup(); - debug$2('onend'); + debug$4('onend'); reject(new Error('Proxy connection ended before receiving CONNECT response')); } function onerror(err) { cleanup(); - debug$2('onerror %o', err); + debug$4('onerror %o', err); reject(err); } function ondata(b) { @@ -152660,7 +152664,7 @@ function parseProxyResponse(socket) { const endOfHeaders = buffered.indexOf('\r\n\r\n'); if (endOfHeaders === -1) { // keep buffering - debug$2('have not received end of HTTP headers yet...'); + debug$4('have not received end of HTTP headers yet...'); read(); return; } @@ -152698,7 +152702,7 @@ function parseProxyResponse(socket) { headers[key] = value; } } - debug$2('got proxy server response: %o %o', firstLine, headers); + debug$4('got proxy server response: %o %o', firstLine, headers); cleanup(); resolve({ connect: { @@ -152716,7 +152720,7 @@ function parseProxyResponse(socket) { } parseProxyResponse$1.parseProxyResponse = parseProxyResponse; -var __createBinding$4 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { +var __createBinding$5 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { @@ -152727,31 +152731,31 @@ var __createBinding$4 = (commonjsGlobal && commonjsGlobal.__createBinding) || (O if (k2 === undefined) k2 = k; o[k2] = m[k]; })); -var __setModuleDefault$4 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { +var __setModuleDefault$5 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); -var __importStar$4 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { +var __importStar$5 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$4(result, mod, k); - __setModuleDefault$4(result, mod); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$5(result, mod, k); + __setModuleDefault$5(result, mod); return result; }; var __importDefault$3 = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; -Object.defineProperty(dist$1, "__esModule", { value: true }); -var HttpsProxyAgent_1 = dist$1.HttpsProxyAgent = void 0; -const net$2 = __importStar$4(require$$0$4); -const tls$1 = __importStar$4(require$$1$3); -const assert_1 = __importDefault$3(assert$6); -const debug_1$1 = __importDefault$3(srcExports); -const agent_base_1$1 = dist$2; +Object.defineProperty(dist$7, "__esModule", { value: true }); +var HttpsProxyAgent_1 = dist$7.HttpsProxyAgent = void 0; +const net$2 = __importStar$5(require$$0$4); +const tls$1 = __importStar$5(require$$1$5); +const assert_1 = __importDefault$3(assert$a); +const debug_1$2 = __importDefault$3(srcExports); +const agent_base_1$1 = dist$8; const url_1$1 = Url; const parse_proxy_response_1 = parseProxyResponse$1; -const debug$1 = (0, debug_1$1.default)('https-proxy-agent'); +const debug$3 = (0, debug_1$2.default)('https-proxy-agent'); /** * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. @@ -152770,7 +152774,7 @@ class HttpsProxyAgent extends agent_base_1$1.Agent { this.options = { path: undefined }; this.proxy = typeof proxy === 'string' ? new url_1$1.URL(proxy) : proxy; this.proxyHeaders = opts?.headers ?? {}; - debug$1('Creating new HttpsProxyAgent instance: %o', this.proxy.href); + debug$3('Creating new HttpsProxyAgent instance: %o', this.proxy.href); // Trim off the brackets from IPv6 addresses const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); const port = this.proxy.port @@ -152781,7 +152785,7 @@ class HttpsProxyAgent extends agent_base_1$1.Agent { this.connectOpts = { // Attempt to negotiate http/1.1 for proxy servers that support http/2 ALPNProtocols: ['http/1.1'], - ...(opts ? omit$1(opts, 'headers') : null), + ...(opts ? omit$2(opts, 'headers') : null), host, port, }; @@ -152798,7 +152802,7 @@ class HttpsProxyAgent extends agent_base_1$1.Agent { // Create a socket connection to the proxy server. let socket; if (proxy.protocol === 'https:') { - debug$1('Creating `tls.Socket`: %o', this.connectOpts); + debug$3('Creating `tls.Socket`: %o', this.connectOpts); const servername = this.connectOpts.servername || this.connectOpts.host; socket = tls$1.connect({ ...this.connectOpts, @@ -152806,7 +152810,7 @@ class HttpsProxyAgent extends agent_base_1$1.Agent { }); } else { - debug$1('Creating `net.Socket`: %o', this.connectOpts); + debug$3('Creating `net.Socket`: %o', this.connectOpts); socket = net$2.connect(this.connectOpts); } const headers = typeof this.proxyHeaders === 'function' @@ -152838,10 +152842,10 @@ class HttpsProxyAgent extends agent_base_1$1.Agent { if (opts.secureEndpoint) { // The proxy is connecting to a TLS server, so upgrade // this socket connection to a TLS connection. - debug$1('Upgrading socket connection to TLS'); + debug$3('Upgrading socket connection to TLS'); const servername = opts.servername || opts.host; return tls$1.connect({ - ...omit$1(opts, 'host', 'path', 'port'), + ...omit$2(opts, 'host', 'path', 'port'), socket, servername: net$2.isIP(servername) ? undefined : servername, }); @@ -152863,7 +152867,7 @@ class HttpsProxyAgent extends agent_base_1$1.Agent { fakeSocket.readable = true; // Need to wait for the "socket" event to re-play the "data" events. req.once('socket', (s) => { - debug$1('Replaying proxy buffer for failed request'); + debug$3('Replaying proxy buffer for failed request'); (0, assert_1.default)(s.listenerCount('data') > 0); // Replay the "buffered" Buffer onto the fake `socket`, since at // this point the HTTP module machinery has been hooked up for @@ -152875,11 +152879,11 @@ class HttpsProxyAgent extends agent_base_1$1.Agent { } } HttpsProxyAgent.protocols = ['http', 'https']; -HttpsProxyAgent_1 = dist$1.HttpsProxyAgent = HttpsProxyAgent; +HttpsProxyAgent_1 = dist$7.HttpsProxyAgent = HttpsProxyAgent; function resume(socket) { socket.resume(); } -function omit$1(obj, ...keys) { +function omit$2(obj, ...keys) { const ret = {}; let key; for (key in obj) { @@ -152890,7 +152894,7 @@ function omit$1(obj, ...keys) { return ret; } -var dist = {}; +var dist$6 = {}; var build$1 = {}; @@ -152898,9 +152902,9 @@ var socksclient = {}; var smartbuffer = {}; -var utils$e = {}; +var utils$k = {}; -Object.defineProperty(utils$e, "__esModule", { value: true }); +Object.defineProperty(utils$k, "__esModule", { value: true }); const buffer_1 = require$$0$7; /** * Error strings @@ -152919,7 +152923,7 @@ const ERRORS$1 = { INVALID_READ_BEYOND_BOUNDS: 'Attempted to read beyond the bounds of the managed data.', INVALID_WRITE_BEYOND_BOUNDS: 'Attempted to write beyond the bounds of the managed data.' }; -utils$e.ERRORS = ERRORS$1; +utils$k.ERRORS = ERRORS$1; /** * Checks if a given encoding is a valid Buffer encoding. (Throws an exception if check fails) * @@ -152930,7 +152934,7 @@ function checkEncoding(encoding) { throw new Error(ERRORS$1.INVALID_ENCODING); } } -utils$e.checkEncoding = checkEncoding; +utils$k.checkEncoding = checkEncoding; /** * Checks if a given number is a finite integer. (Throws an exception if check fails) * @@ -152939,7 +152943,7 @@ utils$e.checkEncoding = checkEncoding; function isFiniteInteger(value) { return typeof value === 'number' && isFinite(value) && isInteger$1(value); } -utils$e.isFiniteInteger = isFiniteInteger; +utils$k.isFiniteInteger = isFiniteInteger; /** * Checks if an offset/length value is valid. (Throws an exception if check fails) * @@ -152965,7 +152969,7 @@ function checkOffsetOrLengthValue(value, offset) { function checkLengthValue(length) { checkOffsetOrLengthValue(length, false); } -utils$e.checkLengthValue = checkLengthValue; +utils$k.checkLengthValue = checkLengthValue; /** * Checks if a offset value is valid. (Throws an exception if check fails) * @@ -152974,7 +152978,7 @@ utils$e.checkLengthValue = checkLengthValue; function checkOffsetValue(offset) { checkOffsetOrLengthValue(offset, true); } -utils$e.checkOffsetValue = checkOffsetValue; +utils$k.checkOffsetValue = checkOffsetValue; /** * Checks if a target offset value is out of bounds. (Throws an exception if check fails) * @@ -152986,7 +152990,7 @@ function checkTargetOffset(offset, buff) { throw new Error(ERRORS$1.INVALID_TARGET_OFFSET); } } -utils$e.checkTargetOffset = checkTargetOffset; +utils$k.checkTargetOffset = checkTargetOffset; /** * Determines whether a given number is a integer. * @param value The number to check. @@ -153005,10 +153009,10 @@ function bigIntAndBufferInt64Check(bufferMethod) { throw new Error(`Platform does not support Buffer.prototype.${bufferMethod}.`); } } -utils$e.bigIntAndBufferInt64Check = bigIntAndBufferInt64Check; +utils$k.bigIntAndBufferInt64Check = bigIntAndBufferInt64Check; Object.defineProperty(smartbuffer, "__esModule", { value: true }); -const utils_1 = utils$e; +const utils_1$8 = utils$k; // The default Buffer size if one is not provided. const DEFAULT_SMARTBUFFER_SIZE = 4096; // The default string encoding to use for reading/writing strings. @@ -153027,16 +153031,16 @@ class SmartBuffer { if (SmartBuffer.isSmartBufferOptions(options)) { // Checks for encoding if (options.encoding) { - utils_1.checkEncoding(options.encoding); + utils_1$8.checkEncoding(options.encoding); this._encoding = options.encoding; } // Checks for initial size length if (options.size) { - if (utils_1.isFiniteInteger(options.size) && options.size > 0) { + if (utils_1$8.isFiniteInteger(options.size) && options.size > 0) { this._buff = Buffer.allocUnsafe(options.size); } else { - throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_SIZE); + throw new Error(utils_1$8.ERRORS.INVALID_SMARTBUFFER_SIZE); } // Check for initial Buffer } @@ -153046,7 +153050,7 @@ class SmartBuffer { this.length = options.buff.length; } else { - throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_BUFFER); + throw new Error(utils_1$8.ERRORS.INVALID_SMARTBUFFER_BUFFER); } } else { @@ -153056,7 +153060,7 @@ class SmartBuffer { else { // If something was passed but it's not a SmartBufferOptions object if (typeof options !== 'undefined') { - throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_OBJECT); + throw new Error(utils_1$8.ERRORS.INVALID_SMARTBUFFER_OBJECT); } // Otherwise default to sane options this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE); @@ -153159,7 +153163,7 @@ class SmartBuffer { * @return { BigInt } */ readBigInt64BE(offset) { - utils_1.bigIntAndBufferInt64Check('readBigInt64BE'); + utils_1$8.bigIntAndBufferInt64Check('readBigInt64BE'); return this._readNumberValue(Buffer.prototype.readBigInt64BE, 8, offset); } /** @@ -153169,7 +153173,7 @@ class SmartBuffer { * @return { BigInt } */ readBigInt64LE(offset) { - utils_1.bigIntAndBufferInt64Check('readBigInt64LE'); + utils_1$8.bigIntAndBufferInt64Check('readBigInt64LE'); return this._readNumberValue(Buffer.prototype.readBigInt64LE, 8, offset); } /** @@ -153292,7 +153296,7 @@ class SmartBuffer { * @return this */ writeBigInt64BE(value, offset) { - utils_1.bigIntAndBufferInt64Check('writeBigInt64BE'); + utils_1$8.bigIntAndBufferInt64Check('writeBigInt64BE'); return this._writeNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset); } /** @@ -153304,7 +153308,7 @@ class SmartBuffer { * @return this */ insertBigInt64BE(value, offset) { - utils_1.bigIntAndBufferInt64Check('writeBigInt64BE'); + utils_1$8.bigIntAndBufferInt64Check('writeBigInt64BE'); return this._insertNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset); } /** @@ -153316,7 +153320,7 @@ class SmartBuffer { * @return this */ writeBigInt64LE(value, offset) { - utils_1.bigIntAndBufferInt64Check('writeBigInt64LE'); + utils_1$8.bigIntAndBufferInt64Check('writeBigInt64LE'); return this._writeNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset); } /** @@ -153328,7 +153332,7 @@ class SmartBuffer { * @return this */ insertBigInt64LE(value, offset) { - utils_1.bigIntAndBufferInt64Check('writeBigInt64LE'); + utils_1$8.bigIntAndBufferInt64Check('writeBigInt64LE'); return this._insertNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset); } // Unsigned Integers @@ -153384,7 +153388,7 @@ class SmartBuffer { * @return { BigInt } */ readBigUInt64BE(offset) { - utils_1.bigIntAndBufferInt64Check('readBigUInt64BE'); + utils_1$8.bigIntAndBufferInt64Check('readBigUInt64BE'); return this._readNumberValue(Buffer.prototype.readBigUInt64BE, 8, offset); } /** @@ -153394,7 +153398,7 @@ class SmartBuffer { * @return { BigInt } */ readBigUInt64LE(offset) { - utils_1.bigIntAndBufferInt64Check('readBigUInt64LE'); + utils_1$8.bigIntAndBufferInt64Check('readBigUInt64LE'); return this._readNumberValue(Buffer.prototype.readBigUInt64LE, 8, offset); } /** @@ -153516,7 +153520,7 @@ class SmartBuffer { * @return this */ writeBigUInt64BE(value, offset) { - utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE'); + utils_1$8.bigIntAndBufferInt64Check('writeBigUInt64BE'); return this._writeNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset); } /** @@ -153528,7 +153532,7 @@ class SmartBuffer { * @return this */ insertBigUInt64BE(value, offset) { - utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE'); + utils_1$8.bigIntAndBufferInt64Check('writeBigUInt64BE'); return this._insertNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset); } /** @@ -153540,7 +153544,7 @@ class SmartBuffer { * @return this */ writeBigUInt64LE(value, offset) { - utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE'); + utils_1$8.bigIntAndBufferInt64Check('writeBigUInt64LE'); return this._writeNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset); } /** @@ -153552,7 +153556,7 @@ class SmartBuffer { * @return this */ insertBigUInt64LE(value, offset) { - utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE'); + utils_1$8.bigIntAndBufferInt64Check('writeBigUInt64LE'); return this._insertNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset); } // Floating Point @@ -153695,7 +153699,7 @@ class SmartBuffer { let lengthVal; // Length provided if (typeof arg1 === 'number') { - utils_1.checkLengthValue(arg1); + utils_1$8.checkLengthValue(arg1); lengthVal = Math.min(arg1, this.length - this._readOffset); } else { @@ -153704,7 +153708,7 @@ class SmartBuffer { } // Check encoding if (typeof encoding !== 'undefined') { - utils_1.checkEncoding(encoding); + utils_1$8.checkEncoding(encoding); } const value = this._buff.slice(this._readOffset, this._readOffset + lengthVal).toString(encoding || this._encoding); this._readOffset += lengthVal; @@ -153720,7 +153724,7 @@ class SmartBuffer { * @return this */ insertString(value, offset, encoding) { - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); return this._handleString(value, true, offset, encoding); } /** @@ -153744,7 +153748,7 @@ class SmartBuffer { */ readStringNT(encoding) { if (typeof encoding !== 'undefined') { - utils_1.checkEncoding(encoding); + utils_1$8.checkEncoding(encoding); } // Set null character position to the end SmartBuffer instance. let nullPos = this.length; @@ -153771,7 +153775,7 @@ class SmartBuffer { * @return this */ insertStringNT(value, offset, encoding) { - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); // Write Values this.insertString(value, offset, encoding); this.insertUInt8(0x00, offset + value.length); @@ -153802,7 +153806,7 @@ class SmartBuffer { */ readBuffer(length) { if (typeof length !== 'undefined') { - utils_1.checkLengthValue(length); + utils_1$8.checkLengthValue(length); } const lengthVal = typeof length === 'number' ? length : this.length; const endPoint = Math.min(this.length, this._readOffset + lengthVal); @@ -153821,7 +153825,7 @@ class SmartBuffer { * @return this */ insertBuffer(value, offset) { - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); return this._handleBuffer(value, true, offset); } /** @@ -153865,7 +153869,7 @@ class SmartBuffer { * @return this */ insertBufferNT(value, offset) { - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); // Write Values this.insertBuffer(value, offset); this.insertUInt8(0x00, offset + value.length); @@ -153882,7 +153886,7 @@ class SmartBuffer { writeBufferNT(value, offset) { // Checks for valid numberic value; if (typeof offset !== 'undefined') { - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); } // Write Values this.writeBuffer(value, offset); @@ -153920,9 +153924,9 @@ class SmartBuffer { * @param offset { Number } - The offset value to set. */ set readOffset(offset) { - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); // Check for bounds. - utils_1.checkTargetOffset(offset, this); + utils_1$8.checkTargetOffset(offset, this); this._readOffset = offset; } /** @@ -153939,9 +153943,9 @@ class SmartBuffer { * @param offset { Number } - The offset value to set. */ set writeOffset(offset) { - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); // Check for bounds. - utils_1.checkTargetOffset(offset, this); + utils_1$8.checkTargetOffset(offset, this); this._writeOffset = offset; } /** @@ -153958,7 +153962,7 @@ class SmartBuffer { * @param encoding { BufferEncoding } The string Buffer encoding to set. */ set encoding(encoding) { - utils_1.checkEncoding(encoding); + utils_1$8.checkEncoding(encoding); this._encoding = encoding; } /** @@ -153985,7 +153989,7 @@ class SmartBuffer { toString(encoding) { const encodingVal = typeof encoding === 'string' ? encoding : this._encoding; // Check for invalid encoding. - utils_1.checkEncoding(encodingVal); + utils_1$8.checkEncoding(encodingVal); return this._buff.toString(encodingVal, 0, this.length); } /** @@ -154012,12 +154016,12 @@ class SmartBuffer { // Check for encoding } else if (typeof arg3 === 'string') { - utils_1.checkEncoding(arg3); + utils_1$8.checkEncoding(arg3); encodingVal = arg3; } // Check for encoding (third param) if (typeof encoding === 'string') { - utils_1.checkEncoding(encoding); + utils_1$8.checkEncoding(encoding); encodingVal = encoding; } // Calculate bytelength of string. @@ -154092,13 +154096,13 @@ class SmartBuffer { // If an offset was provided, use it. if (typeof offset !== 'undefined') { // Checks for valid numberic value; - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); // Overide with custom offset. offsetVal = offset; } // Checks if offset is below zero, or the offset+length offset is beyond the total length of the managed data. if (offsetVal < 0 || offsetVal + length > this.length) { - throw new Error(utils_1.ERRORS.INVALID_READ_BEYOND_BOUNDS); + throw new Error(utils_1$8.ERRORS.INVALID_READ_BEYOND_BOUNDS); } } /** @@ -154109,7 +154113,7 @@ class SmartBuffer { */ ensureInsertable(dataLength, offset) { // Checks for valid numberic value; - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); // Ensure there is enough internal Buffer capacity. this._ensureCapacity(this.length + dataLength); // If an offset was provided and its not the very end of the buffer, copy data into appropriate location in regards to the offset. @@ -154191,7 +154195,7 @@ class SmartBuffer { */ _insertNumberValue(func, byteSize, value, offset) { // Check for invalid offset values. - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); // Ensure there is enough internal Buffer capacity. (raw offset is passed) this.ensureInsertable(byteSize, offset); // Call buffer.writeXXXX(); @@ -154217,9 +154221,9 @@ class SmartBuffer { if (typeof offset === 'number') { // Check if we're writing beyond the bounds of the managed data. if (offset < 0) { - throw new Error(utils_1.ERRORS.INVALID_WRITE_BEYOND_BOUNDS); + throw new Error(utils_1$8.ERRORS.INVALID_WRITE_BEYOND_BOUNDS); } - utils_1.checkOffsetValue(offset); + utils_1$8.checkOffsetValue(offset); } // Default to writeOffset if no offset value was given. const offsetVal = typeof offset === 'number' ? offset : this._writeOffset; @@ -154239,12 +154243,12 @@ class SmartBuffer { } smartbuffer.SmartBuffer = SmartBuffer; -var constants$3 = {}; +var constants$6 = {}; -Object.defineProperty(constants$3, "__esModule", { value: true }); -constants$3.SOCKS5_NO_ACCEPTABLE_AUTH = constants$3.SOCKS5_CUSTOM_AUTH_END = constants$3.SOCKS5_CUSTOM_AUTH_START = constants$3.SOCKS_INCOMING_PACKET_SIZES = constants$3.SocksClientState = constants$3.Socks5Response = constants$3.Socks5HostType = constants$3.Socks5Auth = constants$3.Socks4Response = constants$3.SocksCommand = constants$3.ERRORS = constants$3.DEFAULT_TIMEOUT = void 0; +Object.defineProperty(constants$6, "__esModule", { value: true }); +constants$6.SOCKS5_NO_ACCEPTABLE_AUTH = constants$6.SOCKS5_CUSTOM_AUTH_END = constants$6.SOCKS5_CUSTOM_AUTH_START = constants$6.SOCKS_INCOMING_PACKET_SIZES = constants$6.SocksClientState = constants$6.Socks5Response = constants$6.Socks5HostType = constants$6.Socks5Auth = constants$6.Socks4Response = constants$6.SocksCommand = constants$6.ERRORS = constants$6.DEFAULT_TIMEOUT = void 0; const DEFAULT_TIMEOUT = 30000; -constants$3.DEFAULT_TIMEOUT = DEFAULT_TIMEOUT; +constants$6.DEFAULT_TIMEOUT = DEFAULT_TIMEOUT; // prettier-ignore const ERRORS = { InvalidSocksCommand: 'An invalid SOCKS command was provided. Valid options are connect, bind, and associate.', @@ -154275,7 +154279,7 @@ const ERRORS = { InvalidSocks5IncomingConnectionResponse: 'Received invalid Socks5 incoming connection response', Socks5ProxyRejectedIncomingBoundConnection: 'Socks5 Proxy rejected incoming bound connection', }; -constants$3.ERRORS = ERRORS; +constants$6.ERRORS = ERRORS; const SOCKS_INCOMING_PACKET_SIZES = { Socks5InitialHandshakeResponse: 2, Socks5UserPassAuthenticationResponse: 2, @@ -154287,32 +154291,32 @@ const SOCKS_INCOMING_PACKET_SIZES = { // Command response + incoming connection (bind) Socks4Response: 8, // 2 header + 2 port + 4 ip }; -constants$3.SOCKS_INCOMING_PACKET_SIZES = SOCKS_INCOMING_PACKET_SIZES; +constants$6.SOCKS_INCOMING_PACKET_SIZES = SOCKS_INCOMING_PACKET_SIZES; var SocksCommand; (function (SocksCommand) { SocksCommand[SocksCommand["connect"] = 1] = "connect"; SocksCommand[SocksCommand["bind"] = 2] = "bind"; SocksCommand[SocksCommand["associate"] = 3] = "associate"; -})(SocksCommand || (constants$3.SocksCommand = SocksCommand = {})); +})(SocksCommand || (constants$6.SocksCommand = SocksCommand = {})); var Socks4Response; (function (Socks4Response) { Socks4Response[Socks4Response["Granted"] = 90] = "Granted"; Socks4Response[Socks4Response["Failed"] = 91] = "Failed"; Socks4Response[Socks4Response["Rejected"] = 92] = "Rejected"; Socks4Response[Socks4Response["RejectedIdent"] = 93] = "RejectedIdent"; -})(Socks4Response || (constants$3.Socks4Response = Socks4Response = {})); +})(Socks4Response || (constants$6.Socks4Response = Socks4Response = {})); var Socks5Auth; (function (Socks5Auth) { Socks5Auth[Socks5Auth["NoAuth"] = 0] = "NoAuth"; Socks5Auth[Socks5Auth["GSSApi"] = 1] = "GSSApi"; Socks5Auth[Socks5Auth["UserPass"] = 2] = "UserPass"; -})(Socks5Auth || (constants$3.Socks5Auth = Socks5Auth = {})); +})(Socks5Auth || (constants$6.Socks5Auth = Socks5Auth = {})); const SOCKS5_CUSTOM_AUTH_START = 0x80; -constants$3.SOCKS5_CUSTOM_AUTH_START = SOCKS5_CUSTOM_AUTH_START; +constants$6.SOCKS5_CUSTOM_AUTH_START = SOCKS5_CUSTOM_AUTH_START; const SOCKS5_CUSTOM_AUTH_END = 0xfe; -constants$3.SOCKS5_CUSTOM_AUTH_END = SOCKS5_CUSTOM_AUTH_END; +constants$6.SOCKS5_CUSTOM_AUTH_END = SOCKS5_CUSTOM_AUTH_END; const SOCKS5_NO_ACCEPTABLE_AUTH = 0xff; -constants$3.SOCKS5_NO_ACCEPTABLE_AUTH = SOCKS5_NO_ACCEPTABLE_AUTH; +constants$6.SOCKS5_NO_ACCEPTABLE_AUTH = SOCKS5_NO_ACCEPTABLE_AUTH; var Socks5Response; (function (Socks5Response) { Socks5Response[Socks5Response["Granted"] = 0] = "Granted"; @@ -154324,13 +154328,13 @@ var Socks5Response; Socks5Response[Socks5Response["TTLExpired"] = 6] = "TTLExpired"; Socks5Response[Socks5Response["CommandNotSupported"] = 7] = "CommandNotSupported"; Socks5Response[Socks5Response["AddressNotSupported"] = 8] = "AddressNotSupported"; -})(Socks5Response || (constants$3.Socks5Response = Socks5Response = {})); +})(Socks5Response || (constants$6.Socks5Response = Socks5Response = {})); var Socks5HostType; (function (Socks5HostType) { Socks5HostType[Socks5HostType["IPv4"] = 1] = "IPv4"; Socks5HostType[Socks5HostType["Hostname"] = 3] = "Hostname"; Socks5HostType[Socks5HostType["IPv6"] = 4] = "IPv6"; -})(Socks5HostType || (constants$3.Socks5HostType = Socks5HostType = {})); +})(Socks5HostType || (constants$6.Socks5HostType = Socks5HostType = {})); var SocksClientState; (function (SocksClientState) { SocksClientState[SocksClientState["Created"] = 0] = "Created"; @@ -154346,9 +154350,9 @@ var SocksClientState; SocksClientState[SocksClientState["Established"] = 10] = "Established"; SocksClientState[SocksClientState["Disconnected"] = 11] = "Disconnected"; SocksClientState[SocksClientState["Error"] = 99] = "Error"; -})(SocksClientState || (constants$3.SocksClientState = SocksClientState = {})); +})(SocksClientState || (constants$6.SocksClientState = SocksClientState = {})); -var helpers$2 = {}; +var helpers$4 = {}; var util$1 = {}; @@ -154407,14 +154411,14 @@ function isCorrect(defaultBits) { } common$2.isCorrect = isCorrect; -var constants$2 = {}; +var constants$5 = {}; -Object.defineProperty(constants$2, "__esModule", { value: true }); -constants$2.RE_SUBNET_STRING = constants$2.RE_ADDRESS = constants$2.GROUPS = constants$2.BITS = void 0; -constants$2.BITS = 32; -constants$2.GROUPS = 4; -constants$2.RE_ADDRESS = /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/g; -constants$2.RE_SUBNET_STRING = /\/\d{1,2}$/; +Object.defineProperty(constants$5, "__esModule", { value: true }); +constants$5.RE_SUBNET_STRING = constants$5.RE_ADDRESS = constants$5.GROUPS = constants$5.BITS = void 0; +constants$5.BITS = 32; +constants$5.GROUPS = 4; +constants$5.RE_ADDRESS = /^(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/g; +constants$5.RE_SUBNET_STRING = /\/\d{1,2}$/; var addressError = {}; @@ -156021,7 +156025,7 @@ var sprintf = {}; } (sprintf)); /* eslint-disable no-param-reassign */ -var __createBinding$3 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { +var __createBinding$4 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { @@ -156032,22 +156036,22 @@ var __createBinding$3 = (commonjsGlobal && commonjsGlobal.__createBinding) || (O if (k2 === undefined) k2 = k; o[k2] = m[k]; })); -var __setModuleDefault$3 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { +var __setModuleDefault$4 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); -var __importStar$3 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { +var __importStar$4 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$3(result, mod, k); - __setModuleDefault$3(result, mod); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$4(result, mod, k); + __setModuleDefault$4(result, mod); return result; }; Object.defineProperty(ipv4, "__esModule", { value: true }); ipv4.Address4 = void 0; -const common$1 = __importStar$3(common$2); -const constants$1 = __importStar$3(constants$2); +const common$1 = __importStar$4(common$2); +const constants$4 = __importStar$4(constants$5); const address_error_1$1 = addressError; const jsbn_1$1 = jsbnExports; const sprintf_js_1$3 = sprintf; @@ -156058,7 +156062,7 @@ const sprintf_js_1$3 = sprintf; */ class Address4 { constructor(address) { - this.groups = constants$1.GROUPS; + this.groups = constants$4.GROUPS; this.parsedAddress = []; this.parsedSubnet = ''; this.subnet = '/32'; @@ -156070,7 +156074,7 @@ class Address4 { * @instance * @returns {Boolean} */ - this.isCorrect = common$1.isCorrect(constants$1.BITS); + this.isCorrect = common$1.isCorrect(constants$4.BITS); /** * Returns true if the given address is in the subnet of the current address * @memberof Address4 @@ -156079,15 +156083,15 @@ class Address4 { */ this.isInSubnet = common$1.isInSubnet; this.address = address; - const subnet = constants$1.RE_SUBNET_STRING.exec(address); + const subnet = constants$4.RE_SUBNET_STRING.exec(address); if (subnet) { this.parsedSubnet = subnet[0].replace('/', ''); this.subnetMask = parseInt(this.parsedSubnet, 10); this.subnet = `/${this.subnetMask}`; - if (this.subnetMask < 0 || this.subnetMask > constants$1.BITS) { + if (this.subnetMask < 0 || this.subnetMask > constants$4.BITS) { throw new address_error_1$1.AddressError('Invalid subnet mask.'); } - address = address.replace(constants$1.RE_SUBNET_STRING, ''); + address = address.replace(constants$4.RE_SUBNET_STRING, ''); } this.addressMinusSuffix = address; this.parsedAddress = this.parse(address); @@ -156107,7 +156111,7 @@ class Address4 { */ parse(address) { const groups = address.split('.'); - if (!address.match(constants$1.RE_ADDRESS)) { + if (!address.match(constants$4.RE_ADDRESS)) { throw new address_error_1$1.AddressError('Invalid IPv4 address.'); } return groups; @@ -156191,7 +156195,7 @@ class Address4 { toGroup6() { const output = []; let i; - for (i = 0; i < constants$1.GROUPS; i += 2) { + for (i = 0; i < constants$4.GROUPS; i += 2) { const hex = (0, sprintf_js_1$3.sprintf)('%02x%02x', parseInt(this.parsedAddress[i], 10), parseInt(this.parsedAddress[i + 1], 10)); output.push((0, sprintf_js_1$3.sprintf)('%x', parseInt(hex, 16))); } @@ -156213,7 +156217,7 @@ class Address4 { * @returns {BigInteger} */ _startAddress() { - return new jsbn_1$1.BigInteger(this.mask() + '0'.repeat(constants$1.BITS - this.subnetMask), 2); + return new jsbn_1$1.BigInteger(this.mask() + '0'.repeat(constants$4.BITS - this.subnetMask), 2); } /** * The first address in the range given by this address' subnet. @@ -156243,7 +156247,7 @@ class Address4 { * @returns {BigInteger} */ _endAddress() { - return new jsbn_1$1.BigInteger(this.mask() + '1'.repeat(constants$1.BITS - this.subnetMask), 2); + return new jsbn_1$1.BigInteger(this.mask() + '1'.repeat(constants$4.BITS - this.subnetMask), 2); } /** * The last address in the range given by this address' subnet @@ -156332,7 +156336,7 @@ class Address4 { * @returns {string} */ binaryZeroPad() { - return this.bigInteger().toString(2).padStart(constants$1.BITS, '0'); + return this.bigInteger().toString(2).padStart(constants$4.BITS, '0'); } /** * Groups an IPv4 address for inclusion at the end of an IPv6 address @@ -156340,25 +156344,25 @@ class Address4 { */ groupForV6() { const segments = this.parsedAddress; - return this.address.replace(constants$1.RE_ADDRESS, (0, sprintf_js_1$3.sprintf)('%s.%s', segments.slice(0, 2).join('.'), segments.slice(2, 4).join('.'))); + return this.address.replace(constants$4.RE_ADDRESS, (0, sprintf_js_1$3.sprintf)('%s.%s', segments.slice(0, 2).join('.'), segments.slice(2, 4).join('.'))); } } ipv4.Address4 = Address4; var ipv6 = {}; -var constants = {}; +var constants$3 = {}; -Object.defineProperty(constants, "__esModule", { value: true }); -constants.RE_URL_WITH_PORT = constants.RE_URL = constants.RE_ZONE_STRING = constants.RE_SUBNET_STRING = constants.RE_BAD_ADDRESS = constants.RE_BAD_CHARACTERS = constants.TYPES = constants.SCOPES = constants.GROUPS = constants.BITS = void 0; -constants.BITS = 128; -constants.GROUPS = 8; +Object.defineProperty(constants$3, "__esModule", { value: true }); +constants$3.RE_URL_WITH_PORT = constants$3.RE_URL = constants$3.RE_ZONE_STRING = constants$3.RE_SUBNET_STRING = constants$3.RE_BAD_ADDRESS = constants$3.RE_BAD_CHARACTERS = constants$3.TYPES = constants$3.SCOPES = constants$3.GROUPS = constants$3.BITS = void 0; +constants$3.BITS = 128; +constants$3.GROUPS = 8; /** * Represents IPv6 address scopes * @memberof Address6 * @static */ -constants.SCOPES = { +constants$3.SCOPES = { 0: 'Reserved', 1: 'Interface local', 2: 'Link local', @@ -156373,7 +156377,7 @@ constants.SCOPES = { * @memberof Address6 * @static */ -constants.TYPES = { +constants$3.TYPES = { 'ff01::1/128': 'Multicast (All nodes on this interface)', 'ff01::2/128': 'Multicast (All routers on this interface)', 'ff02::1/128': 'Multicast (All nodes on this link)', @@ -156402,32 +156406,32 @@ constants.TYPES = { * @memberof Address6 * @static */ -constants.RE_BAD_CHARACTERS = /([^0-9a-f:/%])/gi; +constants$3.RE_BAD_CHARACTERS = /([^0-9a-f:/%])/gi; /** * A regular expression that matches an incorrect IPv6 address * @memberof Address6 * @static */ -constants.RE_BAD_ADDRESS = /([0-9a-f]{5,}|:{3,}|[^:]:$|^:[^:]|\/$)/gi; +constants$3.RE_BAD_ADDRESS = /([0-9a-f]{5,}|:{3,}|[^:]:$|^:[^:]|\/$)/gi; /** * A regular expression that matches an IPv6 subnet * @memberof Address6 * @static */ -constants.RE_SUBNET_STRING = /\/\d{1,3}(?=%|$)/; +constants$3.RE_SUBNET_STRING = /\/\d{1,3}(?=%|$)/; /** * A regular expression that matches an IPv6 zone * @memberof Address6 * @static */ -constants.RE_ZONE_STRING = /%.*$/; -constants.RE_URL = new RegExp(/^\[{0,1}([0-9a-f:]+)\]{0,1}/); -constants.RE_URL_WITH_PORT = new RegExp(/\[([0-9a-f:]+)\]:([0-9]{1,5})/); +constants$3.RE_ZONE_STRING = /%.*$/; +constants$3.RE_URL = new RegExp(/^\[{0,1}([0-9a-f:]+)\]{0,1}/); +constants$3.RE_URL_WITH_PORT = new RegExp(/\[([0-9a-f:]+)\]:([0-9]{1,5})/); -var helpers$1 = {}; +var helpers$3 = {}; -Object.defineProperty(helpers$1, "__esModule", { value: true }); -helpers$1.simpleGroup = helpers$1.spanLeadingZeroes = helpers$1.spanAll = helpers$1.spanAllZeroes = void 0; +Object.defineProperty(helpers$3, "__esModule", { value: true }); +helpers$3.simpleGroup = helpers$3.spanLeadingZeroes = helpers$3.spanAll = helpers$3.spanAllZeroes = void 0; const sprintf_js_1$2 = sprintf; /** * @returns {String} the string with all zeroes contained in a @@ -156435,7 +156439,7 @@ const sprintf_js_1$2 = sprintf; function spanAllZeroes(s) { return s.replace(/(0+)/g, '$1'); } -helpers$1.spanAllZeroes = spanAllZeroes; +helpers$3.spanAllZeroes = spanAllZeroes; /** * @returns {String} the string with each character contained in a */ @@ -156446,7 +156450,7 @@ function spanAll(s, offset = 0) { ) .join(''); } -helpers$1.spanAll = spanAll; +helpers$3.spanAll = spanAll; function spanLeadingZeroesSimple(group) { return group.replace(/^(0+)/, '$1'); } @@ -156457,7 +156461,7 @@ function spanLeadingZeroes(address) { const groups = address.split(':'); return groups.map((g) => spanLeadingZeroesSimple(g)).join(':'); } -helpers$1.spanLeadingZeroes = spanLeadingZeroes; +helpers$3.spanLeadingZeroes = spanLeadingZeroes; /** * Groups an address * @returns {String} a grouped address @@ -156471,11 +156475,11 @@ function simpleGroup(addressString, offset = 0) { return (0, sprintf_js_1$2.sprintf)('%s', i + offset, spanLeadingZeroesSimple(g)); }); } -helpers$1.simpleGroup = simpleGroup; +helpers$3.simpleGroup = simpleGroup; var regularExpressions = {}; -var __createBinding$2 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { +var __createBinding$3 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { @@ -156486,21 +156490,21 @@ var __createBinding$2 = (commonjsGlobal && commonjsGlobal.__createBinding) || (O if (k2 === undefined) k2 = k; o[k2] = m[k]; })); -var __setModuleDefault$2 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { +var __setModuleDefault$3 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); -var __importStar$2 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { +var __importStar$3 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$2(result, mod, k); - __setModuleDefault$2(result, mod); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$3(result, mod, k); + __setModuleDefault$3(result, mod); return result; }; Object.defineProperty(regularExpressions, "__esModule", { value: true }); regularExpressions.possibleElisions = regularExpressions.simpleRegularExpression = regularExpressions.ADDRESS_BOUNDARY = regularExpressions.padGroup = regularExpressions.groupPossibilities = void 0; -const v6 = __importStar$2(constants); +const v6 = __importStar$3(constants$3); const sprintf_js_1$1 = sprintf; function groupPossibilities(possibilities) { return (0, sprintf_js_1$1.sprintf)('(%s)', possibilities.join('|')); @@ -156572,7 +156576,7 @@ regularExpressions.possibleElisions = possibleElisions; /* eslint-disable prefer-destructuring */ /* eslint-disable no-param-reassign */ -var __createBinding$1 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { +var __createBinding$2 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { @@ -156583,30 +156587,30 @@ var __createBinding$1 = (commonjsGlobal && commonjsGlobal.__createBinding) || (O if (k2 === undefined) k2 = k; o[k2] = m[k]; })); -var __setModuleDefault$1 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { +var __setModuleDefault$2 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); -var __importStar$1 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { +var __importStar$2 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$1(result, mod, k); - __setModuleDefault$1(result, mod); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$2(result, mod, k); + __setModuleDefault$2(result, mod); return result; }; Object.defineProperty(ipv6, "__esModule", { value: true }); ipv6.Address6 = void 0; -const common = __importStar$1(common$2); -const constants4 = __importStar$1(constants$2); -const constants6 = __importStar$1(constants); -const helpers = __importStar$1(helpers$1); +const common = __importStar$2(common$2); +const constants4 = __importStar$2(constants$5); +const constants6 = __importStar$2(constants$3); +const helpers$2 = __importStar$2(helpers$3); const ipv4_1 = ipv4; const regular_expressions_1 = regularExpressions; const address_error_1 = addressError; const jsbn_1 = jsbnExports; const sprintf_js_1 = sprintf; -function assert$4(condition) { +function assert$8(condition) { if (!condition) { throw new Error('Assertion failed.'); } @@ -157474,15 +157478,15 @@ class Address6 { group() { if (this.elidedGroups === 0) { // The simple case - return helpers.simpleGroup(this.address).join(':'); + return helpers$2.simpleGroup(this.address).join(':'); } - assert$4(typeof this.elidedGroups === 'number'); - assert$4(typeof this.elisionBegin === 'number'); + assert$8(typeof this.elidedGroups === 'number'); + assert$8(typeof this.elisionBegin === 'number'); // The elided case const output = []; const [left, right] = this.address.split('::'); if (left.length) { - output.push(...helpers.simpleGroup(left)); + output.push(...helpers$2.simpleGroup(left)); } else { output.push(''); @@ -157493,13 +157497,13 @@ class Address6 { } output.push((0, sprintf_js_1.sprintf)('', classes.join(' '))); if (right.length) { - output.push(...helpers.simpleGroup(right, this.elisionEnd)); + output.push(...helpers$2.simpleGroup(right, this.elisionEnd)); } else { output.push(''); } if (this.is4()) { - assert$4(this.address4 instanceof ipv4_1.Address4); + assert$8(this.address4 instanceof ipv4_1.Address4); output.pop(); output.push(this.address4.groupForV6()); } @@ -157533,7 +157537,7 @@ class Address6 { if (halves[0].length) { output.push((0, regular_expressions_1.simpleRegularExpression)(halves[0].split(':'))); } - assert$4(typeof address6.elidedGroups === 'number'); + assert$8(typeof address6.elidedGroups === 'number'); output.push((0, regular_expressions_1.possibleElisions)(address6.elidedGroups, halves[0].length !== 0, halves[1].length !== 0)); if (halves[1].length) { output.push((0, regular_expressions_1.simpleRegularExpression)(halves[1].split(':'))); @@ -157599,15 +157603,15 @@ ipv6.Address6 = Address6; Object.defineProperty(exports, "Address6", { enumerable: true, get: function () { return ipv6_1.Address6; } }); const address_error_1 = addressError; Object.defineProperty(exports, "AddressError", { enumerable: true, get: function () { return address_error_1.AddressError; } }); - const helpers = __importStar(helpers$1); + const helpers = __importStar(helpers$3); exports.v6 = { helpers }; } (ipAddress)); -Object.defineProperty(helpers$2, "__esModule", { value: true }); -helpers$2.ipToBuffer = helpers$2.int32ToIpv4 = helpers$2.ipv4ToInt32 = helpers$2.validateSocksClientChainOptions = helpers$2.validateSocksClientOptions = void 0; -const util_1$q = util$1; -const constants_1 = constants$3; +Object.defineProperty(helpers$4, "__esModule", { value: true }); +helpers$4.ipToBuffer = helpers$4.int32ToIpv4 = helpers$4.ipv4ToInt32 = helpers$4.validateSocksClientChainOptions = helpers$4.validateSocksClientOptions = void 0; +const util_1$s = util$1; +const constants_1$1 = constants$6; const stream$1 = Stream; const ip_address_1 = ipAddress; const net$1 = require$$0$4; @@ -157618,34 +157622,34 @@ const net$1 = require$$0$4; */ function validateSocksClientOptions(options, acceptedCommands = ['connect', 'bind', 'associate']) { // Check SOCKs command option. - if (!constants_1.SocksCommand[options.command]) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksCommand, options); + if (!constants_1$1.SocksCommand[options.command]) { + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksCommand, options); } // Check SocksCommand for acceptable command. if (acceptedCommands.indexOf(options.command) === -1) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksCommandForOperation, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksCommandForOperation, options); } // Check destination if (!isValidSocksRemoteHost(options.destination)) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsDestination, options); } // Check SOCKS proxy to use if (!isValidSocksProxy(options.proxy)) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsProxy, options); } // Validate custom auth (if set) validateCustomProxyAuth(options.proxy, options); // Check timeout if (options.timeout && !isValidTimeoutValue(options.timeout)) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsTimeout, options); } // Check existing_socket (if provided) if (options.existing_socket && !(options.existing_socket instanceof stream$1.Duplex)) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsExistingSocket, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsExistingSocket, options); } } -helpers$2.validateSocksClientOptions = validateSocksClientOptions; +helpers$4.validateSocksClientOptions = validateSocksClientOptions; /** * Validates the SocksClientChainOptions * @param options { SocksClientChainOptions } @@ -157653,52 +157657,52 @@ helpers$2.validateSocksClientOptions = validateSocksClientOptions; function validateSocksClientChainOptions(options) { // Only connect is supported when chaining. if (options.command !== 'connect') { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksCommandChain, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksCommandChain, options); } // Check destination if (!isValidSocksRemoteHost(options.destination)) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsDestination, options); } // Validate proxies (length) if (!(options.proxies && Array.isArray(options.proxies) && options.proxies.length >= 2)) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxiesLength, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsProxiesLength, options); } // Validate proxies options.proxies.forEach((proxy) => { if (!isValidSocksProxy(proxy)) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsProxy, options); } // Validate custom auth (if set) validateCustomProxyAuth(proxy, options); }); // Check timeout if (options.timeout && !isValidTimeoutValue(options.timeout)) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsTimeout, options); } } -helpers$2.validateSocksClientChainOptions = validateSocksClientChainOptions; +helpers$4.validateSocksClientChainOptions = validateSocksClientChainOptions; function validateCustomProxyAuth(proxy, options) { if (proxy.custom_auth_method !== undefined) { // Invalid auth method range - if (proxy.custom_auth_method < constants_1.SOCKS5_CUSTOM_AUTH_START || - proxy.custom_auth_method > constants_1.SOCKS5_CUSTOM_AUTH_END) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthRange, options); + if (proxy.custom_auth_method < constants_1$1.SOCKS5_CUSTOM_AUTH_START || + proxy.custom_auth_method > constants_1$1.SOCKS5_CUSTOM_AUTH_END) { + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsCustomAuthRange, options); } // Missing custom_auth_request_handler if (proxy.custom_auth_request_handler === undefined || typeof proxy.custom_auth_request_handler !== 'function') { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); } // Missing custom_auth_response_size if (proxy.custom_auth_response_size === undefined) { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); } // Missing/invalid custom_auth_response_handler if (proxy.custom_auth_response_handler === undefined || typeof proxy.custom_auth_response_handler !== 'function') { - throw new util_1$q.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); + throw new util_1$s.SocksClientError(constants_1$1.ERRORS.InvalidSocksClientOptionsCustomAuthOptions, options); } } } @@ -157737,7 +157741,7 @@ function ipv4ToInt32(ip) { // Convert the IPv4 address parts to an integer return address.toArray().reduce((acc, part) => (acc << 8) + part, 0); } -helpers$2.ipv4ToInt32 = ipv4ToInt32; +helpers$4.ipv4ToInt32 = ipv4ToInt32; function int32ToIpv4(int32) { // Extract each byte (octet) from the 32-bit integer const octet1 = (int32 >>> 24) & 0xff; @@ -157747,7 +157751,7 @@ function int32ToIpv4(int32) { // Combine the octets into a string in IPv4 format return [octet1, octet2, octet3, octet4].join('.'); } -helpers$2.int32ToIpv4 = int32ToIpv4; +helpers$4.int32ToIpv4 = int32ToIpv4; function ipToBuffer(ip) { if (net$1.isIPv4(ip)) { // Handle IPv4 addresses @@ -157763,7 +157767,7 @@ function ipToBuffer(ip) { throw new Error('Invalid IP address format'); } } -helpers$2.ipToBuffer = ipToBuffer; +helpers$4.ipToBuffer = ipToBuffer; var receivebuffer = {}; @@ -157824,8 +157828,8 @@ receivebuffer.ReceiveBuffer = ReceiveBuffer; const events_1 = require$$0$3; const net = require$$0$4; const smart_buffer_1 = smartbuffer; - const constants_1 = constants$3; - const helpers_1 = helpers$2; + const constants_1 = constants$6; + const helpers_1 = helpers$4; const receivebuffer_1 = receivebuffer; const util_1 = util$1; Object.defineProperty(exports, "SocksClientError", { enumerable: true, get: function () { return util_1.SocksClientError; } }); @@ -158624,7 +158628,7 @@ receivebuffer.ReceiveBuffer = ReceiveBuffer; } (build$1)); -var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { +var __createBinding$1 = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { @@ -158635,31 +158639,31 @@ var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Obj if (k2 === undefined) k2 = k; o[k2] = m[k]; })); -var __setModuleDefault = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { +var __setModuleDefault$1 = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); -var __importStar = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { +var __importStar$1 = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$1(result, mod, k); + __setModuleDefault$1(result, mod); return result; }; var __importDefault$2 = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; -Object.defineProperty(dist, "__esModule", { value: true }); -var SocksProxyAgent_1 = dist.SocksProxyAgent = void 0; +Object.defineProperty(dist$6, "__esModule", { value: true }); +var SocksProxyAgent_1 = dist$6.SocksProxyAgent = void 0; const socks_1 = build$1; -const agent_base_1 = dist$2; -const debug_1 = __importDefault$2(srcExports); -const dns = __importStar(require$$3$1); -const net = __importStar(require$$0$4); -const tls = __importStar(require$$1$3); +const agent_base_1 = dist$8; +const debug_1$1 = __importDefault$2(srcExports); +const dns = __importStar$1(require$$3$1); +const net = __importStar$1(require$$0$4); +const tls = __importStar$1(require$$1$5); const url_1 = Url; -const debug = (0, debug_1.default)('socks-proxy-agent'); +const debug$2 = (0, debug_1$1.default)('socks-proxy-agent'); function parseSocksURL(url) { let lookup = false; let type = 5; @@ -158764,9 +158768,9 @@ class SocksProxyAgent extends agent_base_1.Agent { if (tlsSocket) tlsSocket.destroy(); }; - debug('Creating socks proxy connection: %o', socksOpts); + debug$2('Creating socks proxy connection: %o', socksOpts); const { socket } = await socks_1.SocksClient.createConnection(socksOpts); - debug('Successfully created socks proxy connection'); + debug$2('Successfully created socks proxy connection'); if (timeout !== null) { socket.setTimeout(timeout); socket.on('timeout', () => cleanup()); @@ -158774,15 +158778,15 @@ class SocksProxyAgent extends agent_base_1.Agent { if (opts.secureEndpoint) { // The proxy is connecting to a TLS server, so upgrade // this socket connection to a TLS connection. - debug('Upgrading socket connection to TLS'); + debug$2('Upgrading socket connection to TLS'); const servername = opts.servername || opts.host; const tlsSocket = tls.connect({ - ...omit(opts, 'host', 'path', 'port'), + ...omit$1(opts, 'host', 'path', 'port'), socket, servername: net.isIP(servername) ? undefined : servername, }); tlsSocket.once('error', (error) => { - debug('Socket TLS error', error.message); + debug$2('Socket TLS error', error.message); cleanup(tlsSocket); }); return tlsSocket; @@ -158797,8 +158801,8 @@ SocksProxyAgent.protocols = [ 'socks5', 'socks5h', ]; -SocksProxyAgent_1 = dist.SocksProxyAgent = SocksProxyAgent; -function omit(obj, ...keys) { +SocksProxyAgent_1 = dist$6.SocksProxyAgent = SocksProxyAgent; +function omit$1(obj, ...keys) { const ret = {}; let key; for (key in obj) { @@ -162171,13 +162175,14 @@ BigInt.prototype.toJSON = function() { return this.toString(); }; const isNode = !process.browser && typeof globalThis.window === "undefined"; +const crypto$2 = isNode ? require$$5.webcrypto : globalThis.crypto; const chunk = (arr, size) => [...Array(Math.ceil(arr.length / size))].map((_, i) => arr.slice(size * i, size + size * i)); function sleep$1(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); } function validateUrl(url, protocols) { try { - const parsedUrl = new Url.URL(url); + const parsedUrl = new URL(url); if (protocols && protocols.length) { return protocols.map((p) => p.toLowerCase()).includes(parsedUrl.protocol); } @@ -162186,11 +162191,35 @@ function validateUrl(url, protocols) { return false; } } -function bytesToHex(bytes) { +function concatBytes$3(...arrays) { + const totalSize = arrays.reduce((acc, e) => acc + e.length, 0); + const merged = new Uint8Array(totalSize); + arrays.forEach((array, i, arrays2) => { + const offset = arrays2.slice(0, i).reduce((acc, e) => acc + e.length, 0); + merged.set(array, offset); + }); + return merged; +} +function bytesToBase64$1(bytes) { + return btoa(String.fromCharCode.apply(null, Array.from(bytes))); +} +function base64ToBytes$1(base64) { + return Uint8Array.from(atob(base64), (c) => c.charCodeAt(0)); +} +function bytesToHex$3(bytes) { return "0x" + Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join(""); } +function hexToBytes$3(hexString) { + if (hexString.slice(0, 2) === "0x") { + hexString = hexString.replace("0x", ""); + } + if (hexString.length % 2 !== 0) { + hexString = "0" + hexString; + } + return Uint8Array.from(hexString.match(/.{1,2}/g).map((byte) => parseInt(byte, 16))); +} function bytesToBN(bytes) { - return BigInt(bytesToHex(bytes)); + return BigInt(bytesToHex$3(bytes)); } function bnToBytes(bigint) { let hexString = typeof bigint === "bigint" ? bigint.toString(16) : bigint; @@ -162212,7 +162241,7 @@ function toFixedHex(numberish, length = 32) { return "0x" + BigInt(numberish).toString(16).padStart(length * 2, "0"); } function rBigInt(nbytes = 31) { - return bytesToBN(crypto.getRandomValues(new Uint8Array(nbytes))); + return bytesToBN(crypto$2.getRandomValues(new Uint8Array(nbytes))); } function substring(str, length = 10) { if (str.length < length * 2) { @@ -162221,7 +162250,7 @@ function substring(str, length = 10) { return `${str.substring(0, length)}...${str.substring(str.length - length)}`; } -var __async$e = (__this, __arguments, generator) => { +var __async$f = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -162242,7 +162271,7 @@ var __async$e = (__this, __arguments, generator) => { }); }; function multicall(Multicall2, calls) { - return __async$e(this, null, function* () { + return __async$f(this, null, function* () { const calldata = calls.map((call) => { var _a, _b, _c; const target = ((_a = call.contract) == null ? void 0 : _a.target) || call.address; @@ -162265,29 +162294,29 @@ function multicall(Multicall2, calls) { }); } -var __defProp$4 = Object.defineProperty; -var __defProps$4 = Object.defineProperties; -var __getOwnPropDescs$4 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols; +var __defProp$6 = Object.defineProperty; +var __defProps$5 = Object.defineProperties; +var __getOwnPropDescs$5 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$6 = Object.getOwnPropertySymbols; var __getProtoOf$1 = Object.getPrototypeOf; -var __hasOwnProp$4 = Object.prototype.hasOwnProperty; -var __propIsEnum$4 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$6 = Object.prototype.hasOwnProperty; +var __propIsEnum$6 = Object.prototype.propertyIsEnumerable; var __reflectGet$1 = Reflect.get; -var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$4 = (a, b) => { +var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$6 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$4.call(b, prop)) - __defNormalProp$4(a, prop, b[prop]); - if (__getOwnPropSymbols$4) - for (var prop of __getOwnPropSymbols$4(b)) { - if (__propIsEnum$4.call(b, prop)) - __defNormalProp$4(a, prop, b[prop]); + if (__hasOwnProp$6.call(b, prop)) + __defNormalProp$6(a, prop, b[prop]); + if (__getOwnPropSymbols$6) + for (var prop of __getOwnPropSymbols$6(b)) { + if (__propIsEnum$6.call(b, prop)) + __defNormalProp$6(a, prop, b[prop]); } return a; }; -var __spreadProps$4 = (a, b) => __defProps$4(a, __getOwnPropDescs$4(b)); +var __spreadProps$5 = (a, b) => __defProps$5(a, __getOwnPropDescs$5(b)); var __superGet$1 = (cls, obj, key) => __reflectGet$1(__getProtoOf$1(cls), key, obj); -var __async$d = (__this, __arguments, generator) => { +var __async$e = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -162333,7 +162362,7 @@ function getHttpAgent({ } } function fetchData(_0) { - return __async$d(this, arguments, function* (url, options = {}) { + return __async$e(this, arguments, function* (url, options = {}) { var _a, _b, _c; const MAX_RETRY = (_a = options.maxRetry) != null ? _a : 3; const RETRY_ON = (_b = options.retryOn) != null ? _b : 500; @@ -162425,7 +162454,7 @@ function fetchData(_0) { throw errorObject; }); } -const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, null, function* () { +const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$e(void 0, null, function* () { let signal; if (_signal) { const controller = new AbortController(); @@ -162434,7 +162463,7 @@ const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, nu controller.abort(); }); } - const init = __spreadProps$4(__spreadValues$4({}, options), { + const init = __spreadProps$5(__spreadValues$6({}, options), { method: req.method || "POST", headers: req.headers, body: req.body || void 0, @@ -162459,7 +162488,7 @@ const oracleMapper = /* @__PURE__ */ new Map(); const multicallMapper = /* @__PURE__ */ new Map(); function getGasOraclePlugin(networkKey, fetchOptions) { const gasStationApi = (fetchOptions == null ? void 0 : fetchOptions.gasStationApi) || "https://gasstation.polygon.technology/v2"; - return new FetchUrlFeeDataNetworkPlugin(gasStationApi, (fetchFeeData, provider, request) => __async$d(this, null, function* () { + return new FetchUrlFeeDataNetworkPlugin(gasStationApi, (fetchFeeData, provider, request) => __async$e(this, null, function* () { if (!oracleMapper.has(networkKey)) { oracleMapper.set(networkKey, GasPriceOracle__factory.connect(fetchOptions == null ? void 0 : fetchOptions.gasPriceOracle, provider)); } @@ -162541,7 +162570,7 @@ function getProviderWithNetId(netId, rpcUrl, config, fetchOptions) { provider.pollingInterval = (fetchOptions == null ? void 0 : fetchOptions.pollingInterval) || pollInterval * 1e3; return provider; } -const populateTransaction = (signer, tx) => __async$d(void 0, null, function* () { +const populateTransaction = (signer, tx) => __async$e(void 0, null, function* () { const provider = signer.provider; if (!tx.from) { tx.from = signer.address; @@ -162550,7 +162579,7 @@ const populateTransaction = (signer, tx) => __async$d(void 0, null, function* () throw new Error(errMsg); } const [feeData, nonce] = yield Promise.all([ - (() => __async$d(void 0, null, function* () { + (() => __async$e(void 0, null, function* () { if (tx.maxFeePerGas && tx.maxPriorityFeePerGas) { return new FeeData(null, BigInt(tx.maxFeePerGas), BigInt(tx.maxPriorityFeePerGas)); } @@ -162572,7 +162601,7 @@ const populateTransaction = (signer, tx) => __async$d(void 0, null, function* () ); } }))(), - (() => __async$d(void 0, null, function* () { + (() => __async$e(void 0, null, function* () { if (tx.nonce) { return tx.nonce; } @@ -162602,7 +162631,7 @@ const populateTransaction = (signer, tx) => __async$d(void 0, null, function* () delete tx.maxFeePerGas; delete tx.maxPriorityFeePerGas; } - tx.gasLimit = tx.gasLimit || (yield (() => __async$d(void 0, null, function* () { + tx.gasLimit = tx.gasLimit || (yield (() => __async$e(void 0, null, function* () { try { const gasLimit = yield provider.estimateGas(tx); return gasLimit === BigInt(21e3) ? gasLimit : gasLimit * (BigInt(1e4) + BigInt(signer.gasLimitBump)) / BigInt(1e4); @@ -162630,7 +162659,7 @@ class TornadoWallet extends Wallet { return new TornadoWallet(privateKey, provider, options); } populateTransaction(tx) { - return __async$d(this, null, function* () { + return __async$e(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = txObject.nonce; return __superGet$1(TornadoWallet.prototype, this, "populateTransaction").call(this, txObject); @@ -162646,7 +162675,7 @@ class TornadoVoidSigner extends VoidSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } populateTransaction(tx) { - return __async$d(this, null, function* () { + return __async$e(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = txObject.nonce; return __superGet$1(TornadoVoidSigner.prototype, this, "populateTransaction").call(this, txObject); @@ -162757,6 +162786,22 @@ const GET_NOTE_ACCOUNTS = ` } } `; +const GET_ECHO_EVENTS = ` + query getNoteAccounts($first: Int, $fromBlock: Int) { + noteAccounts(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + id + blockNumber + address + encryptedAccount + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; const GET_ENCRYPTED_NOTES = ` query getEncryptedNotes($first: Int, $fromBlock: Int) { encryptedNotes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { @@ -162773,27 +162818,80 @@ const GET_ENCRYPTED_NOTES = ` } } `; +const GET_GOVERNANCE_EVENTS = ` + query getGovernanceEvents($first: Int, $fromBlock: Int) { + proposals(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + proposer + target + startTime + endTime + description + } + votes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + voter + support + votes + from + input + } + delegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateTo + } + undelegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateFrom + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; +const GET_GOVERNANCE_APY = ` + stakeDailyBurns(first: 30, orderBy: date, orderDirection: desc) { + id + date + dailyAmountBurned + } +`; -var __defProp$3 = Object.defineProperty; -var __defProps$3 = Object.defineProperties; -var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; -var __hasOwnProp$3 = Object.prototype.hasOwnProperty; -var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$3 = (a, b) => { +var __defProp$5 = Object.defineProperty; +var __defProps$4 = Object.defineProperties; +var __getOwnPropDescs$4 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols; +var __hasOwnProp$5 = Object.prototype.hasOwnProperty; +var __propIsEnum$5 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$5 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); - if (__getOwnPropSymbols$3) - for (var prop of __getOwnPropSymbols$3(b)) { - if (__propIsEnum$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); + if (__hasOwnProp$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); + if (__getOwnPropSymbols$5) + for (var prop of __getOwnPropSymbols$5(b)) { + if (__propIsEnum$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); } return a; }; -var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b)); -var __async$c = (__this, __arguments, generator) => { +var __spreadProps$4 = (a, b) => __defProps$4(a, __getOwnPropDescs$4(b)); +var __async$d = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -162816,7 +162914,7 @@ var __async$c = (__this, __arguments, generator) => { const isEmptyArray = (arr) => !Array.isArray(arr) || !arr.length; const first = 1e3; function queryGraph(_0) { - return __async$c(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ graphApi, subgraphName, query, @@ -162825,7 +162923,7 @@ function queryGraph(_0) { }) { var _a; const graphUrl = `${graphApi}/subgraphs/name/${subgraphName}`; - const { data, errors } = yield fetchData(graphUrl, __spreadProps$3(__spreadValues$3({}, fetchDataOptions2), { + const { data, errors } = yield fetchData(graphUrl, __spreadProps$4(__spreadValues$5({}, fetchDataOptions2), { method: "POST", headers: { "Content-Type": "application/json" @@ -162845,7 +162943,7 @@ function queryGraph(_0) { }); } function getStatistic(_0) { - return __async$c(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -162892,7 +162990,7 @@ function getStatistic(_0) { }); } function getMeta(_0) { - return __async$c(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { + return __async$d(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { try { const { _meta: { @@ -162937,7 +163035,7 @@ function getRegisters({ }); } function getAllRegisters(_0) { - return __async$c(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -163026,7 +163124,7 @@ function getDeposits({ }); } function getAllDeposits(_0) { - return __async$c(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -163123,7 +163221,7 @@ function getWithdrawals({ }); } function getAllWithdrawals(_0) { - return __async$c(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -163199,7 +163297,7 @@ function getAllWithdrawals(_0) { }); } function getNoteAccounts(_0) { - return __async$c(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ graphApi, subgraphName, address, @@ -163216,7 +163314,7 @@ function getNoteAccounts(_0) { subgraphName, query: GET_NOTE_ACCOUNTS, variables: { - address + address: address.toLowerCase() }, fetchDataOptions: fetchDataOptions2 }); @@ -163234,6 +163332,95 @@ function getNoteAccounts(_0) { } }); } +function getGraphEchoEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_ECHO_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGraphEchoEvents(_0) { + return __async$d(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const events = []; + let lastSyncBlock = fromBlock; + while (true) { + let { + noteAccounts: result2, + _meta: { + // eslint-disable-next-line prefer-const + block: { number: currentBlock } + } + } = yield getGraphEchoEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + if (isEmptyArray(result2)) { + break; + } + const [firstEvent] = result2; + const [lastEvent2] = result2.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "EchoEvents", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: result2.length + }); + } + if (result2.length < 900) { + events.push(...result2); + break; + } + result2 = result2.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + events.push(...result2); + } + if (!events.length) { + return { + events: [], + lastSyncBlock + }; + } + const result = events.map((e) => { + const [transactionHash, logIndex] = e.id.split("-"); + return { + blockNumber: Number(e.blockNumber), + logIndex: Number(logIndex), + transactionHash, + address: getAddress(e.address), + encryptedAccount: e.encryptedAccount + }; + }); + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + console.log("Error from getAllGraphEchoEvents query"); + console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} function getEncryptedNotes({ graphApi, subgraphName, @@ -163252,7 +163439,7 @@ function getEncryptedNotes({ }); } function getAllEncryptedNotes(_0) { - return __async$c(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -163319,11 +163506,160 @@ function getAllEncryptedNotes(_0) { } }); } +function getGovernanceEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_GOVERNANCE_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGovernanceEvents(_0) { + return __async$d(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const result = []; + let lastSyncBlock = fromBlock; + while (true) { + const { + proposals, + votes, + delegates, + undelegates, + _meta: { + block: { number: currentBlock } + } + } = yield getGovernanceEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + const eventsLength = proposals.length + votes.length + delegates.length + undelegates.length; + if (eventsLength === 0) { + break; + } + const formattedProposals = proposals.map( + ({ blockNumber, logIndex, transactionHash, proposalId, proposer, target, startTime, endTime, description }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "ProposalCreated", + id: Number(proposalId), + proposer: getAddress(proposer), + target: getAddress(target), + startTime: Number(startTime), + endTime: Number(endTime), + description + }; + } + ); + const formattedVotes = votes.map( + ({ blockNumber, logIndex, transactionHash, proposalId, voter, support, votes: votes2, from, input }) => { + if (!input || input.length > 2048) { + input = ""; + } + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Voted", + proposalId: Number(proposalId), + voter: getAddress(voter), + support, + votes: votes2, + from: getAddress(from), + input + }; + } + ); + const formattedDelegates = delegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateTo }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Delegated", + account: getAddress(account), + delegateTo: getAddress(delegateTo) + }; + } + ); + const formattedUndelegates = undelegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateFrom }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Undelegated", + account: getAddress(account), + delegateFrom: getAddress(delegateFrom) + }; + } + ); + let formattedEvents = [ + ...formattedProposals, + ...formattedVotes, + ...formattedDelegates, + ...formattedUndelegates + ].sort((a, b) => { + if (a.blockNumber === b.blockNumber) { + return a.logIndex - b.logIndex; + } + return a.blockNumber - b.blockNumber; + }); + if (eventsLength < 900) { + result.push(...formattedEvents); + break; + } + const [firstEvent] = formattedEvents; + const [lastEvent2] = formattedEvents.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "Governance Events", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: eventsLength + }); + } + formattedEvents = formattedEvents.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + result.push(...formattedEvents); + } + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + console.log("Error from getAllGovernance query"); + console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} var graph = /*#__PURE__*/Object.freeze({ __proto__: null, GET_DEPOSITS: GET_DEPOSITS, + GET_ECHO_EVENTS: GET_ECHO_EVENTS, GET_ENCRYPTED_NOTES: GET_ENCRYPTED_NOTES, + GET_GOVERNANCE_APY: GET_GOVERNANCE_APY, + GET_GOVERNANCE_EVENTS: GET_GOVERNANCE_EVENTS, GET_NOTE_ACCOUNTS: GET_NOTE_ACCOUNTS, GET_REGISTERED: GET_REGISTERED, GET_STATISTIC: GET_STATISTIC, @@ -163331,10 +163667,14 @@ var graph = /*#__PURE__*/Object.freeze({ _META: _META, getAllDeposits: getAllDeposits, getAllEncryptedNotes: getAllEncryptedNotes, + getAllGovernanceEvents: getAllGovernanceEvents, + getAllGraphEchoEvents: getAllGraphEchoEvents, getAllRegisters: getAllRegisters, getAllWithdrawals: getAllWithdrawals, getDeposits: getDeposits, getEncryptedNotes: getEncryptedNotes, + getGovernanceEvents: getGovernanceEvents, + getGraphEchoEvents: getGraphEchoEvents, getMeta: getMeta, getNoteAccounts: getNoteAccounts, getRegisters: getRegisters, @@ -163343,7 +163683,7 @@ var graph = /*#__PURE__*/Object.freeze({ queryGraph: queryGraph }); -var __async$b = (__this, __arguments, generator) => { +var __async$c = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -163382,7 +163722,7 @@ class BatchBlockService { this.retryOn = retryOn; } getBlock(blockTag) { - return __async$b(this, null, function* () { + return __async$c(this, null, function* () { const blockObject = yield this.provider.getBlock(blockTag); if (!blockObject) { const errMsg = `No block for ${blockTag}`; @@ -163392,9 +163732,9 @@ class BatchBlockService { }); } createBatchRequest(batchArray) { - return batchArray.map((blocks, index) => __async$b(this, null, function* () { + return batchArray.map((blocks, index) => __async$c(this, null, function* () { yield sleep$1(20 * index); - return (() => __async$b(this, null, function* () { + return (() => __async$c(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -163411,7 +163751,7 @@ class BatchBlockService { })); } getBatchBlocks(blocks) { - return __async$b(this, null, function* () { + return __async$c(this, null, function* () { let blockCount = 0; const results = []; for (const chunks of chunk(blocks, this.concurrencySize * this.batchSize)) { @@ -163449,7 +163789,7 @@ class BatchTransactionService { this.retryOn = retryOn; } getTransaction(txHash) { - return __async$b(this, null, function* () { + return __async$c(this, null, function* () { const txObject = yield this.provider.getTransaction(txHash); if (!txObject) { const errMsg = `No transaction for ${txHash}`; @@ -163459,9 +163799,9 @@ class BatchTransactionService { }); } createBatchRequest(batchArray) { - return batchArray.map((txs, index) => __async$b(this, null, function* () { + return batchArray.map((txs, index) => __async$c(this, null, function* () { yield sleep$1(20 * index); - return (() => __async$b(this, null, function* () { + return (() => __async$c(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -163478,7 +163818,7 @@ class BatchTransactionService { })); } getBatchTransactions(txs) { - return __async$b(this, null, function* () { + return __async$c(this, null, function* () { let txCount = 0; const results = []; for (const chunks of chunk(txs, this.concurrencySize * this.batchSize)) { @@ -163514,7 +163854,7 @@ class BatchEventsService { this.retryOn = retryOn; } getPastEvents(_0) { - return __async$b(this, arguments, function* ({ fromBlock, toBlock, type }) { + return __async$c(this, arguments, function* ({ fromBlock, toBlock, type }) { let err; let retries = 0; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -163534,13 +163874,13 @@ class BatchEventsService { }); } createBatchRequest(batchArray) { - return batchArray.map((event, index) => __async$b(this, null, function* () { + return batchArray.map((event, index) => __async$c(this, null, function* () { yield sleep$1(20 * index); return this.getPastEvents(event); })); } getBatchEvents(_0) { - return __async$b(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { + return __async$c(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { if (!toBlock) { toBlock = yield this.provider.getBlockNumber(); } @@ -163571,29 +163911,29 @@ class BatchEventsService { } } -var __defProp$2 = Object.defineProperty; -var __defProps$2 = Object.defineProperties; -var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; +var __defProp$4 = Object.defineProperty; +var __defProps$3 = Object.defineProperties; +var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols; var __getProtoOf = Object.getPrototypeOf; -var __hasOwnProp$2 = Object.prototype.hasOwnProperty; -var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$4 = Object.prototype.hasOwnProperty; +var __propIsEnum$4 = Object.prototype.propertyIsEnumerable; var __reflectGet = Reflect.get; -var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$2 = (a, b) => { +var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$4 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); - if (__getOwnPropSymbols$2) - for (var prop of __getOwnPropSymbols$2(b)) { - if (__propIsEnum$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); + if (__hasOwnProp$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); + if (__getOwnPropSymbols$4) + for (var prop of __getOwnPropSymbols$4(b)) { + if (__propIsEnum$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); } return a; }; -var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b)); +var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b)); var __superGet = (cls, obj, key) => __reflectGet(__getProtoOf(cls), key, obj); -var __async$a = (__this, __arguments, generator) => { +var __async$b = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -163667,7 +164007,7 @@ class BaseEventsService { } /* eslint-enable @typescript-eslint/no-unused-vars */ formatEvents(events) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { return yield new Promise((resolve) => resolve(events)); }); } @@ -163675,7 +164015,7 @@ class BaseEventsService { * Get saved or cached events */ getEventsFromDB() { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { return { events: [], lastBlock: null @@ -163683,7 +164023,7 @@ class BaseEventsService { }); } getEventsFromCache() { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { return { events: [], lastBlock: null @@ -163691,7 +164031,7 @@ class BaseEventsService { }); } getSavedEvents() { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { let cachedEvents = yield this.getEventsFromDB(); if (!cachedEvents || !cachedEvents.events.length) { cachedEvents = yield this.getEventsFromCache(); @@ -163703,7 +164043,7 @@ class BaseEventsService { * Get latest events */ getEventsFromGraph(_0) { - return __async$a(this, arguments, function* ({ + return __async$b(this, arguments, function* ({ fromBlock, methodName = "" }) { @@ -163713,7 +164053,7 @@ class BaseEventsService { lastBlock: fromBlock }; } - const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$2({ + const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$4({ fromBlock }, this.getGraphParams())); return { @@ -163723,7 +164063,7 @@ class BaseEventsService { }); } getEventsFromRpc(_0) { - return __async$a(this, arguments, function* ({ + return __async$b(this, arguments, function* ({ fromBlock, toBlock }) { @@ -163761,7 +164101,7 @@ class BaseEventsService { }); } getLatestEvents(_0) { - return __async$a(this, arguments, function* ({ fromBlock }) { + return __async$b(this, arguments, function* ({ fromBlock }) { const allEvents = []; const graphEvents = yield this.getEventsFromGraph({ fromBlock }); const lastSyncBlock = graphEvents.lastBlock && graphEvents.lastBlock >= fromBlock ? graphEvents.lastBlock : fromBlock; @@ -163783,14 +164123,14 @@ class BaseEventsService { */ // eslint-disable-next-line @typescript-eslint/no-unused-vars saveEvents(_0) { - return __async$a(this, arguments, function* ({ events, lastBlock }) { + return __async$b(this, arguments, function* ({ events, lastBlock }) { }); } /** * Trigger saving and receiving latest events */ updateEvents() { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { const savedEvents = yield this.getSavedEvents(); let fromBlock = this.deployedBlock; if (savedEvents && savedEvents.lastBlock) { @@ -163864,7 +164204,7 @@ class BaseDepositsService extends BaseEventsService { }; } formatEvents(events) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { const type = this.getType().toLowerCase(); if (type === DEPOSIT) { const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { @@ -163883,7 +164223,7 @@ class BaseDepositsService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { from } = txs.find(({ hash }) => hash === event.transactionHash); - return __spreadProps$2(__spreadValues$2({}, event), { + return __spreadProps$3(__spreadValues$4({}, event), { from }); }); @@ -163904,7 +164244,7 @@ class BaseDepositsService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { timestamp } = blocks.find(({ number }) => number === event.blockNumber); - return __spreadProps$2(__spreadValues$2({}, event), { + return __spreadProps$3(__spreadValues$4({}, event), { timestamp }); }); @@ -163921,6 +164261,57 @@ class BaseDepositsService extends BaseEventsService { } } } +class BaseEchoService extends BaseEventsService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions: fetchDataOptions2 + }) { + super({ netId, provider, graphApi, subgraphName, contract: Echoer, deployedBlock, fetchDataOptions: fetchDataOptions2 }); + } + getInstanceName() { + return `echo_${this.netId}`; + } + getType() { + return "Echo"; + } + getGraphMethod() { + return "getAllGraphEchoEvents"; + } + formatEvents(events) { + return __async$b(this, null, function* () { + return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { + const { who, data } = args; + if (who && data) { + const eventObjects = { + blockNumber, + logIndex, + transactionHash + }; + return __spreadProps$3(__spreadValues$4({}, eventObjects), { + address: who, + encryptedAccount: data + }); + } + }).filter((e) => e); + }); + } + getEventsFromGraph(_0) { + return __async$b(this, arguments, function* ({ fromBlock }) { + if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + return { + events: [], + lastBlock: fromBlock + }; + } + return __superGet(BaseEchoService.prototype, this, "getEventsFromGraph").call(this, { fromBlock }); + }); + } +} class BaseEncryptedNotesService extends BaseEventsService { constructor({ netId, @@ -163943,7 +164334,7 @@ class BaseEncryptedNotesService extends BaseEventsService { return "getAllEncryptedNotes"; } formatEvents(events) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const { encryptedNote } = args; if (encryptedNote) { @@ -163952,7 +164343,7 @@ class BaseEncryptedNotesService extends BaseEventsService { logIndex, transactionHash }; - return __spreadProps$2(__spreadValues$2({}, eventObjects), { + return __spreadProps$3(__spreadValues$4({}, eventObjects), { encryptedNote }); } @@ -163983,11 +164374,15 @@ class BaseGovernanceService extends BaseEventsService { return "*"; } getGraphMethod() { - return "governanceEvents"; + return "getAllGovernanceEvents"; } formatEvents(events) { - return __async$a(this, null, function* () { - const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { + return __async$b(this, null, function* () { + const proposalEvents = []; + const votedEvents = []; + const delegatedEvents = []; + const undelegatedEvents = []; + events.forEach(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { const eventObjects = { blockNumber, logIndex, @@ -163996,60 +164391,61 @@ class BaseGovernanceService extends BaseEventsService { }; if (event === "ProposalCreated") { const { id, proposer, target, startTime, endTime, description } = args; - return __spreadProps$2(__spreadValues$2({}, eventObjects), { - id, + proposalEvents.push(__spreadProps$3(__spreadValues$4({}, eventObjects), { + id: Number(id), proposer, target, - startTime, - endTime, + startTime: Number(startTime), + endTime: Number(endTime), description - }); + })); } if (event === "Voted") { const { proposalId, voter, support, votes } = args; - return __spreadProps$2(__spreadValues$2({}, eventObjects), { - proposalId, + votedEvents.push(__spreadProps$3(__spreadValues$4({}, eventObjects), { + proposalId: Number(proposalId), voter, support, - votes - }); + votes, + from: "", + input: "" + })); } if (event === "Delegated") { const { account, to: delegateTo } = args; - return __spreadProps$2(__spreadValues$2({}, eventObjects), { + delegatedEvents.push(__spreadProps$3(__spreadValues$4({}, eventObjects), { account, delegateTo - }); + })); } if (event === "Undelegated") { const { account, from: delegateFrom } = args; - return __spreadProps$2(__spreadValues$2({}, eventObjects), { + undelegatedEvents.push(__spreadProps$3(__spreadValues$4({}, eventObjects), { account, delegateFrom - }); + })); } - }).filter((e) => e); - const votedEvents = formattedEvents.map((event, index) => __spreadProps$2(__spreadValues$2({}, event), { index })).filter(({ event }) => event === "Voted"); + }); if (votedEvents.length) { this.updateTransactionProgress({ percentage: 0 }); const txs = yield this.batchTransactionService.getBatchTransactions([ ...new Set(votedEvents.map(({ transactionHash }) => transactionHash)) ]); - votedEvents.forEach((event) => { + votedEvents.forEach((event, index) => { let { data: input, from } = txs.find((t) => t.hash === event.transactionHash); if (!input || input.length > 2048) { input = ""; } - formattedEvents[event.index].from = from; - formattedEvents[event.index].input = input; + votedEvents[index].from = from; + votedEvents[index].input = input; }); } - return formattedEvents; + return [...proposalEvents, ...votedEvents, ...delegatedEvents, ...undelegatedEvents]; }); } getEventsFromGraph(_0) { - return __async$a(this, arguments, function* ({ fromBlock }) { - if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + return __async$b(this, arguments, function* ({ fromBlock }) { + if (!this.graphApi || !this.subgraphName || this.graphApi.includes("api.thegraph.com")) { return { events: [], lastBlock: fromBlock @@ -164083,14 +164479,14 @@ class BaseRegistryService extends BaseEventsService { return "getAllRegisters"; } formatEvents(events) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const eventObjects = { blockNumber, logIndex, transactionHash }; - return __spreadProps$2(__spreadValues$2({}, eventObjects), { + return __spreadProps$3(__spreadValues$4({}, eventObjects), { ensName: args.ensName, relayerAddress: args.relayerAddress }); @@ -164098,7 +164494,7 @@ class BaseRegistryService extends BaseEventsService { }); } fetchRelayers() { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { return (yield this.updateEvents()).events; }); } @@ -164253,7 +164649,7 @@ var flm = /*#__PURE__*/ hMap(flt, 9, 0), flrm = /*#__PURE__*/ hMap(flt, 9, 1); // fixed distance map var fdm = /*#__PURE__*/ hMap(fdt, 5, 0), fdrm = /*#__PURE__*/ hMap(fdt, 5, 1); // find max of array -var max = function (a) { +var max$1 = function (a) { var m = a[0]; for (var i = 1; i < a.length; ++i) { if (a[i] > m) @@ -164380,7 +164776,7 @@ var inflt = function (dat, st, buf, dict) { } pos += hcLen * 3; // code lengths bits - var clb = max(clt), clbmsk = (1 << clb) - 1; + var clb = max$1(clt), clbmsk = (1 << clb) - 1; // code lengths map var clm = hMap(clt, clb, 1); for (var i = 0; i < tl;) { @@ -164409,9 +164805,9 @@ var inflt = function (dat, st, buf, dict) { // length tree distance tree var lt = ldt.subarray(0, hLit), dt = ldt.subarray(hLit); // max length bits - lbt = max(lt); + lbt = max$1(lt); // max dist bits - dbt = max(dt); + dbt = max$1(dt); lm = hMap(lt, lbt, 1); dm = hMap(dt, dbt, 1); } @@ -164954,7 +165350,7 @@ var wrkr = function (fns, init, id, cb) { return wk(ch[id].c + ';onmessage=function(e){for(var k in e.data)self[k]=e.data[k];onmessage=' + init.toString() + '}', id, td, cbfs(td), cb); }; // base async inflate fn -var bInflt = function () { return [u8, u16, i32, fleb, fdeb, clim, fl, fd, flrm, fdrm, rev, ec, hMap, max, bits$3, bits16, shft, slc, err, inflt, inflateSync, pbf, gopt]; }; +var bInflt = function () { return [u8, u16, i32, fleb, fdeb, clim, fl, fd, flrm, fdrm, rev, ec, hMap, max$1, bits$3, bits16, shft, slc, err, inflt, inflateSync, pbf, gopt]; }; var bDflt = function () { return [u8, u16, i32, fleb, fdeb, clim, revfl, revfd, flm, flt, fdm, fdt, rev, deo, et, hMap, wbits, wbits16, hTree, ln, lc, clen, wfblk, wblk, shft, slc, dflt, dopt, deflateSync, pbf]; }; // post buf var pbf = function (msg) { return postMessage(msg, [msg.buffer]); }; @@ -165382,7 +165778,7 @@ function unzip(data, opts, cb) { return tAll; } -var __async$9 = (__this, __arguments, generator) => { +var __async$a = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -165403,7 +165799,7 @@ var __async$9 = (__this, __arguments, generator) => { }); }; function existsAsync(fileOrDir) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { try { yield promises.stat(fileOrDir); return true; @@ -165434,27 +165830,26 @@ function unzipAsync(data) { }); }); } -function saveEvents(_0) { - return __async$9(this, arguments, function* ({ - name, +function saveUserFile(_0) { + return __async$a(this, arguments, function* ({ + fileName, userDirectory, - events + dataString }) { - const fileName = `${name}.json`.toLowerCase(); + fileName = fileName.toLowerCase(); const filePath = path$3.join(userDirectory, fileName); - const stringEvents = JSON.stringify(events, null, 2) + "\n"; const payload = yield zipAsync({ - [fileName]: new TextEncoder().encode(stringEvents) + [fileName]: new TextEncoder().encode(dataString) }); if (!(yield existsAsync(userDirectory))) { yield promises.mkdir(userDirectory, { recursive: true }); } yield promises.writeFile(filePath + ".zip", payload); - yield promises.writeFile(filePath, stringEvents); + yield promises.writeFile(filePath, dataString); }); } function loadSavedEvents(_0) { - return __async$9(this, arguments, function* ({ + return __async$a(this, arguments, function* ({ name, userDirectory, deployedBlock @@ -165483,7 +165878,7 @@ function loadSavedEvents(_0) { }); } function download(_0) { - return __async$9(this, arguments, function* ({ name, cacheDirectory }) { + return __async$a(this, arguments, function* ({ name, cacheDirectory }) { const fileName = `${name}.json`.toLowerCase(); const zipName = `${fileName}.zip`; const zipPath = path$3.join(cacheDirectory, zipName); @@ -165493,7 +165888,7 @@ function download(_0) { }); } function loadCachedEvents(_0) { - return __async$9(this, arguments, function* ({ + return __async$a(this, arguments, function* ({ name, cacheDirectory, deployedBlock @@ -165523,7 +165918,7 @@ function loadCachedEvents(_0) { }); } -var __async$8 = (__this, __arguments, generator) => { +var __async$9 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -165608,7 +166003,7 @@ class NodeDepositsService extends BaseDepositsService { } } getEventsFromDB() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { if (!this.userDirectory) { console.log( "Updating events for", @@ -165639,7 +166034,7 @@ class NodeDepositsService extends BaseDepositsService { }); } getEventsFromCache() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -165661,7 +166056,7 @@ class NodeDepositsService extends BaseDepositsService { }); } saveEvents(_0) { - return __async$8(this, arguments, function* ({ events, lastBlock }) { + return __async$9(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -165682,10 +166077,136 @@ class NodeDepositsService extends BaseDepositsService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" + }); + } + }); + } +} +class NodeEchoService extends BaseEchoService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions, + cacheDirectory, + userDirectory + }) { + super({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions + }); + this.cacheDirectory = cacheDirectory; + this.userDirectory = userDirectory; + } + updateEventProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + console.log(`fromBlock - ${fromBlock}`); + console.log(`toBlock - ${toBlock}`); + if (count) { + console.log(`downloaded ${type} events count - ${count}`); + console.log("____________________________________________"); + console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock} +`); + } + } + } + updateGraphProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + console.log(`fromBlock - ${fromBlock}`); + console.log(`toBlock - ${toBlock}`); + if (count) { + console.log(`downloaded ${type} events from graph node count - ${count}`); + console.log("____________________________________________"); + console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock} +`); + } + } + } + getEventsFromDB() { + return __async$9(this, null, function* () { + if (!this.userDirectory) { + console.log(`Updating events for ${this.netId} chain echo events +`); + console.log(`savedEvents count - ${0}`); + console.log(`savedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const savedEvents = yield loadSavedEvents({ + name: this.getInstanceName(), + userDirectory: this.userDirectory, + deployedBlock: this.deployedBlock + }); + console.log(`Updating events for ${this.netId} chain echo events +`); + console.log(`savedEvents count - ${savedEvents.events.length}`); + console.log(`savedEvents lastBlock - ${savedEvents.lastBlock} +`); + return savedEvents; + }); + } + getEventsFromCache() { + return __async$9(this, null, function* () { + if (!this.cacheDirectory) { + console.log(`cachedEvents count - ${0}`); + console.log(`cachedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const cachedEvents = yield loadCachedEvents({ + name: this.getInstanceName(), + cacheDirectory: this.cacheDirectory, + deployedBlock: this.deployedBlock + }); + console.log(`cachedEvents count - ${cachedEvents.events.length}`); + console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock} +`); + return cachedEvents; + }); + } + saveEvents(_0) { + return __async$9(this, arguments, function* ({ events, lastBlock }) { + const instanceName = this.getInstanceName(); + console.log("\ntotalEvents count - ", events.length); + console.log( + `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock} +` + ); + const eventTable = new Table(); + eventTable.push( + [{ colSpan: 2, content: "Echo Accounts", hAlign: "center" }], + ["Network", `${this.netId} chain`], + ["Events", `${events.length} events`], + [{ colSpan: 2, content: "Latest events" }], + ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => { + const eventIndex = events.length - index; + return [eventIndex, blockNumber]; + }) + ); + console.log(eventTable.toString() + "\n"); + if (this.userDirectory) { + yield saveUserFile({ + fileName: instanceName + ".json", + userDirectory: this.userDirectory, + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -165740,7 +166261,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { } } getEventsFromDB() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain encrypted events `); @@ -165766,7 +166287,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { }); } getEventsFromCache() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -165788,7 +166309,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { }); } saveEvents(_0) { - return __async$8(this, arguments, function* ({ events, lastBlock }) { + return __async$9(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -165808,10 +166329,10 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -165871,7 +166392,7 @@ class NodeGovernanceService extends BaseGovernanceService { } } getEventsFromDB() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain governance events `); @@ -165897,7 +166418,7 @@ class NodeGovernanceService extends BaseGovernanceService { }); } getEventsFromCache() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -165919,7 +166440,7 @@ class NodeGovernanceService extends BaseGovernanceService { }); } saveEvents(_0) { - return __async$8(this, arguments, function* ({ events, lastBlock }) { + return __async$9(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -165939,10 +166460,10 @@ class NodeGovernanceService extends BaseGovernanceService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -165997,7 +166518,7 @@ class NodeRegistryService extends BaseRegistryService { } } getEventsFromDB() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain registry events `); @@ -166023,7 +166544,7 @@ class NodeRegistryService extends BaseRegistryService { }); } getEventsFromCache() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -166045,7 +166566,7 @@ class NodeRegistryService extends BaseRegistryService { }); } saveEvents(_0) { - return __async$8(this, arguments, function* ({ events, lastBlock }) { + return __async$9(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -166065,10 +166586,10 @@ class NodeRegistryService extends BaseRegistryService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -166079,11 +166600,11 @@ var ajv$1 = {exports: {}}; var core$2 = {}; -var validate = {}; +var validate$1 = {}; var boolSchema = {}; -var errors$1 = {}; +var errors$2 = {}; var codegen = {}; @@ -167426,11 +167947,11 @@ names$1.default = names; keyValues.push([E.propertyName, propertyName]); } -} (errors$1)); +} (errors$2)); Object.defineProperty(boolSchema, "__esModule", { value: true }); boolSchema.boolOrEmptySchema = boolSchema.topBoolOrEmptySchema = void 0; -const errors_1$2 = errors$1; +const errors_1$2 = errors$2; const codegen_1$s = codegen; const names_1$6 = names$1; const boolError = { @@ -167531,7 +168052,7 @@ applicability.shouldUseRule = shouldUseRule; exports.reportTypeError = exports.checkDataTypes = exports.checkDataType = exports.coerceAndCheckDataType = exports.getJSONTypes = exports.getSchemaTypes = exports.DataType = void 0; const rules_1 = rules; const applicability_1 = applicability; - const errors_1 = errors$1; + const errors_1 = errors$2; const codegen_1 = codegen; const util_1 = util; var DataType; @@ -167735,7 +168256,7 @@ var defaults = {}; Object.defineProperty(defaults, "__esModule", { value: true }); defaults.assignDefaults = void 0; const codegen_1$r = codegen; -const util_1$p = util; +const util_1$r = util; function assignDefaults(it, ty) { const { properties, items } = it.schema; if (ty === "object" && properties) { @@ -167754,7 +168275,7 @@ function assignDefault(it, prop, defaultValue) { return; const childData = (0, codegen_1$r._) `${data}${(0, codegen_1$r.getProperty)(prop)}`; if (compositeRule) { - (0, util_1$p.checkStrictMode)(it, `default is ignored for: ${childData}`); + (0, util_1$r.checkStrictMode)(it, `default is ignored for: ${childData}`); return; } let condition = (0, codegen_1$r._) `${childData} === undefined`; @@ -167773,7 +168294,7 @@ var code = {}; Object.defineProperty(code, "__esModule", { value: true }); code.validateUnion = code.validateArray = code.usePattern = code.callValidateCode = code.schemaProperties = code.allSchemaProperties = code.noPropertyInData = code.propertyInData = code.isOwnProperty = code.hasPropFunc = code.reportMissingProp = code.checkMissingProp = code.checkReportMissingProp = void 0; const codegen_1$q = codegen; -const util_1$o = util; +const util_1$q = util; const names_1$5 = names$1; const util_2$1 = util; function checkReportMissingProp(cxt, prop) { @@ -167820,7 +168341,7 @@ function allSchemaProperties(schemaMap) { } code.allSchemaProperties = allSchemaProperties; function schemaProperties(it, schemaMap) { - return allSchemaProperties(schemaMap).filter((p) => !(0, util_1$o.alwaysValidSchema)(it, schemaMap[p])); + return allSchemaProperties(schemaMap).filter((p) => !(0, util_1$q.alwaysValidSchema)(it, schemaMap[p])); } code.schemaProperties = schemaProperties; function callValidateCode({ schemaCode, data, it: { gen, topSchemaRef, schemaPath, errorPath }, it }, func, context, passSchema) { @@ -167866,7 +168387,7 @@ function validateArray(cxt) { cxt.subschema({ keyword, dataProp: i, - dataPropType: util_1$o.Type.Num, + dataPropType: util_1$q.Type.Num, }, valid); gen.if((0, codegen_1$q.not)(valid), notValid); }); @@ -167878,7 +168399,7 @@ function validateUnion(cxt) { /* istanbul ignore if */ if (!Array.isArray(schema)) throw new Error("ajv implementation error"); - const alwaysValid = schema.some((sch) => (0, util_1$o.alwaysValidSchema)(it, sch)); + const alwaysValid = schema.some((sch) => (0, util_1$q.alwaysValidSchema)(it, sch)); if (alwaysValid && !it.opts.unevaluated) return; const valid = gen.let("valid", false); @@ -167905,7 +168426,7 @@ keyword.validateKeywordUsage = keyword.validSchemaType = keyword.funcKeywordCode const codegen_1$p = codegen; const names_1$4 = names$1; const code_1$9 = code; -const errors_1$1 = errors$1; +const errors_1$1 = errors$2; function macroKeywordCode(cxt, def) { const { gen, keyword, schema, parentSchema, it } = cxt; const macroSchema = def.macro.call(it.self, schema, parentSchema, it); @@ -168028,7 +168549,7 @@ var subschema = {}; Object.defineProperty(subschema, "__esModule", { value: true }); subschema.extendSubschemaMode = subschema.extendSubschemaData = subschema.getSubschema = void 0; const codegen_1$o = codegen; -const util_1$n = util; +const util_1$p = util; function getSubschema(it, { keyword, schemaProp, schema, schemaPath, errSchemaPath, topSchemaRef }) { if (keyword !== undefined && schema !== undefined) { throw new Error('both "keyword" and "schema" passed, only one allowed'); @@ -168044,7 +168565,7 @@ function getSubschema(it, { keyword, schemaProp, schema, schemaPath, errSchemaPa : { schema: sch[schemaProp], schemaPath: (0, codegen_1$o._) `${it.schemaPath}${(0, codegen_1$o.getProperty)(keyword)}${(0, codegen_1$o.getProperty)(schemaProp)}`, - errSchemaPath: `${it.errSchemaPath}/${keyword}/${(0, util_1$n.escapeFragment)(schemaProp)}`, + errSchemaPath: `${it.errSchemaPath}/${keyword}/${(0, util_1$p.escapeFragment)(schemaProp)}`, }; } if (schema !== undefined) { @@ -168070,7 +168591,7 @@ function extendSubschemaData(subschema, it, { dataProp, dataPropType: dpType, da const { errorPath, dataPathArr, opts } = it; const nextData = gen.let("data", (0, codegen_1$o._) `${it.data}${(0, codegen_1$o.getProperty)(dataProp)}`, true); dataContextProps(nextData); - subschema.errorPath = (0, codegen_1$o.str) `${errorPath}${(0, util_1$n.getErrorPath)(dataProp, dpType, opts.jsPropertySyntax)}`; + subschema.errorPath = (0, codegen_1$o.str) `${errorPath}${(0, util_1$p.getErrorPath)(dataProp, dpType, opts.jsPropertySyntax)}`; subschema.parentDataProperty = (0, codegen_1$o._) `${dataProp}`; subschema.dataPathArr = [...dataPathArr, subschema.parentDataProperty]; } @@ -168250,7 +168771,7 @@ var jsonSchemaTraverseExports = jsonSchemaTraverse.exports; Object.defineProperty(resolve$1, "__esModule", { value: true }); resolve$1.getSchemaRefs = resolve$1.resolveUrl = resolve$1.normalizeId = resolve$1._getFullPath = resolve$1.getFullPath = resolve$1.inlineRef = void 0; -const util_1$m = util; +const util_1$o = util; const equal$2 = fastDeepEqual; const traverse = jsonSchemaTraverseExports; // TODO refactor to use keyword definitions @@ -168310,7 +168831,7 @@ function countKeys(schema) { if (SIMPLE_INLINED.has(key)) continue; if (typeof schema[key] == "object") { - (0, util_1$m.eachItem)(schema[key], (sch) => (count += countKeys(sch))); + (0, util_1$o.eachItem)(schema[key], (sch) => (count += countKeys(sch))); } if (count === Infinity) return Infinity; @@ -168402,8 +168923,8 @@ function getSchemaRefs(schema, baseId) { } resolve$1.getSchemaRefs = getSchemaRefs; -Object.defineProperty(validate, "__esModule", { value: true }); -validate.getData = validate.KeywordCxt = validate.validateFunctionCode = void 0; +Object.defineProperty(validate$1, "__esModule", { value: true }); +validate$1.getData = validate$1.KeywordCxt = validate$1.validateFunctionCode = void 0; const boolSchema_1 = boolSchema; const dataType_1$1 = dataType; const applicability_1 = applicability; @@ -168414,8 +168935,8 @@ const subschema_1 = subschema; const codegen_1$n = codegen; const names_1$3 = names$1; const resolve_1$2 = resolve$1; -const util_1$l = util; -const errors_1 = errors$1; +const util_1$n = util; +const errors_1 = errors$2; // schema compilation - generates validation function, subschemaCode (below) is used for subschemas function validateFunctionCode(it) { if (isSchemaObj(it)) { @@ -168427,7 +168948,7 @@ function validateFunctionCode(it) { } validateFunction(it, () => (0, boolSchema_1.topBoolOrEmptySchema)(it)); } -validate.validateFunctionCode = validateFunctionCode; +validate$1.validateFunctionCode = validateFunctionCode; function validateFunction({ gen, validateName, schema, schemaEnv, opts }, body) { if (opts.code.es5) { gen.func(validateName, (0, codegen_1$n._) `${names_1$3.default.data}, ${names_1$3.default.valCxt}`, schemaEnv.$async, () => { @@ -168520,7 +169041,7 @@ function subSchemaObjCode(it, valid) { gen.var(valid, (0, codegen_1$n._) `${errsCount} === ${names_1$3.default.errors}`); } function checkKeywords(it) { - (0, util_1$l.checkUnknownRules)(it); + (0, util_1$n.checkUnknownRules)(it); checkRefsAndKeywords(it); } function typeAndKeywords(it, errsCount) { @@ -168532,14 +169053,14 @@ function typeAndKeywords(it, errsCount) { } function checkRefsAndKeywords(it) { const { schema, errSchemaPath, opts, self } = it; - if (schema.$ref && opts.ignoreKeywordsWithRef && (0, util_1$l.schemaHasRulesButRef)(schema, self.RULES)) { + if (schema.$ref && opts.ignoreKeywordsWithRef && (0, util_1$n.schemaHasRulesButRef)(schema, self.RULES)) { self.logger.warn(`$ref: keywords ignored in schema at path "${errSchemaPath}"`); } } function checkNoDefault(it) { const { schema, opts } = it; if (schema.default !== undefined && opts.useDefaults && opts.strictSchema) { - (0, util_1$l.checkStrictMode)(it, "default is ignored in the schema root"); + (0, util_1$n.checkStrictMode)(it, "default is ignored in the schema root"); } } function updateContext(it) { @@ -168584,7 +169105,7 @@ function assignEvaluated({ gen, evaluated, props, items }) { function schemaKeywords(it, types, typeErrors, errsCount) { const { gen, schema, data, allErrors, opts, self } = it; const { RULES } = self; - if (schema.$ref && (opts.ignoreKeywordsWithRef || !(0, util_1$l.schemaHasRulesButRef)(schema, RULES))) { + if (schema.$ref && (opts.ignoreKeywordsWithRef || !(0, util_1$n.schemaHasRulesButRef)(schema, RULES))) { gen.block(() => keywordCode(it, "$ref", RULES.all.$ref.definition)); // TODO typecast return; } @@ -168685,7 +169206,7 @@ function narrowSchemaTypes(it, withTypes) { function strictTypesError(it, msg) { const schemaPath = it.schemaEnv.baseId + it.errSchemaPath; msg += ` at "${schemaPath}" (strictTypes)`; - (0, util_1$l.checkStrictMode)(it, msg, it.opts.strictTypes); + (0, util_1$n.checkStrictMode)(it, msg, it.opts.strictTypes); } class KeywordCxt { constructor(it, def, keyword) { @@ -168696,7 +169217,7 @@ class KeywordCxt { this.data = it.data; this.schema = it.schema[keyword]; this.$data = def.$data && it.opts.$data && this.schema && this.schema.$data; - this.schemaValue = (0, util_1$l.schemaRefOrVal)(it, this.schema, keyword, this.$data); + this.schemaValue = (0, util_1$n.schemaRefOrVal)(it, this.schema, keyword, this.$data); this.schemaType = def.schemaType; this.parentSchema = it.schema; this.params = {}; @@ -168845,10 +169366,10 @@ class KeywordCxt { if (!it.opts.unevaluated) return; if (it.props !== true && schemaCxt.props !== undefined) { - it.props = util_1$l.mergeEvaluated.props(gen, schemaCxt.props, it.props, toName); + it.props = util_1$n.mergeEvaluated.props(gen, schemaCxt.props, it.props, toName); } if (it.items !== true && schemaCxt.items !== undefined) { - it.items = util_1$l.mergeEvaluated.items(gen, schemaCxt.items, it.items, toName); + it.items = util_1$n.mergeEvaluated.items(gen, schemaCxt.items, it.items, toName); } } mergeValidEvaluated(schemaCxt, valid) { @@ -168859,7 +169380,7 @@ class KeywordCxt { } } } -validate.KeywordCxt = KeywordCxt; +validate$1.KeywordCxt = KeywordCxt; function keywordCode(it, keyword, def, ruleType) { const cxt = new KeywordCxt(it, def, keyword); if ("code" in def) { @@ -168909,7 +169430,7 @@ function getData($data, { dataLevel, dataNames, dataPathArr }) { const segments = jsonPointer.split("/"); for (const segment of segments) { if (segment) { - data = (0, codegen_1$n._) `${data}${(0, codegen_1$n.getProperty)((0, util_1$l.unescapeJsonPointer)(segment))}`; + data = (0, codegen_1$n._) `${data}${(0, codegen_1$n.getProperty)((0, util_1$n.unescapeJsonPointer)(segment))}`; expr = (0, codegen_1$n._) `${expr} && ${data}`; } } @@ -168918,7 +169439,7 @@ function getData($data, { dataLevel, dataNames, dataPathArr }) { return `Cannot access ${pointerType} ${up} levels up, current level is ${dataLevel}`; } } -validate.getData = getData; +validate$1.getData = getData; var validation_error = {}; @@ -168953,8 +169474,8 @@ const codegen_1$m = codegen; const validation_error_1 = validation_error; const names_1$2 = names$1; const resolve_1 = resolve$1; -const util_1$k = util; -const validate_1$1 = validate; +const util_1$m = util; +const validate_1$1 = validate$1; class SchemaEnv { constructor(env) { var _a; @@ -169164,7 +169685,7 @@ function getJsonPointer(parsedRef, { baseId, schema, root }) { for (const part of parsedRef.fragment.slice(1).split("/")) { if (typeof schema === "boolean") return; - const partSchema = schema[(0, util_1$k.unescapeFragment)(part)]; + const partSchema = schema[(0, util_1$m.unescapeFragment)(part)]; if (partSchema === undefined) return; schema = partSchema; @@ -169175,7 +169696,7 @@ function getJsonPointer(parsedRef, { baseId, schema, root }) { } } let env; - if (typeof schema != "boolean" && schema.$ref && !(0, util_1$k.schemaHasRulesButRef)(schema, this.RULES)) { + if (typeof schema != "boolean" && schema.$ref && !(0, util_1$m.schemaHasRulesButRef)(schema, this.RULES)) { const $ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schema.$ref); env = resolveSchema.call(this, root, $ref); } @@ -169190,7 +169711,7 @@ function getJsonPointer(parsedRef, { baseId, schema, root }) { var $id$1 = "https://raw.githubusercontent.com/ajv-validator/ajv/master/lib/refs/data.json#"; var description = "Meta-schema for $data reference (JSON AnySchema extension proposal)"; -var type$1 = "object"; +var type$2 = "object"; var required$1 = [ "$data" ]; @@ -169211,7 +169732,7 @@ var additionalProperties$1 = false; var require$$9 = { $id: $id$1, description: description, - type: type$1, + type: type$2, required: required$1, properties: properties$2, additionalProperties: additionalProperties$1 @@ -170637,7 +171158,7 @@ uri$1.default = uri; (function (exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.CodeGen = exports.Name = exports.nil = exports.stringify = exports.str = exports._ = exports.KeywordCxt = void 0; - var validate_1 = validate; + var validate_1 = validate$1; Object.defineProperty(exports, "KeywordCxt", { enumerable: true, get: function () { return validate_1.KeywordCxt; } }); var codegen_1 = codegen; Object.defineProperty(exports, "_", { enumerable: true, get: function () { return codegen_1._; } }); @@ -171277,7 +171798,7 @@ const code_1$8 = code; const codegen_1$l = codegen; const names_1$1 = names$1; const compile_1$1 = compile; -const util_1$j = util; +const util_1$l = util; const def$r = { keyword: "$ref", schemaType: "string", @@ -171367,23 +171888,23 @@ function callRef(cxt, v, sch, $async) { if (it.props !== true) { if (schEvaluated && !schEvaluated.dynamicProps) { if (schEvaluated.props !== undefined) { - it.props = util_1$j.mergeEvaluated.props(gen, schEvaluated.props, it.props); + it.props = util_1$l.mergeEvaluated.props(gen, schEvaluated.props, it.props); } } else { const props = gen.var("props", (0, codegen_1$l._) `${source}.evaluated.props`); - it.props = util_1$j.mergeEvaluated.props(gen, props, it.props, codegen_1$l.Name); + it.props = util_1$l.mergeEvaluated.props(gen, props, it.props, codegen_1$l.Name); } } if (it.items !== true) { if (schEvaluated && !schEvaluated.dynamicItems) { if (schEvaluated.items !== undefined) { - it.items = util_1$j.mergeEvaluated.items(gen, schEvaluated.items, it.items); + it.items = util_1$l.mergeEvaluated.items(gen, schEvaluated.items, it.items); } } else { const items = gen.var("items", (0, codegen_1$l._) `${source}.evaluated.items`); - it.items = util_1$j.mergeEvaluated.items(gen, items, it.items, codegen_1$l.Name); + it.items = util_1$l.mergeEvaluated.items(gen, items, it.items, codegen_1$l.Name); } } } @@ -171492,7 +172013,7 @@ ucs2length.code = 'require("ajv/dist/runtime/ucs2length").default'; Object.defineProperty(limitLength, "__esModule", { value: true }); const codegen_1$i = codegen; -const util_1$i = util; +const util_1$k = util; const ucs2length_1 = ucs2length$1; const error$g = { message({ keyword, schemaCode }) { @@ -171510,15 +172031,15 @@ const def$o = { code(cxt) { const { keyword, data, schemaCode, it } = cxt; const op = keyword === "maxLength" ? codegen_1$i.operators.GT : codegen_1$i.operators.LT; - const len = it.opts.unicode === false ? (0, codegen_1$i._) `${data}.length` : (0, codegen_1$i._) `${(0, util_1$i.useFunc)(cxt.gen, ucs2length_1.default)}(${data})`; + const len = it.opts.unicode === false ? (0, codegen_1$i._) `${data}.length` : (0, codegen_1$i._) `${(0, util_1$k.useFunc)(cxt.gen, ucs2length_1.default)}(${data})`; cxt.fail$data((0, codegen_1$i._) `${len} ${op} ${schemaCode}`); }, }; limitLength.default = def$o; -var pattern = {}; +var pattern$1 = {}; -Object.defineProperty(pattern, "__esModule", { value: true }); +Object.defineProperty(pattern$1, "__esModule", { value: true }); const code_1$7 = code; const codegen_1$h = codegen; const error$f = { @@ -171539,7 +172060,7 @@ const def$n = { cxt.fail$data((0, codegen_1$h._) `!${regExp}.test(${data})`); }, }; -pattern.default = def$n; +pattern$1.default = def$n; var limitProperties = {}; @@ -171571,7 +172092,7 @@ var required = {}; Object.defineProperty(required, "__esModule", { value: true }); const code_1$6 = code; const codegen_1$f = codegen; -const util_1$h = util; +const util_1$j = util; const error$d = { message: ({ params: { missingProperty } }) => (0, codegen_1$f.str) `must have required property '${missingProperty}'`, params: ({ params: { missingProperty } }) => (0, codegen_1$f._) `{missingProperty: ${missingProperty}}`, @@ -171599,7 +172120,7 @@ const def$l = { if ((props === null || props === void 0 ? void 0 : props[requiredKey]) === undefined && !definedProperties.has(requiredKey)) { const schemaPath = it.schemaEnv.baseId + it.errSchemaPath; const msg = `required property "${requiredKey}" is not defined at "${schemaPath}" (strictRequired)`; - (0, util_1$h.checkStrictMode)(it, msg, it.opts.strictRequired); + (0, util_1$j.checkStrictMode)(it, msg, it.opts.strictRequired); } } } @@ -171684,7 +172205,7 @@ equal$1.default = equal; Object.defineProperty(uniqueItems, "__esModule", { value: true }); const dataType_1 = dataType; const codegen_1$d = codegen; -const util_1$g = util; +const util_1$i = util; const equal_1$2 = equal$1; const error$b = { message: ({ params: { i, j } }) => (0, codegen_1$d.str) `must NOT have duplicate items (items ## ${j} and ${i} are identical)`, @@ -171733,7 +172254,7 @@ const def$j = { }); } function loopN2(i, j) { - const eql = (0, util_1$g.useFunc)(gen, equal_1$2.default); + const eql = (0, util_1$i.useFunc)(gen, equal_1$2.default); const outer = gen.name("outer"); gen.label(outer).for((0, codegen_1$d._) `;${i}--;`, () => gen.for((0, codegen_1$d._) `${j} = ${i}; ${j}--;`, () => gen.if((0, codegen_1$d._) `${eql}(${data}[${i}], ${data}[${j}])`, () => { cxt.error(); @@ -171748,7 +172269,7 @@ var _const = {}; Object.defineProperty(_const, "__esModule", { value: true }); const codegen_1$c = codegen; -const util_1$f = util; +const util_1$h = util; const equal_1$1 = equal$1; const error$a = { message: "must be equal to constant", @@ -171761,7 +172282,7 @@ const def$i = { code(cxt) { const { gen, data, $data, schemaCode, schema } = cxt; if ($data || (schema && typeof schema == "object")) { - cxt.fail$data((0, codegen_1$c._) `!${(0, util_1$f.useFunc)(gen, equal_1$1.default)}(${data}, ${schemaCode})`); + cxt.fail$data((0, codegen_1$c._) `!${(0, util_1$h.useFunc)(gen, equal_1$1.default)}(${data}, ${schemaCode})`); } else { cxt.fail((0, codegen_1$c._) `${schema} !== ${data}`); @@ -171774,7 +172295,7 @@ var _enum = {}; Object.defineProperty(_enum, "__esModule", { value: true }); const codegen_1$b = codegen; -const util_1$e = util; +const util_1$g = util; const equal_1 = equal$1; const error$9 = { message: "must be equal to one of the allowed values", @@ -171791,7 +172312,7 @@ const def$h = { throw new Error("enum must have non-empty array"); const useLoop = schema.length >= it.opts.loopEnum; let eql; - const getEql = () => (eql !== null && eql !== void 0 ? eql : (eql = (0, util_1$e.useFunc)(gen, equal_1.default))); + const getEql = () => (eql !== null && eql !== void 0 ? eql : (eql = (0, util_1$g.useFunc)(gen, equal_1.default))); let valid; if (useLoop || $data) { valid = gen.let("valid"); @@ -171823,7 +172344,7 @@ Object.defineProperty(validation$1, "__esModule", { value: true }); const limitNumber_1 = limitNumber; const multipleOf_1 = multipleOf; const limitLength_1 = limitLength; -const pattern_1 = pattern; +const pattern_1 = pattern$1; const limitProperties_1 = limitProperties; const required_1 = required; const limitItems_1 = limitItems; @@ -171858,7 +172379,7 @@ var additionalItems = {}; Object.defineProperty(additionalItems, "__esModule", { value: true }); additionalItems.validateAdditionalItems = void 0; const codegen_1$a = codegen; -const util_1$d = util; +const util_1$f = util; const error$8 = { message: ({ params: { len } }) => (0, codegen_1$a.str) `must NOT have more than ${len} items`, params: ({ params: { len } }) => (0, codegen_1$a._) `{limit: ${len}}`, @@ -171873,7 +172394,7 @@ const def$g = { const { parentSchema, it } = cxt; const { items } = parentSchema; if (!Array.isArray(items)) { - (0, util_1$d.checkStrictMode)(it, '"additionalItems" is ignored when "items" is not an array of schemas'); + (0, util_1$f.checkStrictMode)(it, '"additionalItems" is ignored when "items" is not an array of schemas'); return; } validateAdditionalItems(cxt, items); @@ -171887,14 +172408,14 @@ function validateAdditionalItems(cxt, items) { cxt.setParams({ len: items.length }); cxt.pass((0, codegen_1$a._) `${len} <= ${items.length}`); } - else if (typeof schema == "object" && !(0, util_1$d.alwaysValidSchema)(it, schema)) { + else if (typeof schema == "object" && !(0, util_1$f.alwaysValidSchema)(it, schema)) { const valid = gen.var("valid", (0, codegen_1$a._) `${len} <= ${items.length}`); // TODO var gen.if((0, codegen_1$a.not)(valid), () => validateItems(valid)); cxt.ok(valid); } function validateItems(valid) { gen.forRange("i", items.length, len, (i) => { - cxt.subschema({ keyword, dataProp: i, dataPropType: util_1$d.Type.Num }, valid); + cxt.subschema({ keyword, dataProp: i, dataPropType: util_1$f.Type.Num }, valid); if (!it.allErrors) gen.if((0, codegen_1$a.not)(valid), () => gen.break()); }); @@ -171910,7 +172431,7 @@ var items = {}; Object.defineProperty(items, "__esModule", { value: true }); items.validateTuple = void 0; const codegen_1$9 = codegen; -const util_1$c = util; +const util_1$e = util; const code_1$5 = code; const def$f = { keyword: "items", @@ -171922,7 +172443,7 @@ const def$f = { if (Array.isArray(schema)) return validateTuple(cxt, "additionalItems", schema); it.items = true; - if ((0, util_1$c.alwaysValidSchema)(it, schema)) + if ((0, util_1$e.alwaysValidSchema)(it, schema)) return; cxt.ok((0, code_1$5.validateArray)(cxt)); }, @@ -171931,12 +172452,12 @@ function validateTuple(cxt, extraItems, schArr = cxt.schema) { const { gen, parentSchema, data, keyword, it } = cxt; checkStrictTuple(parentSchema); if (it.opts.unevaluated && schArr.length && it.items !== true) { - it.items = util_1$c.mergeEvaluated.items(gen, schArr.length, it.items); + it.items = util_1$e.mergeEvaluated.items(gen, schArr.length, it.items); } const valid = gen.name("valid"); const len = gen.const("len", (0, codegen_1$9._) `${data}.length`); schArr.forEach((sch, i) => { - if ((0, util_1$c.alwaysValidSchema)(it, sch)) + if ((0, util_1$e.alwaysValidSchema)(it, sch)) return; gen.if((0, codegen_1$9._) `${len} > ${i}`, () => cxt.subschema({ keyword, @@ -171951,7 +172472,7 @@ function validateTuple(cxt, extraItems, schArr = cxt.schema) { const fullTuple = l === sch.minItems && (l === sch.maxItems || sch[extraItems] === false); if (opts.strictTuples && !fullTuple) { const msg = `"${keyword}" is ${l}-tuple, but minItems or maxItems/${extraItems} are not specified or different at path "${errSchemaPath}"`; - (0, util_1$c.checkStrictMode)(it, msg, opts.strictTuples); + (0, util_1$e.checkStrictMode)(it, msg, opts.strictTuples); } } } @@ -171973,7 +172494,7 @@ var items2020 = {}; Object.defineProperty(items2020, "__esModule", { value: true }); const codegen_1$8 = codegen; -const util_1$b = util; +const util_1$d = util; const code_1$4 = code; const additionalItems_1$1 = additionalItems; const error$7 = { @@ -171990,7 +172511,7 @@ const def$d = { const { schema, parentSchema, it } = cxt; const { prefixItems } = parentSchema; it.items = true; - if ((0, util_1$b.alwaysValidSchema)(it, schema)) + if ((0, util_1$d.alwaysValidSchema)(it, schema)) return; if (prefixItems) (0, additionalItems_1$1.validateAdditionalItems)(cxt, prefixItems); @@ -172004,7 +172525,7 @@ var contains = {}; Object.defineProperty(contains, "__esModule", { value: true }); const codegen_1$7 = codegen; -const util_1$a = util; +const util_1$c = util; const error$6 = { message: ({ params: { min, max } }) => max === undefined ? (0, codegen_1$7.str) `must contain at least ${min} valid item(s)` @@ -172033,15 +172554,15 @@ const def$c = { const len = gen.const("len", (0, codegen_1$7._) `${data}.length`); cxt.setParams({ min, max }); if (max === undefined && min === 0) { - (0, util_1$a.checkStrictMode)(it, `"minContains" == 0 without "maxContains": "contains" keyword ignored`); + (0, util_1$c.checkStrictMode)(it, `"minContains" == 0 without "maxContains": "contains" keyword ignored`); return; } if (max !== undefined && min > max) { - (0, util_1$a.checkStrictMode)(it, `"minContains" > "maxContains" is always invalid`); + (0, util_1$c.checkStrictMode)(it, `"minContains" > "maxContains" is always invalid`); cxt.fail(); return; } - if ((0, util_1$a.alwaysValidSchema)(it, schema)) { + if ((0, util_1$c.alwaysValidSchema)(it, schema)) { let cond = (0, codegen_1$7._) `${len} >= ${min}`; if (max !== undefined) cond = (0, codegen_1$7._) `${cond} && ${len} <= ${max}`; @@ -172073,7 +172594,7 @@ const def$c = { cxt.subschema({ keyword: "contains", dataProp: i, - dataPropType: util_1$a.Type.Num, + dataPropType: util_1$c.Type.Num, compositeRule: true, }, _valid); block(); @@ -172189,7 +172710,7 @@ var propertyNames = {}; Object.defineProperty(propertyNames, "__esModule", { value: true }); const codegen_1$6 = codegen; -const util_1$9 = util; +const util_1$b = util; const error$5 = { message: "property name must be valid", params: ({ params }) => (0, codegen_1$6._) `{propertyName: ${params.propertyName}}`, @@ -172201,7 +172722,7 @@ const def$b = { error: error$5, code(cxt) { const { gen, schema, data, it } = cxt; - if ((0, util_1$9.alwaysValidSchema)(it, schema)) + if ((0, util_1$b.alwaysValidSchema)(it, schema)) return; const valid = gen.name("valid"); gen.forIn("key", data, (key) => { @@ -172230,7 +172751,7 @@ Object.defineProperty(additionalProperties, "__esModule", { value: true }); const code_1$3 = code; const codegen_1$5 = codegen; const names_1 = names$1; -const util_1$8 = util; +const util_1$a = util; const error$4 = { message: "must NOT have additional properties", params: ({ params }) => (0, codegen_1$5._) `{additionalProperty: ${params.additionalProperty}}`, @@ -172249,7 +172770,7 @@ const def$a = { throw new Error("ajv implementation error"); const { allErrors, opts } = it; it.props = true; - if (opts.removeAdditional !== "all" && (0, util_1$8.alwaysValidSchema)(it, schema)) + if (opts.removeAdditional !== "all" && (0, util_1$a.alwaysValidSchema)(it, schema)) return; const props = (0, code_1$3.allSchemaProperties)(parentSchema.properties); const patProps = (0, code_1$3.allSchemaProperties)(parentSchema.patternProperties); @@ -172267,7 +172788,7 @@ const def$a = { let definedProp; if (props.length > 8) { // TODO maybe an option instead of hard-coded 8? - const propsSchema = (0, util_1$8.schemaRefOrVal)(it, parentSchema.properties, "properties"); + const propsSchema = (0, util_1$a.schemaRefOrVal)(it, parentSchema.properties, "properties"); definedProp = (0, code_1$3.isOwnProperty)(gen, propsSchema, key); } else if (props.length) { @@ -172296,7 +172817,7 @@ const def$a = { gen.break(); return; } - if (typeof schema == "object" && !(0, util_1$8.alwaysValidSchema)(it, schema)) { + if (typeof schema == "object" && !(0, util_1$a.alwaysValidSchema)(it, schema)) { const valid = gen.name("valid"); if (opts.removeAdditional === "failing") { applyAdditionalSchema(key, valid, false); @@ -172316,7 +172837,7 @@ const def$a = { const subschema = { keyword: "additionalProperties", dataProp: key, - dataPropType: util_1$8.Type.Str, + dataPropType: util_1$a.Type.Str, }; if (errors === false) { Object.assign(subschema, { @@ -172334,9 +172855,9 @@ additionalProperties.default = def$a; var properties$1 = {}; Object.defineProperty(properties$1, "__esModule", { value: true }); -const validate_1 = validate; +const validate_1 = validate$1; const code_1$2 = code; -const util_1$7 = util; +const util_1$9 = util; const additionalProperties_1$1 = additionalProperties; const def$9 = { keyword: "properties", @@ -172352,9 +172873,9 @@ const def$9 = { it.definedProperties.add(prop); } if (it.opts.unevaluated && allProps.length && it.props !== true) { - it.props = util_1$7.mergeEvaluated.props(gen, (0, util_1$7.toHash)(allProps), it.props); + it.props = util_1$9.mergeEvaluated.props(gen, (0, util_1$9.toHash)(allProps), it.props); } - const properties = allProps.filter((p) => !(0, util_1$7.alwaysValidSchema)(it, schema[p])); + const properties = allProps.filter((p) => !(0, util_1$9.alwaysValidSchema)(it, schema[p])); if (properties.length === 0) return; const valid = gen.name("valid"); @@ -172391,7 +172912,7 @@ var patternProperties = {}; Object.defineProperty(patternProperties, "__esModule", { value: true }); const code_1$1 = code; const codegen_1$4 = codegen; -const util_1$6 = util; +const util_1$8 = util; const util_2 = util; const def$8 = { keyword: "patternProperties", @@ -172401,7 +172922,7 @@ const def$8 = { const { gen, schema, data, parentSchema, it } = cxt; const { opts } = it; const patterns = (0, code_1$1.allSchemaProperties)(schema); - const alwaysValidPatterns = patterns.filter((p) => (0, util_1$6.alwaysValidSchema)(it, schema[p])); + const alwaysValidPatterns = patterns.filter((p) => (0, util_1$8.alwaysValidSchema)(it, schema[p])); if (patterns.length === 0 || (alwaysValidPatterns.length === patterns.length && (!it.opts.unevaluated || it.props === true))) { @@ -172431,7 +172952,7 @@ const def$8 = { function checkMatchingProperties(pat) { for (const prop in checkProperties) { if (new RegExp(pat).test(prop)) { - (0, util_1$6.checkStrictMode)(it, `property ${prop} matches pattern ${pat} (use allowMatchingProperties)`); + (0, util_1$8.checkStrictMode)(it, `property ${prop} matches pattern ${pat} (use allowMatchingProperties)`); } } } @@ -172465,14 +172986,14 @@ patternProperties.default = def$8; var not = {}; Object.defineProperty(not, "__esModule", { value: true }); -const util_1$5 = util; +const util_1$7 = util; const def$7 = { keyword: "not", schemaType: ["object", "boolean"], trackErrors: true, code(cxt) { const { gen, schema, it } = cxt; - if ((0, util_1$5.alwaysValidSchema)(it, schema)) { + if ((0, util_1$7.alwaysValidSchema)(it, schema)) { cxt.fail(); return; } @@ -172506,7 +173027,7 @@ var oneOf = {}; Object.defineProperty(oneOf, "__esModule", { value: true }); const codegen_1$3 = codegen; -const util_1$4 = util; +const util_1$6 = util; const error$3 = { message: "must match exactly one schema in oneOf", params: ({ params }) => (0, codegen_1$3._) `{passingSchemas: ${params.passing}}`, @@ -172534,7 +173055,7 @@ const def$5 = { function validateOneOf() { schArr.forEach((sch, i) => { let schCxt; - if ((0, util_1$4.alwaysValidSchema)(it, sch)) { + if ((0, util_1$6.alwaysValidSchema)(it, sch)) { gen.var(schValid, true); } else { @@ -172566,7 +173087,7 @@ oneOf.default = def$5; var allOf = {}; Object.defineProperty(allOf, "__esModule", { value: true }); -const util_1$3 = util; +const util_1$5 = util; const def$4 = { keyword: "allOf", schemaType: "array", @@ -172577,7 +173098,7 @@ const def$4 = { throw new Error("ajv implementation error"); const valid = gen.name("valid"); schema.forEach((sch, i) => { - if ((0, util_1$3.alwaysValidSchema)(it, sch)) + if ((0, util_1$5.alwaysValidSchema)(it, sch)) return; const schCxt = cxt.subschema({ keyword: "allOf", schemaProp: i }, valid); cxt.ok(valid); @@ -172591,7 +173112,7 @@ var _if = {}; Object.defineProperty(_if, "__esModule", { value: true }); const codegen_1$2 = codegen; -const util_1$2 = util; +const util_1$4 = util; const error$2 = { message: ({ params }) => (0, codegen_1$2.str) `must match "${params.ifClause}" schema`, params: ({ params }) => (0, codegen_1$2._) `{failingKeyword: ${params.ifClause}}`, @@ -172604,7 +173125,7 @@ const def$3 = { code(cxt) { const { gen, parentSchema, it } = cxt; if (parentSchema.then === undefined && parentSchema.else === undefined) { - (0, util_1$2.checkStrictMode)(it, '"if" without "then" and "else" is ignored'); + (0, util_1$4.checkStrictMode)(it, '"if" without "then" and "else" is ignored'); } const hasThen = hasSchema(it, "then"); const hasElse = hasSchema(it, "else"); @@ -172650,20 +173171,20 @@ const def$3 = { }; function hasSchema(it, keyword) { const schema = it.schema[keyword]; - return schema !== undefined && !(0, util_1$2.alwaysValidSchema)(it, schema); + return schema !== undefined && !(0, util_1$4.alwaysValidSchema)(it, schema); } _if.default = def$3; var thenElse = {}; Object.defineProperty(thenElse, "__esModule", { value: true }); -const util_1$1 = util; +const util_1$3 = util; const def$2 = { keyword: ["then", "else"], schemaType: ["object", "boolean"], code({ keyword, parentSchema, it }) { if (parentSchema.if === undefined) - (0, util_1$1.checkStrictMode)(it, `"${keyword}" without "if" is ignored`); + (0, util_1$3.checkStrictMode)(it, `"${keyword}" without "if" is ignored`); }, }; thenElse.default = def$2; @@ -172848,7 +173369,7 @@ draft7.default = draft7Vocabularies; var discriminator = {}; -var types = {}; +var types$2 = {}; (function (exports) { Object.defineProperty(exports, "__esModule", { value: true }); @@ -172858,15 +173379,15 @@ var types = {}; DiscrError["Mapping"] = "mapping"; })(exports.DiscrError || (exports.DiscrError = {})); -} (types)); +} (types$2)); Object.defineProperty(discriminator, "__esModule", { value: true }); const codegen_1 = codegen; -const types_1 = types; +const types_1$1 = types$2; const compile_1 = compile; -const util_1 = util; +const util_1$2 = util; const error = { - message: ({ params: { discrError, tagName } }) => discrError === types_1.DiscrError.Tag + message: ({ params: { discrError, tagName } }) => discrError === types_1$1.DiscrError.Tag ? `tag "${tagName}" must be string` : `value of tag "${tagName}" must be in oneOf`, params: ({ params: { discrError, tag, tagName } }) => (0, codegen_1._) `{error: ${discrError}, tag: ${tagName}, tagValue: ${tag}}`, @@ -172891,7 +173412,7 @@ const def = { throw new Error("discriminator: requires oneOf keyword"); const valid = gen.let("valid", false); const tag = gen.const("tag", (0, codegen_1._) `${data}${(0, codegen_1.getProperty)(tagName)}`); - gen.if((0, codegen_1._) `typeof ${tag} == "string"`, () => validateMapping(), () => cxt.error(false, { discrError: types_1.DiscrError.Tag, tag, tagName })); + gen.if((0, codegen_1._) `typeof ${tag} == "string"`, () => validateMapping(), () => cxt.error(false, { discrError: types_1$1.DiscrError.Tag, tag, tagName })); cxt.ok(valid); function validateMapping() { const mapping = getMapping(); @@ -172901,7 +173422,7 @@ const def = { gen.assign(valid, applyTagSchema(mapping[tagValue])); } gen.else(); - cxt.error(false, { discrError: types_1.DiscrError.Mapping, tag, tagName }); + cxt.error(false, { discrError: types_1$1.DiscrError.Mapping, tag, tagName }); gen.endIf(); } function applyTagSchema(schemaProp) { @@ -172917,7 +173438,7 @@ const def = { let tagRequired = true; for (let i = 0; i < oneOf.length; i++) { let sch = oneOf[i]; - if ((sch === null || sch === void 0 ? void 0 : sch.$ref) && !(0, util_1.schemaHasRulesButRef)(sch, it.self.RULES)) { + if ((sch === null || sch === void 0 ? void 0 : sch.$ref) && !(0, util_1$2.schemaHasRulesButRef)(sch, it.self.RULES)) { sch = compile_1.resolveRef.call(it.self, it.schemaEnv.root, it.baseId, sch === null || sch === void 0 ? void 0 : sch.$ref); if (sch instanceof compile_1.SchemaEnv) sch = sch.schema; @@ -173005,7 +173526,7 @@ var definitions = { ] } }; -var type = [ +var type$1 = [ "object", "boolean" ]; @@ -173206,7 +173727,7 @@ var require$$3 = { $id: $id, title: title, definitions: definitions, - type: type, + type: type$1, properties: properties, "default": true }; @@ -173245,7 +173766,7 @@ var require$$3 = { module.exports = exports = Ajv; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = Ajv; - var validate_1 = validate; + var validate_1 = validate$1; Object.defineProperty(exports, "KeywordCxt", { enumerable: true, get: function () { return validate_1.KeywordCxt; } }); var codegen_1 = codegen; Object.defineProperty(exports, "_", { enumerable: true, get: function () { return codegen_1._; } }); @@ -173264,6 +173785,677 @@ var require$$3 = { var ajvExports = ajv$1.exports; var Ajv = /*@__PURE__*/getDefaultExportFromCjs(ajvExports); +var __defProp$3 = Object.defineProperty; +var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; +var __hasOwnProp$3 = Object.prototype.hasOwnProperty; +var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$3 = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); + if (__getOwnPropSymbols$3) + for (var prop of __getOwnPropSymbols$3(b)) { + if (__propIsEnum$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); + } + return a; +}; +var NetId = /* @__PURE__ */ ((NetId2) => { + NetId2[NetId2["MAINNET"] = 1] = "MAINNET"; + NetId2[NetId2["BSC"] = 56] = "BSC"; + NetId2[NetId2["POLYGON"] = 137] = "POLYGON"; + NetId2[NetId2["OPTIMISM"] = 10] = "OPTIMISM"; + NetId2[NetId2["ARBITRUM"] = 42161] = "ARBITRUM"; + NetId2[NetId2["GNOSIS"] = 100] = "GNOSIS"; + NetId2[NetId2["AVALANCHE"] = 43114] = "AVALANCHE"; + NetId2[NetId2["SEPOLIA"] = 11155111] = "SEPOLIA"; + return NetId2; +})(NetId || {}); +const theGraph = { + name: "Hosted Graph", + url: "https://api.thegraph.com" +}; +const tornado = { + name: "Tornado Subgraphs", + url: "https://tornadocash-rpc.com" +}; +const defaultConfig = { + [1 /* MAINNET */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 80, + fast: 50, + standard: 25, + low: 8 + }, + nativeCurrency: "eth", + currencyName: "ETH", + explorerUrl: "https://etherscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Ethereum Mainnet", + deployedBlock: 9116966, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + mevblockerRPC: { + name: "MevblockerRPC", + url: "https://rpc.mevblocker.io" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/ethereum-mainnet" + }, + noderealRPC: { + name: "NodeReal RPC", + url: "https://eth-mainnet.nodereal.io/v1/1659dfb40aa24bbb8153a677b98064d7" + }, + notadegenRPC: { + name: "NotADegen RPC", + url: "https://rpc.notadegen.com/eth" + }, + keydonixRPC: { + name: "Keydonix RPC", + url: "https://ethereum.keydonix.com/v1/mainnet" + }, + oneRPC: { + name: "1RPC", + url: "https://1rpc.io/eth" + } + }, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", + echoContract: "0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornContract: "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", + governanceContract: "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", + stakingRewardsContract: "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29", + registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", + aggregatorContract: "0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49", + reverseRecordsContract: "0x3671aE578E63FdF66ad4F3E12CC0c0d71Ac7510C", + tornadoSubgraph: "tornadocash/mainnet-tornado-subgraph", + registrySubgraph: "tornadocash/tornado-relayer-registry", + governanceSubgraph: "tornadocash/tornado-governance", + subgraphs: { + tornado, + theGraph + }, + tokens: { + eth: { + instanceAddress: { + "0.1": "0x12D66f87A04A9E220743712cE6d9bB1B5616B8Fc", + "1": "0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936", + "10": "0x910Cbd523D972eb0a6f4cAe4618aD62622b39DbF", + "100": "0xA160cdAB225685dA1d56aa342Ad8841c3b53f291" + }, + symbol: "ETH", + decimals: 18 + }, + dai: { + instanceAddress: { + "100": "0xD4B88Df4D29F5CedD6857912842cff3b20C8Cfa3", + "1000": "0xFD8610d20aA15b7B2E3Be39B396a1bC3516c7144", + "10000": "0x07687e702b410Fa43f4cB4Af7FA097918ffD2730", + "100000": "0x23773E65ed146A459791799d01336DB287f25334" + }, + tokenAddress: "0x6B175474E89094C44Da98b954EedeAC495271d0F", + tokenGasLimit: 7e4, + symbol: "DAI", + decimals: 18, + gasLimit: 7e5 + }, + cdai: { + instanceAddress: { + "5000": "0x22aaA7720ddd5388A3c0A3333430953C68f1849b", + "50000": "0x03893a7c7463AE47D46bc7f091665f1893656003", + "500000": "0x2717c5e28cf931547B621a5dddb772Ab6A35B701", + "5000000": "0xD21be7248e0197Ee08E0c20D4a96DEBdaC3D20Af" + }, + tokenAddress: "0x5d3a536E4D6DbD6114cc1Ead35777bAB948E3643", + tokenGasLimit: 2e5, + symbol: "cDAI", + decimals: 8, + gasLimit: 7e5 + }, + usdc: { + instanceAddress: { + "100": "0xd96f2B1c14Db8458374d9Aca76E26c3D18364307", + "1000": "0x4736dCf1b7A3d580672CcE6E7c65cd5cc9cFBa9D" + }, + tokenAddress: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + tokenGasLimit: 7e4, + symbol: "USDC", + decimals: 6, + gasLimit: 7e5 + }, + usdt: { + instanceAddress: { + "100": "0x169AD27A470D064DEDE56a2D3ff727986b15D52B", + "1000": "0x0836222F2B2B24A3F36f98668Ed8F0B38D1a872f" + }, + tokenAddress: "0xdAC17F958D2ee523a2206206994597C13D831ec7", + tokenGasLimit: 7e4, + symbol: "USDT", + decimals: 6, + gasLimit: 7e5 + }, + wbtc: { + instanceAddress: { + "0.1": "0x178169B423a011fff22B9e3F3abeA13414dDD0F1", + "1": "0x610B717796ad172B316836AC95a2ffad065CeaB4", + "10": "0xbB93e510BbCD0B7beb5A853875f9eC60275CF498" + }, + tokenAddress: "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + tokenGasLimit: 7e4, + symbol: "WBTC", + decimals: 8, + gasLimit: 7e5 + } + }, + ensSubdomainKey: "mainnet-tornado", + pollInterval: 15, + constants: { + GOVERNANCE_BLOCK: 11474695, + NOTE_ACCOUNT_BLOCK: 11842486, + ENCRYPTED_NOTES_BLOCK: 12143762, + REGISTRY_BLOCK: 14173129, + MINING_BLOCK_TIME: 15 + } + }, + [56 /* BSC */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 5, + fast: 5, + standard: 5, + low: 5 + }, + nativeCurrency: "bnb", + currencyName: "BNB", + explorerUrl: "https://bscscan.com", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Binance Smart Chain", + deployedBlock: 8158799, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornadoSubgraph: "tornadocash/bsc-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/bsc" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/bsc-mainnet" + }, + noderealRPC: { + name: "NodeReal RPC", + url: "https://bsc-mainnet.nodereal.io/v1/64a9df0874fb4a93b9d0a3849de012d3" + }, + oneRPC: { + name: "1RPC", + url: "https://1rpc.io/bnb" + } + }, + tokens: { + bnb: { + instanceAddress: { + "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", + "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", + "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" + }, + symbol: "BNB", + decimals: 18 + } + }, + ensSubdomainKey: "bsc-tornado", + pollInterval: 10, + constants: { + NOTE_ACCOUNT_BLOCK: 8159269, + ENCRYPTED_NOTES_BLOCK: 8159269 + } + }, + [137 /* POLYGON */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 100, + fast: 75, + standard: 50, + low: 30 + }, + nativeCurrency: "matic", + currencyName: "MATIC", + explorerUrl: "https://polygonscan.com", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Polygon (Matic) Network", + deployedBlock: 16257962, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + gasPriceOracleContract: "0xF81A8D8D3581985D3969fe53bFA67074aDFa8F3C", + tornadoSubgraph: "tornadocash/matic-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + chainnodes: { + name: "Tornado RPC", + url: "https://polygon-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/polygon-mainnet" + }, + oneRpc: { + name: "1RPC", + url: "https://1rpc.io/matic" + } + }, + tokens: { + matic: { + instanceAddress: { + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", + "1000": "0xdf231d99Ff8b6c6CBF4E9B9a945CBAcEF9339178", + "10000": "0xaf4c0B70B2Ea9FB7487C7CbB37aDa259579fe040", + "100000": "0xa5C2254e4253490C54cef0a4347fddb8f75A4998" + }, + symbol: "MATIC", + decimals: 18 + } + }, + ensSubdomainKey: "polygon-tornado", + pollInterval: 10, + constants: { + NOTE_ACCOUNT_BLOCK: 16257996, + ENCRYPTED_NOTES_BLOCK: 16257996 + } + }, + [10 /* OPTIMISM */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 1e-3, + fast: 1e-3, + standard: 1e-3, + low: 1e-3 + }, + nativeCurrency: "eth", + currencyName: "ETH", + explorerUrl: "https://optimistic.etherscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Optimism", + deployedBlock: 2243689, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + ovmGasPriceOracleContract: "0x420000000000000000000000000000000000000F", + tornadoSubgraph: "tornadocash/optimism-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/op" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://optimism-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + optimism: { + name: "Optimism RPC", + url: "https://mainnet.optimism.io" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/optimism-mainnet" + }, + oneRpc: { + name: "1RPC", + url: "https://1rpc.io/op" + } + }, + tokens: { + eth: { + instanceAddress: { + "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", + "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", + "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" + }, + symbol: "ETH", + decimals: 18 + } + }, + ensSubdomainKey: "optimism-tornado", + pollInterval: 15, + constants: { + NOTE_ACCOUNT_BLOCK: 2243694, + ENCRYPTED_NOTES_BLOCK: 2243694 + } + }, + [42161 /* ARBITRUM */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 4, + fast: 3, + standard: 2.52, + low: 2.29 + }, + nativeCurrency: "eth", + currencyName: "ETH", + explorerUrl: "https://arbiscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Arbitrum One", + deployedBlock: 3430648, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornadoSubgraph: "tornadocash/arbitrum-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/arbitrum" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + arbitrum: { + name: "Arbitrum RPC", + url: "https://arb1.arbitrum.io/rpc" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/arbitrum-one" + }, + oneRpc: { + name: "1rpc", + url: "https://1rpc.io/arb" + } + }, + tokens: { + eth: { + instanceAddress: { + "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", + "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", + "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" + }, + symbol: "ETH", + decimals: 18 + } + }, + ensSubdomainKey: "arbitrum-tornado", + pollInterval: 15, + constants: { + NOTE_ACCOUNT_BLOCK: 3430605, + ENCRYPTED_NOTES_BLOCK: 3430605 + } + }, + [100 /* GNOSIS */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 6, + fast: 5, + standard: 4, + low: 1 + }, + nativeCurrency: "xdai", + currencyName: "xDAI", + explorerUrl: "https://gnosisscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Gnosis Chain", + deployedBlock: 17754561, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornadoSubgraph: "tornadocash/xdai-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/gnosis" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + gnosis: { + name: "Gnosis RPC", + url: "https://rpc.gnosischain.com" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/arbitrum-one" + }, + blockPi: { + name: "BlockPi", + url: "https://gnosis.blockpi.network/v1/rpc/public" + } + }, + tokens: { + xdai: { + instanceAddress: { + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", + "1000": "0xdf231d99Ff8b6c6CBF4E9B9a945CBAcEF9339178", + "10000": "0xaf4c0B70B2Ea9FB7487C7CbB37aDa259579fe040", + "100000": "0xa5C2254e4253490C54cef0a4347fddb8f75A4998" + }, + symbol: "xDAI", + decimals: 18 + } + }, + ensSubdomainKey: "gnosis-tornado", + pollInterval: 15, + constants: { + NOTE_ACCOUNT_BLOCK: 17754564, + ENCRYPTED_NOTES_BLOCK: 17754564 + } + }, + [43114 /* AVALANCHE */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 225, + fast: 35, + standard: 25, + low: 25 + }, + nativeCurrency: "avax", + currencyName: "AVAX", + explorerUrl: "https://snowtrace.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Avalanche Mainnet", + deployedBlock: 4429818, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornadoSubgraph: "tornadocash/avalanche-tornado-subgraph", + subgraphs: { + theGraph + }, + rpcUrls: { + oneRPC: { + name: "OneRPC", + url: "https://1rpc.io/avax/c" + }, + avalancheRPC: { + name: "Avalanche RPC", + url: "https://api.avax.network/ext/bc/C/rpc" + }, + meowRPC: { + name: "Meow RPC", + url: "https://avax.meowrpc.com" + } + }, + tokens: { + avax: { + instanceAddress: { + "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", + "500": "0xaf8d1839c3c67cf571aa74B5c12398d4901147B3" + }, + symbol: "AVAX", + decimals: 18 + } + }, + ensSubdomainKey: "avalanche-tornado", + pollInterval: 10, + constants: { + NOTE_ACCOUNT_BLOCK: 4429813, + ENCRYPTED_NOTES_BLOCK: 4429813 + } + }, + [11155111 /* SEPOLIA */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 2, + fast: 2, + standard: 2, + low: 2 + }, + nativeCurrency: "eth", + currencyName: "SepoliaETH", + explorerUrl: "https://sepolia.etherscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Ethereum Sepolia", + deployedBlock: 5594395, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + tornContract: "0x3AE6667167C0f44394106E197904519D808323cA", + governanceContract: "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", + stakingRewardsContract: "0x6d0018890751Efd31feb8166711B16732E2b496b", + registryContract: "0x1428e5d2356b13778A13108b10c440C83011dfB8", + aggregatorContract: "0x4088712AC9fad39ea133cdb9130E465d235e9642", + reverseRecordsContract: "0xEc29700C0283e5Be64AcdFe8077d6cC95dE23C23", + tornadoSubgraph: "tornadocash/sepolia-tornado-subgraph", + subgraphs: { + tornado + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/sepolia" + }, + sepolia: { + name: "Sepolia RPC", + url: "https://rpc.sepolia.org" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://sepolia.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + } + }, + tokens: { + eth: { + instanceAddress: { + "0.1": "0x8C4A04d872a6C1BE37964A21ba3a138525dFF50b", + "1": "0x8cc930096B4Df705A007c4A039BDFA1320Ed2508", + "10": "0x8D10d506D29Fc62ABb8A290B99F66dB27Fc43585", + "100": "0x44c5C92ed73dB43888210264f0C8b36Fd68D8379" + }, + symbol: "ETH", + decimals: 18 + }, + dai: { + instanceAddress: { + "100": "0x6921fd1a97441dd603a997ED6DDF388658daf754", + "1000": "0x50a637770F5d161999420F7d70d888DE47207145", + "10000": "0xecD649870407cD43923A816Cc6334a5bdf113621", + "100000": "0x73B4BD04bF83206B6e979BE2507098F92EDf4F90" + }, + tokenAddress: "0xFF34B3d4Aee8ddCd6F9AFFFB6Fe49bD371b8a357", + tokenGasLimit: 7e4, + symbol: "DAI", + decimals: 18, + gasLimit: 7e5 + } + }, + ensSubdomainKey: "sepolia-tornado", + pollInterval: 15, + constants: { + GOVERNANCE_BLOCK: 5594395, + NOTE_ACCOUNT_BLOCK: 5594395, + ENCRYPTED_NOTES_BLOCK: 5594395, + MINING_BLOCK_TIME: 15 + } + } +}; +const enabledChains = Object.values(NetId); +let customConfig = {}; +function getNetworkConfig() { + const allConfig = __spreadValues$3(__spreadValues$3({}, defaultConfig), customConfig); + return enabledChains.reduce((acc, curr) => { + acc[curr] = allConfig[curr]; + return acc; + }, {}); +} +function getConfig(netId) { + const allConfig = getNetworkConfig(); + const chainConfig = allConfig[netId]; + if (!chainConfig) { + const errMsg = `No config found for network ${netId}!`; + throw new Error(errMsg); + } + return chainConfig; +} +function getInstanceByAddress({ netId, address }) { + const { tokens } = getConfig(netId); + for (const [currency, { instanceAddress }] of Object.entries(tokens)) { + for (const [amount, instance] of Object.entries(instanceAddress)) { + if (instance === address) { + return { + amount, + currency + }; + } + } + } +} +function getSubdomains() { + const allConfig = getNetworkConfig(); + return enabledChains.map((chain) => allConfig[chain].ensSubdomainKey); +} + const addressType = { type: "string", pattern: "^0x[a-fA-F0-9]{40}$" }; const bnType = { type: "string", BN: true }; const statusSchema = { @@ -173338,7 +174530,7 @@ function getStatusSchema(netId, config) { } ); schema.properties.instances = instances; - if (Number(netId) === 1) { + if (netId === NetId.MAINNET) { const _tokens = Object.keys(tokens).filter((t) => t !== nativeCurrency); const ethPrices = { type: "object", @@ -173493,7 +174685,7 @@ function toArray$2(s, radix) { } -function add$2(a, b) { +function add$4(a, b) { return BigInt(a) + BigInt(b); } @@ -173513,7 +174705,7 @@ function square$2(a) { return BigInt(a) * BigInt(a); } -function pow$2(a, b) { +function pow$3(a, b) { return BigInt(a) ** BigInt(b); } @@ -173529,23 +174721,23 @@ function div$2(a, b) { return BigInt(a) / BigInt(b); } -function mod$2(a, b) { +function mod$3(a, b) { return BigInt(a) % BigInt(b); } -function eq$2(a, b) { +function eq$5(a, b) { return BigInt(a) == BigInt(b); } -function neq$2(a, b) { +function neq$5(a, b) { return BigInt(a) != BigInt(b); } -function lt$2(a, b) { +function lt$6(a, b) { return BigInt(a) < BigInt(b); } -function gt$2(a, b) { +function gt$7(a, b) { return BigInt(a) > BigInt(b); } @@ -173584,7 +174776,7 @@ function lnot$2(a) { var Scalar_native = /*#__PURE__*/Object.freeze({ __proto__: null, abs: abs$2, - add: add$2, + add: add$4, band: band$2, bitLength: bitLength$2, bits: bits$2, @@ -173592,12 +174784,12 @@ var Scalar_native = /*#__PURE__*/Object.freeze({ bxor: bxor$2, div: div$2, e: e$2, - eq: eq$2, + eq: eq$5, exp: exp$2, fromArray: fromArray$2, fromString: fromString$2, geq: geq$2, - gt: gt$2, + gt: gt$7, isNegative: isNegative$2, isOdd: isOdd$2, isZero: isZero$2, @@ -173605,13 +174797,13 @@ var Scalar_native = /*#__PURE__*/Object.freeze({ leq: leq$2, lnot: lnot$2, lor: lor$2, - lt: lt$2, - mod: mod$2, + lt: lt$6, + mod: mod$3, mul: mul$2, naf: naf$2, neg: neg$2, - neq: neq$2, - pow: pow$2, + neq: neq$5, + pow: pow$3, shiftLeft: shiftLeft$2, shiftRight: shiftRight$2, shl: shl$2, @@ -175162,7 +176354,7 @@ function toArray$1(s, radix) { return bigInt$e(s).toArray(radix); } -function add$1(a, b) { +function add$3(a, b) { return bigInt$e(a).add(bigInt$e(b)); } @@ -175182,7 +176374,7 @@ function square$1(a) { return bigInt$e(a).square(); } -function pow$1(a, b) { +function pow$2(a, b) { return bigInt$e(a).pow(bigInt$e(b)); } @@ -175198,23 +176390,23 @@ function div$1(a, b) { return bigInt$e(a).divide(bigInt$e(b)); } -function mod$1(a, b) { +function mod$2(a, b) { return bigInt$e(a).mod(bigInt$e(b)); } -function eq$1(a, b) { +function eq$4(a, b) { return bigInt$e(a).eq(bigInt$e(b)); } -function neq$1(a, b) { +function neq$4(a, b) { return bigInt$e(a).neq(bigInt$e(b)); } -function lt$1(a, b) { +function lt$5(a, b) { return bigInt$e(a).lt(bigInt$e(b)); } -function gt$1(a, b) { +function gt$6(a, b) { return bigInt$e(a).gt(bigInt$e(b)); } @@ -175253,7 +176445,7 @@ function lnot$1(a) { var Scalar_bigint = /*#__PURE__*/Object.freeze({ __proto__: null, abs: abs$1, - add: add$1, + add: add$3, band: band$1, bitLength: bitLength$1, bits: bits$1, @@ -175261,12 +176453,12 @@ var Scalar_bigint = /*#__PURE__*/Object.freeze({ bxor: bxor$1, div: div$1, e: e$1, - eq: eq$1, + eq: eq$4, exp: exp$1, fromArray: fromArray$1, fromString: fromString$1, geq: geq$1, - gt: gt$1, + gt: gt$6, isNegative: isNegative$1, isOdd: isOdd$1, isZero: isZero$1, @@ -175274,13 +176466,13 @@ var Scalar_bigint = /*#__PURE__*/Object.freeze({ leq: leq$1, lnot: lnot$1, lor: lor$1, - lt: lt$1, - mod: mod$1, + lt: lt$5, + mod: mod$2, mul: mul$1, naf: naf$1, neg: neg$1, - neq: neq$1, - pow: pow$1, + neq: neq$4, + pow: pow$2, shiftLeft: shiftLeft$1, shiftRight: shiftRight$1, shl: shl$1, @@ -175380,20 +176572,20 @@ let { bits, toNumber: toNumber$1, toArray, - add, + add: add$2, sub, neg, mul, square, - pow, + pow: pow$1, exp, abs, div, - mod, - eq, - neq, - lt, - gt, + mod: mod$1, + eq: eq$3, + neq: neq$3, + lt: lt$4, + gt: gt$5, leq, geq, band, @@ -175407,7 +176599,7 @@ let { var _Scalar = /*#__PURE__*/Object.freeze({ __proto__: null, abs: abs, - add: add, + add: add$2, band: band, bitLength: bitLength, bits: bits, @@ -175415,14 +176607,14 @@ var _Scalar = /*#__PURE__*/Object.freeze({ bxor: bxor, div: div, e: e, - eq: eq, + eq: eq$3, exp: exp, fromArray: fromArray, fromRprBE: fromRprBE, fromRprLE: fromRprLE, fromString: fromString, geq: geq, - gt: gt, + gt: gt$5, isNegative: isNegative, isOdd: isOdd, isZero: isZero, @@ -175430,14 +176622,14 @@ var _Scalar = /*#__PURE__*/Object.freeze({ leq: leq, lnot: lnot, lor: lor, - lt: lt, - mod: mod, + lt: lt$4, + mod: mod$1, mul: mul, naf: naf, neg: neg, - neq: neq, + neq: neq$3, one: one, - pow: pow, + pow: pow$1, shiftLeft: shiftLeft, shiftRight: shiftRight, shl: shl, @@ -175516,7 +176708,7 @@ class ChaCha { } nextU64() { - return add(mul(this.nextU32(), 0x100000000), this.nextU32()); + return add$2(mul(this.nextU32(), 0x100000000), this.nextU32()); } nextBool() { @@ -175580,7 +176772,7 @@ function getThreadRng() { return threadRng; } -var utils$d = {}; +var utils$j = {}; /* Copyright 2019 0KIMS association. @@ -175603,7 +176795,7 @@ var utils$d = {}; const bigInt$d = BigIntegerExports$1; -utils$d.bigInt2BytesLE = function bigInt2BytesLE(_a, len) { +utils$j.bigInt2BytesLE = function bigInt2BytesLE(_a, len) { const b = Array(len); let v = bigInt$d(_a); for (let i=0; i. */ -const utils$c = utils$d; +const utils$i = utils$j; var build_int = function buildInt(module, n64, _prefix) { @@ -175662,7 +176854,7 @@ var build_int = function buildInt(module, n64, _prefix) { const n32 = n64*2; const n8 = n64*8; - module.alloc(n8, utils$c.bigInt2BytesLE(1, n8)); + module.alloc(n8, utils$i.bigInt2BytesLE(1, n8)); function buildCopy() { const f = module.addFunction(prefix+"_copy"); @@ -177142,7 +178334,7 @@ function buildBatchConvertion$2(module, fnName, internalFnName, sizeIn, sizeOut, const bigInt$c = BigIntegerExports$1; const buildInt = build_int; -const utils$b = utils$d; +const utils$h = utils$j; const buildExp$2 = build_timesscalar; const buildBatchInverse$2 = build_batchinverse; const buildBatchConvertion$1 = build_batchconvertion; @@ -177158,18 +178350,18 @@ var build_f1m = function buildF1m(module, _q, _prefix, _intPrefix) { if (module.modules[prefix]) return prefix; // already builded const intPrefix = buildInt(module, n64, _intPrefix); - const pq = module.alloc(n8, utils$b.bigInt2BytesLE(q, n8)); + const pq = module.alloc(n8, utils$h.bigInt2BytesLE(q, n8)); - module.alloc(utils$b.bigInt2BytesLE(bigInt$c.one.shiftLeft(n64*64).mod(q), n8)); - const pR2 = module.alloc(utils$b.bigInt2BytesLE(bigInt$c.one.shiftLeft(n64*64).square().mod(q), n8)); - const pOne = module.alloc(utils$b.bigInt2BytesLE(bigInt$c.one.shiftLeft(n64*64).mod(q), n8)); - const pZero = module.alloc(utils$b.bigInt2BytesLE(bigInt$c.zero, n8)); + module.alloc(utils$h.bigInt2BytesLE(bigInt$c.one.shiftLeft(n64*64).mod(q), n8)); + const pR2 = module.alloc(utils$h.bigInt2BytesLE(bigInt$c.one.shiftLeft(n64*64).square().mod(q), n8)); + const pOne = module.alloc(utils$h.bigInt2BytesLE(bigInt$c.one.shiftLeft(n64*64).mod(q), n8)); + const pZero = module.alloc(utils$h.bigInt2BytesLE(bigInt$c.zero, n8)); const _minusOne = q.minus(bigInt$c.one); const _e = _minusOne.shiftRight(1); // e = (p-1)/2 - const pe = module.alloc(n8, utils$b.bigInt2BytesLE(_e, n8)); + const pe = module.alloc(n8, utils$h.bigInt2BytesLE(_e, n8)); const _ePlusOne = _e.add(bigInt$c.one); // e = (p-1)/2 - const pePlusOne = module.alloc(n8, utils$b.bigInt2BytesLE(_ePlusOne, n8)); + const pePlusOne = module.alloc(n8, utils$h.bigInt2BytesLE(_ePlusOne, n8)); module.modules[prefix] = { pq: pq, @@ -177938,7 +179130,7 @@ var build_f1m = function buildF1m(module, _q, _prefix, _intPrefix) { while (!_nqr.modPow(_e, q).equals(_minusOne)) _nqr = _nqr.add(bigInt$c.one); } - module.alloc(utils$b.bigInt2BytesLE(_nqr.shiftLeft(n64*64).mod(q), n8)); + module.alloc(utils$h.bigInt2BytesLE(_nqr.shiftLeft(n64*64).mod(q), n8)); let s2 = 0; let _t = _minusOne; @@ -177947,13 +179139,13 @@ var build_f1m = function buildF1m(module, _q, _prefix, _intPrefix) { s2++; _t = _t.shiftRight(1); } - const pt = module.alloc(n8, utils$b.bigInt2BytesLE(_t, n8)); + const pt = module.alloc(n8, utils$h.bigInt2BytesLE(_t, n8)); const _nqrToT = _nqr.modPow(_t, q); - const pNqrToT = module.alloc(utils$b.bigInt2BytesLE(_nqrToT.shiftLeft(n64*64).mod(q), n8)); + const pNqrToT = module.alloc(utils$h.bigInt2BytesLE(_nqrToT.shiftLeft(n64*64).mod(q), n8)); const _tPlusOneOver2 = _t.add(1).shiftRight(1); - const ptPlusOneOver2 = module.alloc(n8, utils$b.bigInt2BytesLE(_tPlusOneOver2, n8)); + const ptPlusOneOver2 = module.alloc(n8, utils$h.bigInt2BytesLE(_tPlusOneOver2, n8)); function buildSqrt() { @@ -178339,7 +179531,7 @@ var build_f1 = function buildF1(module, _q, _prefix, _f1mPrefix, _intPrefix) { const buildExp$1 = build_timesscalar; const buildBatchInverse$1 = build_batchinverse; const bigInt$a = BigIntegerExports$1; -const utils$a = utils$d; +const utils$g = utils$j; var build_f2m = function buildF2m(module, mulNonResidueFn, prefix, f1mPrefix) { @@ -178779,8 +179971,8 @@ var build_f2m = function buildF2m(module, mulNonResidueFn, prefix, f1mPrefix) { const c = f.getCodeBuilder(); - const e34 = c.i32_const(module.alloc(utils$a.bigInt2BytesLE(bigInt$a(q).minus(bigInt$a(3)).divide(4), f1n8 ))); - const e12 = c.i32_const(module.alloc(utils$a.bigInt2BytesLE(bigInt$a(q).minus(bigInt$a(1)).divide(2), f1n8 ))); + const e34 = c.i32_const(module.alloc(utils$g.bigInt2BytesLE(bigInt$a(q).minus(bigInt$a(3)).divide(4), f1n8 ))); + const e12 = c.i32_const(module.alloc(utils$g.bigInt2BytesLE(bigInt$a(q).minus(bigInt$a(1)).divide(2), f1n8 ))); const a = c.getLocal("a"); const a1 = c.i32_const(module.alloc(f1n8*2)); @@ -178847,7 +180039,7 @@ var build_f2m = function buildF2m(module, mulNonResidueFn, prefix, f1mPrefix) { const c = f.getCodeBuilder(); - const e34 = c.i32_const(module.alloc(utils$a.bigInt2BytesLE(bigInt$a(q).minus(bigInt$a(3)).divide(4), f1n8 ))); + const e34 = c.i32_const(module.alloc(utils$g.bigInt2BytesLE(bigInt$a(q).minus(bigInt$a(3)).divide(4), f1n8 ))); const a = c.getLocal("a"); const a1 = c.i32_const(module.alloc(f1n8*2)); @@ -181793,7 +182985,7 @@ var build_curve_jacobian_a0 = function buildCurve(module, prefix, prefixField, p */ const bigInt$9 = BigIntegerExports$1; -const utils$9 = utils$d; +const utils$f = utils$j; var build_fft = function buildFFT(module, prefix, gPrefix, fPrefix, opGtimesF) { @@ -181832,7 +183024,7 @@ var build_fft = function buildFFT(module, prefix, gPrefix, fPrefix, opGtimesF) { for (let i=0; i (b==-1 ? 0xFF: b) ); const pExponentNafBytes = module.alloc(exponentNafBytes); - module.alloc(utils$8.bigInt2BytesLE(exponent, 32)); + module.alloc(utils$e.bigInt2BytesLE(exponent, 32)); const f = module.addFunction(prefix+ "__cyclotomicExp_"+fnName); f.addParam("x", "i32"); @@ -185026,7 +186218,7 @@ var build_bn128 = function buildBN128(module, _prefix) { const exponent = bigInt$8("552484233613224096312617126783173147097382103762957654188882734314196910839907541213974502761540629817009608548654680343627701153829446747810907373256841551006201639677726139946029199968412598804882391702273019083653272047566316584365559776493027495458238373902875937659943504873220554161550525926302303331747463515644711876653177129578303191095900909191624817826566688241804408081892785725967931714097716709526092261278071952560171111444072049229123565057483750161460024353346284167282452756217662335528813519139808291170539072125381230815729071544861602750936964829313608137325426383735122175229541155376346436093930287402089517426973178917569713384748081827255472576937471496195752727188261435633271238710131736096299798168852925540549342330775279877006784354801422249722573783561685179618816480037695005515426162362431072245638324744480"); - const pExponent = module.alloc(utils$8.bigInt2BytesLE( exponent, 352 )); + const pExponent = module.alloc(utils$e.bigInt2BytesLE( exponent, 352 )); const c = f.getCodeBuilder(); @@ -185135,7 +186327,7 @@ var build_bn128 = function buildBN128(module, _prefix) { }; const bigInt$7 = BigIntegerExports$1; -const utils$7 = utils$d; +const utils$d = utils$j; const buildF1m =build_f1m; const buildF1 =build_f1; @@ -185169,11 +186361,11 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const frsize = n8r; - const pr = module.alloc(utils$7.bigInt2BytesLE( r, frsize )); + const pr = module.alloc(utils$d.bigInt2BytesLE( r, frsize )); const f1mPrefix = buildF1m(module, q, "f1m", "intq"); buildF1(module, r, "fr", "frm", "intr"); - const pG1b = module.alloc(utils$7.bigInt2BytesLE( toMontgomery(bigInt$7(4)), f1size )); + const pG1b = module.alloc(utils$d.bigInt2BytesLE( toMontgomery(bigInt$7(4)), f1size )); const g1mPrefix = buildCurve(module, "g1m", "f1m", pG1b); buildFFT$1(module, "frm", "frm", "frm", "frm_mul"); @@ -185183,8 +186375,8 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const f2mPrefix = buildF2m(module, "f1m_neg", "f2m", "f1m"); const pG2b = module.alloc([ - ...utils$7.bigInt2BytesLE( toMontgomery(bigInt$7("4")), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(bigInt$7("4")), f1size ) + ...utils$d.bigInt2BytesLE( toMontgomery(bigInt$7("4")), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(bigInt$7("4")), f1size ) ]); const g2mPrefix = buildCurve(module, "g2m", "f2m", pG2b); @@ -185240,9 +186432,9 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const pG1gen = module.alloc( [ - ...utils$7.bigInt2BytesLE( toMontgomery(G1gen[0]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G1gen[1]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G1gen[2]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G1gen[0]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G1gen[1]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G1gen[2]), f1size ), ] ); @@ -185254,9 +186446,9 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const pG1zero = module.alloc( [ - ...utils$7.bigInt2BytesLE( toMontgomery(G1zero[0]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G1zero[1]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G1zero[2]), f1size ) + ...utils$d.bigInt2BytesLE( toMontgomery(G1zero[0]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G1zero[1]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G1zero[2]), f1size ) ] ); @@ -185275,12 +186467,12 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const pG2gen = module.alloc( [ - ...utils$7.bigInt2BytesLE( toMontgomery(G2gen[0][0]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2gen[0][1]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2gen[1][0]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2gen[1][1]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2gen[2][0]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2gen[2][1]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2gen[0][0]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2gen[0][1]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2gen[1][0]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2gen[1][1]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2gen[2][0]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2gen[2][1]), f1size ), ] ); @@ -185299,43 +186491,43 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const pG2zero = module.alloc( [ - ...utils$7.bigInt2BytesLE( toMontgomery(G2zero[0][0]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2zero[0][1]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2zero[1][0]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2zero[1][1]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2zero[2][0]), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(G2zero[2][1]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2zero[0][0]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2zero[0][1]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2zero[1][0]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2zero[1][1]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2zero[2][0]), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(G2zero[2][1]), f1size ), ] ); const pOneT = module.alloc([ - ...utils$7.bigInt2BytesLE( toMontgomery(1), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(1), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(0), f1size ), ]); module.alloc([ - ...utils$7.bigInt2BytesLE( toMontgomery( bigInt$7(2).modInv(q)), f1size ), - ...utils$7.bigInt2BytesLE( bigInt$7(0), f1size ) + ...utils$d.bigInt2BytesLE( toMontgomery( bigInt$7(2).modInv(q)), f1size ), + ...utils$d.bigInt2BytesLE( bigInt$7(0), f1size ) ]); const pBls12381Twist = module.alloc([ - ...utils$7.bigInt2BytesLE( toMontgomery(1), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery(1), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(1), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery(1), f1size ), ]); module.alloc([ - ...utils$7.bigInt2BytesLE( toMontgomery("4"), f1size ), - ...utils$7.bigInt2BytesLE( toMontgomery("4"), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery("4"), f1size ), + ...utils$d.bigInt2BytesLE( toMontgomery("4"), f1size ), ]); function build_mulNR2() { @@ -186121,8 +187313,8 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const Rc1 = c.i32_add(c.getLocal("r"), c.i32_const(i*f2size + f1size)); const coef = mul2(F12[Math.floor(i/3)][n%12] , F6[i%3][n%6]); const pCoef = module.alloc([ - ...utils$7.bigInt2BytesLE(toMontgomery(coef[0]), n8q), - ...utils$7.bigInt2BytesLE(toMontgomery(coef[1]), n8q), + ...utils$d.bigInt2BytesLE(toMontgomery(coef[0]), n8q), + ...utils$d.bigInt2BytesLE(toMontgomery(coef[1]), n8q), ]); if (n%2 == 1) { f.addCode( @@ -186447,7 +187639,7 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const exponent = bigInt$7("322277361516934140462891564586510139908379969514828494218366688025288661041104682794998680497580008899973249814104447692778988208376779573819485263026159588510513834876303014016798809919343532899164848730280942609956670917565618115867287399623286813270357901731510188149934363360381614501334086825442271920079363289954510565375378443704372994881406797882676971082200626541916413184642520269678897559532260949334760604962086348898118982248842634379637598665468817769075878555493752214492790122785850202957575200176084204422751485957336465472324810982833638490904279282696134323072515220044451592646885410572234451732790590013479358343841220074174848221722017083597872017638514103174122784843925578370430843522959600095676285723737049438346544753168912974976791528535276317256904336520179281145394686565050419250614107803233314658825463117900250701199181529205942363159325765991819433914303908860460720581408201373164047773794825411011922305820065611121544561808414055302212057471395719432072209245600258134364584636810093520285711072578721435517884103526483832733289802426157301542744476740008494780363354305116978805620671467071400711358839553375340724899735460480144599782014906586543813292157922220645089192130209334926661588737007768565838519456601560804957985667880395221049249803753582637708560"); - const pExponent = module.alloc(utils$7.bigInt2BytesLE( exponent, 544 )); + const pExponent = module.alloc(utils$d.bigInt2BytesLE( exponent, 544 )); const c = f.getCodeBuilder(); @@ -186543,17 +187735,17 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { ]; const wInv = c.i32_const(module.alloc([ - ...utils$7.bigInt2BytesLE(toMontgomery(WINV[0]), n8q), - ...utils$7.bigInt2BytesLE(toMontgomery(WINV[1]), n8q), + ...utils$d.bigInt2BytesLE(toMontgomery(WINV[0]), n8q), + ...utils$d.bigInt2BytesLE(toMontgomery(WINV[1]), n8q), ])); - const frob2X = c.i32_const(module.alloc(utils$7.bigInt2BytesLE(toMontgomery(FROB2X), n8q))); + const frob2X = c.i32_const(module.alloc(utils$d.bigInt2BytesLE(toMontgomery(FROB2X), n8q))); const frob3Y = c.i32_const(module.alloc([ - ...utils$7.bigInt2BytesLE(toMontgomery(FROB3Y[0]), n8q), - ...utils$7.bigInt2BytesLE(toMontgomery(FROB3Y[1]), n8q), + ...utils$d.bigInt2BytesLE(toMontgomery(FROB3Y[0]), n8q), + ...utils$d.bigInt2BytesLE(toMontgomery(FROB3Y[1]), n8q), ])); - const z = c.i32_const(module.alloc(utils$7.bigInt2BytesLE(finalExpZ, 8))); + const z = c.i32_const(module.alloc(utils$d.bigInt2BytesLE(finalExpZ, 8))); const px = c.getLocal("p"); const py = c.i32_add(c.getLocal("p"), c.i32_const(f2size)); @@ -186654,10 +187846,10 @@ var build_bls12381 = function buildBLS12381(module, _prefix) { const BETA2 = bigInt$7("793479390729215512621379701633421447060886740281060493010456487427281649075476305620758731620350"); const Z2M1D3 = finalExpZ.times(finalExpZ).minus(bigInt$7.one).divide(bigInt$7(3)); - const beta = c.i32_const(module.alloc(utils$7.bigInt2BytesLE(toMontgomery(BETA), n8q))); - const beta2 = c.i32_const(module.alloc(utils$7.bigInt2BytesLE(toMontgomery(BETA2), n8q))); + const beta = c.i32_const(module.alloc(utils$d.bigInt2BytesLE(toMontgomery(BETA), n8q))); + const beta2 = c.i32_const(module.alloc(utils$d.bigInt2BytesLE(toMontgomery(BETA2), n8q))); - const z2m1d3 = c.i32_const(module.alloc(utils$7.bigInt2BytesLE(Z2M1D3, 16))); + const z2m1d3 = c.i32_const(module.alloc(utils$d.bigInt2BytesLE(Z2M1D3, 16))); const px = c.getLocal("p"); @@ -187086,13 +188278,13 @@ var utils_bigint = /*#__PURE__*/Object.freeze({ unstringifyBigInts: unstringifyBigInts$5 }); -let utils$6 = {}; +let utils$c = {}; const supportsNativeBigInt = typeof BigInt === "function"; if (supportsNativeBigInt) { - Object.assign(utils$6, utils_native); + Object.assign(utils$c, utils_native); } else { - Object.assign(utils$6, utils_bigint); + Object.assign(utils$c, utils_bigint); } @@ -187112,7 +188304,7 @@ function _revSlow(idx, bits) { return res; } -utils$6.bitReverse = function bitReverse(idx, bits) { +utils$c.bitReverse = function bitReverse(idx, bits) { return ( _revTable[idx >>> 24] | (_revTable[(idx >>> 16) & 0xFF] << 8) | @@ -187122,19 +188314,19 @@ utils$6.bitReverse = function bitReverse(idx, bits) { }; -utils$6.log2 = function log2( V ) +utils$c.log2 = function log2( V ) { return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) ); }; -utils$6.buffReverseBits = function buffReverseBits(buff, eSize) { +utils$c.buffReverseBits = function buffReverseBits(buff, eSize) { const n = buff.byteLength /eSize; - const bits = utils$6.log2(n); + const bits = utils$c.log2(n); if (n != (1 << bits)) { throw new Error("Invalid number of pointers"); } for (let i=0; ir) { const tmp = buff.slice(i*eSize, (i+1)*eSize); buff.set( buff.slice(r*eSize, (r+1)*eSize), i*eSize); @@ -187144,7 +188336,7 @@ utils$6.buffReverseBits = function buffReverseBits(buff, eSize) { }; -utils$6.array2buffer = function(arr, sG) { +utils$c.array2buffer = function(arr, sG) { const buff = new Uint8Array(sG*arr.length); for (let i=0; i. */ -const utils$4 = utils$5; +const utils$a = utils$b; let CodeBuilder$1 = class CodeBuilder { constructor(func) { @@ -190440,63 +191632,63 @@ let CodeBuilder$1 = class CodeBuilder { const idx = this.func.localIdxByName[localName]; if (idx === undefined) throw new Error(`Local Variable not defined: Function: ${this.functionName} local: ${localName} `); - return [...valCode, 0x21, ...utils$4.varuint32( idx )]; + return [...valCode, 0x21, ...utils$a.varuint32( idx )]; } teeLocal(localName, valCode) { const idx = this.func.localIdxByName[localName]; if (idx === undefined) throw new Error(`Local Variable not defined: Function: ${this.functionName} local: ${localName} `); - return [...valCode, 0x22, ...utils$4.varuint32( idx )]; + return [...valCode, 0x22, ...utils$a.varuint32( idx )]; } getLocal(localName) { const idx = this.func.localIdxByName[localName]; if (idx === undefined) throw new Error(`Local Variable not defined: Function: ${this.functionName} local: ${localName} `); - return [0x20, ...utils$4.varuint32( idx )]; + return [0x20, ...utils$a.varuint32( idx )]; } i64_load8_s(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 0 : _align; // 8 bits alignment by default - return [...idxCode, 0x30, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x30, align, ...utils$a.varuint32(offset)]; } i64_load8_u(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 0 : _align; // 8 bits alignment by default - return [...idxCode, 0x31, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x31, align, ...utils$a.varuint32(offset)]; } i64_load16_s(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 1 : _align; // 16 bits alignment by default - return [...idxCode, 0x32, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x32, align, ...utils$a.varuint32(offset)]; } i64_load16_u(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 1 : _align; // 16 bits alignment by default - return [...idxCode, 0x33, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x33, align, ...utils$a.varuint32(offset)]; } i64_load32_s(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 2 : _align; // 32 bits alignment by default - return [...idxCode, 0x34, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x34, align, ...utils$a.varuint32(offset)]; } i64_load32_u(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 2 : _align; // 32 bits alignment by default - return [...idxCode, 0x35, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x35, align, ...utils$a.varuint32(offset)]; } i64_load(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 3 : _align; // 64 bits alignment by default - return [...idxCode, 0x29, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x29, align, ...utils$a.varuint32(offset)]; } @@ -190515,7 +191707,7 @@ let CodeBuilder$1 = class CodeBuilder { align = _align; codeVal = _codeVal; } - return [...idxCode, ...codeVal, 0x37, align, ...utils$4.varuint32(offset)]; + return [...idxCode, ...codeVal, 0x37, align, ...utils$a.varuint32(offset)]; } i64_store32(idxCode, _offset, _align, _codeVal) { @@ -190533,7 +191725,7 @@ let CodeBuilder$1 = class CodeBuilder { align = _align; codeVal = _codeVal; } - return [...idxCode, ...codeVal, 0x3e, align, ...utils$4.varuint32(offset)]; + return [...idxCode, ...codeVal, 0x3e, align, ...utils$a.varuint32(offset)]; } @@ -190552,7 +191744,7 @@ let CodeBuilder$1 = class CodeBuilder { align = _align; codeVal = _codeVal; } - return [...idxCode, ...codeVal, 0x3d, align, ...utils$4.varuint32(offset)]; + return [...idxCode, ...codeVal, 0x3d, align, ...utils$a.varuint32(offset)]; } @@ -190571,37 +191763,37 @@ let CodeBuilder$1 = class CodeBuilder { align = _align; codeVal = _codeVal; } - return [...idxCode, ...codeVal, 0x3c, align, ...utils$4.varuint32(offset)]; + return [...idxCode, ...codeVal, 0x3c, align, ...utils$a.varuint32(offset)]; } i32_load8_s(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 0 : _align; // 32 bits alignment by default - return [...idxCode, 0x2c, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x2c, align, ...utils$a.varuint32(offset)]; } i32_load8_u(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 0 : _align; // 32 bits alignment by default - return [...idxCode, 0x2d, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x2d, align, ...utils$a.varuint32(offset)]; } i32_load16_s(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 1 : _align; // 32 bits alignment by default - return [...idxCode, 0x2e, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x2e, align, ...utils$a.varuint32(offset)]; } i32_load16_u(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 1 : _align; // 32 bits alignment by default - return [...idxCode, 0x2f, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x2f, align, ...utils$a.varuint32(offset)]; } i32_load(idxCode, _offset, _align) { const offset = _offset || 0; const align = (_align === undefined) ? 2 : _align; // 32 bits alignment by default - return [...idxCode, 0x28, align, ...utils$4.varuint32(offset)]; + return [...idxCode, 0x28, align, ...utils$a.varuint32(offset)]; } i32_store(idxCode, _offset, _align, _codeVal) { @@ -190619,7 +191811,7 @@ let CodeBuilder$1 = class CodeBuilder { align = _align; codeVal = _codeVal; } - return [...idxCode, ...codeVal, 0x36, align, ...utils$4.varuint32(offset)]; + return [...idxCode, ...codeVal, 0x36, align, ...utils$a.varuint32(offset)]; } @@ -190638,7 +191830,7 @@ let CodeBuilder$1 = class CodeBuilder { align = _align; codeVal = _codeVal; } - return [...idxCode, ...codeVal, 0x3b, align, ...utils$4.varuint32(offset)]; + return [...idxCode, ...codeVal, 0x3b, align, ...utils$a.varuint32(offset)]; } i32_store8(idxCode, _offset, _align, _codeVal) { @@ -190656,14 +191848,14 @@ let CodeBuilder$1 = class CodeBuilder { align = _align; codeVal = _codeVal; } - return [...idxCode, ...codeVal, 0x3a, align, ...utils$4.varuint32(offset)]; + return [...idxCode, ...codeVal, 0x3a, align, ...utils$a.varuint32(offset)]; } call(fnName, ...args) { const idx = this.module.functionIdxByName[fnName]; if (idx === undefined) throw new Error(`Function not defined: Function: ${fnName}`); - return [...[].concat(...args), 0x10, ...utils$4.varuint32(idx)]; + return [...[].concat(...args), 0x10, ...utils$a.varuint32(idx)]; } call_indirect(fnIdx, ...args) { @@ -190682,13 +191874,13 @@ let CodeBuilder$1 = class CodeBuilder { loop(...args) { return [0x03, 0x40, ...[].concat(...[...args]), 0x0b]; } - br_if(relPath, condCode) { return [...condCode, 0x0d, ...utils$4.varuint32(relPath)]; } - br(relPath) { return [0x0c, ...utils$4.varuint32(relPath)]; } + br_if(relPath, condCode) { return [...condCode, 0x0d, ...utils$a.varuint32(relPath)]; } + br(relPath) { return [0x0c, ...utils$a.varuint32(relPath)]; } ret(rCode) { return [...rCode, 0x0f]; } drop(dCode) { return [...dCode, 0x1a]; } - i64_const(num) { return [0x42, ...utils$4.varint64(num)]; } - i32_const(num) { return [0x41, ...utils$4.varint32(num)]; } + i64_const(num) { return [0x42, ...utils$a.varint64(num)]; } + i32_const(num) { return [0x41, ...utils$a.varint32(num)]; } i64_eqz(opcode) { return [...opcode, 0x50]; } @@ -190779,7 +191971,7 @@ var codebuilder = CodeBuilder$1; */ const CodeBuilder = codebuilder; -const utils$3 = utils$5; +const utils$9 = utils$b; const typeCodes = { "i32": 0x7f, @@ -190843,25 +192035,25 @@ let FunctionBuilder$1 = class FunctionBuilder { } getSignature() { - const params = [...utils$3.varuint32(this.params.length), ...this.params.map((p) => typeCodes[p.type])]; + const params = [...utils$9.varuint32(this.params.length), ...this.params.map((p) => typeCodes[p.type])]; const returns = this.returnType ? [0x01, typeCodes[this.returnType]] : [0]; return [0x60, ...params, ...returns]; } getBody() { const locals = this.locals.map((l) => [ - ...utils$3.varuint32(l.length), + ...utils$9.varuint32(l.length), typeCodes[l.type] ]); const body = [ - ...utils$3.varuint32(this.locals.length), + ...utils$9.varuint32(this.locals.length), ...[].concat(...locals), ...this.code, 0x0b ]; return [ - ...utils$3.varuint32(body.length), + ...utils$9.varuint32(body.length), ...body ]; } @@ -190897,7 +192089,7 @@ var functionbuilder = FunctionBuilder$1; */ const FunctionBuilder = functionbuilder; -const utils$2 = utils$5; +const utils$8 = utils$b; let ModuleBuilder$1 = class ModuleBuilder { @@ -190921,8 +192113,8 @@ let ModuleBuilder$1 = class ModuleBuilder { build() { this._setSignatures(); return new Uint8Array([ - ...utils$2.u32(0x6d736100), - ...utils$2.u32(1), + ...utils$8.u32(0x6d736100), + ...utils$8.u32(1), ...this._buildType(), ...this._buildImport(), ...this._buildFunctionDeclarations(), @@ -191030,13 +192222,13 @@ let ModuleBuilder$1 = class ModuleBuilder { const signatureIdxByName = {}; if (this.functionsTable.length>0) { const signature = this.functions[this.functionsTable[0]].getSignature(); - const signatureName = "s_"+utils$2.toHexString(signature); + const signatureName = "s_"+utils$8.toHexString(signature); signatureIdxByName[signatureName] = 0; this.signatures.push(signature); } for (let i=0; i= BYTES_MIN$1, 'digestLength must be at least ' + BYTES_MIN$1 + ', was given ' + digestLength); - assert$2(digestLength <= BYTES_MAX$1, 'digestLength must be at most ' + BYTES_MAX$1 + ', was given ' + digestLength); + assert$6(digestLength >= BYTES_MIN$1, 'digestLength must be at least ' + BYTES_MIN$1 + ', was given ' + digestLength); + assert$6(digestLength <= BYTES_MAX$1, 'digestLength must be at most ' + BYTES_MAX$1 + ', was given ' + digestLength); if (key != null) { - assert$2(key instanceof Uint8Array, 'key must be Uint8Array or Buffer'); - assert$2(key.length >= KEYBYTES_MIN$1, 'key must be at least ' + KEYBYTES_MIN$1 + ', was given ' + key.length); - assert$2(key.length <= KEYBYTES_MAX$1, 'key must be at least ' + KEYBYTES_MAX$1 + ', was given ' + key.length); + assert$6(key instanceof Uint8Array, 'key must be Uint8Array or Buffer'); + assert$6(key.length >= KEYBYTES_MIN$1, 'key must be at least ' + KEYBYTES_MIN$1 + ', was given ' + key.length); + assert$6(key.length <= KEYBYTES_MAX$1, 'key must be at least ' + KEYBYTES_MAX$1 + ', was given ' + key.length); } if (salt != null) { - assert$2(salt instanceof Uint8Array, 'salt must be Uint8Array or Buffer'); - assert$2(salt.length === SALTBYTES$1, 'salt must be exactly ' + SALTBYTES$1 + ', was given ' + salt.length); + assert$6(salt instanceof Uint8Array, 'salt must be Uint8Array or Buffer'); + assert$6(salt.length === SALTBYTES$1, 'salt must be exactly ' + SALTBYTES$1 + ', was given ' + salt.length); } if (personal != null) { - assert$2(personal instanceof Uint8Array, 'personal must be Uint8Array or Buffer'); - assert$2(personal.length === PERSONALBYTES$1, 'personal must be exactly ' + PERSONALBYTES$1 + ', was given ' + personal.length); + assert$6(personal instanceof Uint8Array, 'personal must be Uint8Array or Buffer'); + assert$6(personal.length === PERSONALBYTES$1, 'personal must be exactly ' + PERSONALBYTES$1 + ', was given ' + personal.length); } } @@ -191779,8 +192971,8 @@ Blake2b$1.prototype._realloc = function (size) { }; Blake2b$1.prototype.update = function (input) { - assert$2(this.finalized === false, 'Hash instance finalized'); - assert$2(input instanceof Uint8Array, 'input must be Uint8Array or Buffer'); + assert$6(this.finalized === false, 'Hash instance finalized'); + assert$6(input instanceof Uint8Array, 'input must be Uint8Array or Buffer'); if (head + input.length > this._memory.length) this._realloc(head + input.length); this._memory.set(input, head); @@ -191789,7 +192981,7 @@ Blake2b$1.prototype.update = function (input) { }; Blake2b$1.prototype.digest = function (enc) { - assert$2(this.finalized === false, 'Hash instance finalized'); + assert$6(this.finalized === false, 'Hash instance finalized'); this.finalized = true; freeList.push(this.pointer); @@ -191803,7 +192995,7 @@ Blake2b$1.prototype.digest = function (enc) { return b4a.toString(this._memory, enc, this.pointer + 128, this.pointer + 128 + this.digestLength) } - assert$2(enc instanceof Uint8Array && enc.length >= this.digestLength, 'input must be Uint8Array or Buffer'); + assert$6(enc instanceof Uint8Array && enc.length >= this.digestLength, 'input must be Uint8Array or Buffer'); for (var i = 0; i < this.digestLength; i++) { enc[i] = this._memory[this.pointer + 128 + i]; } @@ -191837,7 +193029,7 @@ function noop () {} var blake2bWasmExports = blake2bWasm.exports; -var assert$1 = nanoassert; +var assert$5 = nanoassert; var b2wasm = blake2bWasmExports; // 64-bit unsigned addition @@ -192050,15 +193242,15 @@ function Blake2b (outlen, key, salt, personal) { } Blake2b.prototype.update = function (input) { - assert$1(input instanceof Uint8Array, 'input must be Uint8Array or Buffer'); + assert$5(input instanceof Uint8Array, 'input must be Uint8Array or Buffer'); blake2bUpdate(this, input); return this }; Blake2b.prototype.digest = function (out) { var buf = (!out || out === 'binary' || out === 'hex') ? new Uint8Array(this.outlen) : out; - assert$1(buf instanceof Uint8Array, 'out must be "binary", "hex", Uint8Array, or Buffer'); - assert$1(buf.length >= this.outlen, 'out must have at least outlen bytes of space'); + assert$5(buf instanceof Uint8Array, 'out must be "binary", "hex", Uint8Array, or Buffer'); + assert$5(buf.length >= this.outlen, 'out must have at least outlen bytes of space'); blake2bFinal(this, buf); if (out === 'hex') return hexSlice(buf) return buf @@ -192116,20 +193308,20 @@ var Proto = Blake2b; blake2b$2.exports = function createHash (outlen, key, salt, personal, noAssert) { if (noAssert !== true) { - assert$1(outlen >= BYTES_MIN, 'outlen must be at least ' + BYTES_MIN + ', was given ' + outlen); - assert$1(outlen <= BYTES_MAX, 'outlen must be at most ' + BYTES_MAX + ', was given ' + outlen); + assert$5(outlen >= BYTES_MIN, 'outlen must be at least ' + BYTES_MIN + ', was given ' + outlen); + assert$5(outlen <= BYTES_MAX, 'outlen must be at most ' + BYTES_MAX + ', was given ' + outlen); if (key != null) { - assert$1(key instanceof Uint8Array, 'key must be Uint8Array or Buffer'); - assert$1(key.length >= KEYBYTES_MIN, 'key must be at least ' + KEYBYTES_MIN + ', was given ' + key.length); - assert$1(key.length <= KEYBYTES_MAX, 'key must be at most ' + KEYBYTES_MAX + ', was given ' + key.length); + assert$5(key instanceof Uint8Array, 'key must be Uint8Array or Buffer'); + assert$5(key.length >= KEYBYTES_MIN, 'key must be at least ' + KEYBYTES_MIN + ', was given ' + key.length); + assert$5(key.length <= KEYBYTES_MAX, 'key must be at most ' + KEYBYTES_MAX + ', was given ' + key.length); } if (salt != null) { - assert$1(salt instanceof Uint8Array, 'salt must be Uint8Array or Buffer'); - assert$1(salt.length === SALTBYTES, 'salt must be exactly ' + SALTBYTES + ', was given ' + salt.length); + assert$5(salt instanceof Uint8Array, 'salt must be Uint8Array or Buffer'); + assert$5(salt.length === SALTBYTES, 'salt must be exactly ' + SALTBYTES + ', was given ' + salt.length); } if (personal != null) { - assert$1(personal instanceof Uint8Array, 'personal must be Uint8Array or Buffer'); - assert$1(personal.length === PERSONALBYTES, 'personal must be exactly ' + PERSONALBYTES + ', was given ' + personal.length); + assert$5(personal instanceof Uint8Array, 'personal must be Uint8Array or Buffer'); + assert$5(personal.length === PERSONALBYTES, 'personal must be exactly ' + PERSONALBYTES + ', was given ' + personal.length); } } @@ -192195,7 +193387,7 @@ function requireBuffer_list () { function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } var _require = require$$0$7, Buffer = _require.Buffer; - var _require2 = require$$1$2, + var _require2 = require$$1$3, inspect = _require2.inspect; var custom = inspect && inspect.custom || 'inspect'; function copyBuffer(src, target, offset) { @@ -192473,12 +193665,12 @@ function requireDestroy () { return destroy_1; } -var errors = {}; +var errors$1 = {}; var hasRequiredErrors; function requireErrors () { - if (hasRequiredErrors) return errors; + if (hasRequiredErrors) return errors$1; hasRequiredErrors = 1; const codes = {}; @@ -192594,8 +193786,8 @@ function requireErrors () { }, TypeError); createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); - errors.codes = codes; - return errors; + errors$1.codes = codes; + return errors$1; } var state; @@ -192694,7 +193886,7 @@ function requireNode () { * For Node.js, simply re-export the core `util.deprecate` function. */ - node = require$$1$2.deprecate; + node = require$$1$3.deprecate; return node; } @@ -194158,7 +195350,7 @@ function require_stream_readable () { } /**/ - var debugUtil = require$$1$2; + var debugUtil = require$$1$3; var debug; if (debugUtil && debugUtil.debuglog) { debug = debugUtil.debuglog('stream'); @@ -196208,19 +197400,19 @@ function requireBlake384 () { return blake384; } -var lib$1; +var lib$2; var hasRequiredLib; function requireLib () { - if (hasRequiredLib) return lib$1; + if (hasRequiredLib) return lib$2; hasRequiredLib = 1; - lib$1 = { + lib$2 = { Blake224: requireBlake224(), Blake256: requireBlake256(), Blake384: requireBlake384(), Blake512: requireBlake512() }; - return lib$1; + return lib$2; } var js; @@ -196736,7 +197928,7 @@ function addSlice(array) { function isInteger(value) { return (typeof (value) === "number" && value == value && (value % 1) === 0); } -function isBytes(value) { +function isBytes$4(value) { if (value == null) { return false; } @@ -196779,7 +197971,7 @@ function arrayify(value, options) { if (isHexable(value)) { value = value.toHexString(); } - if (isHexString(value)) { + if (isHexString$2(value)) { let hex = value.substring(2); if (hex.length % 2) { if (options.hexPad === "left") { @@ -196798,12 +197990,12 @@ function arrayify(value, options) { } return addSlice(new Uint8Array(result)); } - if (isBytes(value)) { + if (isBytes$4(value)) { return addSlice(new Uint8Array(value)); } return logger$1.throwArgumentError("invalid arrayify value", "value", value); } -function isHexString(value, length) { +function isHexString$2(value, length) { if (typeof (value) !== "string" || !value.match(/^0x[0-9A-Fa-f]*$/)) { return false; } @@ -196813,7 +198005,7 @@ function isHexString(value, length) { return true; } -var sha3$1 = {exports: {}}; +var sha3$3 = {exports: {}}; /** * [js-sha3]{@link https://github.com/emn178/js-sha3} @@ -197466,13 +198658,13 @@ var sha3$1 = {exports: {}}; } } })(); -} (sha3$1)); +} (sha3$3)); -var sha3Exports = sha3$1.exports; -var sha3 = /*@__PURE__*/getDefaultExportFromCjs(sha3Exports); +var sha3Exports = sha3$3.exports; +var sha3$2 = /*@__PURE__*/getDefaultExportFromCjs(sha3Exports); function keccak256(data) { - return '0x' + sha3.keccak_256(arrayify(data)); + return '0x' + sha3$2.keccak_256(arrayify(data)); } const version = "strings/5.7.0"; @@ -197859,11 +199051,11 @@ var poseidonConstants = { // License: LGPL-3.0+ // -const { unstringifyBigInts: unstringifyBigInts$3 } = utils$1; +const { unstringifyBigInts: unstringifyBigInts$3 } = utils$7; unstringifyBigInts$3(poseidonConstants); -var __async$7 = (__this, __arguments, generator) => { +var __async$8 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -197888,13 +199080,13 @@ class Pedersen { this.pedersenPromise = this.initPedersen(); } initPedersen() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { this.pedersenHash = yield buildPedersenHash(); this.babyJub = this.pedersenHash.babyJub; }); } unpackPoint(buffer) { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { var _a, _b; yield this.pedersenPromise; return (_b = this.babyJub) == null ? void 0 : _b.unpackPoint((_a = this.pedersenHash) == null ? void 0 : _a.hash(buffer)); @@ -197907,13 +199099,13 @@ class Pedersen { } const pedersen = new Pedersen(); function buffPedersenHash(buffer) { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { const [hash] = yield pedersen.unpackPoint(buffer); return pedersen.toStringBuffer(hash); }); } -var __async$6 = (__this, __arguments, generator) => { +var __async$7 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -197934,7 +199126,7 @@ var __async$6 = (__this, __arguments, generator) => { }); }; function createDeposit(_0) { - return __async$6(this, arguments, function* ({ nullifier, secret }) { + return __async$7(this, arguments, function* ({ nullifier, secret }) { const preimage = new Uint8Array([...leInt2Buff$3(nullifier), ...leInt2Buff$3(secret)]); const noteHex = toFixedHex(bytesToBN(preimage), 62); const commitment = BigInt(yield buffPedersenHash(preimage)); @@ -197994,7 +199186,7 @@ class Deposit { ); } static createNote(_0) { - return __async$6(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { + return __async$7(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { if (!nullifier) { nullifier = rBigInt(31); } @@ -198008,7 +199200,7 @@ class Deposit { const newDeposit = new Deposit({ currency: currency.toLowerCase(), amount, - netId: Number(netId), + netId, note: `tornado-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.noteHex}`, noteHex: depositObject.noteHex, invoice: `tornadoInvoice-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.commitmentHex}`, @@ -198021,7 +199213,7 @@ class Deposit { }); } static parseNote(noteString) { - return __async$6(this, null, function* () { + return __async$7(this, null, function* () { const noteRegex = new RegExp("tornado-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{124})", "g"); const match = noteRegex.exec(noteString); if (!match) { @@ -198084,6 +199276,17935 @@ class Invoice { } } +var dist$5 = {}; + +var personalSign$1 = {}; + +var dist$4 = {}; + +var constants$2 = {}; + +var secp256k1$1 = {}; + +var secp256k1 = {}; + +var sha256 = {}; + +var _sha2 = {}; + +var _assert$1 = {}; + +Object.defineProperty(_assert$1, "__esModule", { value: true }); +_assert$1.output = _assert$1.exists = _assert$1.hash = _assert$1.bytes = _assert$1.bool = _assert$1.number = void 0; +function number$3(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`Wrong positive integer: ${n}`); +} +_assert$1.number = number$3; +function bool$2(b) { + if (typeof b !== 'boolean') + throw new Error(`Expected boolean, not ${b}`); +} +_assert$1.bool = bool$2; +// copied from utils +function isBytes$3(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +function bytes$3(b, ...lengths) { + if (!isBytes$3(b)) + throw new Error('Expected Uint8Array'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Expected Uint8Array of length ${lengths}, not of length=${b.length}`); +} +_assert$1.bytes = bytes$3; +function hash$1(hash) { + if (typeof hash !== 'function' || typeof hash.create !== 'function') + throw new Error('Hash should be wrapped by utils.wrapConstructor'); + number$3(hash.outputLen); + number$3(hash.blockLen); +} +_assert$1.hash = hash$1; +function exists$1(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); +} +_assert$1.exists = exists$1; +function output$1(out, instance) { + bytes$3(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } +} +_assert$1.output = output$1; +const assert$4 = { number: number$3, bool: bool$2, bytes: bytes$3, hash: hash$1, exists: exists$1, output: output$1 }; +_assert$1.default = assert$4; + +var utils$6 = {}; + +var crypto$1 = {}; + +Object.defineProperty(crypto$1, "__esModule", { value: true }); +crypto$1.crypto = void 0; +crypto$1.crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; + +(function (exports) { + /*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + Object.defineProperty(exports, "__esModule", { value: true }); + exports.randomBytes = exports.wrapXOFConstructorWithOpts = exports.wrapConstructorWithOpts = exports.wrapConstructor = exports.checkOpts = exports.Hash = exports.concatBytes = exports.toBytes = exports.utf8ToBytes = exports.asyncLoop = exports.nextTick = exports.hexToBytes = exports.bytesToHex = exports.isLE = exports.rotr = exports.createView = exports.u32 = exports.u8 = void 0; + // We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. + // node.js versions earlier than v19 don't declare it in global scope. + // For node.js, package.json#exports field mapping rewrites import + // from `crypto` to `cryptoNode`, which imports native module. + // Makes the utils un-importable in browsers without a bundler. + // Once node.js 18 is deprecated (2025-04-30), we can just drop the import. + const crypto_1 = crypto$1; + // Cast array to different type + const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); + exports.u8 = u8; + const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); + exports.u32 = u32; + function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); + } + // Cast array to view + const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); + exports.createView = createView; + // The rotate right (circular right shift) operation for uint32 + const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); + exports.rotr = rotr; + // big-endian hardware is rare. Just in case someone still decides to run hashes: + // early-throw an error because we don't support BE yet. + // Other libraries would silently corrupt the data instead of throwing an error, + // when they don't support it. + exports.isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; + if (!exports.isLE) + throw new Error('Non little-endian hardware is not supported'); + // Array where index 0xf0 (240) is mapped to string 'f0' + const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); + /** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ + function bytesToHex(bytes) { + if (!isBytes(bytes)) + throw new Error('Uint8Array expected'); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; + } + exports.bytesToHex = bytesToHex; + // We use optimized technique to convert hex string to byte array + const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; + function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; + } + /** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ + function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; + } + exports.hexToBytes = hexToBytes; + // There is no setImmediate in browser and setTimeout is slow. + // call of async fn will return Promise, which will be fullfiled only on + // next scheduler queue processing step and this is exactly what we need. + const nextTick = async () => { }; + exports.nextTick = nextTick; + // Returns control to thread each 'tick' ms to avoid blocking + async function asyncLoop(iters, tick, cb) { + let ts = Date.now(); + for (let i = 0; i < iters; i++) { + cb(i); + // Date.now() is not monotonic, so in case if clock goes backwards we return return control too + const diff = Date.now() - ts; + if (diff >= 0 && diff < tick) + continue; + await (0, exports.nextTick)(); + ts += diff; + } + } + exports.asyncLoop = asyncLoop; + /** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ + function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 + } + exports.utf8ToBytes = utf8ToBytes; + /** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ + function toBytes(data) { + if (typeof data === 'string') + data = utf8ToBytes(data); + if (!isBytes(data)) + throw new Error(`expected Uint8Array, got ${typeof data}`); + return data; + } + exports.toBytes = toBytes; + /** + * Copies several Uint8Arrays into one. + */ + function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + if (!isBytes(a)) + throw new Error('Uint8Array expected'); + sum += a.length; + } + const res = new Uint8Array(sum); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; + } + exports.concatBytes = concatBytes; + // For runtime check if class implements interface + class Hash { + // Safe version that clones internal state + clone() { + return this._cloneInto(); + } + } + exports.Hash = Hash; + const toStr = {}.toString; + function checkOpts(defaults, opts) { + if (opts !== undefined && toStr.call(opts) !== '[object Object]') + throw new Error('Options should be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; + } + exports.checkOpts = checkOpts; + function wrapConstructor(hashCons) { + const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); + const tmp = hashCons(); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = () => hashCons(); + return hashC; + } + exports.wrapConstructor = wrapConstructor; + function wrapConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; + } + exports.wrapConstructorWithOpts = wrapConstructorWithOpts; + function wrapXOFConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; + } + exports.wrapXOFConstructorWithOpts = wrapXOFConstructorWithOpts; + /** + * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. + */ + function randomBytes(bytesLength = 32) { + if (crypto_1.crypto && typeof crypto_1.crypto.getRandomValues === 'function') { + return crypto_1.crypto.getRandomValues(new Uint8Array(bytesLength)); + } + throw new Error('crypto.getRandomValues must be defined'); + } + exports.randomBytes = randomBytes; + +} (utils$6)); + +Object.defineProperty(_sha2, "__esModule", { value: true }); +_sha2.SHA2 = void 0; +const _assert_js_1$2 = _assert$1; +const utils_js_1$7 = utils$6; +// Polyfill for Safari 14 +function setBigUint64(view, byteOffset, value, isLE) { + if (typeof view.setBigUint64 === 'function') + return view.setBigUint64(byteOffset, value, isLE); + const _32n = BigInt(32); + const _u32_max = BigInt(0xffffffff); + const wh = Number((value >> _32n) & _u32_max); + const wl = Number(value & _u32_max); + const h = isLE ? 4 : 0; + const l = isLE ? 0 : 4; + view.setUint32(byteOffset + h, wh, isLE); + view.setUint32(byteOffset + l, wl, isLE); +} +// Base SHA2 class (RFC 6234) +class SHA2 extends utils_js_1$7.Hash { + constructor(blockLen, outputLen, padOffset, isLE) { + super(); + this.blockLen = blockLen; + this.outputLen = outputLen; + this.padOffset = padOffset; + this.isLE = isLE; + this.finished = false; + this.length = 0; + this.pos = 0; + this.destroyed = false; + this.buffer = new Uint8Array(blockLen); + this.view = (0, utils_js_1$7.createView)(this.buffer); + } + update(data) { + (0, _assert_js_1$2.exists)(this); + const { view, buffer, blockLen } = this; + data = (0, utils_js_1$7.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + // Fast path: we have at least one block in input, cast it to view and process + if (take === blockLen) { + const dataView = (0, utils_js_1$7.createView)(data); + for (; blockLen <= len - pos; pos += blockLen) + this.process(dataView, pos); + continue; + } + buffer.set(data.subarray(pos, pos + take), this.pos); + this.pos += take; + pos += take; + if (this.pos === blockLen) { + this.process(view, 0); + this.pos = 0; + } + } + this.length += data.length; + this.roundClean(); + return this; + } + digestInto(out) { + (0, _assert_js_1$2.exists)(this); + (0, _assert_js_1$2.output)(out, this); + this.finished = true; + // Padding + // We can avoid allocation of buffer for padding completely if it + // was previously not allocated here. But it won't change performance. + const { buffer, view, blockLen, isLE } = this; + let { pos } = this; + // append the bit '1' to the message + buffer[pos++] = 0b10000000; + this.buffer.subarray(pos).fill(0); + // we have less than padOffset left in buffer, so we cannot put length in current block, need process it and pad again + if (this.padOffset > blockLen - pos) { + this.process(view, 0); + pos = 0; + } + // Pad until full block byte with zeros + for (let i = pos; i < blockLen; i++) + buffer[i] = 0; + // Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that + // You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen. + // So we just write lowest 64 bits of that value. + setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE); + this.process(view, 0); + const oview = (0, utils_js_1$7.createView)(out); + const len = this.outputLen; + // NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT + if (len % 4) + throw new Error('_sha2: outputLen should be aligned to 32bit'); + const outLen = len / 4; + const state = this.get(); + if (outLen > state.length) + throw new Error('_sha2: outputLen bigger than state'); + for (let i = 0; i < outLen; i++) + oview.setUint32(4 * i, state[i], isLE); + } + digest() { + const { buffer, outputLen } = this; + this.digestInto(buffer); + const res = buffer.slice(0, outputLen); + this.destroy(); + return res; + } + _cloneInto(to) { + to || (to = new this.constructor()); + to.set(...this.get()); + const { blockLen, buffer, length, finished, destroyed, pos } = this; + to.length = length; + to.pos = pos; + to.finished = finished; + to.destroyed = destroyed; + if (length % blockLen) + to.buffer.set(buffer); + return to; + } +} +_sha2.SHA2 = SHA2; + +Object.defineProperty(sha256, "__esModule", { value: true }); +sha256.sha224 = sha256.sha256 = void 0; +const _sha2_js_1 = _sha2; +const utils_js_1$6 = utils$6; +// SHA2-256 need to try 2^128 hashes to execute birthday attack. +// BTC network is doing 2^67 hashes/sec as per early 2023. +// Choice: a ? b : c +const Chi = (a, b, c) => (a & b) ^ (~a & c); +// Majority function, true if any two inpust is true +const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c); +// Round constants: +// first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311) +// prettier-ignore +const SHA256_K = /* @__PURE__ */ new Uint32Array([ + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 +]); +// Initial state (first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19): +// prettier-ignore +const IV = /* @__PURE__ */ new Uint32Array([ + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 +]); +// Temporary buffer, not used to store anything between runs +// Named this way because it matches specification. +const SHA256_W = /* @__PURE__ */ new Uint32Array(64); +class SHA256 extends _sha2_js_1.SHA2 { + constructor() { + super(64, 32, 8, false); + // We cannot use array here since array allows indexing by variable + // which means optimizer/compiler cannot use registers. + this.A = IV[0] | 0; + this.B = IV[1] | 0; + this.C = IV[2] | 0; + this.D = IV[3] | 0; + this.E = IV[4] | 0; + this.F = IV[5] | 0; + this.G = IV[6] | 0; + this.H = IV[7] | 0; + } + get() { + const { A, B, C, D, E, F, G, H } = this; + return [A, B, C, D, E, F, G, H]; + } + // prettier-ignore + set(A, B, C, D, E, F, G, H) { + this.A = A | 0; + this.B = B | 0; + this.C = C | 0; + this.D = D | 0; + this.E = E | 0; + this.F = F | 0; + this.G = G | 0; + this.H = H | 0; + } + process(view, offset) { + // Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array + for (let i = 0; i < 16; i++, offset += 4) + SHA256_W[i] = view.getUint32(offset, false); + for (let i = 16; i < 64; i++) { + const W15 = SHA256_W[i - 15]; + const W2 = SHA256_W[i - 2]; + const s0 = (0, utils_js_1$6.rotr)(W15, 7) ^ (0, utils_js_1$6.rotr)(W15, 18) ^ (W15 >>> 3); + const s1 = (0, utils_js_1$6.rotr)(W2, 17) ^ (0, utils_js_1$6.rotr)(W2, 19) ^ (W2 >>> 10); + SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0; + } + // Compression function main loop, 64 rounds + let { A, B, C, D, E, F, G, H } = this; + for (let i = 0; i < 64; i++) { + const sigma1 = (0, utils_js_1$6.rotr)(E, 6) ^ (0, utils_js_1$6.rotr)(E, 11) ^ (0, utils_js_1$6.rotr)(E, 25); + const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0; + const sigma0 = (0, utils_js_1$6.rotr)(A, 2) ^ (0, utils_js_1$6.rotr)(A, 13) ^ (0, utils_js_1$6.rotr)(A, 22); + const T2 = (sigma0 + Maj(A, B, C)) | 0; + H = G; + G = F; + F = E; + E = (D + T1) | 0; + D = C; + C = B; + B = A; + A = (T1 + T2) | 0; + } + // Add the compressed chunk to the current hash value + A = (A + this.A) | 0; + B = (B + this.B) | 0; + C = (C + this.C) | 0; + D = (D + this.D) | 0; + E = (E + this.E) | 0; + F = (F + this.F) | 0; + G = (G + this.G) | 0; + H = (H + this.H) | 0; + this.set(A, B, C, D, E, F, G, H); + } + roundClean() { + SHA256_W.fill(0); + } + destroy() { + this.set(0, 0, 0, 0, 0, 0, 0, 0); + this.buffer.fill(0); + } +} +// Constants from https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf +class SHA224 extends SHA256 { + constructor() { + super(); + this.A = 0xc1059ed8 | 0; + this.B = 0x367cd507 | 0; + this.C = 0x3070dd17 | 0; + this.D = 0xf70e5939 | 0; + this.E = 0xffc00b31 | 0; + this.F = 0x68581511 | 0; + this.G = 0x64f98fa7 | 0; + this.H = 0xbefa4fa4 | 0; + this.outputLen = 28; + } +} +/** + * SHA2-256 hash function + * @param message - data that would be hashed + */ +sha256.sha256 = (0, utils_js_1$6.wrapConstructor)(() => new SHA256()); +sha256.sha224 = (0, utils_js_1$6.wrapConstructor)(() => new SHA224()); + +var modular = {}; + +var utils$5 = {}; + +Object.defineProperty(utils$5, "__esModule", { value: true }); +utils$5.validateObject = utils$5.createHmacDrbg = utils$5.bitMask = utils$5.bitSet = utils$5.bitGet = utils$5.bitLen = utils$5.utf8ToBytes = utils$5.equalBytes = utils$5.concatBytes = utils$5.ensureBytes = utils$5.numberToVarBytesBE = utils$5.numberToBytesLE = utils$5.numberToBytesBE = utils$5.bytesToNumberLE = utils$5.bytesToNumberBE = utils$5.hexToBytes = utils$5.hexToNumber = utils$5.numberToHexUnpadded = utils$5.bytesToHex = utils$5.isBytes = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// 100 lines of code in the file are duplicated from noble-hashes (utils). +// This is OK: `abstract` directory does not use noble-hashes. +// User may opt-in into using different hashing library. This way, noble-hashes +// won't be included into their bundle. +const _0n$4 = BigInt(0); +const _1n$4 = BigInt(1); +const _2n$3 = BigInt(2); +function isBytes$2(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +utils$5.isBytes = isBytes$2; +// Array where index 0xf0 (240) is mapped to string 'f0' +const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +/** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ +function bytesToHex$2(bytes) { + if (!isBytes$2(bytes)) + throw new Error('Uint8Array expected'); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; +} +utils$5.bytesToHex = bytesToHex$2; +function numberToHexUnpadded(num) { + const hex = num.toString(16); + return hex.length & 1 ? `0${hex}` : hex; +} +utils$5.numberToHexUnpadded = numberToHexUnpadded; +function hexToNumber$1(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + // Big Endian + return BigInt(hex === '' ? '0' : `0x${hex}`); +} +utils$5.hexToNumber = hexToNumber$1; +// We use optimized technique to convert hex string to byte array +const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; +function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; +} +/** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ +function hexToBytes$2(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; +} +utils$5.hexToBytes = hexToBytes$2; +// BE: Big Endian, LE: Little Endian +function bytesToNumberBE(bytes) { + return hexToNumber$1(bytesToHex$2(bytes)); +} +utils$5.bytesToNumberBE = bytesToNumberBE; +function bytesToNumberLE(bytes) { + if (!isBytes$2(bytes)) + throw new Error('Uint8Array expected'); + return hexToNumber$1(bytesToHex$2(Uint8Array.from(bytes).reverse())); +} +utils$5.bytesToNumberLE = bytesToNumberLE; +function numberToBytesBE(n, len) { + return hexToBytes$2(n.toString(16).padStart(len * 2, '0')); +} +utils$5.numberToBytesBE = numberToBytesBE; +function numberToBytesLE(n, len) { + return numberToBytesBE(n, len).reverse(); +} +utils$5.numberToBytesLE = numberToBytesLE; +// Unpadded, rarely used +function numberToVarBytesBE(n) { + return hexToBytes$2(numberToHexUnpadded(n)); +} +utils$5.numberToVarBytesBE = numberToVarBytesBE; +/** + * Takes hex string or Uint8Array, converts to Uint8Array. + * Validates output length. + * Will throw error for other types. + * @param title descriptive title for an error e.g. 'private key' + * @param hex hex string or Uint8Array + * @param expectedLength optional, will compare to result array's length + * @returns + */ +function ensureBytes(title, hex, expectedLength) { + let res; + if (typeof hex === 'string') { + try { + res = hexToBytes$2(hex); + } + catch (e) { + throw new Error(`${title} must be valid hex string, got "${hex}". Cause: ${e}`); + } + } + else if (isBytes$2(hex)) { + // Uint8Array.from() instead of hash.slice() because node.js Buffer + // is instance of Uint8Array, and its slice() creates **mutable** copy + res = Uint8Array.from(hex); + } + else { + throw new Error(`${title} must be hex string or Uint8Array`); + } + const len = res.length; + if (typeof expectedLength === 'number' && len !== expectedLength) + throw new Error(`${title} expected ${expectedLength} bytes, got ${len}`); + return res; +} +utils$5.ensureBytes = ensureBytes; +/** + * Copies several Uint8Arrays into one. + */ +function concatBytes$2(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + if (!isBytes$2(a)) + throw new Error('Uint8Array expected'); + sum += a.length; + } + let res = new Uint8Array(sum); + let pad = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; +} +utils$5.concatBytes = concatBytes$2; +// Compares 2 u8a-s in kinda constant time +function equalBytes(a, b) { + if (a.length !== b.length) + return false; + let diff = 0; + for (let i = 0; i < a.length; i++) + diff |= a[i] ^ b[i]; + return diff === 0; +} +utils$5.equalBytes = equalBytes; +/** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ +function utf8ToBytes$1(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 +} +utils$5.utf8ToBytes = utf8ToBytes$1; +// Bit operations +/** + * Calculates amount of bits in a bigint. + * Same as `n.toString(2).length` + */ +function bitLen(n) { + let len; + for (len = 0; n > _0n$4; n >>= _1n$4, len += 1) + ; + return len; +} +utils$5.bitLen = bitLen; +/** + * Gets single bit at position. + * NOTE: first bit position is 0 (same as arrays) + * Same as `!!+Array.from(n.toString(2)).reverse()[pos]` + */ +function bitGet(n, pos) { + return (n >> BigInt(pos)) & _1n$4; +} +utils$5.bitGet = bitGet; +/** + * Sets single bit at position. + */ +const bitSet = (n, pos, value) => { + return n | ((value ? _1n$4 : _0n$4) << BigInt(pos)); +}; +utils$5.bitSet = bitSet; +/** + * Calculate mask for N bits. Not using ** operator with bigints because of old engines. + * Same as BigInt(`0b${Array(i).fill('1').join('')}`) + */ +const bitMask = (n) => (_2n$3 << BigInt(n - 1)) - _1n$4; +utils$5.bitMask = bitMask; +// DRBG +const u8n = (data) => new Uint8Array(data); // creates Uint8Array +const u8fr = (arr) => Uint8Array.from(arr); // another shortcut +/** + * Minimal HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + * @returns function that will call DRBG until 2nd arg returns something meaningful + * @example + * const drbg = createHmacDRBG(32, 32, hmac); + * drbg(seed, bytesToKey); // bytesToKey must return Key or undefined + */ +function createHmacDrbg(hashLen, qByteLen, hmacFn) { + if (typeof hashLen !== 'number' || hashLen < 2) + throw new Error('hashLen must be a number'); + if (typeof qByteLen !== 'number' || qByteLen < 2) + throw new Error('qByteLen must be a number'); + if (typeof hmacFn !== 'function') + throw new Error('hmacFn must be a function'); + // Step B, Step C: set hashLen to 8*ceil(hlen/8) + let v = u8n(hashLen); // Minimal non-full-spec HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + let k = u8n(hashLen); // Steps B and C of RFC6979 3.2: set hashLen, in our case always same + let i = 0; // Iterations counter, will throw when over 1000 + const reset = () => { + v.fill(1); + k.fill(0); + i = 0; + }; + const h = (...b) => hmacFn(k, v, ...b); // hmac(k)(v, ...values) + const reseed = (seed = u8n()) => { + // HMAC-DRBG reseed() function. Steps D-G + k = h(u8fr([0x00]), seed); // k = hmac(k || v || 0x00 || seed) + v = h(); // v = hmac(k || v) + if (seed.length === 0) + return; + k = h(u8fr([0x01]), seed); // k = hmac(k || v || 0x01 || seed) + v = h(); // v = hmac(k || v) + }; + const gen = () => { + // HMAC-DRBG generate() function + if (i++ >= 1000) + throw new Error('drbg: tried 1000 values'); + let len = 0; + const out = []; + while (len < qByteLen) { + v = h(); + const sl = v.slice(); + out.push(sl); + len += v.length; + } + return concatBytes$2(...out); + }; + const genUntil = (seed, pred) => { + reset(); + reseed(seed); // Steps D-G + let res = undefined; // Step H: grind until k is in [1..n-1] + while (!(res = pred(gen()))) + reseed(); + reset(); + return res; + }; + return genUntil; +} +utils$5.createHmacDrbg = createHmacDrbg; +// Validating curves and fields +const validatorFns = { + bigint: (val) => typeof val === 'bigint', + function: (val) => typeof val === 'function', + boolean: (val) => typeof val === 'boolean', + string: (val) => typeof val === 'string', + stringOrUint8Array: (val) => typeof val === 'string' || isBytes$2(val), + isSafeInteger: (val) => Number.isSafeInteger(val), + array: (val) => Array.isArray(val), + field: (val, object) => object.Fp.isValid(val), + hash: (val) => typeof val === 'function' && Number.isSafeInteger(val.outputLen), +}; +// type Record = { [P in K]: T; } +function validateObject(object, validators, optValidators = {}) { + const checkField = (fieldName, type, isOptional) => { + const checkVal = validatorFns[type]; + if (typeof checkVal !== 'function') + throw new Error(`Invalid validator "${type}", expected function`); + const val = object[fieldName]; + if (isOptional && val === undefined) + return; + if (!checkVal(val, object)) { + throw new Error(`Invalid param ${String(fieldName)}=${val} (${typeof val}), expected ${type}`); + } + }; + for (const [fieldName, type] of Object.entries(validators)) + checkField(fieldName, type, false); + for (const [fieldName, type] of Object.entries(optValidators)) + checkField(fieldName, type, true); + return object; +} +utils$5.validateObject = validateObject; + +Object.defineProperty(modular, "__esModule", { value: true }); +modular.mapHashToField = modular.getMinHashLength = modular.getFieldBytesLength = modular.hashToPrivateScalar = modular.FpSqrtEven = modular.FpSqrtOdd = modular.Field = modular.nLength = modular.FpIsSquare = modular.FpDiv = modular.FpInvertBatch = modular.FpPow = modular.validateField = modular.isNegativeLE = modular.FpSqrt = modular.tonelliShanks = modular.invert = modular.pow2 = modular.pow = modular.mod = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// Utilities for modular arithmetics and finite fields +const utils_js_1$5 = utils$5; +// prettier-ignore +const _0n$3 = BigInt(0), _1n$3 = BigInt(1), _2n$2 = BigInt(2), _3n = BigInt(3); +// prettier-ignore +const _4n = BigInt(4), _5n = BigInt(5), _8n = BigInt(8); +// prettier-ignore +BigInt(9); BigInt(16); +// Calculates a modulo b +function mod(a, b) { + const result = a % b; + return result >= _0n$3 ? result : b + result; +} +modular.mod = mod; +/** + * Efficiently raise num to power and do modular division. + * Unsafe in some contexts: uses ladder, so can expose bigint bits. + * @example + * pow(2n, 6n, 11n) // 64n % 11n == 9n + */ +// TODO: use field version && remove +function pow(num, power, modulo) { + if (modulo <= _0n$3 || power < _0n$3) + throw new Error('Expected power/modulo > 0'); + if (modulo === _1n$3) + return _0n$3; + let res = _1n$3; + while (power > _0n$3) { + if (power & _1n$3) + res = (res * num) % modulo; + num = (num * num) % modulo; + power >>= _1n$3; + } + return res; +} +modular.pow = pow; +// Does x ^ (2 ^ power) mod p. pow2(30, 4) == 30 ^ (2 ^ 4) +function pow2(x, power, modulo) { + let res = x; + while (power-- > _0n$3) { + res *= res; + res %= modulo; + } + return res; +} +modular.pow2 = pow2; +// Inverses number over modulo +function invert(number, modulo) { + if (number === _0n$3 || modulo <= _0n$3) { + throw new Error(`invert: expected positive integers, got n=${number} mod=${modulo}`); + } + // Euclidean GCD https://brilliant.org/wiki/extended-euclidean-algorithm/ + // Fermat's little theorem "CT-like" version inv(n) = n^(m-2) mod m is 30x slower. + let a = mod(number, modulo); + let b = modulo; + // prettier-ignore + let x = _0n$3, u = _1n$3; + while (a !== _0n$3) { + // JIT applies optimization if those two lines follow each other + const q = b / a; + const r = b % a; + const m = x - u * q; + // prettier-ignore + b = a, a = r, x = u, u = m; + } + const gcd = b; + if (gcd !== _1n$3) + throw new Error('invert: does not exist'); + return mod(x, modulo); +} +modular.invert = invert; +/** + * Tonelli-Shanks square root search algorithm. + * 1. https://eprint.iacr.org/2012/685.pdf (page 12) + * 2. Square Roots from 1; 24, 51, 10 to Dan Shanks + * Will start an infinite loop if field order P is not prime. + * @param P field order + * @returns function that takes field Fp (created from P) and number n + */ +function tonelliShanks(P) { + // Legendre constant: used to calculate Legendre symbol (a | p), + // which denotes the value of a^((p-1)/2) (mod p). + // (a | p) ≡ 1 if a is a square (mod p) + // (a | p) ≡ -1 if a is not a square (mod p) + // (a | p) ≡ 0 if a ≡ 0 (mod p) + const legendreC = (P - _1n$3) / _2n$2; + let Q, S, Z; + // Step 1: By factoring out powers of 2 from p - 1, + // find q and s such that p - 1 = q*(2^s) with q odd + for (Q = P - _1n$3, S = 0; Q % _2n$2 === _0n$3; Q /= _2n$2, S++) + ; + // Step 2: Select a non-square z such that (z | p) ≡ -1 and set c ≡ zq + for (Z = _2n$2; Z < P && pow(Z, legendreC, P) !== P - _1n$3; Z++) + ; + // Fast-path + if (S === 1) { + const p1div4 = (P + _1n$3) / _4n; + return function tonelliFast(Fp, n) { + const root = Fp.pow(n, p1div4); + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Slow-path + const Q1div2 = (Q + _1n$3) / _2n$2; + return function tonelliSlow(Fp, n) { + // Step 0: Check that n is indeed a square: (n | p) should not be ≡ -1 + if (Fp.pow(n, legendreC) === Fp.neg(Fp.ONE)) + throw new Error('Cannot find square root'); + let r = S; + // TODO: will fail at Fp2/etc + let g = Fp.pow(Fp.mul(Fp.ONE, Z), Q); // will update both x and b + let x = Fp.pow(n, Q1div2); // first guess at the square root + let b = Fp.pow(n, Q); // first guess at the fudge factor + while (!Fp.eql(b, Fp.ONE)) { + if (Fp.eql(b, Fp.ZERO)) + return Fp.ZERO; // https://en.wikipedia.org/wiki/Tonelli%E2%80%93Shanks_algorithm (4. If t = 0, return r = 0) + // Find m such b^(2^m)==1 + let m = 1; + for (let t2 = Fp.sqr(b); m < r; m++) { + if (Fp.eql(t2, Fp.ONE)) + break; + t2 = Fp.sqr(t2); // t2 *= t2 + } + // NOTE: r-m-1 can be bigger than 32, need to convert to bigint before shift, otherwise there will be overflow + const ge = Fp.pow(g, _1n$3 << BigInt(r - m - 1)); // ge = 2^(r-m-1) + g = Fp.sqr(ge); // g = ge * ge + x = Fp.mul(x, ge); // x *= ge + b = Fp.mul(b, g); // b *= g + r = m; + } + return x; + }; +} +modular.tonelliShanks = tonelliShanks; +function FpSqrt(P) { + // NOTE: different algorithms can give different roots, it is up to user to decide which one they want. + // For example there is FpSqrtOdd/FpSqrtEven to choice root based on oddness (used for hash-to-curve). + // P ≡ 3 (mod 4) + // √n = n^((P+1)/4) + if (P % _4n === _3n) { + // Not all roots possible! + // const ORDER = + // 0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaabn; + // const NUM = 72057594037927816n; + const p1div4 = (P + _1n$3) / _4n; + return function sqrt3mod4(Fp, n) { + const root = Fp.pow(n, p1div4); + // Throw if root**2 != n + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Atkin algorithm for q ≡ 5 (mod 8), https://eprint.iacr.org/2012/685.pdf (page 10) + if (P % _8n === _5n) { + const c1 = (P - _5n) / _8n; + return function sqrt5mod8(Fp, n) { + const n2 = Fp.mul(n, _2n$2); + const v = Fp.pow(n2, c1); + const nv = Fp.mul(n, v); + const i = Fp.mul(Fp.mul(nv, _2n$2), v); + const root = Fp.mul(nv, Fp.sub(i, Fp.ONE)); + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Other cases: Tonelli-Shanks algorithm + return tonelliShanks(P); +} +modular.FpSqrt = FpSqrt; +// Little-endian check for first LE bit (last BE bit); +const isNegativeLE = (num, modulo) => (mod(num, modulo) & _1n$3) === _1n$3; +modular.isNegativeLE = isNegativeLE; +// prettier-ignore +const FIELD_FIELDS = [ + 'create', 'isValid', 'is0', 'neg', 'inv', 'sqrt', 'sqr', + 'eql', 'add', 'sub', 'mul', 'pow', 'div', + 'addN', 'subN', 'mulN', 'sqrN' +]; +function validateField(field) { + const initial = { + ORDER: 'bigint', + MASK: 'bigint', + BYTES: 'isSafeInteger', + BITS: 'isSafeInteger', + }; + const opts = FIELD_FIELDS.reduce((map, val) => { + map[val] = 'function'; + return map; + }, initial); + return (0, utils_js_1$5.validateObject)(field, opts); +} +modular.validateField = validateField; +// Generic field functions +/** + * Same as `pow` but for Fp: non-constant-time. + * Unsafe in some contexts: uses ladder, so can expose bigint bits. + */ +function FpPow(f, num, power) { + // Should have same speed as pow for bigints + // TODO: benchmark! + if (power < _0n$3) + throw new Error('Expected power > 0'); + if (power === _0n$3) + return f.ONE; + if (power === _1n$3) + return num; + let p = f.ONE; + let d = num; + while (power > _0n$3) { + if (power & _1n$3) + p = f.mul(p, d); + d = f.sqr(d); + power >>= _1n$3; + } + return p; +} +modular.FpPow = FpPow; +/** + * Efficiently invert an array of Field elements. + * `inv(0)` will return `undefined` here: make sure to throw an error. + */ +function FpInvertBatch(f, nums) { + const tmp = new Array(nums.length); + // Walk from first to last, multiply them by each other MOD p + const lastMultiplied = nums.reduce((acc, num, i) => { + if (f.is0(num)) + return acc; + tmp[i] = acc; + return f.mul(acc, num); + }, f.ONE); + // Invert last element + const inverted = f.inv(lastMultiplied); + // Walk from last to first, multiply them by inverted each other MOD p + nums.reduceRight((acc, num, i) => { + if (f.is0(num)) + return acc; + tmp[i] = f.mul(acc, tmp[i]); + return f.mul(acc, num); + }, inverted); + return tmp; +} +modular.FpInvertBatch = FpInvertBatch; +function FpDiv(f, lhs, rhs) { + return f.mul(lhs, typeof rhs === 'bigint' ? invert(rhs, f.ORDER) : f.inv(rhs)); +} +modular.FpDiv = FpDiv; +// This function returns True whenever the value x is a square in the field F. +function FpIsSquare(f) { + const legendreConst = (f.ORDER - _1n$3) / _2n$2; // Integer arithmetic + return (x) => { + const p = f.pow(x, legendreConst); + return f.eql(p, f.ZERO) || f.eql(p, f.ONE); + }; +} +modular.FpIsSquare = FpIsSquare; +// CURVE.n lengths +function nLength(n, nBitLength) { + // Bit size, byte size of CURVE.n + const _nBitLength = nBitLength !== undefined ? nBitLength : n.toString(2).length; + const nByteLength = Math.ceil(_nBitLength / 8); + return { nBitLength: _nBitLength, nByteLength }; +} +modular.nLength = nLength; +/** + * Initializes a finite field over prime. **Non-primes are not supported.** + * Do not init in loop: slow. Very fragile: always run a benchmark on a change. + * Major performance optimizations: + * * a) denormalized operations like mulN instead of mul + * * b) same object shape: never add or remove keys + * * c) Object.freeze + * @param ORDER prime positive bigint + * @param bitLen how many bits the field consumes + * @param isLE (def: false) if encoding / decoding should be in little-endian + * @param redef optional faster redefinitions of sqrt and other methods + */ +function Field(ORDER, bitLen, isLE = false, redef = {}) { + if (ORDER <= _0n$3) + throw new Error(`Expected Field ORDER > 0, got ${ORDER}`); + const { nBitLength: BITS, nByteLength: BYTES } = nLength(ORDER, bitLen); + if (BYTES > 2048) + throw new Error('Field lengths over 2048 bytes are not supported'); + const sqrtP = FpSqrt(ORDER); + const f = Object.freeze({ + ORDER, + BITS, + BYTES, + MASK: (0, utils_js_1$5.bitMask)(BITS), + ZERO: _0n$3, + ONE: _1n$3, + create: (num) => mod(num, ORDER), + isValid: (num) => { + if (typeof num !== 'bigint') + throw new Error(`Invalid field element: expected bigint, got ${typeof num}`); + return _0n$3 <= num && num < ORDER; // 0 is valid element, but it's not invertible + }, + is0: (num) => num === _0n$3, + isOdd: (num) => (num & _1n$3) === _1n$3, + neg: (num) => mod(-num, ORDER), + eql: (lhs, rhs) => lhs === rhs, + sqr: (num) => mod(num * num, ORDER), + add: (lhs, rhs) => mod(lhs + rhs, ORDER), + sub: (lhs, rhs) => mod(lhs - rhs, ORDER), + mul: (lhs, rhs) => mod(lhs * rhs, ORDER), + pow: (num, power) => FpPow(f, num, power), + div: (lhs, rhs) => mod(lhs * invert(rhs, ORDER), ORDER), + // Same as above, but doesn't normalize + sqrN: (num) => num * num, + addN: (lhs, rhs) => lhs + rhs, + subN: (lhs, rhs) => lhs - rhs, + mulN: (lhs, rhs) => lhs * rhs, + inv: (num) => invert(num, ORDER), + sqrt: redef.sqrt || ((n) => sqrtP(f, n)), + invertBatch: (lst) => FpInvertBatch(f, lst), + // TODO: do we really need constant cmov? + // We don't have const-time bigints anyway, so probably will be not very useful + cmov: (a, b, c) => (c ? b : a), + toBytes: (num) => (isLE ? (0, utils_js_1$5.numberToBytesLE)(num, BYTES) : (0, utils_js_1$5.numberToBytesBE)(num, BYTES)), + fromBytes: (bytes) => { + if (bytes.length !== BYTES) + throw new Error(`Fp.fromBytes: expected ${BYTES}, got ${bytes.length}`); + return isLE ? (0, utils_js_1$5.bytesToNumberLE)(bytes) : (0, utils_js_1$5.bytesToNumberBE)(bytes); + }, + }); + return Object.freeze(f); +} +modular.Field = Field; +function FpSqrtOdd(Fp, elm) { + if (!Fp.isOdd) + throw new Error(`Field doesn't have isOdd`); + const root = Fp.sqrt(elm); + return Fp.isOdd(root) ? root : Fp.neg(root); +} +modular.FpSqrtOdd = FpSqrtOdd; +function FpSqrtEven(Fp, elm) { + if (!Fp.isOdd) + throw new Error(`Field doesn't have isOdd`); + const root = Fp.sqrt(elm); + return Fp.isOdd(root) ? Fp.neg(root) : root; +} +modular.FpSqrtEven = FpSqrtEven; +/** + * "Constant-time" private key generation utility. + * Same as mapKeyToField, but accepts less bytes (40 instead of 48 for 32-byte field). + * Which makes it slightly more biased, less secure. + * @deprecated use mapKeyToField instead + */ +function hashToPrivateScalar(hash, groupOrder, isLE = false) { + hash = (0, utils_js_1$5.ensureBytes)('privateHash', hash); + const hashLen = hash.length; + const minLen = nLength(groupOrder).nByteLength + 8; + if (minLen < 24 || hashLen < minLen || hashLen > 1024) + throw new Error(`hashToPrivateScalar: expected ${minLen}-1024 bytes of input, got ${hashLen}`); + const num = isLE ? (0, utils_js_1$5.bytesToNumberLE)(hash) : (0, utils_js_1$5.bytesToNumberBE)(hash); + return mod(num, groupOrder - _1n$3) + _1n$3; +} +modular.hashToPrivateScalar = hashToPrivateScalar; +/** + * Returns total number of bytes consumed by the field element. + * For example, 32 bytes for usual 256-bit weierstrass curve. + * @param fieldOrder number of field elements, usually CURVE.n + * @returns byte length of field + */ +function getFieldBytesLength(fieldOrder) { + if (typeof fieldOrder !== 'bigint') + throw new Error('field order must be bigint'); + const bitLength = fieldOrder.toString(2).length; + return Math.ceil(bitLength / 8); +} +modular.getFieldBytesLength = getFieldBytesLength; +/** + * Returns minimal amount of bytes that can be safely reduced + * by field order. + * Should be 2^-128 for 128-bit curve such as P256. + * @param fieldOrder number of field elements, usually CURVE.n + * @returns byte length of target hash + */ +function getMinHashLength(fieldOrder) { + const length = getFieldBytesLength(fieldOrder); + return length + Math.ceil(length / 2); +} +modular.getMinHashLength = getMinHashLength; +/** + * "Constant-time" private key generation utility. + * Can take (n + n/2) or more bytes of uniform input e.g. from CSPRNG or KDF + * and convert them into private scalar, with the modulo bias being negligible. + * Needs at least 48 bytes of input for 32-byte private key. + * https://research.kudelskisecurity.com/2020/07/28/the-definitive-guide-to-modulo-bias-and-how-to-avoid-it/ + * FIPS 186-5, A.2 https://csrc.nist.gov/publications/detail/fips/186/5/final + * RFC 9380, https://www.rfc-editor.org/rfc/rfc9380#section-5 + * @param hash hash output from SHA3 or a similar function + * @param groupOrder size of subgroup - (e.g. secp256k1.CURVE.n) + * @param isLE interpret hash bytes as LE num + * @returns valid private scalar + */ +function mapHashToField(key, fieldOrder, isLE = false) { + const len = key.length; + const fieldLen = getFieldBytesLength(fieldOrder); + const minLen = getMinHashLength(fieldOrder); + // No small numbers: need to understand bias story. No huge numbers: easier to detect JS timings. + if (len < 16 || len < minLen || len > 1024) + throw new Error(`expected ${minLen}-1024 bytes of input, got ${len}`); + const num = isLE ? (0, utils_js_1$5.bytesToNumberBE)(key) : (0, utils_js_1$5.bytesToNumberLE)(key); + // `mod(x, 11)` can sometimes produce 0. `mod(x, 10) + 1` is the same, but no 0 + const reduced = mod(num, fieldOrder - _1n$3) + _1n$3; + return isLE ? (0, utils_js_1$5.numberToBytesLE)(reduced, fieldLen) : (0, utils_js_1$5.numberToBytesBE)(reduced, fieldLen); +} +modular.mapHashToField = mapHashToField; + +var weierstrass = {}; + +var curve = {}; + +Object.defineProperty(curve, "__esModule", { value: true }); +curve.validateBasic = curve.wNAF = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// Abelian group utilities +const modular_js_1$1 = modular; +const utils_js_1$4 = utils$5; +const _0n$2 = BigInt(0); +const _1n$2 = BigInt(1); +// Elliptic curve multiplication of Point by scalar. Fragile. +// Scalars should always be less than curve order: this should be checked inside of a curve itself. +// Creates precomputation tables for fast multiplication: +// - private scalar is split by fixed size windows of W bits +// - every window point is collected from window's table & added to accumulator +// - since windows are different, same point inside tables won't be accessed more than once per calc +// - each multiplication is 'Math.ceil(CURVE_ORDER / 𝑊) + 1' point additions (fixed for any scalar) +// - +1 window is neccessary for wNAF +// - wNAF reduces table size: 2x less memory + 2x faster generation, but 10% slower multiplication +// TODO: Research returning 2d JS array of windows, instead of a single window. This would allow +// windows to be in different memory locations +function wNAF(c, bits) { + const constTimeNegate = (condition, item) => { + const neg = item.negate(); + return condition ? neg : item; + }; + const opts = (W) => { + const windows = Math.ceil(bits / W) + 1; // +1, because + const windowSize = 2 ** (W - 1); // -1 because we skip zero + return { windows, windowSize }; + }; + return { + constTimeNegate, + // non-const time multiplication ladder + unsafeLadder(elm, n) { + let p = c.ZERO; + let d = elm; + while (n > _0n$2) { + if (n & _1n$2) + p = p.add(d); + d = d.double(); + n >>= _1n$2; + } + return p; + }, + /** + * Creates a wNAF precomputation window. Used for caching. + * Default window size is set by `utils.precompute()` and is equal to 8. + * Number of precomputed points depends on the curve size: + * 2^(𝑊−1) * (Math.ceil(𝑛 / 𝑊) + 1), where: + * - 𝑊 is the window size + * - 𝑛 is the bitlength of the curve order. + * For a 256-bit curve and window size 8, the number of precomputed points is 128 * 33 = 4224. + * @returns precomputed point tables flattened to a single array + */ + precomputeWindow(elm, W) { + const { windows, windowSize } = opts(W); + const points = []; + let p = elm; + let base = p; + for (let window = 0; window < windows; window++) { + base = p; + points.push(base); + // =1, because we skip zero + for (let i = 1; i < windowSize; i++) { + base = base.add(p); + points.push(base); + } + p = base.double(); + } + return points; + }, + /** + * Implements ec multiplication using precomputed tables and w-ary non-adjacent form. + * @param W window size + * @param precomputes precomputed tables + * @param n scalar (we don't check here, but should be less than curve order) + * @returns real and fake (for const-time) points + */ + wNAF(W, precomputes, n) { + // TODO: maybe check that scalar is less than group order? wNAF behavious is undefined otherwise + // But need to carefully remove other checks before wNAF. ORDER == bits here + const { windows, windowSize } = opts(W); + let p = c.ZERO; + let f = c.BASE; + const mask = BigInt(2 ** W - 1); // Create mask with W ones: 0b1111 for W=4 etc. + const maxNumber = 2 ** W; + const shiftBy = BigInt(W); + for (let window = 0; window < windows; window++) { + const offset = window * windowSize; + // Extract W bits. + let wbits = Number(n & mask); + // Shift number by W bits. + n >>= shiftBy; + // If the bits are bigger than max size, we'll split those. + // +224 => 256 - 32 + if (wbits > windowSize) { + wbits -= maxNumber; + n += _1n$2; + } + // This code was first written with assumption that 'f' and 'p' will never be infinity point: + // since each addition is multiplied by 2 ** W, it cannot cancel each other. However, + // there is negate now: it is possible that negated element from low value + // would be the same as high element, which will create carry into next window. + // It's not obvious how this can fail, but still worth investigating later. + // Check if we're onto Zero point. + // Add random point inside current window to f. + const offset1 = offset; + const offset2 = offset + Math.abs(wbits) - 1; // -1 because we skip zero + const cond1 = window % 2 !== 0; + const cond2 = wbits < 0; + if (wbits === 0) { + // The most important part for const-time getPublicKey + f = f.add(constTimeNegate(cond1, precomputes[offset1])); + } + else { + p = p.add(constTimeNegate(cond2, precomputes[offset2])); + } + } + // JIT-compiler should not eliminate f here, since it will later be used in normalizeZ() + // Even if the variable is still unused, there are some checks which will + // throw an exception, so compiler needs to prove they won't happen, which is hard. + // At this point there is a way to F be infinity-point even if p is not, + // which makes it less const-time: around 1 bigint multiply. + return { p, f }; + }, + wNAFCached(P, precomputesMap, n, transform) { + // @ts-ignore + const W = P._WINDOW_SIZE || 1; + // Calculate precomputes on a first run, reuse them after + let comp = precomputesMap.get(P); + if (!comp) { + comp = this.precomputeWindow(P, W); + if (W !== 1) { + precomputesMap.set(P, transform(comp)); + } + } + return this.wNAF(W, comp, n); + }, + }; +} +curve.wNAF = wNAF; +function validateBasic(curve) { + (0, modular_js_1$1.validateField)(curve.Fp); + (0, utils_js_1$4.validateObject)(curve, { + n: 'bigint', + h: 'bigint', + Gx: 'field', + Gy: 'field', + }, { + nBitLength: 'isSafeInteger', + nByteLength: 'isSafeInteger', + }); + // Set defaults + return Object.freeze({ + ...(0, modular_js_1$1.nLength)(curve.n, curve.nBitLength), + ...curve, + ...{ p: curve.Fp.ORDER }, + }); +} +curve.validateBasic = validateBasic; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.mapToCurveSimpleSWU = exports.SWUFpSqrtRatio = exports.weierstrass = exports.weierstrassPoints = exports.DER = void 0; + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + // Short Weierstrass curve. The formula is: y² = x³ + ax + b + const mod = modular; + const ut = utils$5; + const utils_js_1 = utils$5; + const curve_js_1 = curve; + function validatePointOpts(curve) { + const opts = (0, curve_js_1.validateBasic)(curve); + ut.validateObject(opts, { + a: 'field', + b: 'field', + }, { + allowedPrivateKeyLengths: 'array', + wrapPrivateKey: 'boolean', + isTorsionFree: 'function', + clearCofactor: 'function', + allowInfinityPoint: 'boolean', + fromBytes: 'function', + toBytes: 'function', + }); + const { endo, Fp, a } = opts; + if (endo) { + if (!Fp.eql(a, Fp.ZERO)) { + throw new Error('Endomorphism can only be defined for Koblitz curves that have a=0'); + } + if (typeof endo !== 'object' || + typeof endo.beta !== 'bigint' || + typeof endo.splitScalar !== 'function') { + throw new Error('Expected endomorphism with beta: bigint and splitScalar: function'); + } + } + return Object.freeze({ ...opts }); + } + // ASN.1 DER encoding utilities + const { bytesToNumberBE: b2n, hexToBytes: h2b } = ut; + exports.DER = { + // asn.1 DER encoding utils + Err: class DERErr extends Error { + constructor(m = '') { + super(m); + } + }, + _parseInt(data) { + const { Err: E } = exports.DER; + if (data.length < 2 || data[0] !== 0x02) + throw new E('Invalid signature integer tag'); + const len = data[1]; + const res = data.subarray(2, len + 2); + if (!len || res.length !== len) + throw new E('Invalid signature integer: wrong length'); + // https://crypto.stackexchange.com/a/57734 Leftmost bit of first byte is 'negative' flag, + // since we always use positive integers here. It must always be empty: + // - add zero byte if exists + // - if next byte doesn't have a flag, leading zero is not allowed (minimal encoding) + if (res[0] & 0b10000000) + throw new E('Invalid signature integer: negative'); + if (res[0] === 0x00 && !(res[1] & 0b10000000)) + throw new E('Invalid signature integer: unnecessary leading zero'); + return { d: b2n(res), l: data.subarray(len + 2) }; // d is data, l is left + }, + toSig(hex) { + // parse DER signature + const { Err: E } = exports.DER; + const data = typeof hex === 'string' ? h2b(hex) : hex; + if (!ut.isBytes(data)) + throw new Error('ui8a expected'); + let l = data.length; + if (l < 2 || data[0] != 0x30) + throw new E('Invalid signature tag'); + if (data[1] !== l - 2) + throw new E('Invalid signature: incorrect length'); + const { d: r, l: sBytes } = exports.DER._parseInt(data.subarray(2)); + const { d: s, l: rBytesLeft } = exports.DER._parseInt(sBytes); + if (rBytesLeft.length) + throw new E('Invalid signature: left bytes after parsing'); + return { r, s }; + }, + hexFromSig(sig) { + // Add leading zero if first byte has negative bit enabled. More details in '_parseInt' + const slice = (s) => (Number.parseInt(s[0], 16) & 0b1000 ? '00' + s : s); + const h = (num) => { + const hex = num.toString(16); + return hex.length & 1 ? `0${hex}` : hex; + }; + const s = slice(h(sig.s)); + const r = slice(h(sig.r)); + const shl = s.length / 2; + const rhl = r.length / 2; + const sl = h(shl); + const rl = h(rhl); + return `30${h(rhl + shl + 4)}02${rl}${r}02${sl}${s}`; + }, + }; + // Be friendly to bad ECMAScript parsers by not using bigint literals + // prettier-ignore + const _0n = BigInt(0), _1n = BigInt(1), _2n = BigInt(2), _3n = BigInt(3), _4n = BigInt(4); + function weierstrassPoints(opts) { + const CURVE = validatePointOpts(opts); + const { Fp } = CURVE; // All curves has same field / group length as for now, but they can differ + const toBytes = CURVE.toBytes || + ((_c, point, _isCompressed) => { + const a = point.toAffine(); + return ut.concatBytes(Uint8Array.from([0x04]), Fp.toBytes(a.x), Fp.toBytes(a.y)); + }); + const fromBytes = CURVE.fromBytes || + ((bytes) => { + // const head = bytes[0]; + const tail = bytes.subarray(1); + // if (head !== 0x04) throw new Error('Only non-compressed encoding is supported'); + const x = Fp.fromBytes(tail.subarray(0, Fp.BYTES)); + const y = Fp.fromBytes(tail.subarray(Fp.BYTES, 2 * Fp.BYTES)); + return { x, y }; + }); + /** + * y² = x³ + ax + b: Short weierstrass curve formula + * @returns y² + */ + function weierstrassEquation(x) { + const { a, b } = CURVE; + const x2 = Fp.sqr(x); // x * x + const x3 = Fp.mul(x2, x); // x2 * x + return Fp.add(Fp.add(x3, Fp.mul(x, a)), b); // x3 + a * x + b + } + // Validate whether the passed curve params are valid. + // We check if curve equation works for generator point. + // `assertValidity()` won't work: `isTorsionFree()` is not available at this point in bls12-381. + // ProjectivePoint class has not been initialized yet. + if (!Fp.eql(Fp.sqr(CURVE.Gy), weierstrassEquation(CURVE.Gx))) + throw new Error('bad generator point: equation left != right'); + // Valid group elements reside in range 1..n-1 + function isWithinCurveOrder(num) { + return typeof num === 'bigint' && _0n < num && num < CURVE.n; + } + function assertGE(num) { + if (!isWithinCurveOrder(num)) + throw new Error('Expected valid bigint: 0 < bigint < curve.n'); + } + // Validates if priv key is valid and converts it to bigint. + // Supports options allowedPrivateKeyLengths and wrapPrivateKey. + function normPrivateKeyToScalar(key) { + const { allowedPrivateKeyLengths: lengths, nByteLength, wrapPrivateKey, n } = CURVE; + if (lengths && typeof key !== 'bigint') { + if (ut.isBytes(key)) + key = ut.bytesToHex(key); + // Normalize to hex string, pad. E.g. P521 would norm 130-132 char hex to 132-char bytes + if (typeof key !== 'string' || !lengths.includes(key.length)) + throw new Error('Invalid key'); + key = key.padStart(nByteLength * 2, '0'); + } + let num; + try { + num = + typeof key === 'bigint' + ? key + : ut.bytesToNumberBE((0, utils_js_1.ensureBytes)('private key', key, nByteLength)); + } + catch (error) { + throw new Error(`private key must be ${nByteLength} bytes, hex or bigint, not ${typeof key}`); + } + if (wrapPrivateKey) + num = mod.mod(num, n); // disabled by default, enabled for BLS + assertGE(num); // num in range [1..N-1] + return num; + } + const pointPrecomputes = new Map(); + function assertPrjPoint(other) { + if (!(other instanceof Point)) + throw new Error('ProjectivePoint expected'); + } + /** + * Projective Point works in 3d / projective (homogeneous) coordinates: (x, y, z) ∋ (x=x/z, y=y/z) + * Default Point works in 2d / affine coordinates: (x, y) + * We're doing calculations in projective, because its operations don't require costly inversion. + */ + class Point { + constructor(px, py, pz) { + this.px = px; + this.py = py; + this.pz = pz; + if (px == null || !Fp.isValid(px)) + throw new Error('x required'); + if (py == null || !Fp.isValid(py)) + throw new Error('y required'); + if (pz == null || !Fp.isValid(pz)) + throw new Error('z required'); + } + // Does not validate if the point is on-curve. + // Use fromHex instead, or call assertValidity() later. + static fromAffine(p) { + const { x, y } = p || {}; + if (!p || !Fp.isValid(x) || !Fp.isValid(y)) + throw new Error('invalid affine point'); + if (p instanceof Point) + throw new Error('projective point not allowed'); + const is0 = (i) => Fp.eql(i, Fp.ZERO); + // fromAffine(x:0, y:0) would produce (x:0, y:0, z:1), but we need (x:0, y:1, z:0) + if (is0(x) && is0(y)) + return Point.ZERO; + return new Point(x, y, Fp.ONE); + } + get x() { + return this.toAffine().x; + } + get y() { + return this.toAffine().y; + } + /** + * Takes a bunch of Projective Points but executes only one + * inversion on all of them. Inversion is very slow operation, + * so this improves performance massively. + * Optimization: converts a list of projective points to a list of identical points with Z=1. + */ + static normalizeZ(points) { + const toInv = Fp.invertBatch(points.map((p) => p.pz)); + return points.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + } + /** + * Converts hash string or Uint8Array to Point. + * @param hex short/long ECDSA hex + */ + static fromHex(hex) { + const P = Point.fromAffine(fromBytes((0, utils_js_1.ensureBytes)('pointHex', hex))); + P.assertValidity(); + return P; + } + // Multiplies generator point by privateKey. + static fromPrivateKey(privateKey) { + return Point.BASE.multiply(normPrivateKeyToScalar(privateKey)); + } + // "Private method", don't use it directly + _setWindowSize(windowSize) { + this._WINDOW_SIZE = windowSize; + pointPrecomputes.delete(this); + } + // A point on curve is valid if it conforms to equation. + assertValidity() { + if (this.is0()) { + // (0, 1, 0) aka ZERO is invalid in most contexts. + // In BLS, ZERO can be serialized, so we allow it. + // (0, 0, 0) is wrong representation of ZERO and is always invalid. + if (CURVE.allowInfinityPoint && !Fp.is0(this.py)) + return; + throw new Error('bad point: ZERO'); + } + // Some 3rd-party test vectors require different wording between here & `fromCompressedHex` + const { x, y } = this.toAffine(); + // Check if x, y are valid field elements + if (!Fp.isValid(x) || !Fp.isValid(y)) + throw new Error('bad point: x or y not FE'); + const left = Fp.sqr(y); // y² + const right = weierstrassEquation(x); // x³ + ax + b + if (!Fp.eql(left, right)) + throw new Error('bad point: equation left != right'); + if (!this.isTorsionFree()) + throw new Error('bad point: not in prime-order subgroup'); + } + hasEvenY() { + const { y } = this.toAffine(); + if (Fp.isOdd) + return !Fp.isOdd(y); + throw new Error("Field doesn't support isOdd"); + } + /** + * Compare one point to another. + */ + equals(other) { + assertPrjPoint(other); + const { px: X1, py: Y1, pz: Z1 } = this; + const { px: X2, py: Y2, pz: Z2 } = other; + const U1 = Fp.eql(Fp.mul(X1, Z2), Fp.mul(X2, Z1)); + const U2 = Fp.eql(Fp.mul(Y1, Z2), Fp.mul(Y2, Z1)); + return U1 && U2; + } + /** + * Flips point to one corresponding to (x, -y) in Affine coordinates. + */ + negate() { + return new Point(this.px, Fp.neg(this.py), this.pz); + } + // Renes-Costello-Batina exception-free doubling formula. + // There is 30% faster Jacobian formula, but it is not complete. + // https://eprint.iacr.org/2015/1060, algorithm 3 + // Cost: 8M + 3S + 3*a + 2*b3 + 15add. + double() { + const { a, b } = CURVE; + const b3 = Fp.mul(b, _3n); + const { px: X1, py: Y1, pz: Z1 } = this; + let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore + let t0 = Fp.mul(X1, X1); // step 1 + let t1 = Fp.mul(Y1, Y1); + let t2 = Fp.mul(Z1, Z1); + let t3 = Fp.mul(X1, Y1); + t3 = Fp.add(t3, t3); // step 5 + Z3 = Fp.mul(X1, Z1); + Z3 = Fp.add(Z3, Z3); + X3 = Fp.mul(a, Z3); + Y3 = Fp.mul(b3, t2); + Y3 = Fp.add(X3, Y3); // step 10 + X3 = Fp.sub(t1, Y3); + Y3 = Fp.add(t1, Y3); + Y3 = Fp.mul(X3, Y3); + X3 = Fp.mul(t3, X3); + Z3 = Fp.mul(b3, Z3); // step 15 + t2 = Fp.mul(a, t2); + t3 = Fp.sub(t0, t2); + t3 = Fp.mul(a, t3); + t3 = Fp.add(t3, Z3); + Z3 = Fp.add(t0, t0); // step 20 + t0 = Fp.add(Z3, t0); + t0 = Fp.add(t0, t2); + t0 = Fp.mul(t0, t3); + Y3 = Fp.add(Y3, t0); + t2 = Fp.mul(Y1, Z1); // step 25 + t2 = Fp.add(t2, t2); + t0 = Fp.mul(t2, t3); + X3 = Fp.sub(X3, t0); + Z3 = Fp.mul(t2, t1); + Z3 = Fp.add(Z3, Z3); // step 30 + Z3 = Fp.add(Z3, Z3); + return new Point(X3, Y3, Z3); + } + // Renes-Costello-Batina exception-free addition formula. + // There is 30% faster Jacobian formula, but it is not complete. + // https://eprint.iacr.org/2015/1060, algorithm 1 + // Cost: 12M + 0S + 3*a + 3*b3 + 23add. + add(other) { + assertPrjPoint(other); + const { px: X1, py: Y1, pz: Z1 } = this; + const { px: X2, py: Y2, pz: Z2 } = other; + let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore + const a = CURVE.a; + const b3 = Fp.mul(CURVE.b, _3n); + let t0 = Fp.mul(X1, X2); // step 1 + let t1 = Fp.mul(Y1, Y2); + let t2 = Fp.mul(Z1, Z2); + let t3 = Fp.add(X1, Y1); + let t4 = Fp.add(X2, Y2); // step 5 + t3 = Fp.mul(t3, t4); + t4 = Fp.add(t0, t1); + t3 = Fp.sub(t3, t4); + t4 = Fp.add(X1, Z1); + let t5 = Fp.add(X2, Z2); // step 10 + t4 = Fp.mul(t4, t5); + t5 = Fp.add(t0, t2); + t4 = Fp.sub(t4, t5); + t5 = Fp.add(Y1, Z1); + X3 = Fp.add(Y2, Z2); // step 15 + t5 = Fp.mul(t5, X3); + X3 = Fp.add(t1, t2); + t5 = Fp.sub(t5, X3); + Z3 = Fp.mul(a, t4); + X3 = Fp.mul(b3, t2); // step 20 + Z3 = Fp.add(X3, Z3); + X3 = Fp.sub(t1, Z3); + Z3 = Fp.add(t1, Z3); + Y3 = Fp.mul(X3, Z3); + t1 = Fp.add(t0, t0); // step 25 + t1 = Fp.add(t1, t0); + t2 = Fp.mul(a, t2); + t4 = Fp.mul(b3, t4); + t1 = Fp.add(t1, t2); + t2 = Fp.sub(t0, t2); // step 30 + t2 = Fp.mul(a, t2); + t4 = Fp.add(t4, t2); + t0 = Fp.mul(t1, t4); + Y3 = Fp.add(Y3, t0); + t0 = Fp.mul(t5, t4); // step 35 + X3 = Fp.mul(t3, X3); + X3 = Fp.sub(X3, t0); + t0 = Fp.mul(t3, t1); + Z3 = Fp.mul(t5, Z3); + Z3 = Fp.add(Z3, t0); // step 40 + return new Point(X3, Y3, Z3); + } + subtract(other) { + return this.add(other.negate()); + } + is0() { + return this.equals(Point.ZERO); + } + wNAF(n) { + return wnaf.wNAFCached(this, pointPrecomputes, n, (comp) => { + const toInv = Fp.invertBatch(comp.map((p) => p.pz)); + return comp.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + }); + } + /** + * Non-constant-time multiplication. Uses double-and-add algorithm. + * It's faster, but should only be used when you don't care about + * an exposed private key e.g. sig verification, which works over *public* keys. + */ + multiplyUnsafe(n) { + const I = Point.ZERO; + if (n === _0n) + return I; + assertGE(n); // Will throw on 0 + if (n === _1n) + return this; + const { endo } = CURVE; + if (!endo) + return wnaf.unsafeLadder(this, n); + // Apply endomorphism + let { k1neg, k1, k2neg, k2 } = endo.splitScalar(n); + let k1p = I; + let k2p = I; + let d = this; + while (k1 > _0n || k2 > _0n) { + if (k1 & _1n) + k1p = k1p.add(d); + if (k2 & _1n) + k2p = k2p.add(d); + d = d.double(); + k1 >>= _1n; + k2 >>= _1n; + } + if (k1neg) + k1p = k1p.negate(); + if (k2neg) + k2p = k2p.negate(); + k2p = new Point(Fp.mul(k2p.px, endo.beta), k2p.py, k2p.pz); + return k1p.add(k2p); + } + /** + * Constant time multiplication. + * Uses wNAF method. Windowed method may be 10% faster, + * but takes 2x longer to generate and consumes 2x memory. + * Uses precomputes when available. + * Uses endomorphism for Koblitz curves. + * @param scalar by which the point would be multiplied + * @returns New point + */ + multiply(scalar) { + assertGE(scalar); + let n = scalar; + let point, fake; // Fake point is used to const-time mult + const { endo } = CURVE; + if (endo) { + const { k1neg, k1, k2neg, k2 } = endo.splitScalar(n); + let { p: k1p, f: f1p } = this.wNAF(k1); + let { p: k2p, f: f2p } = this.wNAF(k2); + k1p = wnaf.constTimeNegate(k1neg, k1p); + k2p = wnaf.constTimeNegate(k2neg, k2p); + k2p = new Point(Fp.mul(k2p.px, endo.beta), k2p.py, k2p.pz); + point = k1p.add(k2p); + fake = f1p.add(f2p); + } + else { + const { p, f } = this.wNAF(n); + point = p; + fake = f; + } + // Normalize `z` for both points, but return only real one + return Point.normalizeZ([point, fake])[0]; + } + /** + * Efficiently calculate `aP + bQ`. Unsafe, can expose private key, if used incorrectly. + * Not using Strauss-Shamir trick: precomputation tables are faster. + * The trick could be useful if both P and Q are not G (not in our case). + * @returns non-zero affine point + */ + multiplyAndAddUnsafe(Q, a, b) { + const G = Point.BASE; // No Strauss-Shamir trick: we have 10% faster G precomputes + const mul = (P, a // Select faster multiply() method + ) => (a === _0n || a === _1n || !P.equals(G) ? P.multiplyUnsafe(a) : P.multiply(a)); + const sum = mul(this, a).add(mul(Q, b)); + return sum.is0() ? undefined : sum; + } + // Converts Projective point to affine (x, y) coordinates. + // Can accept precomputed Z^-1 - for example, from invertBatch. + // (x, y, z) ∋ (x=x/z, y=y/z) + toAffine(iz) { + const { px: x, py: y, pz: z } = this; + const is0 = this.is0(); + // If invZ was 0, we return zero point. However we still want to execute + // all operations, so we replace invZ with a random number, 1. + if (iz == null) + iz = is0 ? Fp.ONE : Fp.inv(z); + const ax = Fp.mul(x, iz); + const ay = Fp.mul(y, iz); + const zz = Fp.mul(z, iz); + if (is0) + return { x: Fp.ZERO, y: Fp.ZERO }; + if (!Fp.eql(zz, Fp.ONE)) + throw new Error('invZ was invalid'); + return { x: ax, y: ay }; + } + isTorsionFree() { + const { h: cofactor, isTorsionFree } = CURVE; + if (cofactor === _1n) + return true; // No subgroups, always torsion-free + if (isTorsionFree) + return isTorsionFree(Point, this); + throw new Error('isTorsionFree() has not been declared for the elliptic curve'); + } + clearCofactor() { + const { h: cofactor, clearCofactor } = CURVE; + if (cofactor === _1n) + return this; // Fast-path + if (clearCofactor) + return clearCofactor(Point, this); + return this.multiplyUnsafe(CURVE.h); + } + toRawBytes(isCompressed = true) { + this.assertValidity(); + return toBytes(Point, this, isCompressed); + } + toHex(isCompressed = true) { + return ut.bytesToHex(this.toRawBytes(isCompressed)); + } + } + Point.BASE = new Point(CURVE.Gx, CURVE.Gy, Fp.ONE); + Point.ZERO = new Point(Fp.ZERO, Fp.ONE, Fp.ZERO); + const _bits = CURVE.nBitLength; + const wnaf = (0, curve_js_1.wNAF)(Point, CURVE.endo ? Math.ceil(_bits / 2) : _bits); + // Validate if generator point is on curve + return { + CURVE, + ProjectivePoint: Point, + normPrivateKeyToScalar, + weierstrassEquation, + isWithinCurveOrder, + }; + } + exports.weierstrassPoints = weierstrassPoints; + function validateOpts(curve) { + const opts = (0, curve_js_1.validateBasic)(curve); + ut.validateObject(opts, { + hash: 'hash', + hmac: 'function', + randomBytes: 'function', + }, { + bits2int: 'function', + bits2int_modN: 'function', + lowS: 'boolean', + }); + return Object.freeze({ lowS: true, ...opts }); + } + function weierstrass(curveDef) { + const CURVE = validateOpts(curveDef); + const { Fp, n: CURVE_ORDER } = CURVE; + const compressedLen = Fp.BYTES + 1; // e.g. 33 for 32 + const uncompressedLen = 2 * Fp.BYTES + 1; // e.g. 65 for 32 + function isValidFieldElement(num) { + return _0n < num && num < Fp.ORDER; // 0 is banned since it's not invertible FE + } + function modN(a) { + return mod.mod(a, CURVE_ORDER); + } + function invN(a) { + return mod.invert(a, CURVE_ORDER); + } + const { ProjectivePoint: Point, normPrivateKeyToScalar, weierstrassEquation, isWithinCurveOrder, } = weierstrassPoints({ + ...CURVE, + toBytes(_c, point, isCompressed) { + const a = point.toAffine(); + const x = Fp.toBytes(a.x); + const cat = ut.concatBytes; + if (isCompressed) { + return cat(Uint8Array.from([point.hasEvenY() ? 0x02 : 0x03]), x); + } + else { + return cat(Uint8Array.from([0x04]), x, Fp.toBytes(a.y)); + } + }, + fromBytes(bytes) { + const len = bytes.length; + const head = bytes[0]; + const tail = bytes.subarray(1); + // this.assertValidity() is done inside of fromHex + if (len === compressedLen && (head === 0x02 || head === 0x03)) { + const x = ut.bytesToNumberBE(tail); + if (!isValidFieldElement(x)) + throw new Error('Point is not on curve'); + const y2 = weierstrassEquation(x); // y² = x³ + ax + b + let y = Fp.sqrt(y2); // y = y² ^ (p+1)/4 + const isYOdd = (y & _1n) === _1n; + // ECDSA + const isHeadOdd = (head & 1) === 1; + if (isHeadOdd !== isYOdd) + y = Fp.neg(y); + return { x, y }; + } + else if (len === uncompressedLen && head === 0x04) { + const x = Fp.fromBytes(tail.subarray(0, Fp.BYTES)); + const y = Fp.fromBytes(tail.subarray(Fp.BYTES, 2 * Fp.BYTES)); + return { x, y }; + } + else { + throw new Error(`Point of length ${len} was invalid. Expected ${compressedLen} compressed bytes or ${uncompressedLen} uncompressed bytes`); + } + }, + }); + const numToNByteStr = (num) => ut.bytesToHex(ut.numberToBytesBE(num, CURVE.nByteLength)); + function isBiggerThanHalfOrder(number) { + const HALF = CURVE_ORDER >> _1n; + return number > HALF; + } + function normalizeS(s) { + return isBiggerThanHalfOrder(s) ? modN(-s) : s; + } + // slice bytes num + const slcNum = (b, from, to) => ut.bytesToNumberBE(b.slice(from, to)); + /** + * ECDSA signature with its (r, s) properties. Supports DER & compact representations. + */ + class Signature { + constructor(r, s, recovery) { + this.r = r; + this.s = s; + this.recovery = recovery; + this.assertValidity(); + } + // pair (bytes of r, bytes of s) + static fromCompact(hex) { + const l = CURVE.nByteLength; + hex = (0, utils_js_1.ensureBytes)('compactSignature', hex, l * 2); + return new Signature(slcNum(hex, 0, l), slcNum(hex, l, 2 * l)); + } + // DER encoded ECDSA signature + // https://bitcoin.stackexchange.com/questions/57644/what-are-the-parts-of-a-bitcoin-transaction-input-script + static fromDER(hex) { + const { r, s } = exports.DER.toSig((0, utils_js_1.ensureBytes)('DER', hex)); + return new Signature(r, s); + } + assertValidity() { + // can use assertGE here + if (!isWithinCurveOrder(this.r)) + throw new Error('r must be 0 < r < CURVE.n'); + if (!isWithinCurveOrder(this.s)) + throw new Error('s must be 0 < s < CURVE.n'); + } + addRecoveryBit(recovery) { + return new Signature(this.r, this.s, recovery); + } + recoverPublicKey(msgHash) { + const { r, s, recovery: rec } = this; + const h = bits2int_modN((0, utils_js_1.ensureBytes)('msgHash', msgHash)); // Truncate hash + if (rec == null || ![0, 1, 2, 3].includes(rec)) + throw new Error('recovery id invalid'); + const radj = rec === 2 || rec === 3 ? r + CURVE.n : r; + if (radj >= Fp.ORDER) + throw new Error('recovery id 2 or 3 invalid'); + const prefix = (rec & 1) === 0 ? '02' : '03'; + const R = Point.fromHex(prefix + numToNByteStr(radj)); + const ir = invN(radj); // r^-1 + const u1 = modN(-h * ir); // -hr^-1 + const u2 = modN(s * ir); // sr^-1 + const Q = Point.BASE.multiplyAndAddUnsafe(R, u1, u2); // (sr^-1)R-(hr^-1)G = -(hr^-1)G + (sr^-1) + if (!Q) + throw new Error('point at infinify'); // unsafe is fine: no priv data leaked + Q.assertValidity(); + return Q; + } + // Signatures should be low-s, to prevent malleability. + hasHighS() { + return isBiggerThanHalfOrder(this.s); + } + normalizeS() { + return this.hasHighS() ? new Signature(this.r, modN(-this.s), this.recovery) : this; + } + // DER-encoded + toDERRawBytes() { + return ut.hexToBytes(this.toDERHex()); + } + toDERHex() { + return exports.DER.hexFromSig({ r: this.r, s: this.s }); + } + // padded bytes of r, then padded bytes of s + toCompactRawBytes() { + return ut.hexToBytes(this.toCompactHex()); + } + toCompactHex() { + return numToNByteStr(this.r) + numToNByteStr(this.s); + } + } + const utils = { + isValidPrivateKey(privateKey) { + try { + normPrivateKeyToScalar(privateKey); + return true; + } + catch (error) { + return false; + } + }, + normPrivateKeyToScalar: normPrivateKeyToScalar, + /** + * Produces cryptographically secure private key from random of size + * (groupLen + ceil(groupLen / 2)) with modulo bias being negligible. + */ + randomPrivateKey: () => { + const length = mod.getMinHashLength(CURVE.n); + return mod.mapHashToField(CURVE.randomBytes(length), CURVE.n); + }, + /** + * Creates precompute table for an arbitrary EC point. Makes point "cached". + * Allows to massively speed-up `point.multiply(scalar)`. + * @returns cached point + * @example + * const fast = utils.precompute(8, ProjectivePoint.fromHex(someonesPubKey)); + * fast.multiply(privKey); // much faster ECDH now + */ + precompute(windowSize = 8, point = Point.BASE) { + point._setWindowSize(windowSize); + point.multiply(BigInt(3)); // 3 is arbitrary, just need any number here + return point; + }, + }; + /** + * Computes public key for a private key. Checks for validity of the private key. + * @param privateKey private key + * @param isCompressed whether to return compact (default), or full key + * @returns Public key, full when isCompressed=false; short when isCompressed=true + */ + function getPublicKey(privateKey, isCompressed = true) { + return Point.fromPrivateKey(privateKey).toRawBytes(isCompressed); + } + /** + * Quick and dirty check for item being public key. Does not validate hex, or being on-curve. + */ + function isProbPub(item) { + const arr = ut.isBytes(item); + const str = typeof item === 'string'; + const len = (arr || str) && item.length; + if (arr) + return len === compressedLen || len === uncompressedLen; + if (str) + return len === 2 * compressedLen || len === 2 * uncompressedLen; + if (item instanceof Point) + return true; + return false; + } + /** + * ECDH (Elliptic Curve Diffie Hellman). + * Computes shared public key from private key and public key. + * Checks: 1) private key validity 2) shared key is on-curve. + * Does NOT hash the result. + * @param privateA private key + * @param publicB different public key + * @param isCompressed whether to return compact (default), or full key + * @returns shared public key + */ + function getSharedSecret(privateA, publicB, isCompressed = true) { + if (isProbPub(privateA)) + throw new Error('first arg must be private key'); + if (!isProbPub(publicB)) + throw new Error('second arg must be public key'); + const b = Point.fromHex(publicB); // check for being on-curve + return b.multiply(normPrivateKeyToScalar(privateA)).toRawBytes(isCompressed); + } + // RFC6979: ensure ECDSA msg is X bytes and < N. RFC suggests optional truncating via bits2octets. + // FIPS 186-4 4.6 suggests the leftmost min(nBitLen, outLen) bits, which matches bits2int. + // bits2int can produce res>N, we can do mod(res, N) since the bitLen is the same. + // int2octets can't be used; pads small msgs with 0: unacceptatble for trunc as per RFC vectors + const bits2int = CURVE.bits2int || + function (bytes) { + // For curves with nBitLength % 8 !== 0: bits2octets(bits2octets(m)) !== bits2octets(m) + // for some cases, since bytes.length * 8 is not actual bitLength. + const num = ut.bytesToNumberBE(bytes); // check for == u8 done here + const delta = bytes.length * 8 - CURVE.nBitLength; // truncate to nBitLength leftmost bits + return delta > 0 ? num >> BigInt(delta) : num; + }; + const bits2int_modN = CURVE.bits2int_modN || + function (bytes) { + return modN(bits2int(bytes)); // can't use bytesToNumberBE here + }; + // NOTE: pads output with zero as per spec + const ORDER_MASK = ut.bitMask(CURVE.nBitLength); + /** + * Converts to bytes. Checks if num in `[0..ORDER_MASK-1]` e.g.: `[0..2^256-1]`. + */ + function int2octets(num) { + if (typeof num !== 'bigint') + throw new Error('bigint expected'); + if (!(_0n <= num && num < ORDER_MASK)) + throw new Error(`bigint expected < 2^${CURVE.nBitLength}`); + // works with order, can have different size than numToField! + return ut.numberToBytesBE(num, CURVE.nByteLength); + } + // Steps A, D of RFC6979 3.2 + // Creates RFC6979 seed; converts msg/privKey to numbers. + // Used only in sign, not in verify. + // NOTE: we cannot assume here that msgHash has same amount of bytes as curve order, this will be wrong at least for P521. + // Also it can be bigger for P224 + SHA256 + function prepSig(msgHash, privateKey, opts = defaultSigOpts) { + if (['recovered', 'canonical'].some((k) => k in opts)) + throw new Error('sign() legacy options not supported'); + const { hash, randomBytes } = CURVE; + let { lowS, prehash, extraEntropy: ent } = opts; // generates low-s sigs by default + if (lowS == null) + lowS = true; // RFC6979 3.2: we skip step A, because we already provide hash + msgHash = (0, utils_js_1.ensureBytes)('msgHash', msgHash); + if (prehash) + msgHash = (0, utils_js_1.ensureBytes)('prehashed msgHash', hash(msgHash)); + // We can't later call bits2octets, since nested bits2int is broken for curves + // with nBitLength % 8 !== 0. Because of that, we unwrap it here as int2octets call. + // const bits2octets = (bits) => int2octets(bits2int_modN(bits)) + const h1int = bits2int_modN(msgHash); + const d = normPrivateKeyToScalar(privateKey); // validate private key, convert to bigint + const seedArgs = [int2octets(d), int2octets(h1int)]; + // extraEntropy. RFC6979 3.6: additional k' (optional). + if (ent != null) { + // K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1) || k') + const e = ent === true ? randomBytes(Fp.BYTES) : ent; // generate random bytes OR pass as-is + seedArgs.push((0, utils_js_1.ensureBytes)('extraEntropy', e)); // check for being bytes + } + const seed = ut.concatBytes(...seedArgs); // Step D of RFC6979 3.2 + const m = h1int; // NOTE: no need to call bits2int second time here, it is inside truncateHash! + // Converts signature params into point w r/s, checks result for validity. + function k2sig(kBytes) { + // RFC 6979 Section 3.2, step 3: k = bits2int(T) + const k = bits2int(kBytes); // Cannot use fields methods, since it is group element + if (!isWithinCurveOrder(k)) + return; // Important: all mod() calls here must be done over N + const ik = invN(k); // k^-1 mod n + const q = Point.BASE.multiply(k).toAffine(); // q = Gk + const r = modN(q.x); // r = q.x mod n + if (r === _0n) + return; + // Can use scalar blinding b^-1(bm + bdr) where b ∈ [1,q−1] according to + // https://tches.iacr.org/index.php/TCHES/article/view/7337/6509. We've decided against it: + // a) dependency on CSPRNG b) 15% slowdown c) doesn't really help since bigints are not CT + const s = modN(ik * modN(m + r * d)); // Not using blinding here + if (s === _0n) + return; + let recovery = (q.x === r ? 0 : 2) | Number(q.y & _1n); // recovery bit (2 or 3, when q.x > n) + let normS = s; + if (lowS && isBiggerThanHalfOrder(s)) { + normS = normalizeS(s); // if lowS was passed, ensure s is always + recovery ^= 1; // // in the bottom half of N + } + return new Signature(r, normS, recovery); // use normS, not s + } + return { seed, k2sig }; + } + const defaultSigOpts = { lowS: CURVE.lowS, prehash: false }; + const defaultVerOpts = { lowS: CURVE.lowS, prehash: false }; + /** + * Signs message hash with a private key. + * ``` + * sign(m, d, k) where + * (x, y) = G × k + * r = x mod n + * s = (m + dr)/k mod n + * ``` + * @param msgHash NOT message. msg needs to be hashed to `msgHash`, or use `prehash`. + * @param privKey private key + * @param opts lowS for non-malleable sigs. extraEntropy for mixing randomness into k. prehash will hash first arg. + * @returns signature with recovery param + */ + function sign(msgHash, privKey, opts = defaultSigOpts) { + const { seed, k2sig } = prepSig(msgHash, privKey, opts); // Steps A, D of RFC6979 3.2. + const C = CURVE; + const drbg = ut.createHmacDrbg(C.hash.outputLen, C.nByteLength, C.hmac); + return drbg(seed, k2sig); // Steps B, C, D, E, F, G + } + // Enable precomputes. Slows down first publicKey computation by 20ms. + Point.BASE._setWindowSize(8); + // utils.precompute(8, ProjectivePoint.BASE) + /** + * Verifies a signature against message hash and public key. + * Rejects lowS signatures by default: to override, + * specify option `{lowS: false}`. Implements section 4.1.4 from https://www.secg.org/sec1-v2.pdf: + * + * ``` + * verify(r, s, h, P) where + * U1 = hs^-1 mod n + * U2 = rs^-1 mod n + * R = U1⋅G - U2⋅P + * mod(R.x, n) == r + * ``` + */ + function verify(signature, msgHash, publicKey, opts = defaultVerOpts) { + const sg = signature; + msgHash = (0, utils_js_1.ensureBytes)('msgHash', msgHash); + publicKey = (0, utils_js_1.ensureBytes)('publicKey', publicKey); + if ('strict' in opts) + throw new Error('options.strict was renamed to lowS'); + const { lowS, prehash } = opts; + let _sig = undefined; + let P; + try { + if (typeof sg === 'string' || ut.isBytes(sg)) { + // Signature can be represented in 2 ways: compact (2*nByteLength) & DER (variable-length). + // Since DER can also be 2*nByteLength bytes, we check for it first. + try { + _sig = Signature.fromDER(sg); + } + catch (derError) { + if (!(derError instanceof exports.DER.Err)) + throw derError; + _sig = Signature.fromCompact(sg); + } + } + else if (typeof sg === 'object' && typeof sg.r === 'bigint' && typeof sg.s === 'bigint') { + const { r, s } = sg; + _sig = new Signature(r, s); + } + else { + throw new Error('PARSE'); + } + P = Point.fromHex(publicKey); + } + catch (error) { + if (error.message === 'PARSE') + throw new Error(`signature must be Signature instance, Uint8Array or hex string`); + return false; + } + if (lowS && _sig.hasHighS()) + return false; + if (prehash) + msgHash = CURVE.hash(msgHash); + const { r, s } = _sig; + const h = bits2int_modN(msgHash); // Cannot use fields methods, since it is group element + const is = invN(s); // s^-1 + const u1 = modN(h * is); // u1 = hs^-1 mod n + const u2 = modN(r * is); // u2 = rs^-1 mod n + const R = Point.BASE.multiplyAndAddUnsafe(P, u1, u2)?.toAffine(); // R = u1⋅G + u2⋅P + if (!R) + return false; + const v = modN(R.x); + return v === r; + } + return { + CURVE, + getPublicKey, + getSharedSecret, + sign, + verify, + ProjectivePoint: Point, + Signature, + utils, + }; + } + exports.weierstrass = weierstrass; + /** + * Implementation of the Shallue and van de Woestijne method for any weierstrass curve. + * TODO: check if there is a way to merge this with uvRatio in Edwards; move to modular. + * b = True and y = sqrt(u / v) if (u / v) is square in F, and + * b = False and y = sqrt(Z * (u / v)) otherwise. + * @param Fp + * @param Z + * @returns + */ + function SWUFpSqrtRatio(Fp, Z) { + // Generic implementation + const q = Fp.ORDER; + let l = _0n; + for (let o = q - _1n; o % _2n === _0n; o /= _2n) + l += _1n; + const c1 = l; // 1. c1, the largest integer such that 2^c1 divides q - 1. + // We need 2n ** c1 and 2n ** (c1-1). We can't use **; but we can use <<. + // 2n ** c1 == 2n << (c1-1) + const _2n_pow_c1_1 = _2n << (c1 - _1n - _1n); + const _2n_pow_c1 = _2n_pow_c1_1 * _2n; + const c2 = (q - _1n) / _2n_pow_c1; // 2. c2 = (q - 1) / (2^c1) # Integer arithmetic + const c3 = (c2 - _1n) / _2n; // 3. c3 = (c2 - 1) / 2 # Integer arithmetic + const c4 = _2n_pow_c1 - _1n; // 4. c4 = 2^c1 - 1 # Integer arithmetic + const c5 = _2n_pow_c1_1; // 5. c5 = 2^(c1 - 1) # Integer arithmetic + const c6 = Fp.pow(Z, c2); // 6. c6 = Z^c2 + const c7 = Fp.pow(Z, (c2 + _1n) / _2n); // 7. c7 = Z^((c2 + 1) / 2) + let sqrtRatio = (u, v) => { + let tv1 = c6; // 1. tv1 = c6 + let tv2 = Fp.pow(v, c4); // 2. tv2 = v^c4 + let tv3 = Fp.sqr(tv2); // 3. tv3 = tv2^2 + tv3 = Fp.mul(tv3, v); // 4. tv3 = tv3 * v + let tv5 = Fp.mul(u, tv3); // 5. tv5 = u * tv3 + tv5 = Fp.pow(tv5, c3); // 6. tv5 = tv5^c3 + tv5 = Fp.mul(tv5, tv2); // 7. tv5 = tv5 * tv2 + tv2 = Fp.mul(tv5, v); // 8. tv2 = tv5 * v + tv3 = Fp.mul(tv5, u); // 9. tv3 = tv5 * u + let tv4 = Fp.mul(tv3, tv2); // 10. tv4 = tv3 * tv2 + tv5 = Fp.pow(tv4, c5); // 11. tv5 = tv4^c5 + let isQR = Fp.eql(tv5, Fp.ONE); // 12. isQR = tv5 == 1 + tv2 = Fp.mul(tv3, c7); // 13. tv2 = tv3 * c7 + tv5 = Fp.mul(tv4, tv1); // 14. tv5 = tv4 * tv1 + tv3 = Fp.cmov(tv2, tv3, isQR); // 15. tv3 = CMOV(tv2, tv3, isQR) + tv4 = Fp.cmov(tv5, tv4, isQR); // 16. tv4 = CMOV(tv5, tv4, isQR) + // 17. for i in (c1, c1 - 1, ..., 2): + for (let i = c1; i > _1n; i--) { + let tv5 = i - _2n; // 18. tv5 = i - 2 + tv5 = _2n << (tv5 - _1n); // 19. tv5 = 2^tv5 + let tvv5 = Fp.pow(tv4, tv5); // 20. tv5 = tv4^tv5 + const e1 = Fp.eql(tvv5, Fp.ONE); // 21. e1 = tv5 == 1 + tv2 = Fp.mul(tv3, tv1); // 22. tv2 = tv3 * tv1 + tv1 = Fp.mul(tv1, tv1); // 23. tv1 = tv1 * tv1 + tvv5 = Fp.mul(tv4, tv1); // 24. tv5 = tv4 * tv1 + tv3 = Fp.cmov(tv2, tv3, e1); // 25. tv3 = CMOV(tv2, tv3, e1) + tv4 = Fp.cmov(tvv5, tv4, e1); // 26. tv4 = CMOV(tv5, tv4, e1) + } + return { isValid: isQR, value: tv3 }; + }; + if (Fp.ORDER % _4n === _3n) { + // sqrt_ratio_3mod4(u, v) + const c1 = (Fp.ORDER - _3n) / _4n; // 1. c1 = (q - 3) / 4 # Integer arithmetic + const c2 = Fp.sqrt(Fp.neg(Z)); // 2. c2 = sqrt(-Z) + sqrtRatio = (u, v) => { + let tv1 = Fp.sqr(v); // 1. tv1 = v^2 + const tv2 = Fp.mul(u, v); // 2. tv2 = u * v + tv1 = Fp.mul(tv1, tv2); // 3. tv1 = tv1 * tv2 + let y1 = Fp.pow(tv1, c1); // 4. y1 = tv1^c1 + y1 = Fp.mul(y1, tv2); // 5. y1 = y1 * tv2 + const y2 = Fp.mul(y1, c2); // 6. y2 = y1 * c2 + const tv3 = Fp.mul(Fp.sqr(y1), v); // 7. tv3 = y1^2; 8. tv3 = tv3 * v + const isQR = Fp.eql(tv3, u); // 9. isQR = tv3 == u + let y = Fp.cmov(y2, y1, isQR); // 10. y = CMOV(y2, y1, isQR) + return { isValid: isQR, value: y }; // 11. return (isQR, y) isQR ? y : y*c2 + }; + } + // No curves uses that + // if (Fp.ORDER % _8n === _5n) // sqrt_ratio_5mod8 + return sqrtRatio; + } + exports.SWUFpSqrtRatio = SWUFpSqrtRatio; + /** + * Simplified Shallue-van de Woestijne-Ulas Method + * https://www.rfc-editor.org/rfc/rfc9380#section-6.6.2 + */ + function mapToCurveSimpleSWU(Fp, opts) { + mod.validateField(Fp); + if (!Fp.isValid(opts.A) || !Fp.isValid(opts.B) || !Fp.isValid(opts.Z)) + throw new Error('mapToCurveSimpleSWU: invalid opts'); + const sqrtRatio = SWUFpSqrtRatio(Fp, opts.Z); + if (!Fp.isOdd) + throw new Error('Fp.isOdd is not implemented!'); + // Input: u, an element of F. + // Output: (x, y), a point on E. + return (u) => { + // prettier-ignore + let tv1, tv2, tv3, tv4, tv5, tv6, x, y; + tv1 = Fp.sqr(u); // 1. tv1 = u^2 + tv1 = Fp.mul(tv1, opts.Z); // 2. tv1 = Z * tv1 + tv2 = Fp.sqr(tv1); // 3. tv2 = tv1^2 + tv2 = Fp.add(tv2, tv1); // 4. tv2 = tv2 + tv1 + tv3 = Fp.add(tv2, Fp.ONE); // 5. tv3 = tv2 + 1 + tv3 = Fp.mul(tv3, opts.B); // 6. tv3 = B * tv3 + tv4 = Fp.cmov(opts.Z, Fp.neg(tv2), !Fp.eql(tv2, Fp.ZERO)); // 7. tv4 = CMOV(Z, -tv2, tv2 != 0) + tv4 = Fp.mul(tv4, opts.A); // 8. tv4 = A * tv4 + tv2 = Fp.sqr(tv3); // 9. tv2 = tv3^2 + tv6 = Fp.sqr(tv4); // 10. tv6 = tv4^2 + tv5 = Fp.mul(tv6, opts.A); // 11. tv5 = A * tv6 + tv2 = Fp.add(tv2, tv5); // 12. tv2 = tv2 + tv5 + tv2 = Fp.mul(tv2, tv3); // 13. tv2 = tv2 * tv3 + tv6 = Fp.mul(tv6, tv4); // 14. tv6 = tv6 * tv4 + tv5 = Fp.mul(tv6, opts.B); // 15. tv5 = B * tv6 + tv2 = Fp.add(tv2, tv5); // 16. tv2 = tv2 + tv5 + x = Fp.mul(tv1, tv3); // 17. x = tv1 * tv3 + const { isValid, value } = sqrtRatio(tv2, tv6); // 18. (is_gx1_square, y1) = sqrt_ratio(tv2, tv6) + y = Fp.mul(tv1, u); // 19. y = tv1 * u -> Z * u^3 * y1 + y = Fp.mul(y, value); // 20. y = y * y1 + x = Fp.cmov(x, tv3, isValid); // 21. x = CMOV(x, tv3, is_gx1_square) + y = Fp.cmov(y, value, isValid); // 22. y = CMOV(y, y1, is_gx1_square) + const e1 = Fp.isOdd(u) === Fp.isOdd(y); // 23. e1 = sgn0(u) == sgn0(y) + y = Fp.cmov(Fp.neg(y), y, e1); // 24. y = CMOV(-y, y, e1) + x = Fp.div(x, tv4); // 25. x = x / tv4 + return { x, y }; + }; + } + exports.mapToCurveSimpleSWU = mapToCurveSimpleSWU; + +} (weierstrass)); + +var hashToCurve = {}; + +Object.defineProperty(hashToCurve, "__esModule", { value: true }); +hashToCurve.createHasher = hashToCurve.isogenyMap = hashToCurve.hash_to_field = hashToCurve.expand_message_xof = hashToCurve.expand_message_xmd = void 0; +const modular_js_1 = modular; +const utils_js_1$3 = utils$5; +function validateDST(dst) { + if ((0, utils_js_1$3.isBytes)(dst)) + return dst; + if (typeof dst === 'string') + return (0, utils_js_1$3.utf8ToBytes)(dst); + throw new Error('DST must be Uint8Array or string'); +} +// Octet Stream to Integer. "spec" implementation of os2ip is 2.5x slower vs bytesToNumberBE. +const os2ip = utils_js_1$3.bytesToNumberBE; +// Integer to Octet Stream (numberToBytesBE) +function i2osp(value, length) { + if (value < 0 || value >= 1 << (8 * length)) { + throw new Error(`bad I2OSP call: value=${value} length=${length}`); + } + const res = Array.from({ length }).fill(0); + for (let i = length - 1; i >= 0; i--) { + res[i] = value & 0xff; + value >>>= 8; + } + return new Uint8Array(res); +} +function strxor(a, b) { + const arr = new Uint8Array(a.length); + for (let i = 0; i < a.length; i++) { + arr[i] = a[i] ^ b[i]; + } + return arr; +} +function abytes(item) { + if (!(0, utils_js_1$3.isBytes)(item)) + throw new Error('Uint8Array expected'); +} +function isNum(item) { + if (!Number.isSafeInteger(item)) + throw new Error('number expected'); +} +// Produces a uniformly random byte string using a cryptographic hash function H that outputs b bits +// https://www.rfc-editor.org/rfc/rfc9380#section-5.3.1 +function expand_message_xmd(msg, DST, lenInBytes, H) { + abytes(msg); + abytes(DST); + isNum(lenInBytes); + // https://www.rfc-editor.org/rfc/rfc9380#section-5.3.3 + if (DST.length > 255) + DST = H((0, utils_js_1$3.concatBytes)((0, utils_js_1$3.utf8ToBytes)('H2C-OVERSIZE-DST-'), DST)); + const { outputLen: b_in_bytes, blockLen: r_in_bytes } = H; + const ell = Math.ceil(lenInBytes / b_in_bytes); + if (ell > 255) + throw new Error('Invalid xmd length'); + const DST_prime = (0, utils_js_1$3.concatBytes)(DST, i2osp(DST.length, 1)); + const Z_pad = i2osp(0, r_in_bytes); + const l_i_b_str = i2osp(lenInBytes, 2); // len_in_bytes_str + const b = new Array(ell); + const b_0 = H((0, utils_js_1$3.concatBytes)(Z_pad, msg, l_i_b_str, i2osp(0, 1), DST_prime)); + b[0] = H((0, utils_js_1$3.concatBytes)(b_0, i2osp(1, 1), DST_prime)); + for (let i = 1; i <= ell; i++) { + const args = [strxor(b_0, b[i - 1]), i2osp(i + 1, 1), DST_prime]; + b[i] = H((0, utils_js_1$3.concatBytes)(...args)); + } + const pseudo_random_bytes = (0, utils_js_1$3.concatBytes)(...b); + return pseudo_random_bytes.slice(0, lenInBytes); +} +hashToCurve.expand_message_xmd = expand_message_xmd; +// Produces a uniformly random byte string using an extendable-output function (XOF) H. +// 1. The collision resistance of H MUST be at least k bits. +// 2. H MUST be an XOF that has been proved indifferentiable from +// a random oracle under a reasonable cryptographic assumption. +// https://www.rfc-editor.org/rfc/rfc9380#section-5.3.2 +function expand_message_xof(msg, DST, lenInBytes, k, H) { + abytes(msg); + abytes(DST); + isNum(lenInBytes); + // https://www.rfc-editor.org/rfc/rfc9380#section-5.3.3 + // DST = H('H2C-OVERSIZE-DST-' || a_very_long_DST, Math.ceil((lenInBytes * k) / 8)); + if (DST.length > 255) { + const dkLen = Math.ceil((2 * k) / 8); + DST = H.create({ dkLen }).update((0, utils_js_1$3.utf8ToBytes)('H2C-OVERSIZE-DST-')).update(DST).digest(); + } + if (lenInBytes > 65535 || DST.length > 255) + throw new Error('expand_message_xof: invalid lenInBytes'); + return (H.create({ dkLen: lenInBytes }) + .update(msg) + .update(i2osp(lenInBytes, 2)) + // 2. DST_prime = DST || I2OSP(len(DST), 1) + .update(DST) + .update(i2osp(DST.length, 1)) + .digest()); +} +hashToCurve.expand_message_xof = expand_message_xof; +/** + * Hashes arbitrary-length byte strings to a list of one or more elements of a finite field F + * https://www.rfc-editor.org/rfc/rfc9380#section-5.2 + * @param msg a byte string containing the message to hash + * @param count the number of elements of F to output + * @param options `{DST: string, p: bigint, m: number, k: number, expand: 'xmd' | 'xof', hash: H}`, see above + * @returns [u_0, ..., u_(count - 1)], a list of field elements. + */ +function hash_to_field(msg, count, options) { + (0, utils_js_1$3.validateObject)(options, { + DST: 'stringOrUint8Array', + p: 'bigint', + m: 'isSafeInteger', + k: 'isSafeInteger', + hash: 'hash', + }); + const { p, k, m, hash, expand, DST: _DST } = options; + abytes(msg); + isNum(count); + const DST = validateDST(_DST); + const log2p = p.toString(2).length; + const L = Math.ceil((log2p + k) / 8); // section 5.1 of ietf draft link above + const len_in_bytes = count * m * L; + let prb; // pseudo_random_bytes + if (expand === 'xmd') { + prb = expand_message_xmd(msg, DST, len_in_bytes, hash); + } + else if (expand === 'xof') { + prb = expand_message_xof(msg, DST, len_in_bytes, k, hash); + } + else if (expand === '_internal_pass') { + // for internal tests only + prb = msg; + } + else { + throw new Error('expand must be "xmd" or "xof"'); + } + const u = new Array(count); + for (let i = 0; i < count; i++) { + const e = new Array(m); + for (let j = 0; j < m; j++) { + const elm_offset = L * (j + i * m); + const tv = prb.subarray(elm_offset, elm_offset + L); + e[j] = (0, modular_js_1.mod)(os2ip(tv), p); + } + u[i] = e; + } + return u; +} +hashToCurve.hash_to_field = hash_to_field; +function isogenyMap(field, map) { + // Make same order as in spec + const COEFF = map.map((i) => Array.from(i).reverse()); + return (x, y) => { + const [xNum, xDen, yNum, yDen] = COEFF.map((val) => val.reduce((acc, i) => field.add(field.mul(acc, x), i))); + x = field.div(xNum, xDen); // xNum / xDen + y = field.mul(y, field.div(yNum, yDen)); // y * (yNum / yDev) + return { x, y }; + }; +} +hashToCurve.isogenyMap = isogenyMap; +function createHasher(Point, mapToCurve, def) { + if (typeof mapToCurve !== 'function') + throw new Error('mapToCurve() must be defined'); + return { + // Encodes byte string to elliptic curve. + // hash_to_curve from https://www.rfc-editor.org/rfc/rfc9380#section-3 + hashToCurve(msg, options) { + const u = hash_to_field(msg, 2, { ...def, DST: def.DST, ...options }); + const u0 = Point.fromAffine(mapToCurve(u[0])); + const u1 = Point.fromAffine(mapToCurve(u[1])); + const P = u0.add(u1).clearCofactor(); + P.assertValidity(); + return P; + }, + // Encodes byte string to elliptic curve. + // encode_to_curve from https://www.rfc-editor.org/rfc/rfc9380#section-3 + encodeToCurve(msg, options) { + const u = hash_to_field(msg, 1, { ...def, DST: def.encodeDST, ...options }); + const P = Point.fromAffine(mapToCurve(u[0])).clearCofactor(); + P.assertValidity(); + return P; + }, + }; +} +hashToCurve.createHasher = createHasher; + +var _shortw_utils = {}; + +var hmac = {}; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.hmac = exports.HMAC = void 0; + const _assert_js_1 = _assert$1; + const utils_js_1 = utils$6; + // HMAC (RFC 2104) + class HMAC extends utils_js_1.Hash { + constructor(hash, _key) { + super(); + this.finished = false; + this.destroyed = false; + (0, _assert_js_1.hash)(hash); + const key = (0, utils_js_1.toBytes)(_key); + this.iHash = hash.create(); + if (typeof this.iHash.update !== 'function') + throw new Error('Expected instance of class which extends utils.Hash'); + this.blockLen = this.iHash.blockLen; + this.outputLen = this.iHash.outputLen; + const blockLen = this.blockLen; + const pad = new Uint8Array(blockLen); + // blockLen can be bigger than outputLen + pad.set(key.length > blockLen ? hash.create().update(key).digest() : key); + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36; + this.iHash.update(pad); + // By doing update (processing of first block) of outer hash here we can re-use it between multiple calls via clone + this.oHash = hash.create(); + // Undo internal XOR && apply outer XOR + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36 ^ 0x5c; + this.oHash.update(pad); + pad.fill(0); + } + update(buf) { + (0, _assert_js_1.exists)(this); + this.iHash.update(buf); + return this; + } + digestInto(out) { + (0, _assert_js_1.exists)(this); + (0, _assert_js_1.bytes)(out, this.outputLen); + this.finished = true; + this.iHash.digestInto(out); + this.oHash.update(out); + this.oHash.digestInto(out); + this.destroy(); + } + digest() { + const out = new Uint8Array(this.oHash.outputLen); + this.digestInto(out); + return out; + } + _cloneInto(to) { + // Create new instance without calling constructor since key already in state and we don't know it. + to || (to = Object.create(Object.getPrototypeOf(this), {})); + const { oHash, iHash, finished, destroyed, blockLen, outputLen } = this; + to = to; + to.finished = finished; + to.destroyed = destroyed; + to.blockLen = blockLen; + to.outputLen = outputLen; + to.oHash = oHash._cloneInto(to.oHash); + to.iHash = iHash._cloneInto(to.iHash); + return to; + } + destroy() { + this.destroyed = true; + this.oHash.destroy(); + this.iHash.destroy(); + } + } + exports.HMAC = HMAC; + /** + * HMAC: RFC2104 message authentication code. + * @param hash - function that would be used e.g. sha256 + * @param key - message key + * @param message - message data + */ + const hmac = (hash, key, message) => new HMAC(hash, key).update(message).digest(); + exports.hmac = hmac; + exports.hmac.create = (hash, key) => new HMAC(hash, key); + +} (hmac)); + +Object.defineProperty(_shortw_utils, "__esModule", { value: true }); +_shortw_utils.createCurve = _shortw_utils.getHash = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +const hmac_1 = hmac; +const utils_1$7 = utils$6; +const weierstrass_js_1 = weierstrass; +// connects noble-curves to noble-hashes +function getHash(hash) { + return { + hash, + hmac: (key, ...msgs) => (0, hmac_1.hmac)(hash, key, (0, utils_1$7.concatBytes)(...msgs)), + randomBytes: utils_1$7.randomBytes, + }; +} +_shortw_utils.getHash = getHash; +function createCurve(curveDef, defHash) { + const create = (hash) => (0, weierstrass_js_1.weierstrass)({ ...curveDef, ...getHash(hash) }); + return Object.freeze({ ...create(defHash), create }); +} +_shortw_utils.createCurve = createCurve; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.encodeToCurve = exports.hashToCurve = exports.schnorr = exports.secp256k1 = void 0; + /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + const sha256_1 = sha256; + const utils_1 = utils$6; + const modular_js_1 = modular; + const weierstrass_js_1 = weierstrass; + const utils_js_1 = utils$5; + const hash_to_curve_js_1 = hashToCurve; + const _shortw_utils_js_1 = _shortw_utils; + const secp256k1P = BigInt('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f'); + const secp256k1N = BigInt('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141'); + const _1n = BigInt(1); + const _2n = BigInt(2); + const divNearest = (a, b) => (a + b / _2n) / b; + /** + * √n = n^((p+1)/4) for fields p = 3 mod 4. We unwrap the loop and multiply bit-by-bit. + * (P+1n/4n).toString(2) would produce bits [223x 1, 0, 22x 1, 4x 0, 11, 00] + */ + function sqrtMod(y) { + const P = secp256k1P; + // prettier-ignore + const _3n = BigInt(3), _6n = BigInt(6), _11n = BigInt(11), _22n = BigInt(22); + // prettier-ignore + const _23n = BigInt(23), _44n = BigInt(44), _88n = BigInt(88); + const b2 = (y * y * y) % P; // x^3, 11 + const b3 = (b2 * b2 * y) % P; // x^7 + const b6 = ((0, modular_js_1.pow2)(b3, _3n, P) * b3) % P; + const b9 = ((0, modular_js_1.pow2)(b6, _3n, P) * b3) % P; + const b11 = ((0, modular_js_1.pow2)(b9, _2n, P) * b2) % P; + const b22 = ((0, modular_js_1.pow2)(b11, _11n, P) * b11) % P; + const b44 = ((0, modular_js_1.pow2)(b22, _22n, P) * b22) % P; + const b88 = ((0, modular_js_1.pow2)(b44, _44n, P) * b44) % P; + const b176 = ((0, modular_js_1.pow2)(b88, _88n, P) * b88) % P; + const b220 = ((0, modular_js_1.pow2)(b176, _44n, P) * b44) % P; + const b223 = ((0, modular_js_1.pow2)(b220, _3n, P) * b3) % P; + const t1 = ((0, modular_js_1.pow2)(b223, _23n, P) * b22) % P; + const t2 = ((0, modular_js_1.pow2)(t1, _6n, P) * b2) % P; + const root = (0, modular_js_1.pow2)(t2, _2n, P); + if (!Fp.eql(Fp.sqr(root), y)) + throw new Error('Cannot find square root'); + return root; + } + const Fp = (0, modular_js_1.Field)(secp256k1P, undefined, undefined, { sqrt: sqrtMod }); + exports.secp256k1 = (0, _shortw_utils_js_1.createCurve)({ + a: BigInt(0), // equation params: a, b + b: BigInt(7), // Seem to be rigid: bitcointalk.org/index.php?topic=289795.msg3183975#msg3183975 + Fp, // Field's prime: 2n**256n - 2n**32n - 2n**9n - 2n**8n - 2n**7n - 2n**6n - 2n**4n - 1n + n: secp256k1N, // Curve order, total count of valid points in the field + // Base point (x, y) aka generator point + Gx: BigInt('55066263022277343669578718895168534326250603453777594175500187360389116729240'), + Gy: BigInt('32670510020758816978083085130507043184471273380659243275938904335757337482424'), + h: BigInt(1), // Cofactor + lowS: true, // Allow only low-S signatures by default in sign() and verify() + /** + * secp256k1 belongs to Koblitz curves: it has efficiently computable endomorphism. + * Endomorphism uses 2x less RAM, speeds up precomputation by 2x and ECDH / key recovery by 20%. + * For precomputed wNAF it trades off 1/2 init time & 1/3 ram for 20% perf hit. + * Explanation: https://gist.github.com/paulmillr/eb670806793e84df628a7c434a873066 + */ + endo: { + beta: BigInt('0x7ae96a2b657c07106e64479eac3434e99cf0497512f58995c1396c28719501ee'), + splitScalar: (k) => { + const n = secp256k1N; + const a1 = BigInt('0x3086d221a7d46bcde86c90e49284eb15'); + const b1 = -_1n * BigInt('0xe4437ed6010e88286f547fa90abfe4c3'); + const a2 = BigInt('0x114ca50f7a8e2f3f657c1108d9d44cfd8'); + const b2 = a1; + const POW_2_128 = BigInt('0x100000000000000000000000000000000'); // (2n**128n).toString(16) + const c1 = divNearest(b2 * k, n); + const c2 = divNearest(-b1 * k, n); + let k1 = (0, modular_js_1.mod)(k - c1 * a1 - c2 * a2, n); + let k2 = (0, modular_js_1.mod)(-c1 * b1 - c2 * b2, n); + const k1neg = k1 > POW_2_128; + const k2neg = k2 > POW_2_128; + if (k1neg) + k1 = n - k1; + if (k2neg) + k2 = n - k2; + if (k1 > POW_2_128 || k2 > POW_2_128) { + throw new Error('splitScalar: Endomorphism failed, k=' + k); + } + return { k1neg, k1, k2neg, k2 }; + }, + }, + }, sha256_1.sha256); + // Schnorr signatures are superior to ECDSA from above. Below is Schnorr-specific BIP0340 code. + // https://github.com/bitcoin/bips/blob/master/bip-0340.mediawiki + const _0n = BigInt(0); + const fe = (x) => typeof x === 'bigint' && _0n < x && x < secp256k1P; + const ge = (x) => typeof x === 'bigint' && _0n < x && x < secp256k1N; + /** An object mapping tags to their tagged hash prefix of [SHA256(tag) | SHA256(tag)] */ + const TAGGED_HASH_PREFIXES = {}; + function taggedHash(tag, ...messages) { + let tagP = TAGGED_HASH_PREFIXES[tag]; + if (tagP === undefined) { + const tagH = (0, sha256_1.sha256)(Uint8Array.from(tag, (c) => c.charCodeAt(0))); + tagP = (0, utils_js_1.concatBytes)(tagH, tagH); + TAGGED_HASH_PREFIXES[tag] = tagP; + } + return (0, sha256_1.sha256)((0, utils_js_1.concatBytes)(tagP, ...messages)); + } + // ECDSA compact points are 33-byte. Schnorr is 32: we strip first byte 0x02 or 0x03 + const pointToBytes = (point) => point.toRawBytes(true).slice(1); + const numTo32b = (n) => (0, utils_js_1.numberToBytesBE)(n, 32); + const modP = (x) => (0, modular_js_1.mod)(x, secp256k1P); + const modN = (x) => (0, modular_js_1.mod)(x, secp256k1N); + const Point = exports.secp256k1.ProjectivePoint; + const GmulAdd = (Q, a, b) => Point.BASE.multiplyAndAddUnsafe(Q, a, b); + // Calculate point, scalar and bytes + function schnorrGetExtPubKey(priv) { + let d_ = exports.secp256k1.utils.normPrivateKeyToScalar(priv); // same method executed in fromPrivateKey + let p = Point.fromPrivateKey(d_); // P = d'⋅G; 0 < d' < n check is done inside + const scalar = p.hasEvenY() ? d_ : modN(-d_); + return { scalar: scalar, bytes: pointToBytes(p) }; + } + /** + * lift_x from BIP340. Convert 32-byte x coordinate to elliptic curve point. + * @returns valid point checked for being on-curve + */ + function lift_x(x) { + if (!fe(x)) + throw new Error('bad x: need 0 < x < p'); // Fail if x ≥ p. + const xx = modP(x * x); + const c = modP(xx * x + BigInt(7)); // Let c = x³ + 7 mod p. + let y = sqrtMod(c); // Let y = c^(p+1)/4 mod p. + if (y % _2n !== _0n) + y = modP(-y); // Return the unique point P such that x(P) = x and + const p = new Point(x, y, _1n); // y(P) = y if y mod 2 = 0 or y(P) = p-y otherwise. + p.assertValidity(); + return p; + } + /** + * Create tagged hash, convert it to bigint, reduce modulo-n. + */ + function challenge(...args) { + return modN((0, utils_js_1.bytesToNumberBE)(taggedHash('BIP0340/challenge', ...args))); + } + /** + * Schnorr public key is just `x` coordinate of Point as per BIP340. + */ + function schnorrGetPublicKey(privateKey) { + return schnorrGetExtPubKey(privateKey).bytes; // d'=int(sk). Fail if d'=0 or d'≥n. Ret bytes(d'⋅G) + } + /** + * Creates Schnorr signature as per BIP340. Verifies itself before returning anything. + * auxRand is optional and is not the sole source of k generation: bad CSPRNG won't be dangerous. + */ + function schnorrSign(message, privateKey, auxRand = (0, utils_1.randomBytes)(32)) { + const m = (0, utils_js_1.ensureBytes)('message', message); + const { bytes: px, scalar: d } = schnorrGetExtPubKey(privateKey); // checks for isWithinCurveOrder + const a = (0, utils_js_1.ensureBytes)('auxRand', auxRand, 32); // Auxiliary random data a: a 32-byte array + const t = numTo32b(d ^ (0, utils_js_1.bytesToNumberBE)(taggedHash('BIP0340/aux', a))); // Let t be the byte-wise xor of bytes(d) and hash/aux(a) + const rand = taggedHash('BIP0340/nonce', t, px, m); // Let rand = hash/nonce(t || bytes(P) || m) + const k_ = modN((0, utils_js_1.bytesToNumberBE)(rand)); // Let k' = int(rand) mod n + if (k_ === _0n) + throw new Error('sign failed: k is zero'); // Fail if k' = 0. + const { bytes: rx, scalar: k } = schnorrGetExtPubKey(k_); // Let R = k'⋅G. + const e = challenge(rx, px, m); // Let e = int(hash/challenge(bytes(R) || bytes(P) || m)) mod n. + const sig = new Uint8Array(64); // Let sig = bytes(R) || bytes((k + ed) mod n). + sig.set(rx, 0); + sig.set(numTo32b(modN(k + e * d)), 32); + // If Verify(bytes(P), m, sig) (see below) returns failure, abort + if (!schnorrVerify(sig, m, px)) + throw new Error('sign: Invalid signature produced'); + return sig; + } + /** + * Verifies Schnorr signature. + * Will swallow errors & return false except for initial type validation of arguments. + */ + function schnorrVerify(signature, message, publicKey) { + const sig = (0, utils_js_1.ensureBytes)('signature', signature, 64); + const m = (0, utils_js_1.ensureBytes)('message', message); + const pub = (0, utils_js_1.ensureBytes)('publicKey', publicKey, 32); + try { + const P = lift_x((0, utils_js_1.bytesToNumberBE)(pub)); // P = lift_x(int(pk)); fail if that fails + const r = (0, utils_js_1.bytesToNumberBE)(sig.subarray(0, 32)); // Let r = int(sig[0:32]); fail if r ≥ p. + if (!fe(r)) + return false; + const s = (0, utils_js_1.bytesToNumberBE)(sig.subarray(32, 64)); // Let s = int(sig[32:64]); fail if s ≥ n. + if (!ge(s)) + return false; + const e = challenge(numTo32b(r), pointToBytes(P), m); // int(challenge(bytes(r)||bytes(P)||m))%n + const R = GmulAdd(P, s, modN(-e)); // R = s⋅G - e⋅P + if (!R || !R.hasEvenY() || R.toAffine().x !== r) + return false; // -eP == (n-e)P + return true; // Fail if is_infinite(R) / not has_even_y(R) / x(R) ≠ r. + } + catch (error) { + return false; + } + } + exports.schnorr = (() => ({ + getPublicKey: schnorrGetPublicKey, + sign: schnorrSign, + verify: schnorrVerify, + utils: { + randomPrivateKey: exports.secp256k1.utils.randomPrivateKey, + lift_x, + pointToBytes, + numberToBytesBE: utils_js_1.numberToBytesBE, + bytesToNumberBE: utils_js_1.bytesToNumberBE, + taggedHash, + mod: modular_js_1.mod, + }, + }))(); + const isoMap = /* @__PURE__ */ (() => (0, hash_to_curve_js_1.isogenyMap)(Fp, [ + // xNum + [ + '0x8e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38daaaaa8c7', + '0x7d3d4c80bc321d5b9f315cea7fd44c5d595d2fc0bf63b92dfff1044f17c6581', + '0x534c328d23f234e6e2a413deca25caece4506144037c40314ecbd0b53d9dd262', + '0x8e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38daaaaa88c', + ], + // xDen + [ + '0xd35771193d94918a9ca34ccbb7b640dd86cd409542f8487d9fe6b745781eb49b', + '0xedadc6f64383dc1df7c4b2d51b54225406d36b641f5e41bbc52a56612a8c6d14', + '0x0000000000000000000000000000000000000000000000000000000000000001', // LAST 1 + ], + // yNum + [ + '0x4bda12f684bda12f684bda12f684bda12f684bda12f684bda12f684b8e38e23c', + '0xc75e0c32d5cb7c0fa9d0a54b12a0a6d5647ab046d686da6fdffc90fc201d71a3', + '0x29a6194691f91a73715209ef6512e576722830a201be2018a765e85a9ecee931', + '0x2f684bda12f684bda12f684bda12f684bda12f684bda12f684bda12f38e38d84', + ], + // yDen + [ + '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffff93b', + '0x7a06534bb8bdb49fd5e9e6632722c2989467c1bfc8e8d978dfb425d2685c2573', + '0x6484aa716545ca2cf3a70c3fa8fe337e0a3d21162f0d6299a7bf8192bfd2a76f', + '0x0000000000000000000000000000000000000000000000000000000000000001', // LAST 1 + ], + ].map((i) => i.map((j) => BigInt(j)))))(); + const mapSWU = /* @__PURE__ */ (() => (0, weierstrass_js_1.mapToCurveSimpleSWU)(Fp, { + A: BigInt('0x3f8731abdd661adca08a5558f0f5d272e953d363cb6f0e5d405447c01a444533'), + B: BigInt('1771'), + Z: Fp.create(BigInt('-11')), + }))(); + const htf = /* @__PURE__ */ (() => (0, hash_to_curve_js_1.createHasher)(exports.secp256k1.ProjectivePoint, (scalars) => { + const { x, y } = mapSWU(Fp.create(scalars[0])); + return isoMap(x, y); + }, { + DST: 'secp256k1_XMD:SHA-256_SSWU_RO_', + encodeDST: 'secp256k1_XMD:SHA-256_SSWU_NU_', + p: Fp.ORDER, + m: 1, + k: 128, + expand: 'xmd', + hash: sha256_1.sha256, + }))(); + exports.hashToCurve = (() => htf.hashToCurve)(); + exports.encodeToCurve = (() => htf.encodeToCurve)(); + +} (secp256k1)); + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.secp256k1 = void 0; + var secp256k1_1 = secp256k1; + Object.defineProperty(exports, "secp256k1", { enumerable: true, get: function () { return secp256k1_1.secp256k1; } }); +} (secp256k1$1)); + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.MAX_WITHDRAWALS_PER_PAYLOAD = exports.RLP_EMPTY_STRING = exports.KECCAK256_RLP = exports.KECCAK256_RLP_S = exports.KECCAK256_RLP_ARRAY = exports.KECCAK256_RLP_ARRAY_S = exports.KECCAK256_NULL = exports.KECCAK256_NULL_S = exports.TWO_POW256 = exports.SECP256K1_ORDER_DIV_2 = exports.SECP256K1_ORDER = exports.MAX_INTEGER_BIGINT = exports.MAX_INTEGER = exports.MAX_UINT64 = void 0; + const buffer_1 = require$$0$7; + const secp256k1_1 = secp256k1$1; + /** + * 2^64-1 + */ + exports.MAX_UINT64 = BigInt('0xffffffffffffffff'); + /** + * The max integer that the evm can handle (2^256-1) + */ + exports.MAX_INTEGER = BigInt('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'); + /** + * The max integer that the evm can handle (2^256-1) as a bigint + * 2^256-1 equals to 340282366920938463463374607431768211455 + * We use literal value instead of calculated value for compatibility issue. + */ + exports.MAX_INTEGER_BIGINT = BigInt('115792089237316195423570985008687907853269984665640564039457584007913129639935'); + exports.SECP256K1_ORDER = secp256k1_1.secp256k1.CURVE.n; + exports.SECP256K1_ORDER_DIV_2 = secp256k1_1.secp256k1.CURVE.n / BigInt(2); + /** + * 2^256 + */ + exports.TWO_POW256 = BigInt('0x10000000000000000000000000000000000000000000000000000000000000000'); + /** + * Keccak-256 hash of null + */ + exports.KECCAK256_NULL_S = 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'; + /** + * Keccak-256 hash of null + */ + exports.KECCAK256_NULL = buffer_1.Buffer.from(exports.KECCAK256_NULL_S, 'hex'); + /** + * Keccak-256 of an RLP of an empty array + */ + exports.KECCAK256_RLP_ARRAY_S = '1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347'; + /** + * Keccak-256 of an RLP of an empty array + */ + exports.KECCAK256_RLP_ARRAY = buffer_1.Buffer.from(exports.KECCAK256_RLP_ARRAY_S, 'hex'); + /** + * Keccak-256 hash of the RLP of null + */ + exports.KECCAK256_RLP_S = '56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421'; + /** + * Keccak-256 hash of the RLP of null + */ + exports.KECCAK256_RLP = buffer_1.Buffer.from(exports.KECCAK256_RLP_S, 'hex'); + /** + * RLP encoded empty string + */ + exports.RLP_EMPTY_STRING = buffer_1.Buffer.from([0x80]); + exports.MAX_WITHDRAWALS_PER_PAYLOAD = 16; + +} (constants$2)); + +var units = {}; + +Object.defineProperty(units, "__esModule", { value: true }); +units.GWEI_TO_WEI = void 0; +/** Easy conversion from Gwei to wei */ +units.GWEI_TO_WEI = BigInt(1000000000); + +var account = {}; + +var dist$3 = {}; + +Object.defineProperty(dist$3, "__esModule", { value: true }); +dist$3.RLP = dist$3.utils = dist$3.decode = dist$3.encode = void 0; +/** + * RLP Encoding based on https://ethereum.org/en/developers/docs/data-structures-and-encoding/rlp/ + * This function takes in data, converts it to Uint8Array if not, + * and adds a length for recursion. + * @param input Will be converted to Uint8Array + * @returns Uint8Array of encoded data + **/ +function encode(input) { + if (Array.isArray(input)) { + const output = []; + let outputLength = 0; + for (let i = 0; i < input.length; i++) { + const encoded = encode(input[i]); + output.push(encoded); + outputLength += encoded.length; + } + return concatBytes$1(encodeLength(outputLength, 192), ...output); + } + const inputBuf = toBytes(input); + if (inputBuf.length === 1 && inputBuf[0] < 128) { + return inputBuf; + } + return concatBytes$1(encodeLength(inputBuf.length, 128), inputBuf); +} +dist$3.encode = encode; +/** + * Slices a Uint8Array, throws if the slice goes out-of-bounds of the Uint8Array. + * E.g. `safeSlice(hexToBytes('aa'), 1, 2)` will throw. + * @param input + * @param start + * @param end + */ +function safeSlice(input, start, end) { + if (end > input.length) { + throw new Error('invalid RLP (safeSlice): end slice of Uint8Array out-of-bounds'); + } + return input.slice(start, end); +} +/** + * Parse integers. Check if there is no leading zeros + * @param v The value to parse + */ +function decodeLength(v) { + if (v[0] === 0) { + throw new Error('invalid RLP: extra zeros'); + } + return parseHexByte(bytesToHex$1(v)); +} +function encodeLength(len, offset) { + if (len < 56) { + return Uint8Array.from([len + offset]); + } + const hexLength = numberToHex$1(len); + const lLength = hexLength.length / 2; + const firstByte = numberToHex$1(offset + 55 + lLength); + return Uint8Array.from(hexToBytes$1(firstByte + hexLength)); +} +function decode(input, stream = false) { + if (typeof input === 'undefined' || input === null || input.length === 0) { + return Uint8Array.from([]); + } + const inputBytes = toBytes(input); + const decoded = _decode(inputBytes); + if (stream) { + return decoded; + } + if (decoded.remainder.length !== 0) { + throw new Error('invalid RLP: remainder must be zero'); + } + return decoded.data; +} +dist$3.decode = decode; +/** Decode an input with RLP */ +function _decode(input) { + let length, llength, data, innerRemainder, d; + const decoded = []; + const firstByte = input[0]; + if (firstByte <= 0x7f) { + // a single byte whose value is in the [0x00, 0x7f] range, that byte is its own RLP encoding. + return { + data: input.slice(0, 1), + remainder: input.slice(1), + }; + } + else if (firstByte <= 0xb7) { + // string is 0-55 bytes long. A single byte with value 0x80 plus the length of the string followed by the string + // The range of the first byte is [0x80, 0xb7] + length = firstByte - 0x7f; + // set 0x80 null to 0 + if (firstByte === 0x80) { + data = Uint8Array.from([]); + } + else { + data = safeSlice(input, 1, length); + } + if (length === 2 && data[0] < 0x80) { + throw new Error('invalid RLP encoding: invalid prefix, single byte < 0x80 are not prefixed'); + } + return { + data, + remainder: input.slice(length), + }; + } + else if (firstByte <= 0xbf) { + // string is greater than 55 bytes long. A single byte with the value (0xb7 plus the length of the length), + // followed by the length, followed by the string + llength = firstByte - 0xb6; + if (input.length - 1 < llength) { + throw new Error('invalid RLP: not enough bytes for string length'); + } + length = decodeLength(safeSlice(input, 1, llength)); + if (length <= 55) { + throw new Error('invalid RLP: expected string length to be greater than 55'); + } + data = safeSlice(input, llength, length + llength); + return { + data, + remainder: input.slice(length + llength), + }; + } + else if (firstByte <= 0xf7) { + // a list between 0-55 bytes long + length = firstByte - 0xbf; + innerRemainder = safeSlice(input, 1, length); + while (innerRemainder.length) { + d = _decode(innerRemainder); + decoded.push(d.data); + innerRemainder = d.remainder; + } + return { + data: decoded, + remainder: input.slice(length), + }; + } + else { + // a list over 55 bytes long + llength = firstByte - 0xf6; + length = decodeLength(safeSlice(input, 1, llength)); + if (length < 56) { + throw new Error('invalid RLP: encoded list too short'); + } + const totalLength = llength + length; + if (totalLength > input.length) { + throw new Error('invalid RLP: total length is larger than the data'); + } + innerRemainder = safeSlice(input, llength, totalLength); + while (innerRemainder.length) { + d = _decode(innerRemainder); + decoded.push(d.data); + innerRemainder = d.remainder; + } + return { + data: decoded, + remainder: input.slice(totalLength), + }; + } +} +const cachedHexes = Array.from({ length: 256 }, (_v, i) => i.toString(16).padStart(2, '0')); +function bytesToHex$1(uint8a) { + // Pre-caching chars with `cachedHexes` speeds this up 6x + let hex = ''; + for (let i = 0; i < uint8a.length; i++) { + hex += cachedHexes[uint8a[i]]; + } + return hex; +} +function parseHexByte(hexByte) { + const byte = Number.parseInt(hexByte, 16); + if (Number.isNaN(byte)) + throw new Error('Invalid byte sequence'); + return byte; +} +// Caching slows it down 2-3x +function hexToBytes$1(hex) { + if (typeof hex !== 'string') { + throw new TypeError('hexToBytes: expected string, got ' + typeof hex); + } + if (hex.length % 2) + throw new Error('hexToBytes: received invalid unpadded hex'); + const array = new Uint8Array(hex.length / 2); + for (let i = 0; i < array.length; i++) { + const j = i * 2; + array[i] = parseHexByte(hex.slice(j, j + 2)); + } + return array; +} +/** Concatenates two Uint8Arrays into one. */ +function concatBytes$1(...arrays) { + if (arrays.length === 1) + return arrays[0]; + const length = arrays.reduce((a, arr) => a + arr.length, 0); + const result = new Uint8Array(length); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const arr = arrays[i]; + result.set(arr, pad); + pad += arr.length; + } + return result; +} +function utf8ToBytes(utf) { + return new TextEncoder().encode(utf); +} +/** Transform an integer into its hexadecimal value */ +function numberToHex$1(integer) { + if (integer < 0) { + throw new Error('Invalid integer as argument, must be unsigned!'); + } + const hex = integer.toString(16); + return hex.length % 2 ? `0${hex}` : hex; +} +/** Pad a string to be even */ +function padToEven$1(a) { + return a.length % 2 ? `0${a}` : a; +} +/** Check if a string is prefixed by 0x */ +function isHexPrefixed$1(str) { + return str.length >= 2 && str[0] === '0' && str[1] === 'x'; +} +/** Removes 0x from a given String */ +function stripHexPrefix$1(str) { + if (typeof str !== 'string') { + return str; + } + return isHexPrefixed$1(str) ? str.slice(2) : str; +} +/** Transform anything into a Uint8Array */ +function toBytes(v) { + if (v instanceof Uint8Array) { + return v; + } + if (typeof v === 'string') { + if (isHexPrefixed$1(v)) { + return hexToBytes$1(padToEven$1(stripHexPrefix$1(v))); + } + return utf8ToBytes(v); + } + if (typeof v === 'number' || typeof v === 'bigint') { + if (!v) { + return Uint8Array.from([]); + } + return hexToBytes$1(numberToHex$1(v)); + } + if (v === null || v === undefined) { + return Uint8Array.from([]); + } + throw new Error('toBytes: received unsupported type ' + typeof v); +} +dist$3.utils = { + bytesToHex: bytesToHex$1, + concatBytes: concatBytes$1, + hexToBytes: hexToBytes$1, + utf8ToBytes, +}; +dist$3.RLP = { encode, decode }; + +var keccak = {}; + +var sha3$1 = {}; + +var _u64$1 = {}; + +Object.defineProperty(_u64$1, "__esModule", { value: true }); +_u64$1.add5L = _u64$1.add5H = _u64$1.add4H = _u64$1.add4L = _u64$1.add3H = _u64$1.add3L = _u64$1.add = _u64$1.rotlBL = _u64$1.rotlBH = _u64$1.rotlSL = _u64$1.rotlSH = _u64$1.rotr32L = _u64$1.rotr32H = _u64$1.rotrBL = _u64$1.rotrBH = _u64$1.rotrSL = _u64$1.rotrSH = _u64$1.shrSL = _u64$1.shrSH = _u64$1.toBig = _u64$1.split = _u64$1.fromBig = void 0; +const U32_MASK64$1 = /* @__PURE__ */ BigInt(2 ** 32 - 1); +const _32n$1 = /* @__PURE__ */ BigInt(32); +// We are not using BigUint64Array, because they are extremely slow as per 2022 +function fromBig$1(n, le = false) { + if (le) + return { h: Number(n & U32_MASK64$1), l: Number((n >> _32n$1) & U32_MASK64$1) }; + return { h: Number((n >> _32n$1) & U32_MASK64$1) | 0, l: Number(n & U32_MASK64$1) | 0 }; +} +_u64$1.fromBig = fromBig$1; +function split$1(lst, le = false) { + let Ah = new Uint32Array(lst.length); + let Al = new Uint32Array(lst.length); + for (let i = 0; i < lst.length; i++) { + const { h, l } = fromBig$1(lst[i], le); + [Ah[i], Al[i]] = [h, l]; + } + return [Ah, Al]; +} +_u64$1.split = split$1; +const toBig$1 = (h, l) => (BigInt(h >>> 0) << _32n$1) | BigInt(l >>> 0); +_u64$1.toBig = toBig$1; +// for Shift in [0, 32) +const shrSH$1 = (h, _l, s) => h >>> s; +_u64$1.shrSH = shrSH$1; +const shrSL$1 = (h, l, s) => (h << (32 - s)) | (l >>> s); +_u64$1.shrSL = shrSL$1; +// Right rotate for Shift in [1, 32) +const rotrSH$1 = (h, l, s) => (h >>> s) | (l << (32 - s)); +_u64$1.rotrSH = rotrSH$1; +const rotrSL$1 = (h, l, s) => (h << (32 - s)) | (l >>> s); +_u64$1.rotrSL = rotrSL$1; +// Right rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotrBH$1 = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); +_u64$1.rotrBH = rotrBH$1; +const rotrBL$1 = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); +_u64$1.rotrBL = rotrBL$1; +// Right rotate for shift===32 (just swaps l&h) +const rotr32H$1 = (_h, l) => l; +_u64$1.rotr32H = rotr32H$1; +const rotr32L$1 = (h, _l) => h; +_u64$1.rotr32L = rotr32L$1; +// Left rotate for Shift in [1, 32) +const rotlSH$1 = (h, l, s) => (h << s) | (l >>> (32 - s)); +_u64$1.rotlSH = rotlSH$1; +const rotlSL$1 = (h, l, s) => (l << s) | (h >>> (32 - s)); +_u64$1.rotlSL = rotlSL$1; +// Left rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotlBH$1 = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); +_u64$1.rotlBH = rotlBH$1; +const rotlBL$1 = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); +_u64$1.rotlBL = rotlBL$1; +// JS uses 32-bit signed integers for bitwise operations which means we cannot +// simple take carry out of low bit sum by shift, we need to use division. +function add$1(Ah, Al, Bh, Bl) { + const l = (Al >>> 0) + (Bl >>> 0); + return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; +} +_u64$1.add = add$1; +// Addition with more than 2 elements +const add3L$1 = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); +_u64$1.add3L = add3L$1; +const add3H$1 = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; +_u64$1.add3H = add3H$1; +const add4L$1 = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); +_u64$1.add4L = add4L$1; +const add4H$1 = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; +_u64$1.add4H = add4H$1; +const add5L$1 = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); +_u64$1.add5L = add5L$1; +const add5H$1 = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; +_u64$1.add5H = add5H$1; +// prettier-ignore +const u64$1 = { + fromBig: fromBig$1, split: split$1, toBig: toBig$1, + shrSH: shrSH$1, shrSL: shrSL$1, + rotrSH: rotrSH$1, rotrSL: rotrSL$1, rotrBH: rotrBH$1, rotrBL: rotrBL$1, + rotr32H: rotr32H$1, rotr32L: rotr32L$1, + rotlSH: rotlSH$1, rotlSL: rotlSL$1, rotlBH: rotlBH$1, rotlBL: rotlBL$1, + add: add$1, add3L: add3L$1, add3H: add3H$1, add4L: add4L$1, add4H: add4H$1, add5H: add5H$1, add5L: add5L$1, +}; +_u64$1.default = u64$1; + +Object.defineProperty(sha3$1, "__esModule", { value: true }); +sha3$1.shake256 = sha3$1.shake128 = sha3$1.keccak_512 = sha3$1.keccak_384 = sha3$1.keccak_256 = sha3$1.keccak_224 = sha3$1.sha3_512 = sha3$1.sha3_384 = sha3$1.sha3_256 = sha3$1.sha3_224 = sha3$1.Keccak = sha3$1.keccakP = void 0; +const _assert_js_1$1 = _assert$1; +const _u64_js_1$1 = _u64$1; +const utils_js_1$2 = utils$6; +// SHA3 (keccak) is based on a new design: basically, the internal state is bigger than output size. +// It's called a sponge function. +// Various per round constants calculations +const [SHA3_PI$1, SHA3_ROTL$1, _SHA3_IOTA$1] = [[], [], []]; +const _0n$1 = /* @__PURE__ */ BigInt(0); +const _1n$1 = /* @__PURE__ */ BigInt(1); +const _2n$1 = /* @__PURE__ */ BigInt(2); +const _7n$1 = /* @__PURE__ */ BigInt(7); +const _256n$1 = /* @__PURE__ */ BigInt(256); +const _0x71n$1 = /* @__PURE__ */ BigInt(0x71); +for (let round = 0, R = _1n$1, x = 1, y = 0; round < 24; round++) { + // Pi + [x, y] = [y, (2 * x + 3 * y) % 5]; + SHA3_PI$1.push(2 * (5 * y + x)); + // Rotational + SHA3_ROTL$1.push((((round + 1) * (round + 2)) / 2) % 64); + // Iota + let t = _0n$1; + for (let j = 0; j < 7; j++) { + R = ((R << _1n$1) ^ ((R >> _7n$1) * _0x71n$1)) % _256n$1; + if (R & _2n$1) + t ^= _1n$1 << ((_1n$1 << /* @__PURE__ */ BigInt(j)) - _1n$1); + } + _SHA3_IOTA$1.push(t); +} +const [SHA3_IOTA_H$1, SHA3_IOTA_L$1] = /* @__PURE__ */ (0, _u64_js_1$1.split)(_SHA3_IOTA$1, true); +// Left rotation (without 0, 32, 64) +const rotlH$1 = (h, l, s) => (s > 32 ? (0, _u64_js_1$1.rotlBH)(h, l, s) : (0, _u64_js_1$1.rotlSH)(h, l, s)); +const rotlL$1 = (h, l, s) => (s > 32 ? (0, _u64_js_1$1.rotlBL)(h, l, s) : (0, _u64_js_1$1.rotlSL)(h, l, s)); +// Same as keccakf1600, but allows to skip some rounds +function keccakP$1(s, rounds = 24) { + const B = new Uint32Array(5 * 2); + // NOTE: all indices are x2 since we store state as u32 instead of u64 (bigints to slow in js) + for (let round = 24 - rounds; round < 24; round++) { + // Theta θ + for (let x = 0; x < 10; x++) + B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; + for (let x = 0; x < 10; x += 2) { + const idx1 = (x + 8) % 10; + const idx0 = (x + 2) % 10; + const B0 = B[idx0]; + const B1 = B[idx0 + 1]; + const Th = rotlH$1(B0, B1, 1) ^ B[idx1]; + const Tl = rotlL$1(B0, B1, 1) ^ B[idx1 + 1]; + for (let y = 0; y < 50; y += 10) { + s[x + y] ^= Th; + s[x + y + 1] ^= Tl; + } + } + // Rho (ρ) and Pi (π) + let curH = s[2]; + let curL = s[3]; + for (let t = 0; t < 24; t++) { + const shift = SHA3_ROTL$1[t]; + const Th = rotlH$1(curH, curL, shift); + const Tl = rotlL$1(curH, curL, shift); + const PI = SHA3_PI$1[t]; + curH = s[PI]; + curL = s[PI + 1]; + s[PI] = Th; + s[PI + 1] = Tl; + } + // Chi (χ) + for (let y = 0; y < 50; y += 10) { + for (let x = 0; x < 10; x++) + B[x] = s[y + x]; + for (let x = 0; x < 10; x++) + s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; + } + // Iota (ι) + s[0] ^= SHA3_IOTA_H$1[round]; + s[1] ^= SHA3_IOTA_L$1[round]; + } + B.fill(0); +} +sha3$1.keccakP = keccakP$1; +let Keccak$1 = class Keccak extends utils_js_1$2.Hash { + // NOTE: we accept arguments in bytes instead of bits here. + constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { + super(); + this.blockLen = blockLen; + this.suffix = suffix; + this.outputLen = outputLen; + this.enableXOF = enableXOF; + this.rounds = rounds; + this.pos = 0; + this.posOut = 0; + this.finished = false; + this.destroyed = false; + // Can be passed from user as dkLen + (0, _assert_js_1$1.number)(outputLen); + // 1600 = 5x5 matrix of 64bit. 1600 bits === 200 bytes + if (0 >= this.blockLen || this.blockLen >= 200) + throw new Error('Sha3 supports only keccak-f1600 function'); + this.state = new Uint8Array(200); + this.state32 = (0, utils_js_1$2.u32)(this.state); + } + keccak() { + keccakP$1(this.state32, this.rounds); + this.posOut = 0; + this.pos = 0; + } + update(data) { + (0, _assert_js_1$1.exists)(this); + const { blockLen, state } = this; + data = (0, utils_js_1$2.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + for (let i = 0; i < take; i++) + state[this.pos++] ^= data[pos++]; + if (this.pos === blockLen) + this.keccak(); + } + return this; + } + finish() { + if (this.finished) + return; + this.finished = true; + const { state, suffix, pos, blockLen } = this; + // Do the padding + state[pos] ^= suffix; + if ((suffix & 0x80) !== 0 && pos === blockLen - 1) + this.keccak(); + state[blockLen - 1] ^= 0x80; + this.keccak(); + } + writeInto(out) { + (0, _assert_js_1$1.exists)(this, false); + (0, _assert_js_1$1.bytes)(out); + this.finish(); + const bufferOut = this.state; + const { blockLen } = this; + for (let pos = 0, len = out.length; pos < len;) { + if (this.posOut >= blockLen) + this.keccak(); + const take = Math.min(blockLen - this.posOut, len - pos); + out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); + this.posOut += take; + pos += take; + } + return out; + } + xofInto(out) { + // Sha3/Keccak usage with XOF is probably mistake, only SHAKE instances can do XOF + if (!this.enableXOF) + throw new Error('XOF is not possible for this instance'); + return this.writeInto(out); + } + xof(bytes) { + (0, _assert_js_1$1.number)(bytes); + return this.xofInto(new Uint8Array(bytes)); + } + digestInto(out) { + (0, _assert_js_1$1.output)(out, this); + if (this.finished) + throw new Error('digest() was already called'); + this.writeInto(out); + this.destroy(); + return out; + } + digest() { + return this.digestInto(new Uint8Array(this.outputLen)); + } + destroy() { + this.destroyed = true; + this.state.fill(0); + } + _cloneInto(to) { + const { blockLen, suffix, outputLen, rounds, enableXOF } = this; + to || (to = new Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); + to.state32.set(this.state32); + to.pos = this.pos; + to.posOut = this.posOut; + to.finished = this.finished; + to.rounds = rounds; + // Suffix can change in cSHAKE + to.suffix = suffix; + to.outputLen = outputLen; + to.enableXOF = enableXOF; + to.destroyed = this.destroyed; + return to; + } +}; +sha3$1.Keccak = Keccak$1; +const gen$1 = (suffix, blockLen, outputLen) => (0, utils_js_1$2.wrapConstructor)(() => new Keccak$1(blockLen, suffix, outputLen)); +sha3$1.sha3_224 = gen$1(0x06, 144, 224 / 8); +/** + * SHA3-256 hash function + * @param message - that would be hashed + */ +sha3$1.sha3_256 = gen$1(0x06, 136, 256 / 8); +sha3$1.sha3_384 = gen$1(0x06, 104, 384 / 8); +sha3$1.sha3_512 = gen$1(0x06, 72, 512 / 8); +sha3$1.keccak_224 = gen$1(0x01, 144, 224 / 8); +/** + * keccak-256 hash function. Different from SHA3-256. + * @param message - that would be hashed + */ +sha3$1.keccak_256 = gen$1(0x01, 136, 256 / 8); +sha3$1.keccak_384 = gen$1(0x01, 104, 384 / 8); +sha3$1.keccak_512 = gen$1(0x01, 72, 512 / 8); +const genShake$1 = (suffix, blockLen, outputLen) => (0, utils_js_1$2.wrapXOFConstructorWithOpts)((opts = {}) => new Keccak$1(blockLen, suffix, opts.dkLen === undefined ? outputLen : opts.dkLen, true)); +sha3$1.shake128 = genShake$1(0x1f, 168, 128 / 8); +sha3$1.shake256 = genShake$1(0x1f, 136, 256 / 8); + +var utils$4 = {exports: {}}; + +utils$4.exports; + +(function (module, exports) { + var __importDefault = (commonjsGlobal && commonjsGlobal.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + Object.defineProperty(exports, "__esModule", { value: true }); + exports.crypto = exports.wrapHash = exports.equalsBytes = exports.hexToBytes = exports.bytesToUtf8 = exports.utf8ToBytes = exports.createView = exports.concatBytes = exports.toHex = exports.bytesToHex = exports.assertBytes = exports.assertBool = void 0; + const _assert_1 = __importDefault(_assert$1); + const utils_1 = utils$6; + const assertBool = _assert_1.default.bool; + exports.assertBool = assertBool; + const assertBytes = _assert_1.default.bytes; + exports.assertBytes = assertBytes; + var utils_2 = utils$6; + Object.defineProperty(exports, "bytesToHex", { enumerable: true, get: function () { return utils_2.bytesToHex; } }); + Object.defineProperty(exports, "toHex", { enumerable: true, get: function () { return utils_2.bytesToHex; } }); + Object.defineProperty(exports, "concatBytes", { enumerable: true, get: function () { return utils_2.concatBytes; } }); + Object.defineProperty(exports, "createView", { enumerable: true, get: function () { return utils_2.createView; } }); + Object.defineProperty(exports, "utf8ToBytes", { enumerable: true, get: function () { return utils_2.utf8ToBytes; } }); + // buf.toString('utf8') -> bytesToUtf8(buf) + function bytesToUtf8(data) { + if (!(data instanceof Uint8Array)) { + throw new TypeError(`bytesToUtf8 expected Uint8Array, got ${typeof data}`); + } + return new TextDecoder().decode(data); + } + exports.bytesToUtf8 = bytesToUtf8; + function hexToBytes(data) { + const sliced = data.startsWith("0x") ? data.substring(2) : data; + return (0, utils_1.hexToBytes)(sliced); + } + exports.hexToBytes = hexToBytes; + // buf.equals(buf2) -> equalsBytes(buf, buf2) + function equalsBytes(a, b) { + if (a.length !== b.length) { + return false; + } + for (let i = 0; i < a.length; i++) { + if (a[i] !== b[i]) { + return false; + } + } + return true; + } + exports.equalsBytes = equalsBytes; + // Internal utils + function wrapHash(hash) { + return (msg) => { + _assert_1.default.bytes(msg); + return hash(msg); + }; + } + exports.wrapHash = wrapHash; + // TODO(v3): switch away from node crypto, remove this unnecessary variable. + exports.crypto = (() => { + const webCrypto = typeof globalThis === "object" && "crypto" in globalThis ? globalThis.crypto : undefined; + const nodeRequire = typeof commonjsRequire === "function" && + commonjsRequire.bind(module); + return { + node: nodeRequire && !webCrypto ? nodeRequire("crypto") : undefined, + web: webCrypto + }; + })(); +} (utils$4, utils$4.exports)); + +var utilsExports = utils$4.exports; + +Object.defineProperty(keccak, "__esModule", { value: true }); +keccak.keccak512 = keccak.keccak384 = keccak.keccak256 = keccak.keccak224 = void 0; +const sha3_1 = sha3$1; +const utils_js_1$1 = utilsExports; +keccak.keccak224 = (0, utils_js_1$1.wrapHash)(sha3_1.keccak_224); +keccak.keccak256 = (() => { + const k = (0, utils_js_1$1.wrapHash)(sha3_1.keccak_256); + k.create = sha3_1.keccak_256.create; + return k; +})(); +keccak.keccak384 = (0, utils_js_1$1.wrapHash)(sha3_1.keccak_384); +keccak.keccak512 = (0, utils_js_1$1.wrapHash)(sha3_1.keccak_512); + +var bytes$2 = {}; + +var helpers$1 = {}; + +var internal = {}; + +/* +The MIT License + +Copyright (c) 2016 Nick Dodson. nickdodson.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE + */ +Object.defineProperty(internal, "__esModule", { value: true }); +internal.isHexString = internal.getKeys = internal.fromAscii = internal.fromUtf8 = internal.toAscii = internal.arrayContainsArray = internal.getBinarySize = internal.padToEven = internal.stripHexPrefix = internal.isHexPrefixed = void 0; +/** + * Returns a `Boolean` on whether or not the a `String` starts with '0x' + * @param str the string input value + * @return a boolean if it is or is not hex prefixed + * @throws if the str input is not a string + */ +function isHexPrefixed(str) { + if (typeof str !== 'string') { + throw new Error(`[isHexPrefixed] input must be type 'string', received type ${typeof str}`); + } + return str[0] === '0' && str[1] === 'x'; +} +internal.isHexPrefixed = isHexPrefixed; +/** + * Removes '0x' from a given `String` if present + * @param str the string value + * @returns the string without 0x prefix + */ +const stripHexPrefix = (str) => { + if (typeof str !== 'string') + throw new Error(`[stripHexPrefix] input must be type 'string', received ${typeof str}`); + return isHexPrefixed(str) ? str.slice(2) : str; +}; +internal.stripHexPrefix = stripHexPrefix; +/** + * Pads a `String` to have an even length + * @param value + * @return output + */ +function padToEven(value) { + let a = value; + if (typeof a !== 'string') { + throw new Error(`[padToEven] value must be type 'string', received ${typeof a}`); + } + if (a.length % 2) + a = `0${a}`; + return a; +} +internal.padToEven = padToEven; +/** + * Get the binary size of a string + * @param str + * @returns the number of bytes contained within the string + */ +function getBinarySize(str) { + if (typeof str !== 'string') { + throw new Error(`[getBinarySize] method requires input type 'string', received ${typeof str}`); + } + return Buffer.byteLength(str, 'utf8'); +} +internal.getBinarySize = getBinarySize; +/** + * Returns TRUE if the first specified array contains all elements + * from the second one. FALSE otherwise. + * + * @param superset + * @param subset + * + */ +function arrayContainsArray(superset, subset, some) { + if (Array.isArray(superset) !== true) { + throw new Error(`[arrayContainsArray] method requires input 'superset' to be an array, got type '${typeof superset}'`); + } + if (Array.isArray(subset) !== true) { + throw new Error(`[arrayContainsArray] method requires input 'subset' to be an array, got type '${typeof subset}'`); + } + return subset[some === true ? 'some' : 'every']((value) => superset.indexOf(value) >= 0); +} +internal.arrayContainsArray = arrayContainsArray; +/** + * Should be called to get ascii from its hex representation + * + * @param string in hex + * @returns ascii string representation of hex value + */ +function toAscii(hex) { + let str = ''; + let i = 0; + const l = hex.length; + if (hex.substring(0, 2) === '0x') + i = 2; + for (; i < l; i += 2) { + const code = parseInt(hex.substr(i, 2), 16); + str += String.fromCharCode(code); + } + return str; +} +internal.toAscii = toAscii; +/** + * Should be called to get hex representation (prefixed by 0x) of utf8 string + * + * @param string + * @param optional padding + * @returns hex representation of input string + */ +function fromUtf8(stringValue) { + const str = Buffer.from(stringValue, 'utf8'); + return `0x${padToEven(str.toString('hex')).replace(/^0+|0+$/g, '')}`; +} +internal.fromUtf8 = fromUtf8; +/** + * Should be called to get hex representation (prefixed by 0x) of ascii string + * + * @param string + * @param optional padding + * @returns hex representation of input string + */ +function fromAscii(stringValue) { + let hex = ''; + for (let i = 0; i < stringValue.length; i++) { + const code = stringValue.charCodeAt(i); + const n = code.toString(16); + hex += n.length < 2 ? `0${n}` : n; + } + return `0x${hex}`; +} +internal.fromAscii = fromAscii; +/** + * Returns the keys from an array of objects. + * @example + * ```js + * getKeys([{a: '1', b: '2'}, {a: '3', b: '4'}], 'a') => ['1', '3'] + *```` + * @param params + * @param key + * @param allowEmpty + * @returns output just a simple array of output keys + */ +function getKeys(params, key, allowEmpty) { + if (!Array.isArray(params)) { + throw new Error(`[getKeys] method expects input 'params' to be an array, got ${typeof params}`); + } + if (typeof key !== 'string') { + throw new Error(`[getKeys] method expects input 'key' to be type 'string', got ${typeof params}`); + } + const result = []; + for (let i = 0; i < params.length; i++) { + let value = params[i][key]; + if (allowEmpty === true && !value) { + value = ''; + } + else if (typeof value !== 'string') { + throw new Error(`invalid abi - expected type 'string', received ${typeof value}`); + } + result.push(value); + } + return result; +} +internal.getKeys = getKeys; +/** + * Is the string a hex string. + * + * @param value + * @param length + * @returns output the string is a hex string + */ +function isHexString$1(value, length) { + if (typeof value !== 'string' || !value.match(/^0x[0-9A-Fa-f]*$/)) + return false; + if (typeof length !== 'undefined' && length > 0 && value.length !== 2 + 2 * length) + return false; + return true; +} +internal.isHexString = isHexString$1; + +Object.defineProperty(helpers$1, "__esModule", { value: true }); +helpers$1.assertIsString = helpers$1.assertIsArray = helpers$1.assertIsBuffer = helpers$1.assertIsHexString = void 0; +const internal_1 = internal; +/** + * Throws if a string is not hex prefixed + * @param {string} input string to check hex prefix of + */ +const assertIsHexString$1 = function (input) { + if (!(0, internal_1.isHexString)(input)) { + const msg = `This method only supports 0x-prefixed hex strings but input was: ${input}`; + throw new Error(msg); + } +}; +helpers$1.assertIsHexString = assertIsHexString$1; +/** + * Throws if input is not a buffer + * @param {Buffer} input value to check + */ +const assertIsBuffer = function (input) { + if (!Buffer.isBuffer(input)) { + const msg = `This method only supports Buffer but input was: ${input}`; + throw new Error(msg); + } +}; +helpers$1.assertIsBuffer = assertIsBuffer; +/** + * Throws if input is not an array + * @param {number[]} input value to check + */ +const assertIsArray = function (input) { + if (!Array.isArray(input)) { + const msg = `This method only supports number arrays but input was: ${input}`; + throw new Error(msg); + } +}; +helpers$1.assertIsArray = assertIsArray; +/** + * Throws if input is not a string + * @param {string} input value to check + */ +const assertIsString = function (input) { + if (typeof input !== 'string') { + const msg = `This method only supports strings but input was: ${input}`; + throw new Error(msg); + } +}; +helpers$1.assertIsString = assertIsString; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.intToUnpaddedBuffer = exports.bigIntToUnpaddedBuffer = exports.bigIntToHex = exports.bufArrToArr = exports.arrToBufArr = exports.validateNoLeadingZeroes = exports.baToJSON = exports.toUtf8 = exports.short = exports.addHexPrefix = exports.toUnsigned = exports.fromSigned = exports.bufferToInt = exports.bigIntToBuffer = exports.bufferToBigInt = exports.bufferToHex = exports.toBuffer = exports.unpadHexString = exports.unpadArray = exports.unpadBuffer = exports.setLengthRight = exports.setLengthLeft = exports.zeros = exports.intToBuffer = exports.intToHex = void 0; + const helpers_1 = helpers$1; + const internal_1 = internal; + /** + * Converts a `Number` into a hex `String` + * @param {Number} i + * @return {String} + */ + const intToHex = function (i) { + if (!Number.isSafeInteger(i) || i < 0) { + throw new Error(`Received an invalid integer type: ${i}`); + } + return `0x${i.toString(16)}`; + }; + exports.intToHex = intToHex; + /** + * Converts an `Number` to a `Buffer` + * @param {Number} i + * @return {Buffer} + */ + const intToBuffer = function (i) { + const hex = (0, exports.intToHex)(i); + return Buffer.from((0, internal_1.padToEven)(hex.slice(2)), 'hex'); + }; + exports.intToBuffer = intToBuffer; + /** + * Returns a buffer filled with 0s. + * @param bytes the number of bytes the buffer should be + */ + const zeros = function (bytes) { + return Buffer.allocUnsafe(bytes).fill(0); + }; + exports.zeros = zeros; + /** + * Pads a `Buffer` with zeros till it has `length` bytes. + * Truncates the beginning or end of input if its length exceeds `length`. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @param right whether to start padding form the left or right + * @return (Buffer) + */ + const setLength = function (msg, length, right) { + const buf = (0, exports.zeros)(length); + if (right) { + if (msg.length < length) { + msg.copy(buf); + return buf; + } + return msg.slice(0, length); + } + else { + if (msg.length < length) { + msg.copy(buf, length - msg.length); + return buf; + } + return msg.slice(-length); + } + }; + /** + * Left Pads a `Buffer` with leading zeros till it has `length` bytes. + * Or it truncates the beginning if it exceeds. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @return (Buffer) + */ + const setLengthLeft = function (msg, length) { + (0, helpers_1.assertIsBuffer)(msg); + return setLength(msg, length, false); + }; + exports.setLengthLeft = setLengthLeft; + /** + * Right Pads a `Buffer` with trailing zeros till it has `length` bytes. + * it truncates the end if it exceeds. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @return (Buffer) + */ + const setLengthRight = function (msg, length) { + (0, helpers_1.assertIsBuffer)(msg); + return setLength(msg, length, true); + }; + exports.setLengthRight = setLengthRight; + /** + * Trims leading zeros from a `Buffer`, `String` or `Number[]`. + * @param a (Buffer|Array|String) + * @return (Buffer|Array|String) + */ + const stripZeros = function (a) { + let first = a[0]; + while (a.length > 0 && first.toString() === '0') { + a = a.slice(1); + first = a[0]; + } + return a; + }; + /** + * Trims leading zeros from a `Buffer`. + * @param a (Buffer) + * @return (Buffer) + */ + const unpadBuffer = function (a) { + (0, helpers_1.assertIsBuffer)(a); + return stripZeros(a); + }; + exports.unpadBuffer = unpadBuffer; + /** + * Trims leading zeros from an `Array` (of numbers). + * @param a (number[]) + * @return (number[]) + */ + const unpadArray = function (a) { + (0, helpers_1.assertIsArray)(a); + return stripZeros(a); + }; + exports.unpadArray = unpadArray; + /** + * Trims leading zeros from a hex-prefixed `String`. + * @param a (String) + * @return (String) + */ + const unpadHexString = function (a) { + (0, helpers_1.assertIsHexString)(a); + a = (0, internal_1.stripHexPrefix)(a); + return ('0x' + stripZeros(a)); + }; + exports.unpadHexString = unpadHexString; + /** + * Attempts to turn a value into a `Buffer`. + * Inputs supported: `Buffer`, `String` (hex-prefixed), `Number`, null/undefined, `BigInt` and other objects + * with a `toArray()` or `toBuffer()` method. + * @param v the value + */ + const toBuffer = function (v) { + if (v === null || v === undefined) { + return Buffer.allocUnsafe(0); + } + if (Buffer.isBuffer(v)) { + return Buffer.from(v); + } + if (Array.isArray(v) || v instanceof Uint8Array) { + return Buffer.from(v); + } + if (typeof v === 'string') { + if (!(0, internal_1.isHexString)(v)) { + throw new Error(`Cannot convert string to buffer. toBuffer only supports 0x-prefixed hex strings and this string was given: ${v}`); + } + return Buffer.from((0, internal_1.padToEven)((0, internal_1.stripHexPrefix)(v)), 'hex'); + } + if (typeof v === 'number') { + return (0, exports.intToBuffer)(v); + } + if (typeof v === 'bigint') { + if (v < BigInt(0)) { + throw new Error(`Cannot convert negative bigint to buffer. Given: ${v}`); + } + let n = v.toString(16); + if (n.length % 2) + n = '0' + n; + return Buffer.from(n, 'hex'); + } + if (v.toArray) { + // converts a BN to a Buffer + return Buffer.from(v.toArray()); + } + if (v.toBuffer) { + return Buffer.from(v.toBuffer()); + } + throw new Error('invalid type'); + }; + exports.toBuffer = toBuffer; + /** + * Converts a `Buffer` into a `0x`-prefixed hex `String`. + * @param buf `Buffer` object to convert + */ + const bufferToHex = function (buf) { + buf = (0, exports.toBuffer)(buf); + return '0x' + buf.toString('hex'); + }; + exports.bufferToHex = bufferToHex; + /** + * Converts a {@link Buffer} to a {@link bigint} + */ + function bufferToBigInt(buf) { + const hex = (0, exports.bufferToHex)(buf); + if (hex === '0x') { + return BigInt(0); + } + return BigInt(hex); + } + exports.bufferToBigInt = bufferToBigInt; + /** + * Converts a {@link bigint} to a {@link Buffer} + */ + function bigIntToBuffer(num) { + return (0, exports.toBuffer)('0x' + num.toString(16)); + } + exports.bigIntToBuffer = bigIntToBuffer; + /** + * Converts a `Buffer` to a `Number`. + * @param buf `Buffer` object to convert + * @throws If the input number exceeds 53 bits. + */ + const bufferToInt = function (buf) { + const res = Number(bufferToBigInt(buf)); + if (!Number.isSafeInteger(res)) + throw new Error('Number exceeds 53 bits'); + return res; + }; + exports.bufferToInt = bufferToInt; + /** + * Interprets a `Buffer` as a signed integer and returns a `BigInt`. Assumes 256-bit numbers. + * @param num Signed integer value + */ + const fromSigned = function (num) { + return BigInt.asIntN(256, bufferToBigInt(num)); + }; + exports.fromSigned = fromSigned; + /** + * Converts a `BigInt` to an unsigned integer and returns it as a `Buffer`. Assumes 256-bit numbers. + * @param num + */ + const toUnsigned = function (num) { + return bigIntToBuffer(BigInt.asUintN(256, num)); + }; + exports.toUnsigned = toUnsigned; + /** + * Adds "0x" to a given `String` if it does not already start with "0x". + */ + const addHexPrefix = function (str) { + if (typeof str !== 'string') { + return str; + } + return (0, internal_1.isHexPrefixed)(str) ? str : '0x' + str; + }; + exports.addHexPrefix = addHexPrefix; + /** + * Shortens a string or buffer's hex string representation to maxLength (default 50). + * + * Examples: + * + * Input: '657468657265756d000000000000000000000000000000000000000000000000' + * Output: '657468657265756d0000000000000000000000000000000000…' + */ + function short(buffer, maxLength = 50) { + const bufferStr = Buffer.isBuffer(buffer) ? buffer.toString('hex') : buffer; + if (bufferStr.length <= maxLength) { + return bufferStr; + } + return bufferStr.slice(0, maxLength) + '…'; + } + exports.short = short; + /** + * Returns the utf8 string representation from a hex string. + * + * Examples: + * + * Input 1: '657468657265756d000000000000000000000000000000000000000000000000' + * Input 2: '657468657265756d' + * Input 3: '000000000000000000000000000000000000000000000000657468657265756d' + * + * Output (all 3 input variants): 'ethereum' + * + * Note that this method is not intended to be used with hex strings + * representing quantities in both big endian or little endian notation. + * + * @param string Hex string, should be `0x` prefixed + * @return Utf8 string + */ + const toUtf8 = function (hex) { + const zerosRegexp = /^(00)+|(00)+$/g; + hex = (0, internal_1.stripHexPrefix)(hex); + if (hex.length % 2 !== 0) { + throw new Error('Invalid non-even hex string input for toUtf8() provided'); + } + const bufferVal = Buffer.from(hex.replace(zerosRegexp, ''), 'hex'); + return bufferVal.toString('utf8'); + }; + exports.toUtf8 = toUtf8; + /** + * Converts a `Buffer` or `Array` to JSON. + * @param ba (Buffer|Array) + * @return (Array|String|null) + */ + const baToJSON = function (ba) { + if (Buffer.isBuffer(ba)) { + return `0x${ba.toString('hex')}`; + } + else if (ba instanceof Array) { + const array = []; + for (let i = 0; i < ba.length; i++) { + array.push((0, exports.baToJSON)(ba[i])); + } + return array; + } + }; + exports.baToJSON = baToJSON; + /** + * Checks provided Buffers for leading zeroes and throws if found. + * + * Examples: + * + * Valid values: 0x1, 0x, 0x01, 0x1234 + * Invalid values: 0x0, 0x00, 0x001, 0x0001 + * + * Note: This method is useful for validating that RLP encoded integers comply with the rule that all + * integer values encoded to RLP must be in the most compact form and contain no leading zero bytes + * @param values An object containing string keys and Buffer values + * @throws if any provided value is found to have leading zero bytes + */ + const validateNoLeadingZeroes = function (values) { + for (const [k, v] of Object.entries(values)) { + if (v !== undefined && v.length > 0 && v[0] === 0) { + throw new Error(`${k} cannot have leading zeroes, received: ${v.toString('hex')}`); + } + } + }; + exports.validateNoLeadingZeroes = validateNoLeadingZeroes; + function arrToBufArr(arr) { + if (!Array.isArray(arr)) { + return Buffer.from(arr); + } + return arr.map((a) => arrToBufArr(a)); + } + exports.arrToBufArr = arrToBufArr; + function bufArrToArr(arr) { + if (!Array.isArray(arr)) { + return Uint8Array.from(arr ?? []); + } + return arr.map((a) => bufArrToArr(a)); + } + exports.bufArrToArr = bufArrToArr; + /** + * Converts a {@link bigint} to a `0x` prefixed hex string + */ + const bigIntToHex = (num) => { + return '0x' + num.toString(16); + }; + exports.bigIntToHex = bigIntToHex; + /** + * Convert value from bigint to an unpadded Buffer + * (useful for RLP transport) + * @param value value to convert + */ + function bigIntToUnpaddedBuffer(value) { + return (0, exports.unpadBuffer)(bigIntToBuffer(value)); + } + exports.bigIntToUnpaddedBuffer = bigIntToUnpaddedBuffer; + function intToUnpaddedBuffer(value) { + return (0, exports.unpadBuffer)((0, exports.intToBuffer)(value)); + } + exports.intToUnpaddedBuffer = intToUnpaddedBuffer; + +} (bytes$2)); + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.accountBodyToRLP = exports.accountBodyToSlim = exports.accountBodyFromSlim = exports.isZeroAddress = exports.zeroAddress = exports.importPublic = exports.privateToAddress = exports.privateToPublic = exports.publicToAddress = exports.pubToAddress = exports.isValidPublic = exports.isValidPrivate = exports.generateAddress2 = exports.generateAddress = exports.isValidChecksumAddress = exports.toChecksumAddress = exports.isValidAddress = exports.Account = void 0; + const rlp_1 = dist$3; + const keccak_1 = keccak; + const secp256k1_1 = secp256k1$1; + const utils_1 = utilsExports; + const bytes_1 = bytes$2; + const constants_1 = constants$2; + const helpers_1 = helpers$1; + const internal_1 = internal; + const _0n = BigInt(0); + class Account { + /** + * This constructor assigns and validates the values. + * Use the static factory methods to assist in creating an Account from varying data types. + */ + constructor(nonce = _0n, balance = _0n, storageRoot = constants_1.KECCAK256_RLP, codeHash = constants_1.KECCAK256_NULL) { + this.nonce = nonce; + this.balance = balance; + this.storageRoot = storageRoot; + this.codeHash = codeHash; + this._validate(); + } + static fromAccountData(accountData) { + const { nonce, balance, storageRoot, codeHash } = accountData; + return new Account(nonce !== undefined ? (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(nonce)) : undefined, balance !== undefined ? (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(balance)) : undefined, storageRoot !== undefined ? (0, bytes_1.toBuffer)(storageRoot) : undefined, codeHash !== undefined ? (0, bytes_1.toBuffer)(codeHash) : undefined); + } + static fromRlpSerializedAccount(serialized) { + const values = (0, bytes_1.arrToBufArr)(rlp_1.RLP.decode(Uint8Array.from(serialized))); + if (!Array.isArray(values)) { + throw new Error('Invalid serialized account input. Must be array'); + } + return this.fromValuesArray(values); + } + static fromValuesArray(values) { + const [nonce, balance, storageRoot, codeHash] = values; + return new Account((0, bytes_1.bufferToBigInt)(nonce), (0, bytes_1.bufferToBigInt)(balance), storageRoot, codeHash); + } + _validate() { + if (this.nonce < _0n) { + throw new Error('nonce must be greater than zero'); + } + if (this.balance < _0n) { + throw new Error('balance must be greater than zero'); + } + if (this.storageRoot.length !== 32) { + throw new Error('storageRoot must have a length of 32'); + } + if (this.codeHash.length !== 32) { + throw new Error('codeHash must have a length of 32'); + } + } + /** + * Returns a Buffer Array of the raw Buffers for the account, in order. + */ + raw() { + return [ + (0, bytes_1.bigIntToUnpaddedBuffer)(this.nonce), + (0, bytes_1.bigIntToUnpaddedBuffer)(this.balance), + this.storageRoot, + this.codeHash, + ]; + } + /** + * Returns the RLP serialization of the account as a `Buffer`. + */ + serialize() { + return Buffer.from(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)(this.raw()))); + } + /** + * Returns a `Boolean` determining if the account is a contract. + */ + isContract() { + return !this.codeHash.equals(constants_1.KECCAK256_NULL); + } + /** + * Returns a `Boolean` determining if the account is empty complying to the definition of + * account emptiness in [EIP-161](https://eips.ethereum.org/EIPS/eip-161): + * "An account is considered empty when it has no code and zero nonce and zero balance." + */ + isEmpty() { + return this.balance === _0n && this.nonce === _0n && this.codeHash.equals(constants_1.KECCAK256_NULL); + } + } + exports.Account = Account; + /** + * Checks if the address is a valid. Accepts checksummed addresses too. + */ + const isValidAddress = function (hexAddress) { + try { + (0, helpers_1.assertIsString)(hexAddress); + } + catch (e) { + return false; + } + return /^0x[0-9a-fA-F]{40}$/.test(hexAddress); + }; + exports.isValidAddress = isValidAddress; + /** + * Returns a checksummed address. + * + * If an eip1191ChainId is provided, the chainId will be included in the checksum calculation. This + * has the effect of checksummed addresses for one chain having invalid checksums for others. + * For more details see [EIP-1191](https://eips.ethereum.org/EIPS/eip-1191). + * + * WARNING: Checksums with and without the chainId will differ and the EIP-1191 checksum is not + * backwards compatible to the original widely adopted checksum format standard introduced in + * [EIP-55](https://eips.ethereum.org/EIPS/eip-55), so this will break in existing applications. + * Usage of this EIP is therefore discouraged unless you have a very targeted use case. + */ + const toChecksumAddress = function (hexAddress, eip1191ChainId) { + (0, helpers_1.assertIsHexString)(hexAddress); + const address = (0, internal_1.stripHexPrefix)(hexAddress).toLowerCase(); + let prefix = ''; + if (eip1191ChainId !== undefined) { + const chainId = (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(eip1191ChainId)); + prefix = chainId.toString() + '0x'; + } + const buf = Buffer.from(prefix + address, 'utf8'); + const hash = (0, utils_1.bytesToHex)((0, keccak_1.keccak256)(buf)); + let ret = '0x'; + for (let i = 0; i < address.length; i++) { + if (parseInt(hash[i], 16) >= 8) { + ret += address[i].toUpperCase(); + } + else { + ret += address[i]; + } + } + return ret; + }; + exports.toChecksumAddress = toChecksumAddress; + /** + * Checks if the address is a valid checksummed address. + * + * See toChecksumAddress' documentation for details about the eip1191ChainId parameter. + */ + const isValidChecksumAddress = function (hexAddress, eip1191ChainId) { + return (0, exports.isValidAddress)(hexAddress) && (0, exports.toChecksumAddress)(hexAddress, eip1191ChainId) === hexAddress; + }; + exports.isValidChecksumAddress = isValidChecksumAddress; + /** + * Generates an address of a newly created contract. + * @param from The address which is creating this new address + * @param nonce The nonce of the from account + */ + const generateAddress = function (from, nonce) { + (0, helpers_1.assertIsBuffer)(from); + (0, helpers_1.assertIsBuffer)(nonce); + if ((0, bytes_1.bufferToBigInt)(nonce) === BigInt(0)) { + // in RLP we want to encode null in the case of zero nonce + // read the RLP documentation for an answer if you dare + return Buffer.from((0, keccak_1.keccak256)(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)([from, null])))).slice(-20); + } + // Only take the lower 160bits of the hash + return Buffer.from((0, keccak_1.keccak256)(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)([from, nonce])))).slice(-20); + }; + exports.generateAddress = generateAddress; + /** + * Generates an address for a contract created using CREATE2. + * @param from The address which is creating this new address + * @param salt A salt + * @param initCode The init code of the contract being created + */ + const generateAddress2 = function (from, salt, initCode) { + (0, helpers_1.assertIsBuffer)(from); + (0, helpers_1.assertIsBuffer)(salt); + (0, helpers_1.assertIsBuffer)(initCode); + if (from.length !== 20) { + throw new Error('Expected from to be of length 20'); + } + if (salt.length !== 32) { + throw new Error('Expected salt to be of length 32'); + } + const address = (0, keccak_1.keccak256)(Buffer.concat([Buffer.from('ff', 'hex'), from, salt, (0, keccak_1.keccak256)(initCode)])); + return (0, bytes_1.toBuffer)(address).slice(-20); + }; + exports.generateAddress2 = generateAddress2; + /** + * Checks if the private key satisfies the rules of the curve secp256k1. + */ + const isValidPrivate = function (privateKey) { + return secp256k1_1.secp256k1.utils.isValidPrivateKey(privateKey); + }; + exports.isValidPrivate = isValidPrivate; + /** + * Checks if the public key satisfies the rules of the curve secp256k1 + * and the requirements of Ethereum. + * @param publicKey The two points of an uncompressed key, unless sanitize is enabled + * @param sanitize Accept public keys in other formats + */ + const isValidPublic = function (publicKey, sanitize = false) { + (0, helpers_1.assertIsBuffer)(publicKey); + if (publicKey.length === 64) { + // Convert to SEC1 for secp256k1 + // Automatically checks whether point is on curve + try { + secp256k1_1.secp256k1.ProjectivePoint.fromHex(Buffer.concat([Buffer.from([4]), publicKey])); + return true; + } + catch (e) { + return false; + } + } + if (!sanitize) { + return false; + } + try { + secp256k1_1.secp256k1.ProjectivePoint.fromHex(publicKey); + return true; + } + catch (e) { + return false; + } + }; + exports.isValidPublic = isValidPublic; + /** + * Returns the ethereum address of a given public key. + * Accepts "Ethereum public keys" and SEC1 encoded keys. + * @param pubKey The two points of an uncompressed key, unless sanitize is enabled + * @param sanitize Accept public keys in other formats + */ + const pubToAddress = function (pubKey, sanitize = false) { + (0, helpers_1.assertIsBuffer)(pubKey); + if (sanitize && pubKey.length !== 64) { + pubKey = Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromHex(pubKey).toRawBytes(false).slice(1)); + } + if (pubKey.length !== 64) { + throw new Error('Expected pubKey to be of length 64'); + } + // Only take the lower 160bits of the hash + return Buffer.from((0, keccak_1.keccak256)(pubKey)).slice(-20); + }; + exports.pubToAddress = pubToAddress; + exports.publicToAddress = exports.pubToAddress; + /** + * Returns the ethereum public key of a given private key. + * @param privateKey A private key must be 256 bits wide + */ + const privateToPublic = function (privateKey) { + (0, helpers_1.assertIsBuffer)(privateKey); + // skip the type flag and use the X, Y points + return Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromPrivateKey(privateKey).toRawBytes(false).slice(1)); + }; + exports.privateToPublic = privateToPublic; + /** + * Returns the ethereum address of a given private key. + * @param privateKey A private key must be 256 bits wide + */ + const privateToAddress = function (privateKey) { + return (0, exports.publicToAddress)((0, exports.privateToPublic)(privateKey)); + }; + exports.privateToAddress = privateToAddress; + /** + * Converts a public key to the Ethereum format. + */ + const importPublic = function (publicKey) { + (0, helpers_1.assertIsBuffer)(publicKey); + if (publicKey.length !== 64) { + publicKey = Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromHex(publicKey).toRawBytes(false).slice(1)); + } + return publicKey; + }; + exports.importPublic = importPublic; + /** + * Returns the zero address. + */ + const zeroAddress = function () { + const addressLength = 20; + const addr = (0, bytes_1.zeros)(addressLength); + return (0, bytes_1.bufferToHex)(addr); + }; + exports.zeroAddress = zeroAddress; + /** + * Checks if a given address is the zero address. + */ + const isZeroAddress = function (hexAddress) { + try { + (0, helpers_1.assertIsString)(hexAddress); + } + catch (e) { + return false; + } + const zeroAddr = (0, exports.zeroAddress)(); + return zeroAddr === hexAddress; + }; + exports.isZeroAddress = isZeroAddress; + function accountBodyFromSlim(body) { + const [nonce, balance, storageRoot, codeHash] = body; + return [ + nonce, + balance, + (0, bytes_1.arrToBufArr)(storageRoot).length === 0 ? constants_1.KECCAK256_RLP : storageRoot, + (0, bytes_1.arrToBufArr)(codeHash).length === 0 ? constants_1.KECCAK256_NULL : codeHash, + ]; + } + exports.accountBodyFromSlim = accountBodyFromSlim; + const emptyUint8Arr = new Uint8Array(0); + function accountBodyToSlim(body) { + const [nonce, balance, storageRoot, codeHash] = body; + return [ + nonce, + balance, + (0, bytes_1.arrToBufArr)(storageRoot).equals(constants_1.KECCAK256_RLP) ? emptyUint8Arr : storageRoot, + (0, bytes_1.arrToBufArr)(codeHash).equals(constants_1.KECCAK256_NULL) ? emptyUint8Arr : codeHash, + ]; + } + exports.accountBodyToSlim = accountBodyToSlim; + /** + * Converts a slim account (per snap protocol spec) to the RLP encoded version of the account + * @param body Array of 4 Buffer-like items to represent the account + * @returns RLP encoded version of the account + */ + function accountBodyToRLP(body, couldBeSlim = true) { + const accountBody = couldBeSlim ? accountBodyFromSlim(body) : body; + return (0, bytes_1.arrToBufArr)(rlp_1.RLP.encode(accountBody)); + } + exports.accountBodyToRLP = accountBodyToRLP; + +} (account)); + +var address$1 = {}; + +Object.defineProperty(address$1, "__esModule", { value: true }); +address$1.Address = void 0; +const account_1 = account; +const bytes_1$3 = bytes$2; +/** + * Handling and generating Ethereum addresses + */ +class Address { + constructor(buf) { + if (buf.length !== 20) { + throw new Error('Invalid address length'); + } + this.buf = buf; + } + /** + * Returns the zero address. + */ + static zero() { + return new Address((0, bytes_1$3.zeros)(20)); + } + /** + * Returns an Address object from a hex-encoded string. + * @param str - Hex-encoded address + */ + static fromString(str) { + if (!(0, account_1.isValidAddress)(str)) { + throw new Error('Invalid address'); + } + return new Address((0, bytes_1$3.toBuffer)(str)); + } + /** + * Returns an address for a given public key. + * @param pubKey The two points of an uncompressed key + */ + static fromPublicKey(pubKey) { + if (!Buffer.isBuffer(pubKey)) { + throw new Error('Public key should be Buffer'); + } + const buf = (0, account_1.pubToAddress)(pubKey); + return new Address(buf); + } + /** + * Returns an address for a given private key. + * @param privateKey A private key must be 256 bits wide + */ + static fromPrivateKey(privateKey) { + if (!Buffer.isBuffer(privateKey)) { + throw new Error('Private key should be Buffer'); + } + const buf = (0, account_1.privateToAddress)(privateKey); + return new Address(buf); + } + /** + * Generates an address for a newly created contract. + * @param from The address which is creating this new address + * @param nonce The nonce of the from account + */ + static generate(from, nonce) { + if (typeof nonce !== 'bigint') { + throw new Error('Expected nonce to be a bigint'); + } + return new Address((0, account_1.generateAddress)(from.buf, (0, bytes_1$3.bigIntToBuffer)(nonce))); + } + /** + * Generates an address for a contract created using CREATE2. + * @param from The address which is creating this new address + * @param salt A salt + * @param initCode The init code of the contract being created + */ + static generate2(from, salt, initCode) { + if (!Buffer.isBuffer(salt)) { + throw new Error('Expected salt to be a Buffer'); + } + if (!Buffer.isBuffer(initCode)) { + throw new Error('Expected initCode to be a Buffer'); + } + return new Address((0, account_1.generateAddress2)(from.buf, salt, initCode)); + } + /** + * Is address equal to another. + */ + equals(address) { + return this.buf.equals(address.buf); + } + /** + * Is address zero. + */ + isZero() { + return this.equals(Address.zero()); + } + /** + * True if address is in the address range defined + * by EIP-1352 + */ + isPrecompileOrSystemAddress() { + const address = (0, bytes_1$3.bufferToBigInt)(this.buf); + const rangeMin = BigInt(0); + const rangeMax = BigInt('0xffff'); + return address >= rangeMin && address <= rangeMax; + } + /** + * Returns hex encoding of address. + */ + toString() { + return '0x' + this.buf.toString('hex'); + } + /** + * Returns Buffer representation of address. + */ + toBuffer() { + return Buffer.from(this.buf); + } +} +address$1.Address = Address; + +var withdrawal = {}; + +var types$1 = {}; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.toType = exports.TypeOutput = void 0; + const bytes_1 = bytes$2; + const internal_1 = internal; + /** + * Type output options + */ + var TypeOutput; + (function (TypeOutput) { + TypeOutput[TypeOutput["Number"] = 0] = "Number"; + TypeOutput[TypeOutput["BigInt"] = 1] = "BigInt"; + TypeOutput[TypeOutput["Buffer"] = 2] = "Buffer"; + TypeOutput[TypeOutput["PrefixedHexString"] = 3] = "PrefixedHexString"; + })(TypeOutput = exports.TypeOutput || (exports.TypeOutput = {})); + function toType(input, outputType) { + if (input === null) { + return null; + } + if (input === undefined) { + return undefined; + } + if (typeof input === 'string' && !(0, internal_1.isHexString)(input)) { + throw new Error(`A string must be provided with a 0x-prefix, given: ${input}`); + } + else if (typeof input === 'number' && !Number.isSafeInteger(input)) { + throw new Error('The provided number is greater than MAX_SAFE_INTEGER (please use an alternative input type)'); + } + const output = (0, bytes_1.toBuffer)(input); + switch (outputType) { + case TypeOutput.Buffer: + return output; + case TypeOutput.BigInt: + return (0, bytes_1.bufferToBigInt)(output); + case TypeOutput.Number: { + const bigInt = (0, bytes_1.bufferToBigInt)(output); + if (bigInt > BigInt(Number.MAX_SAFE_INTEGER)) { + throw new Error('The provided number is greater than MAX_SAFE_INTEGER (please use an alternative output type)'); + } + return Number(bigInt); + } + case TypeOutput.PrefixedHexString: + return (0, bytes_1.bufferToHex)(output); + default: + throw new Error('unknown outputType'); + } + } + exports.toType = toType; + +} (types$1)); + +Object.defineProperty(withdrawal, "__esModule", { value: true }); +withdrawal.Withdrawal = void 0; +const address_1 = address$1; +const bytes_1$2 = bytes$2; +const types_1 = types$1; +/** + * Representation of EIP-4895 withdrawal data + */ +class Withdrawal { + /** + * This constructor assigns and validates the values. + * Use the static factory methods to assist in creating a Withdrawal object from varying data types. + * Its amount is in Gwei to match CL representation and for eventual ssz withdrawalsRoot + */ + constructor(index, validatorIndex, address, + /** + * withdrawal amount in Gwei to match the CL repesentation and eventually ssz withdrawalsRoot + */ + amount) { + this.index = index; + this.validatorIndex = validatorIndex; + this.address = address; + this.amount = amount; + } + static fromWithdrawalData(withdrawalData) { + const { index: indexData, validatorIndex: validatorIndexData, address: addressData, amount: amountData, } = withdrawalData; + const index = (0, types_1.toType)(indexData, types_1.TypeOutput.BigInt); + const validatorIndex = (0, types_1.toType)(validatorIndexData, types_1.TypeOutput.BigInt); + const address = new address_1.Address((0, types_1.toType)(addressData, types_1.TypeOutput.Buffer)); + const amount = (0, types_1.toType)(amountData, types_1.TypeOutput.BigInt); + return new Withdrawal(index, validatorIndex, address, amount); + } + static fromValuesArray(withdrawalArray) { + if (withdrawalArray.length !== 4) { + throw Error(`Invalid withdrawalArray length expected=4 actual=${withdrawalArray.length}`); + } + const [index, validatorIndex, address, amount] = withdrawalArray; + return Withdrawal.fromWithdrawalData({ index, validatorIndex, address, amount }); + } + /** + * Convert a withdrawal to a buffer array + * @param withdrawal the withdrawal to convert + * @returns buffer array of the withdrawal + */ + static toBufferArray(withdrawal) { + const { index, validatorIndex, address, amount } = withdrawal; + const indexBuffer = (0, types_1.toType)(index, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(index, types_1.TypeOutput.Buffer); + const validatorIndexBuffer = (0, types_1.toType)(validatorIndex, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(validatorIndex, types_1.TypeOutput.Buffer); + let addressBuffer; + if (address instanceof address_1.Address) { + addressBuffer = address.buf; + } + else { + addressBuffer = (0, types_1.toType)(address, types_1.TypeOutput.Buffer); + } + const amountBuffer = (0, types_1.toType)(amount, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(amount, types_1.TypeOutput.Buffer); + return [indexBuffer, validatorIndexBuffer, addressBuffer, amountBuffer]; + } + raw() { + return Withdrawal.toBufferArray(this); + } + toValue() { + return { + index: this.index, + validatorIndex: this.validatorIndex, + address: this.address.buf, + amount: this.amount, + }; + } + toJSON() { + return { + index: (0, bytes_1$2.bigIntToHex)(this.index), + validatorIndex: (0, bytes_1$2.bigIntToHex)(this.validatorIndex), + address: '0x' + this.address.buf.toString('hex'), + amount: (0, bytes_1$2.bigIntToHex)(this.amount), + }; + } +} +withdrawal.Withdrawal = Withdrawal; + +var signature = {}; + +Object.defineProperty(signature, "__esModule", { value: true }); +signature.hashPersonalMessage = signature.isValidSignature = signature.fromRpcSig = signature.toCompactSig = signature.toRpcSig = signature.ecrecover = signature.ecsign = void 0; +const keccak_1 = keccak; +const secp256k1_1 = secp256k1$1; +const bytes_1$1 = bytes$2; +const constants_1 = constants$2; +const helpers_1 = helpers$1; +/** + * Returns the ECDSA signature of a message hash. + * + * If `chainId` is provided assume an EIP-155-style signature and calculate the `v` value + * accordingly, otherwise return a "static" `v` just derived from the `recovery` bit + */ +function ecsign(msgHash, privateKey, chainId) { + const sig = secp256k1_1.secp256k1.sign(msgHash, privateKey); + const buf = sig.toCompactRawBytes(); + const r = Buffer.from(buf.slice(0, 32)); + const s = Buffer.from(buf.slice(32, 64)); + const v = chainId === undefined + ? BigInt(sig.recovery + 27) + : BigInt(sig.recovery + 35) + BigInt(chainId) * BigInt(2); + return { r, s, v }; +} +signature.ecsign = ecsign; +function calculateSigRecovery(v, chainId) { + if (v === BigInt(0) || v === BigInt(1)) + return v; + if (chainId === undefined) { + return v - BigInt(27); + } + return v - (chainId * BigInt(2) + BigInt(35)); +} +function isValidSigRecovery(recovery) { + return recovery === BigInt(0) || recovery === BigInt(1); +} +/** + * ECDSA public key recovery from signature. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Recovered public key + */ +const ecrecover = function (msgHash, v, r, s, chainId) { + const signature = Buffer.concat([(0, bytes_1$1.setLengthLeft)(r, 32), (0, bytes_1$1.setLengthLeft)(s, 32)], 64); + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + const sig = secp256k1_1.secp256k1.Signature.fromCompact(signature).addRecoveryBit(Number(recovery)); + const senderPubKey = sig.recoverPublicKey(msgHash); + return Buffer.from(senderPubKey.toRawBytes(false).slice(1)); +}; +signature.ecrecover = ecrecover; +/** + * Convert signature parameters into the format of `eth_sign` RPC method. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Signature + */ +const toRpcSig = function (v, r, s, chainId) { + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + // geth (and the RPC eth_sign method) uses the 65 byte format used by Bitcoin + return (0, bytes_1$1.bufferToHex)(Buffer.concat([(0, bytes_1$1.setLengthLeft)(r, 32), (0, bytes_1$1.setLengthLeft)(s, 32), (0, bytes_1$1.toBuffer)(v)])); +}; +signature.toRpcSig = toRpcSig; +/** + * Convert signature parameters into the format of Compact Signature Representation (EIP-2098). + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Signature + */ +const toCompactSig = function (v, r, s, chainId) { + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + let ss = s; + if ((v > BigInt(28) && v % BigInt(2) === BigInt(1)) || v === BigInt(1) || v === BigInt(28)) { + ss = Buffer.from(s); + ss[0] |= 0x80; + } + return (0, bytes_1$1.bufferToHex)(Buffer.concat([(0, bytes_1$1.setLengthLeft)(r, 32), (0, bytes_1$1.setLengthLeft)(ss, 32)])); +}; +signature.toCompactSig = toCompactSig; +/** + * Convert signature format of the `eth_sign` RPC method to signature parameters + * + * NOTE: For an extracted `v` value < 27 (see Geth bug https://github.com/ethereum/go-ethereum/issues/2053) + * `v + 27` is returned for the `v` value + * NOTE: After EIP1559, `v` could be `0` or `1` but this function assumes + * it's a signed message (EIP-191 or EIP-712) adding `27` at the end. Remove if needed. + */ +const fromRpcSig = function (sig) { + const buf = (0, bytes_1$1.toBuffer)(sig); + let r; + let s; + let v; + if (buf.length >= 65) { + r = buf.slice(0, 32); + s = buf.slice(32, 64); + v = (0, bytes_1$1.bufferToBigInt)(buf.slice(64)); + } + else if (buf.length === 64) { + // Compact Signature Representation (https://eips.ethereum.org/EIPS/eip-2098) + r = buf.slice(0, 32); + s = buf.slice(32, 64); + v = BigInt((0, bytes_1$1.bufferToInt)(buf.slice(32, 33)) >> 7); + s[0] &= 0x7f; + } + else { + throw new Error('Invalid signature length'); + } + // support both versions of `eth_sign` responses + if (v < 27) { + v = v + BigInt(27); + } + return { + v, + r, + s, + }; +}; +signature.fromRpcSig = fromRpcSig; +/** + * Validate a ECDSA signature. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @param homesteadOrLater Indicates whether this is being used on either the homestead hardfork or a later one + */ +const isValidSignature = function (v, r, s, homesteadOrLater = true, chainId) { + if (r.length !== 32 || s.length !== 32) { + return false; + } + if (!isValidSigRecovery(calculateSigRecovery(v, chainId))) { + return false; + } + const rBigInt = (0, bytes_1$1.bufferToBigInt)(r); + const sBigInt = (0, bytes_1$1.bufferToBigInt)(s); + if (rBigInt === BigInt(0) || + rBigInt >= constants_1.SECP256K1_ORDER || + sBigInt === BigInt(0) || + sBigInt >= constants_1.SECP256K1_ORDER) { + return false; + } + if (homesteadOrLater && sBigInt >= constants_1.SECP256K1_ORDER_DIV_2) { + return false; + } + return true; +}; +signature.isValidSignature = isValidSignature; +/** + * Returns the keccak-256 hash of `message`, prefixed with the header used by the `eth_sign` RPC call. + * The output of this function can be fed into `ecsign` to produce the same signature as the `eth_sign` + * call for a given `message`, or fed to `ecrecover` along with a signature to recover the public key + * used to produce the signature. + */ +const hashPersonalMessage = function (message) { + (0, helpers_1.assertIsBuffer)(message); + const prefix = Buffer.from(`\u0019Ethereum Signed Message:\n${message.length}`, 'utf-8'); + return Buffer.from((0, keccak_1.keccak256)(Buffer.concat([prefix, message]))); +}; +signature.hashPersonalMessage = hashPersonalMessage; + +var encoding = {}; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.compactBytesToNibbles = exports.bytesToNibbles = exports.nibblesToCompactBytes = exports.nibblesToBytes = exports.hasTerminator = void 0; + // Reference: https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/ + /** + * + * @param s byte sequence + * @returns boolean indicating if input hex nibble sequence has terminator indicating leaf-node + * terminator is represented with 16 because a nibble ranges from 0 - 15(f) + */ + const hasTerminator = (nibbles) => { + return nibbles.length > 0 && nibbles[nibbles.length - 1] === 16; + }; + exports.hasTerminator = hasTerminator; + const nibblesToBytes = (nibbles, bytes) => { + for (let bi = 0, ni = 0; ni < nibbles.length; bi += 1, ni += 2) { + bytes[bi] = (nibbles[ni] << 4) | nibbles[ni + 1]; + } + }; + exports.nibblesToBytes = nibblesToBytes; + const nibblesToCompactBytes = (nibbles) => { + let terminator = 0; + if ((0, exports.hasTerminator)(nibbles)) { + terminator = 1; + // Remove the terminator from the sequence + nibbles = nibbles.subarray(0, nibbles.length - 1); + } + const buf = new Uint8Array(nibbles.length / 2 + 1); + // Shift the terminator info into the first nibble of buf[0] + buf[0] = terminator << 5; + // If odd length, then add that flag into the first nibble and put the odd nibble to + // second part of buf[0] which otherwise will be left padded with a 0 + if ((nibbles.length & 1) === 1) { + buf[0] |= 1 << 4; + buf[0] |= nibbles[0]; + nibbles = nibbles.subarray(1); + } + // create bytes out of the rest even nibbles + (0, exports.nibblesToBytes)(nibbles, buf.subarray(1)); + return buf; + }; + exports.nibblesToCompactBytes = nibblesToCompactBytes; + const bytesToNibbles = (str) => { + const l = str.length * 2 + 1; + const nibbles = new Uint8Array(l); + for (let i = 0; i < str.length; i++) { + const b = str[i]; + nibbles[i * 2] = b / 16; + nibbles[i * 2 + 1] = b % 16; + } + // This will get removed from calling function if the first nibble + // indicates that terminator is not present + nibbles[l - 1] = 16; + return nibbles; + }; + exports.bytesToNibbles = bytesToNibbles; + const compactBytesToNibbles = (compact) => { + if (compact.length === 0) { + return compact; + } + let base = (0, exports.bytesToNibbles)(compact); + // delete terminator flag if terminator flag was not in first nibble + if (base[0] < 2) { + base = base.subarray(0, base.length - 1); + } + // chop the terminator nibble and the even padding (if there is one) + // i.e. chop 2 left nibbles when even else 1 when odd + const chop = 2 - (base[0] & 1); + return base.subarray(chop); + }; + exports.compactBytesToNibbles = compactBytesToNibbles; + /** + * A test helper to generates compact path for a subset of key bytes + * + * TODO: Commenting the code for now as this seems to be helper function + * (from geth codebase ) + * + */ + // + // + // export const getPathTo = (tillBytes: number, key: Buffer) => { + // const hexNibbles = bytesToNibbles(key).subarray(0, tillBytes) + // // Remove the terminator if its there, although it would be there only if tillBytes >= key.length + // // This seems to be a test helper to generate paths so correctness of this isn't necessary + // hexNibbles[hexNibbles.length - 1] = 0 + // const compactBytes = nibblesToCompactBytes(hexNibbles) + // return [Buffer.from(compactBytes)] + // } + +} (encoding)); + +var asyncEventEmitter = {}; + +/** + * Ported to Typescript from original implementation below: + * https://github.com/ahultgren/async-eventemitter -- MIT licensed + * + * Type Definitions based on work by: patarapolw -- MIT licensed + * that was contributed to Definitely Typed below: + * https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/async-eventemitter + */ +Object.defineProperty(asyncEventEmitter, "__esModule", { value: true }); +asyncEventEmitter.AsyncEventEmitter = void 0; +const events_1 = require$$0$3; +async function runInSeries(context, tasks, data) { + let error; + for await (const task of tasks) { + try { + if (task.length < 2) { + //sync + task.call(context, data); + } + else { + await new Promise((resolve, reject) => { + task.call(context, data, (error) => { + if (error) { + reject(error); + } + else { + resolve(); + } + }); + }); + } + } + catch (e) { + error = e; + } + } + if (error) { + throw error; + } +} +class AsyncEventEmitter extends events_1.EventEmitter { + emit(event, ...args) { + let [data, callback] = args; + const self = this; + let listeners = self._events[event] ?? []; + // Optional data argument + if (callback === undefined && typeof data === 'function') { + callback = data; + data = undefined; + } + // Special treatment of internal newListener and removeListener events + if (event === 'newListener' || event === 'removeListener') { + data = { + event: data, + fn: callback, + }; + callback = undefined; + } + // A single listener is just a function not an array... + listeners = Array.isArray(listeners) ? listeners : [listeners]; + runInSeries(self, listeners.slice(), data).then(callback).catch(callback); + return self.listenerCount(event) > 0; + } + once(event, listener) { + const self = this; + let g; + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + // Hack to support set arity + if (listener.length >= 2) { + g = function (e, next) { + self.removeListener(event, g); + void listener(e, next); + }; + } + else { + g = function (e) { + self.removeListener(event, g); + void listener(e, g); + }; + } + self.on(event, g); + return self; + } + first(event, listener) { + let listeners = this._events[event] ?? []; + // Contract + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + // Listeners are not always an array + if (!Array.isArray(listeners)) { + this._events[event] = listeners = [listeners]; + } + listeners.unshift(listener); + return this; + } + before(event, target, listener) { + return this.beforeOrAfter(event, target, listener); + } + after(event, target, listener) { + return this.beforeOrAfter(event, target, listener, 'after'); + } + beforeOrAfter(event, target, listener, beforeOrAfter) { + let listeners = this._events[event] ?? []; + let i; + let index; + const add = beforeOrAfter === 'after' ? 1 : 0; + // Contract + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + if (typeof target !== 'function') { + throw new TypeError('target must be a function'); + } + // Listeners are not always an array + if (!Array.isArray(listeners)) { + this._events[event] = listeners = [listeners]; + } + index = listeners.length; + for (i = listeners.length; i--;) { + if (listeners[i] === target) { + index = i + add; + break; + } + } + listeners.splice(index, 0, listener); + return this; + } + on(event, listener) { + return super.on(event, listener); + } + addListener(event, listener) { + return super.addListener(event, listener); + } + prependListener(event, listener) { + return super.prependListener(event, listener); + } + prependOnceListener(event, listener) { + return super.prependOnceListener(event, listener); + } + removeAllListeners(event) { + return super.removeAllListeners(event); + } + removeListener(event, listener) { + return super.removeListener(event, listener); + } + eventNames() { + return super.eventNames(); + } + listeners(event) { + return super.listeners(event); + } + listenerCount(event) { + return super.listenerCount(event); + } + getMaxListeners() { + return super.getMaxListeners(); + } + setMaxListeners(maxListeners) { + return super.setMaxListeners(maxListeners); + } +} +asyncEventEmitter.AsyncEventEmitter = AsyncEventEmitter; + +var lock = {}; + +Object.defineProperty(lock, "__esModule", { value: true }); +lock.Lock = void 0; +// Based on https://github.com/jsoendermann/semaphore-async-await/blob/master/src/Semaphore.ts +class Lock { + constructor() { + this.permits = 1; + this.promiseResolverQueue = []; + } + /** + * Returns a promise used to wait for a permit to become available. This method should be awaited on. + * @returns A promise that gets resolved when execution is allowed to proceed. + */ + async acquire() { + if (this.permits > 0) { + this.permits -= 1; + return Promise.resolve(true); + } + // If there is no permit available, we return a promise that resolves once the semaphore gets + // signaled enough times that permits is equal to one. + return new Promise((resolver) => this.promiseResolverQueue.push(resolver)); + } + /** + * Increases the number of permits by one. If there are other functions waiting, one of them will + * continue to execute in a future iteration of the event loop. + */ + release() { + this.permits += 1; + if (this.permits > 1 && this.promiseResolverQueue.length > 0) { + // eslint-disable-next-line no-console + console.warn('Lock.permits should never be > 0 when there is someone waiting.'); + } + else if (this.permits === 1 && this.promiseResolverQueue.length > 0) { + // If there is someone else waiting, immediately consume the permit that was released + // at the beginning of this function and let the waiting function resume. + this.permits -= 1; + const nextResolver = this.promiseResolverQueue.shift(); + if (nextResolver) { + nextResolver(true); + } + } + } +} +lock.Lock = Lock; + +var provider = {}; + +var microFtch = {}; + +Object.defineProperty(microFtch, "__esModule", { value: true }); +microFtch.InvalidStatusCodeError = microFtch.InvalidCertError = void 0; +const DEFAULT_OPT = Object.freeze({ + redirect: true, + expectStatusCode: 200, + headers: {}, + full: false, + keepAlive: true, + cors: false, + referrer: false, + sslAllowSelfSigned: false, + _redirectCount: 0, +}); +class InvalidCertError extends Error { + constructor(msg, fingerprint256) { + super(msg); + this.fingerprint256 = fingerprint256; + } +} +microFtch.InvalidCertError = InvalidCertError; +class InvalidStatusCodeError extends Error { + constructor(statusCode) { + super(`Request Failed. Status Code: ${statusCode}`); + this.statusCode = statusCode; + } +} +microFtch.InvalidStatusCodeError = InvalidStatusCodeError; +function detectType(b, type) { + if (!type || type === 'text' || type === 'json') { + try { + let text = new TextDecoder('utf8', { fatal: true }).decode(b); + if (type === 'text') + return text; + try { + return JSON.parse(text); + } + catch (err) { + if (type === 'json') + throw err; + return text; + } + } + catch (err) { + if (type === 'text' || type === 'json') + throw err; + } + } + return b; +} +let agents = {}; +function fetchNode(url, _options) { + let options = { ...DEFAULT_OPT, ..._options }; + const http = http$2; + const https = require$$1$4; + const zlib = zlib$1; + const { promisify } = require$$1$3; + const { resolve: urlResolve } = Url; + const isSecure = !!/^https/.test(url); + let opts = { + method: options.method || 'GET', + headers: { 'Accept-Encoding': 'gzip, deflate, br' }, + }; + const compactFP = (s) => s.replace(/:| /g, '').toLowerCase(); + if (options.keepAlive) { + const agentOpt = { + keepAlive: true, + keepAliveMsecs: 30 * 1000, + maxFreeSockets: 1024, + maxCachedSessions: 1024, + }; + const agentKey = [ + isSecure, + isSecure && options.sslPinnedCertificates?.map((i) => compactFP(i)).sort(), + ].join(); + opts.agent = + agents[agentKey] || (agents[agentKey] = new (isSecure ? https : http).Agent(agentOpt)); + } + if (options.type === 'json') + opts.headers['Content-Type'] = 'application/json'; + if (options.data) { + if (!options.method) + opts.method = 'POST'; + opts.body = options.type === 'json' ? JSON.stringify(options.data) : options.data; + } + opts.headers = { ...opts.headers, ...options.headers }; + if (options.sslAllowSelfSigned) + opts.rejectUnauthorized = false; + const handleRes = async (res) => { + const status = res.statusCode; + if (options.redirect && 300 <= status && status < 400 && res.headers['location']) { + if (options._redirectCount == 10) + throw new Error('Request failed. Too much redirects.'); + options._redirectCount += 1; + return await fetchNode(urlResolve(url, res.headers['location']), options); + } + if (options.expectStatusCode && status !== options.expectStatusCode) { + res.resume(); + throw new InvalidStatusCodeError(status); + } + let buf = []; + for await (const chunk of res) + buf.push(chunk); + let bytes = Buffer.concat(buf); + const encoding = res.headers['content-encoding']; + if (encoding === 'br') + bytes = await promisify(zlib.brotliDecompress)(bytes); + if (encoding === 'gzip' || encoding === 'deflate') + bytes = await promisify(zlib.unzip)(bytes); + const body = detectType(bytes, options.type); + if (options.full) + return { headers: res.headers, status, body }; + return body; + }; + return new Promise((resolve, reject) => { + const handleError = async (err) => { + if (err && err.code === 'DEPTH_ZERO_SELF_SIGNED_CERT') { + try { + await fetchNode(url, { ...options, sslAllowSelfSigned: true, sslPinnedCertificates: [] }); + } + catch (e) { + if (e && e.fingerprint256) { + err = new InvalidCertError(`Self-signed SSL certificate: ${e.fingerprint256}`, e.fingerprint256); + } + } + } + reject(err); + }; + const req = (isSecure ? https : http).request(url, opts, (res) => { + res.on('error', handleError); + (async () => { + try { + resolve(await handleRes(res)); + } + catch (error) { + reject(error); + } + })(); + }); + req.on('error', handleError); + const pinned = options.sslPinnedCertificates?.map((i) => compactFP(i)); + const mfetchSecureConnect = (socket) => { + const fp256 = compactFP(socket.getPeerCertificate()?.fingerprint256 || ''); + if (!fp256 && socket.isSessionReused()) + return; + if (pinned.includes(fp256)) + return; + req.emit('error', new InvalidCertError(`Invalid SSL certificate: ${fp256} Expected: ${pinned}`, fp256)); + return req.abort(); + }; + if (options.sslPinnedCertificates) { + req.on('socket', (socket) => { + const hasListeners = socket + .listeners('secureConnect') + .map((i) => (i.name || '').replace('bound ', '')) + .includes('mfetchSecureConnect'); + if (hasListeners) + return; + socket.on('secureConnect', mfetchSecureConnect.bind(null, socket)); + }); + } + if (options.keepAlive) + req.setNoDelay(true); + if (opts.body) + req.write(opts.body); + req.end(); + }); +} +const SAFE_HEADERS = new Set(['Accept', 'Accept-Language', 'Content-Language', 'Content-Type'].map((i) => i.toLowerCase())); +const FORBIDDEN_HEADERS = new Set(['Accept-Charset', 'Accept-Encoding', 'Access-Control-Request-Headers', 'Access-Control-Request-Method', + 'Connection', 'Content-Length', 'Cookie', 'Cookie2', 'Date', 'DNT', 'Expect', 'Host', 'Keep-Alive', 'Origin', 'Referer', 'TE', 'Trailer', + 'Transfer-Encoding', 'Upgrade', 'Via'].map((i) => i.toLowerCase())); +async function fetchBrowser(url, _options) { + let options = { ...DEFAULT_OPT, ..._options }; + const headers = new Headers(); + if (options.type === 'json') + headers.set('Content-Type', 'application/json'); + let parsed = new URL(url); + if (parsed.username) { + const auth = btoa(`${parsed.username}:${parsed.password}`); + headers.set('Authorization', `Basic ${auth}`); + parsed.username = ''; + parsed.password = ''; + } + url = '' + parsed; + for (let k in options.headers) { + const name = k.toLowerCase(); + if (SAFE_HEADERS.has(name) || (options.cors && !FORBIDDEN_HEADERS.has(name))) + headers.set(k, options.headers[k]); + } + let opts = { headers, redirect: options.redirect ? 'follow' : 'manual' }; + if (!options.referrer) + opts.referrerPolicy = 'no-referrer'; + if (options.cors) + opts.mode = 'cors'; + if (options.data) { + if (!options.method) + opts.method = 'POST'; + opts.body = options.type === 'json' ? JSON.stringify(options.data) : options.data; + } + const res = await fetch(url, opts); + if (options.expectStatusCode && res.status !== options.expectStatusCode) + throw new InvalidStatusCodeError(res.status); + const body = detectType(new Uint8Array(await res.arrayBuffer()), options.type); + if (options.full) + return { headers: Object.fromEntries(res.headers.entries()), status: res.status, body }; + return body; +} +const IS_NODE = !!(typeof process == 'object' && + process.versions && + process.versions.node && + process.versions.v8); +function fetchUrl(url, options) { + const fn = IS_NODE ? fetchNode : fetchBrowser; + return fn(url, options); +} +microFtch.default = fetchUrl; + +Object.defineProperty(provider, "__esModule", { value: true }); +provider.getProvider = provider.fetchFromProvider = void 0; +const micro_ftch_1 = microFtch; +const fetchFromProvider = async (url, params) => { + const res = await (0, micro_ftch_1.default)(url, { + headers: { + 'content-type': 'application/json', + }, + type: 'json', + data: { + method: params.method, + params: params.params, + jsonrpc: '2.0', + id: 1, + }, + }); + return res.result; +}; +provider.fetchFromProvider = fetchFromProvider; +const getProvider = (provider) => { + if (typeof provider === 'string') { + return provider; + } + else if (provider?.connection?.url !== undefined) { + return provider.connection.url; + } + else { + throw new Error('Must provide valid provider URL or Web3Provider'); + } +}; +provider.getProvider = getProvider; + +(function (exports) { + var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + })); + var __exportStar = (commonjsGlobal && commonjsGlobal.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); + }; + Object.defineProperty(exports, "__esModule", { value: true }); + exports.toAscii = exports.stripHexPrefix = exports.padToEven = exports.isHexString = exports.isHexPrefixed = exports.getKeys = exports.getBinarySize = exports.fromUtf8 = exports.fromAscii = exports.arrayContainsArray = void 0; + /** + * Constants + */ + __exportStar(constants$2, exports); + /** + * Units helpers + */ + __exportStar(units, exports); + /** + * Account class and helper functions + */ + __exportStar(account, exports); + /** + * Address type + */ + __exportStar(address$1, exports); + /** + * Withdrawal type + */ + __exportStar(withdrawal, exports); + /** + * ECDSA signature + */ + __exportStar(signature, exports); + /** + * Utilities for manipulating Buffers, byte arrays, etc. + */ + __exportStar(bytes$2, exports); + /** + * Helpful TypeScript types + */ + __exportStar(types$1, exports); + /** + * Helper function for working with compact encoding + */ + __exportStar(encoding, exports); + /** + * Export ethjs-util methods + */ + __exportStar(asyncEventEmitter, exports); + var internal_1 = internal; + Object.defineProperty(exports, "arrayContainsArray", { enumerable: true, get: function () { return internal_1.arrayContainsArray; } }); + Object.defineProperty(exports, "fromAscii", { enumerable: true, get: function () { return internal_1.fromAscii; } }); + Object.defineProperty(exports, "fromUtf8", { enumerable: true, get: function () { return internal_1.fromUtf8; } }); + Object.defineProperty(exports, "getBinarySize", { enumerable: true, get: function () { return internal_1.getBinarySize; } }); + Object.defineProperty(exports, "getKeys", { enumerable: true, get: function () { return internal_1.getKeys; } }); + Object.defineProperty(exports, "isHexPrefixed", { enumerable: true, get: function () { return internal_1.isHexPrefixed; } }); + Object.defineProperty(exports, "isHexString", { enumerable: true, get: function () { return internal_1.isHexString; } }); + Object.defineProperty(exports, "padToEven", { enumerable: true, get: function () { return internal_1.padToEven; } }); + Object.defineProperty(exports, "stripHexPrefix", { enumerable: true, get: function () { return internal_1.stripHexPrefix; } }); + Object.defineProperty(exports, "toAscii", { enumerable: true, get: function () { return internal_1.toAscii; } }); + __exportStar(lock, exports); + __exportStar(provider, exports); + +} (dist$4)); + +var utils$3 = {}; + +var dist$2 = {}; + +var chunkVFXTVNXN = {}; + +var chunkQEPVHEP7 = {}; + +var chunk6ZDHSOUV = {}; + +var chunkIZC266HS = {}; + +var chunkQVEKZRZ2 = {}; + +Object.defineProperty(chunkQVEKZRZ2, "__esModule", {value: true}); function _nullishCoalesce$1(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }// src/misc.ts +function isNonEmptyArray(value) { + return Array.isArray(value) && value.length > 0; +} +function isNullOrUndefined(value) { + return value === null || value === void 0; +} +function isObject$1(value) { + return Boolean(value) && typeof value === "object" && !Array.isArray(value); +} +var hasProperty = (objectToCheck, name) => Object.hasOwnProperty.call(objectToCheck, name); +function getKnownPropertyNames(object) { + return Object.getOwnPropertyNames(object); +} +var JsonSize = /* @__PURE__ */ ((JsonSize2) => { + JsonSize2[JsonSize2["Null"] = 4] = "Null"; + JsonSize2[JsonSize2["Comma"] = 1] = "Comma"; + JsonSize2[JsonSize2["Wrapper"] = 1] = "Wrapper"; + JsonSize2[JsonSize2["True"] = 4] = "True"; + JsonSize2[JsonSize2["False"] = 5] = "False"; + JsonSize2[JsonSize2["Quote"] = 1] = "Quote"; + JsonSize2[JsonSize2["Colon"] = 1] = "Colon"; + JsonSize2[JsonSize2["Date"] = 24] = "Date"; + return JsonSize2; +})(JsonSize || {}); +var ESCAPE_CHARACTERS_REGEXP = /"|\\|\n|\r|\t/gu; +function isPlainObject$1(value) { + if (typeof value !== "object" || value === null) { + return false; + } + try { + let proto = value; + while (Object.getPrototypeOf(proto) !== null) { + proto = Object.getPrototypeOf(proto); + } + return Object.getPrototypeOf(value) === proto; + } catch (_) { + return false; + } +} +function isASCII(character) { + return character.charCodeAt(0) <= 127; +} +function calculateStringSize(value) { + const size = value.split("").reduce((total, character) => { + if (isASCII(character)) { + return total + 1; + } + return total + 2; + }, 0); + return size + (_nullishCoalesce$1(value.match(ESCAPE_CHARACTERS_REGEXP), () => ( []))).length; +} +function calculateNumberSize(value) { + return value.toString().length; +} + + + + + + + + + + + + + +chunkQVEKZRZ2.isNonEmptyArray = isNonEmptyArray; chunkQVEKZRZ2.isNullOrUndefined = isNullOrUndefined; chunkQVEKZRZ2.isObject = isObject$1; chunkQVEKZRZ2.hasProperty = hasProperty; chunkQVEKZRZ2.getKnownPropertyNames = getKnownPropertyNames; chunkQVEKZRZ2.JsonSize = JsonSize; chunkQVEKZRZ2.ESCAPE_CHARACTERS_REGEXP = ESCAPE_CHARACTERS_REGEXP; chunkQVEKZRZ2.isPlainObject = isPlainObject$1; chunkQVEKZRZ2.isASCII = isASCII; chunkQVEKZRZ2.calculateStringSize = calculateStringSize; chunkQVEKZRZ2.calculateNumberSize = calculateNumberSize; + +/** @template [T=undefined] */ +let ErrorWithCause$1 = class ErrorWithCause extends Error { // linemod-prefix-with: export + /** + * @param {string} message + * @param {{ cause?: T }} options + */ + constructor (message, { cause } = {}) { + super(message); + + /** @type {string} */ + this.name = ErrorWithCause.name; + if (cause) { + /** @type {T} */ + this.cause = cause; + } + /** @type {string} */ + this.message = message; + } +}; + +var errorWithCause = { // linemod-remove + ErrorWithCause: ErrorWithCause$1, // linemod-remove +}; // linemod-remove + +/** + * @template {Error} T + * @param {unknown} err + * @param {new(...args: any[]) => T} reference + * @returns {T|undefined} + */ +const findCauseByReference$1 = (err, reference) => { // linemod-prefix-with: export + if (!err || !reference) return; + if (!(err instanceof Error)) return; + if ( + !(reference.prototype instanceof Error) && + // @ts-ignore + reference !== Error + ) return; + + /** + * Ensures we don't go circular + * + * @type {Set} + */ + const seen = new Set(); + + /** @type {Error|undefined} */ + let currentErr = err; + + while (currentErr && !seen.has(currentErr)) { + seen.add(currentErr); + + if (currentErr instanceof reference) { + return currentErr; + } + + currentErr = getErrorCause$1(currentErr); + } +}; + +/** + * @param {Error|{ cause?: unknown|(()=>err)}} err + * @returns {Error|undefined} + */ +const getErrorCause$1 = (err) => { // linemod-prefix-with: export + if (!err || typeof err !== 'object' || !('cause' in err)) { + return; + } + + // VError / NError style causes + if (typeof err.cause === 'function') { + const causeResult = err.cause(); + + return causeResult instanceof Error + ? causeResult + : undefined; + } else { + return err.cause instanceof Error + ? err.cause + : undefined; + } +}; + +/** + * Internal method that keeps a track of which error we have already added, to avoid circular recursion + * + * @private + * @param {Error} err + * @param {Set} seen + * @returns {string} + */ +const _stackWithCauses = (err, seen) => { + if (!(err instanceof Error)) return ''; + + const stack = err.stack || ''; + + // Ensure we don't go circular or crazily deep + if (seen.has(err)) { + return stack + '\ncauses have become circular...'; + } + + const cause = getErrorCause$1(err); + + // TODO: Follow up in https://github.com/nodejs/node/issues/38725#issuecomment-920309092 on how to log stuff + + if (cause) { + seen.add(err); + return (stack + '\ncaused by: ' + _stackWithCauses(cause, seen)); + } else { + return stack; + } +}; + +/** + * @param {Error} err + * @returns {string} + */ +const stackWithCauses$1 = (err) => _stackWithCauses(err, new Set()); // linemod-prefix-with: export + +/** + * Internal method that keeps a track of which error we have already added, to avoid circular recursion + * + * @private + * @param {Error} err + * @param {Set} seen + * @param {boolean} [skip] + * @returns {string} + */ +const _messageWithCauses = (err, seen, skip) => { + if (!(err instanceof Error)) return ''; + + const message = skip ? '' : (err.message || ''); + + // Ensure we don't go circular or crazily deep + if (seen.has(err)) { + return message + ': ...'; + } + + const cause = getErrorCause$1(err); + + if (cause) { + seen.add(err); + + const skipIfVErrorStyleCause = 'cause' in err && typeof err.cause === 'function'; + + return (message + + (skipIfVErrorStyleCause ? '' : ': ') + + _messageWithCauses(cause, seen, skipIfVErrorStyleCause)); + } else { + return message; + } +}; + +/** + * @param {Error} err + * @returns {string} + */ +const messageWithCauses$1 = (err) => _messageWithCauses(err, new Set()); // linemod-prefix-with: export + +var helpers = { // linemod-remove + findCauseByReference: findCauseByReference$1, // linemod-remove + getErrorCause: getErrorCause$1, // linemod-remove + stackWithCauses: stackWithCauses$1, // linemod-remove + messageWithCauses: messageWithCauses$1, // linemod-remove +}; // linemod-remove + +const { ErrorWithCause } = errorWithCause; // linemod-replace-with: export { ErrorWithCause } from './lib/error-with-cause.mjs'; + +const { // linemod-replace-with: export { + findCauseByReference, + getErrorCause, + messageWithCauses, + stackWithCauses, +} = helpers; // linemod-replace-with: } from './lib/helpers.mjs'; + +var ponyCause = { // linemod-remove + ErrorWithCause, // linemod-remove + findCauseByReference, // linemod-remove + getErrorCause, // linemod-remove + stackWithCauses, // linemod-remove + messageWithCauses, // linemod-remove +}; // linemod-remove + +Object.defineProperty(chunkIZC266HS, "__esModule", {value: true}); + + +var _chunkQVEKZRZ2js$2 = chunkQVEKZRZ2; + +// src/errors.ts +var _ponycause = ponyCause; +function isError(error) { + return error instanceof Error || _chunkQVEKZRZ2js$2.isObject.call(void 0, error) && error.constructor.name === "Error"; +} +function isErrorWithCode(error) { + return typeof error === "object" && error !== null && "code" in error; +} +function isErrorWithMessage(error) { + return typeof error === "object" && error !== null && "message" in error; +} +function isErrorWithStack(error) { + return typeof error === "object" && error !== null && "stack" in error; +} +function getErrorMessage(error) { + if (isErrorWithMessage(error) && typeof error.message === "string") { + return error.message; + } + if (_chunkQVEKZRZ2js$2.isNullOrUndefined.call(void 0, error)) { + return ""; + } + return String(error); +} +function wrapError(originalError, message) { + if (isError(originalError)) { + let error; + if (Error.length === 2) { + error = new Error(message, { cause: originalError }); + } else { + error = new (_ponycause.ErrorWithCause)(message, { cause: originalError }); + } + if (isErrorWithCode(originalError)) { + error.code = originalError.code; + } + return error; + } + if (message.length > 0) { + return new Error(`${String(originalError)}: ${message}`); + } + return new Error(String(originalError)); +} + + + + + + + +chunkIZC266HS.isErrorWithCode = isErrorWithCode; chunkIZC266HS.isErrorWithMessage = isErrorWithMessage; chunkIZC266HS.isErrorWithStack = isErrorWithStack; chunkIZC266HS.getErrorMessage = getErrorMessage; chunkIZC266HS.wrapError = wrapError; + +/** + * A `StructFailure` represents a single specific failure in validation. + */ +/** + * `StructError` objects are thrown (or returned) when validation fails. + * + * Validation logic is design to exit early for maximum performance. The error + * represents the first error encountered during validation. For more detail, + * the `error.failures` property is a generator function that can be run to + * continue validation and receive all the failures in the data. + */ +class StructError extends TypeError { + constructor(failure, failures) { + let cached; + const { message, explanation, ...rest } = failure; + const { path } = failure; + const msg = path.length === 0 ? message : `At path: ${path.join('.')} -- ${message}`; + super(explanation ?? msg); + if (explanation != null) + this.cause = msg; + Object.assign(this, rest); + this.name = this.constructor.name; + this.failures = () => { + return (cached ?? (cached = [failure, ...failures()])); + }; + } +} + +/** + * Check if a value is an iterator. + */ +function isIterable(x) { + return isObject(x) && typeof x[Symbol.iterator] === 'function'; +} +/** + * Check if a value is a plain object. + */ +function isObject(x) { + return typeof x === 'object' && x != null; +} +/** + * Check if a value is a plain object. + */ +function isPlainObject(x) { + if (Object.prototype.toString.call(x) !== '[object Object]') { + return false; + } + const prototype = Object.getPrototypeOf(x); + return prototype === null || prototype === Object.prototype; +} +/** + * Return a value as a printable string. + */ +function print(value) { + if (typeof value === 'symbol') { + return value.toString(); + } + return typeof value === 'string' ? JSON.stringify(value) : `${value}`; +} +/** + * Shifts (removes and returns) the first value from the `input` iterator. + * Like `Array.prototype.shift()` but for an `Iterator`. + */ +function shiftIterator(input) { + const { done, value } = input.next(); + return done ? undefined : value; +} +/** + * Convert a single validation result to a failure. + */ +function toFailure(result, context, struct, value) { + if (result === true) { + return; + } + else if (result === false) { + result = {}; + } + else if (typeof result === 'string') { + result = { message: result }; + } + const { path, branch } = context; + const { type } = struct; + const { refinement, message = `Expected a value of type \`${type}\`${refinement ? ` with refinement \`${refinement}\`` : ''}, but received: \`${print(value)}\``, } = result; + return { + value, + type, + refinement, + key: path[path.length - 1], + path, + branch, + ...result, + message, + }; +} +/** + * Convert a validation result to an iterable of failures. + */ +function* toFailures(result, context, struct, value) { + if (!isIterable(result)) { + result = [result]; + } + for (const r of result) { + const failure = toFailure(r, context, struct, value); + if (failure) { + yield failure; + } + } +} +/** + * Check a value against a struct, traversing deeply into nested values, and + * returning an iterator of failures or success. + */ +function* run(value, struct, options = {}) { + const { path = [], branch = [value], coerce = false, mask = false } = options; + const ctx = { path, branch }; + if (coerce) { + value = struct.coercer(value, ctx); + if (mask && + struct.type !== 'type' && + isObject(struct.schema) && + isObject(value) && + !Array.isArray(value)) { + for (const key in value) { + if (struct.schema[key] === undefined) { + delete value[key]; + } + } + } + } + let status = 'valid'; + for (const failure of struct.validator(value, ctx)) { + failure.explanation = options.message; + status = 'not_valid'; + yield [failure, undefined]; + } + for (let [k, v, s] of struct.entries(value, ctx)) { + const ts = run(v, s, { + path: k === undefined ? path : [...path, k], + branch: k === undefined ? branch : [...branch, v], + coerce, + mask, + message: options.message, + }); + for (const t of ts) { + if (t[0]) { + status = t[0].refinement != null ? 'not_refined' : 'not_valid'; + yield [t[0], undefined]; + } + else if (coerce) { + v = t[1]; + if (k === undefined) { + value = v; + } + else if (value instanceof Map) { + value.set(k, v); + } + else if (value instanceof Set) { + value.add(v); + } + else if (isObject(value)) { + if (v !== undefined || k in value) + value[k] = v; + } + } + } + } + if (status !== 'not_valid') { + for (const failure of struct.refiner(value, ctx)) { + failure.explanation = options.message; + status = 'not_refined'; + yield [failure, undefined]; + } + } + if (status === 'valid') { + yield [undefined, value]; + } +} + +/** + * `Struct` objects encapsulate the validation logic for a specific type of + * values. Once constructed, you use the `assert`, `is` or `validate` helpers to + * validate unknown input data against the struct. + */ +class Struct { + constructor(props) { + const { type, schema, validator, refiner, coercer = (value) => value, entries = function* () { }, } = props; + this.type = type; + this.schema = schema; + this.entries = entries; + this.coercer = coercer; + if (validator) { + this.validator = (value, context) => { + const result = validator(value, context); + return toFailures(result, context, this, value); + }; + } + else { + this.validator = () => []; + } + if (refiner) { + this.refiner = (value, context) => { + const result = refiner(value, context); + return toFailures(result, context, this, value); + }; + } + else { + this.refiner = () => []; + } + } + /** + * Assert that a value passes the struct's validation, throwing if it doesn't. + */ + assert(value, message) { + return assert$3(value, this, message); + } + /** + * Create a value with the struct's coercion logic, then validate it. + */ + create(value, message) { + return create(value, this, message); + } + /** + * Check if a value passes the struct's validation. + */ + is(value) { + return is(value, this); + } + /** + * Mask a value, coercing and validating it, but returning only the subset of + * properties defined by the struct's schema. + */ + mask(value, message) { + return mask(value, this, message); + } + /** + * Validate a value with the struct's validation logic, returning a tuple + * representing the result. + * + * You may optionally pass `true` for the `withCoercion` argument to coerce + * the value before attempting to validate it. If you do, the result will + * contain the coerced result when successful. + */ + validate(value, options = {}) { + return validate(value, this, options); + } +} +/** + * Assert that a value passes a struct, throwing if it doesn't. + */ +function assert$3(value, struct, message) { + const result = validate(value, struct, { message }); + if (result[0]) { + throw result[0]; + } +} +/** + * Create a value with the coercion logic of struct and validate it. + */ +function create(value, struct, message) { + const result = validate(value, struct, { coerce: true, message }); + if (result[0]) { + throw result[0]; + } + else { + return result[1]; + } +} +/** + * Mask a value, returning only the subset of properties defined by a struct. + */ +function mask(value, struct, message) { + const result = validate(value, struct, { coerce: true, mask: true, message }); + if (result[0]) { + throw result[0]; + } + else { + return result[1]; + } +} +/** + * Check if a value passes a struct. + */ +function is(value, struct) { + const result = validate(value, struct); + return !result[0]; +} +/** + * Validate a value against a struct, returning an error if invalid, or the + * value (with potential coercion) if valid. + */ +function validate(value, struct, options = {}) { + const tuples = run(value, struct, options); + const tuple = shiftIterator(tuples); + if (tuple[0]) { + const error = new StructError(tuple[0], function* () { + for (const t of tuples) { + if (t[0]) { + yield t[0]; + } + } + }); + return [error, undefined]; + } + else { + const v = tuple[1]; + return [undefined, v]; + } +} + +function assign(...Structs) { + const isType = Structs[0].type === 'type'; + const schemas = Structs.map((s) => s.schema); + const schema = Object.assign({}, ...schemas); + return isType ? type(schema) : object$1(schema); +} +/** + * Define a new struct type with a custom validation function. + */ +function define(name, validator) { + return new Struct({ type: name, schema: null, validator }); +} +/** + * Create a new struct based on an existing struct, but the value is allowed to + * be `undefined`. `log` will be called if the value is not `undefined`. + */ +function deprecated(struct, log) { + return new Struct({ + ...struct, + refiner: (value, ctx) => value === undefined || struct.refiner(value, ctx), + validator(value, ctx) { + if (value === undefined) { + return true; + } + else { + log(value, ctx); + return struct.validator(value, ctx); + } + }, + }); +} +/** + * Create a struct with dynamic validation logic. + * + * The callback will receive the value currently being validated, and must + * return a struct object to validate it with. This can be useful to model + * validation logic that changes based on its input. + */ +function dynamic(fn) { + return new Struct({ + type: 'dynamic', + schema: null, + *entries(value, ctx) { + const struct = fn(value, ctx); + yield* struct.entries(value, ctx); + }, + validator(value, ctx) { + const struct = fn(value, ctx); + return struct.validator(value, ctx); + }, + coercer(value, ctx) { + const struct = fn(value, ctx); + return struct.coercer(value, ctx); + }, + refiner(value, ctx) { + const struct = fn(value, ctx); + return struct.refiner(value, ctx); + }, + }); +} +/** + * Create a struct with lazily evaluated validation logic. + * + * The first time validation is run with the struct, the callback will be called + * and must return a struct object to use. This is useful for cases where you + * want to have self-referential structs for nested data structures to avoid a + * circular definition problem. + */ +function lazy(fn) { + let struct; + return new Struct({ + type: 'lazy', + schema: null, + *entries(value, ctx) { + struct ?? (struct = fn()); + yield* struct.entries(value, ctx); + }, + validator(value, ctx) { + struct ?? (struct = fn()); + return struct.validator(value, ctx); + }, + coercer(value, ctx) { + struct ?? (struct = fn()); + return struct.coercer(value, ctx); + }, + refiner(value, ctx) { + struct ?? (struct = fn()); + return struct.refiner(value, ctx); + }, + }); +} +/** + * Create a new struct based on an existing object struct, but excluding + * specific properties. + * + * Like TypeScript's `Omit` utility. + */ +function omit(struct, keys) { + const { schema } = struct; + const subschema = { ...schema }; + for (const key of keys) { + delete subschema[key]; + } + switch (struct.type) { + case 'type': + return type(subschema); + default: + return object$1(subschema); + } +} +/** + * Create a new struct based on an existing object struct, but with all of its + * properties allowed to be `undefined`. + * + * Like TypeScript's `Partial` utility. + */ +function partial(struct) { + const isStruct = struct instanceof Struct; + const schema = isStruct ? { ...struct.schema } : { ...struct }; + for (const key in schema) { + schema[key] = optional(schema[key]); + } + if (isStruct && struct.type === 'type') { + return type(schema); + } + return object$1(schema); +} +/** + * Create a new struct based on an existing object struct, but only including + * specific properties. + * + * Like TypeScript's `Pick` utility. + */ +function pick(struct, keys) { + const { schema } = struct; + const subschema = {}; + for (const key of keys) { + subschema[key] = schema[key]; + } + switch (struct.type) { + case 'type': + return type(subschema); + default: + return object$1(subschema); + } +} +/** + * Define a new struct type with a custom validation function. + * + * @deprecated This function has been renamed to `define`. + */ +function struct(name, validator) { + console.warn('superstruct@0.11 - The `struct` helper has been renamed to `define`.'); + return define(name, validator); +} + +/** + * Ensure that any value passes validation. + */ +function any() { + return define('any', () => true); +} +function array$1(Element) { + return new Struct({ + type: 'array', + schema: Element, + *entries(value) { + if (Element && Array.isArray(value)) { + for (const [i, v] of value.entries()) { + yield [i, v, Element]; + } + } + }, + coercer(value) { + return Array.isArray(value) ? value.slice() : value; + }, + validator(value) { + return (Array.isArray(value) || + `Expected an array value, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a bigint. + */ +function bigint$1() { + return define('bigint', (value) => { + return typeof value === 'bigint'; + }); +} +/** + * Ensure that a value is a boolean. + */ +function boolean() { + return define('boolean', (value) => { + return typeof value === 'boolean'; + }); +} +/** + * Ensure that a value is a valid `Date`. + * + * Note: this also ensures that the value is *not* an invalid `Date` object, + * which can occur when parsing a date fails but still returns a `Date`. + */ +function date() { + return define('date', (value) => { + return ((value instanceof Date && !isNaN(value.getTime())) || + `Expected a valid \`Date\` object, but received: ${print(value)}`); + }); +} +function enums(values) { + const schema = {}; + const description = values.map((v) => print(v)).join(); + for (const key of values) { + schema[key] = key; + } + return new Struct({ + type: 'enums', + schema, + validator(value) { + return (values.includes(value) || + `Expected one of \`${description}\`, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a function. + */ +function func() { + return define('func', (value) => { + return (typeof value === 'function' || + `Expected a function, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is an instance of a specific class. + */ +function instance(Class) { + return define('instance', (value) => { + return (value instanceof Class || + `Expected a \`${Class.name}\` instance, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is an integer. + */ +function integer() { + return define('integer', (value) => { + return ((typeof value === 'number' && !isNaN(value) && Number.isInteger(value)) || + `Expected an integer, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value matches all of a set of types. + */ +function intersection(Structs) { + return new Struct({ + type: 'intersection', + schema: null, + *entries(value, ctx) { + for (const S of Structs) { + yield* S.entries(value, ctx); + } + }, + *validator(value, ctx) { + for (const S of Structs) { + yield* S.validator(value, ctx); + } + }, + *refiner(value, ctx) { + for (const S of Structs) { + yield* S.refiner(value, ctx); + } + }, + }); +} +function literal(constant) { + const description = print(constant); + const t = typeof constant; + return new Struct({ + type: 'literal', + schema: t === 'string' || t === 'number' || t === 'boolean' ? constant : null, + validator(value) { + return (value === constant || + `Expected the literal \`${description}\`, but received: ${print(value)}`); + }, + }); +} +function map(Key, Value) { + return new Struct({ + type: 'map', + schema: null, + *entries(value) { + if (Key && Value && value instanceof Map) { + for (const [k, v] of value.entries()) { + yield [k, k, Key]; + yield [k, v, Value]; + } + } + }, + coercer(value) { + return value instanceof Map ? new Map(value) : value; + }, + validator(value) { + return (value instanceof Map || + `Expected a \`Map\` object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that no value ever passes validation. + */ +function never() { + return define('never', () => false); +} +/** + * Augment an existing struct to allow `null` values. + */ +function nullable(struct) { + return new Struct({ + ...struct, + validator: (value, ctx) => value === null || struct.validator(value, ctx), + refiner: (value, ctx) => value === null || struct.refiner(value, ctx), + }); +} +/** + * Ensure that a value is a number. + */ +function number$2() { + return define('number', (value) => { + return ((typeof value === 'number' && !isNaN(value)) || + `Expected a number, but received: ${print(value)}`); + }); +} +function object$1(schema) { + const knowns = schema ? Object.keys(schema) : []; + const Never = never(); + return new Struct({ + type: 'object', + schema: schema ? schema : null, + *entries(value) { + if (schema && isObject(value)) { + const unknowns = new Set(Object.keys(value)); + for (const key of knowns) { + unknowns.delete(key); + yield [key, value[key], schema[key]]; + } + for (const key of unknowns) { + yield [key, value[key], Never]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + coercer(value) { + return isObject(value) ? { ...value } : value; + }, + }); +} +/** + * Augment a struct to allow `undefined` values. + */ +function optional(struct) { + return new Struct({ + ...struct, + validator: (value, ctx) => value === undefined || struct.validator(value, ctx), + refiner: (value, ctx) => value === undefined || struct.refiner(value, ctx), + }); +} +/** + * Ensure that a value is an object with keys and values of specific types, but + * without ensuring any specific shape of properties. + * + * Like TypeScript's `Record` utility. + */ +function record(Key, Value) { + return new Struct({ + type: 'record', + schema: null, + *entries(value) { + if (isObject(value)) { + for (const k in value) { + const v = value[k]; + yield [k, k, Key]; + yield [k, v, Value]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a `RegExp`. + * + * Note: this does not test the value against the regular expression! For that + * you need to use the `pattern()` refinement. + */ +function regexp() { + return define('regexp', (value) => { + return value instanceof RegExp; + }); +} +function set$1(Element) { + return new Struct({ + type: 'set', + schema: null, + *entries(value) { + if (Element && value instanceof Set) { + for (const v of value) { + yield [v, v, Element]; + } + } + }, + coercer(value) { + return value instanceof Set ? new Set(value) : value; + }, + validator(value) { + return (value instanceof Set || + `Expected a \`Set\` object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a string. + */ +function string$1() { + return define('string', (value) => { + return (typeof value === 'string' || + `Expected a string, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is a tuple of a specific length, and that each of its + * elements is of a specific type. + */ +function tuple$1(Structs) { + const Never = never(); + return new Struct({ + type: 'tuple', + schema: null, + *entries(value) { + if (Array.isArray(value)) { + const length = Math.max(Structs.length, value.length); + for (let i = 0; i < length; i++) { + yield [i, value[i], Structs[i] || Never]; + } + } + }, + validator(value) { + return (Array.isArray(value) || + `Expected an array, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value has a set of known properties of specific types. + * + * Note: Unrecognized properties are allowed and untouched. This is similar to + * how TypeScript's structural typing works. + */ +function type(schema) { + const keys = Object.keys(schema); + return new Struct({ + type: 'type', + schema, + *entries(value) { + if (isObject(value)) { + for (const k of keys) { + yield [k, value[k], schema[k]]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + coercer(value) { + return isObject(value) ? { ...value } : value; + }, + }); +} +/** + * Ensure that a value matches one of a set of types. + */ +function union(Structs) { + const description = Structs.map((s) => s.type).join(' | '); + return new Struct({ + type: 'union', + schema: null, + coercer(value) { + for (const S of Structs) { + const [error, coerced] = S.validate(value, { coerce: true }); + if (!error) { + return coerced; + } + } + return value; + }, + validator(value, ctx) { + const failures = []; + for (const S of Structs) { + const [...tuples] = run(value, S, ctx); + const [first] = tuples; + if (!first[0]) { + return []; + } + else { + for (const [failure] of tuples) { + if (failure) { + failures.push(failure); + } + } + } + } + return [ + `Expected the value to satisfy a union of \`${description}\`, but received: ${print(value)}`, + ...failures, + ]; + }, + }); +} +/** + * Ensure that any value passes validation, without widening its type to `any`. + */ +function unknown() { + return define('unknown', () => true); +} + +/** + * Augment a `Struct` to add an additional coercion step to its input. + * + * This allows you to transform input data before validating it, to increase the + * likelihood that it passes validation—for example for default values, parsing + * different formats, etc. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function coerce$2(struct, condition, coercer) { + return new Struct({ + ...struct, + coercer: (value, ctx) => { + return is(value, condition) + ? struct.coercer(coercer(value, ctx), ctx) + : struct.coercer(value, ctx); + }, + }); +} +/** + * Augment a struct to replace `undefined` values with a default. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function defaulted(struct, fallback, options = {}) { + return coerce$2(struct, unknown(), (x) => { + const f = typeof fallback === 'function' ? fallback() : fallback; + if (x === undefined) { + return f; + } + if (!options.strict && isPlainObject(x) && isPlainObject(f)) { + const ret = { ...x }; + let changed = false; + for (const key in f) { + if (ret[key] === undefined) { + ret[key] = f[key]; + changed = true; + } + } + if (changed) { + return ret; + } + } + return x; + }); +} +/** + * Augment a struct to trim string inputs. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function trimmed(struct) { + return coerce$2(struct, string$1(), (x) => x.trim()); +} + +/** + * Ensure that a string, array, map, or set is empty. + */ +function empty(struct) { + return refine(struct, 'empty', (value) => { + const size = getSize(value); + return (size === 0 || + `Expected an empty ${struct.type} but received one with a size of \`${size}\``); + }); +} +function getSize(value) { + if (value instanceof Map || value instanceof Set) { + return value.size; + } + else { + return value.length; + } +} +/** + * Ensure that a number or date is below a threshold. + */ +function max(struct, threshold, options = {}) { + const { exclusive } = options; + return refine(struct, 'max', (value) => { + return exclusive + ? value < threshold + : value <= threshold || + `Expected a ${struct.type} less than ${exclusive ? '' : 'or equal to '}${threshold} but received \`${value}\``; + }); +} +/** + * Ensure that a number or date is above a threshold. + */ +function min(struct, threshold, options = {}) { + const { exclusive } = options; + return refine(struct, 'min', (value) => { + return exclusive + ? value > threshold + : value >= threshold || + `Expected a ${struct.type} greater than ${exclusive ? '' : 'or equal to '}${threshold} but received \`${value}\``; + }); +} +/** + * Ensure that a string, array, map or set is not empty. + */ +function nonempty(struct) { + return refine(struct, 'nonempty', (value) => { + const size = getSize(value); + return (size > 0 || `Expected a nonempty ${struct.type} but received an empty one`); + }); +} +/** + * Ensure that a string matches a regular expression. + */ +function pattern(struct, regexp) { + return refine(struct, 'pattern', (value) => { + return (regexp.test(value) || + `Expected a ${struct.type} matching \`/${regexp.source}/\` but received "${value}"`); + }); +} +/** + * Ensure that a string, array, number, date, map, or set has a size (or length, or time) between `min` and `max`. + */ +function size(struct, min, max = min) { + const expected = `Expected a ${struct.type}`; + const of = min === max ? `of \`${min}\`` : `between \`${min}\` and \`${max}\``; + return refine(struct, 'size', (value) => { + if (typeof value === 'number' || value instanceof Date) { + return ((min <= value && value <= max) || + `${expected} ${of} but received \`${value}\``); + } + else if (value instanceof Map || value instanceof Set) { + const { size } = value; + return ((min <= size && size <= max) || + `${expected} with a size ${of} but received one with a size of \`${size}\``); + } + else { + const { length } = value; + return ((min <= length && length <= max) || + `${expected} with a length ${of} but received one with a length of \`${length}\``); + } + }); +} +/** + * Augment a `Struct` to add an additional refinement to the validation. + * + * The refiner function is guaranteed to receive a value of the struct's type, + * because the struct's existing validation will already have passed. This + * allows you to layer additional validation on top of existing structs. + */ +function refine(struct, name, refiner) { + return new Struct({ + ...struct, + *refiner(value, ctx) { + yield* struct.refiner(value, ctx); + const result = refiner(value, ctx); + const failures = toFailures(result, ctx, struct, value); + for (const failure of failures) { + yield { ...failure, refinement: name }; + } + }, + }); +} + +var dist$1 = /*#__PURE__*/Object.freeze({ + __proto__: null, + Struct: Struct, + StructError: StructError, + any: any, + array: array$1, + assert: assert$3, + assign: assign, + bigint: bigint$1, + boolean: boolean, + coerce: coerce$2, + create: create, + date: date, + defaulted: defaulted, + define: define, + deprecated: deprecated, + dynamic: dynamic, + empty: empty, + enums: enums, + func: func, + instance: instance, + integer: integer, + intersection: intersection, + is: is, + lazy: lazy, + literal: literal, + map: map, + mask: mask, + max: max, + min: min, + never: never, + nonempty: nonempty, + nullable: nullable, + number: number$2, + object: object$1, + omit: omit, + optional: optional, + partial: partial, + pattern: pattern, + pick: pick, + record: record, + refine: refine, + regexp: regexp, + set: set$1, + size: size, + string: string$1, + struct: struct, + trimmed: trimmed, + tuple: tuple$1, + type: type, + union: union, + unknown: unknown, + validate: validate +}); + +var require$$1 = /*@__PURE__*/getAugmentedNamespace(dist$1); + +Object.defineProperty(chunk6ZDHSOUV, "__esModule", {value: true}); function _optionalChain$2(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } + +var _chunkIZC266HSjs$1 = chunkIZC266HS; + +// src/assert.ts +var _superstruct$7 = require$$1; +function isConstructable(fn) { + return Boolean(typeof _optionalChain$2([fn, 'optionalAccess', _ => _.prototype, 'optionalAccess', _2 => _2.constructor, 'optionalAccess', _3 => _3.name]) === "string"); +} +function getErrorMessageWithoutTrailingPeriod(error) { + return _chunkIZC266HSjs$1.getErrorMessage.call(void 0, error).replace(/\.$/u, ""); +} +function getError(ErrorWrapper, message) { + if (isConstructable(ErrorWrapper)) { + return new ErrorWrapper({ + message + }); + } + return ErrorWrapper({ + message + }); +} +var AssertionError = class extends Error { + constructor(options) { + super(options.message); + this.code = "ERR_ASSERTION"; + } +}; +function assert$2(value, message = "Assertion failed.", ErrorWrapper = AssertionError) { + if (!value) { + if (message instanceof Error) { + throw message; + } + throw getError(ErrorWrapper, message); + } +} +function assertStruct(value, struct, errorPrefix = "Assertion failed", ErrorWrapper = AssertionError) { + try { + _superstruct$7.assert.call(void 0, value, struct); + } catch (error) { + throw getError( + ErrorWrapper, + `${errorPrefix}: ${getErrorMessageWithoutTrailingPeriod(error)}.` + ); + } +} +function assertExhaustive(_object) { + throw new Error( + "Invalid branch reached. Should be detected during compilation." + ); +} + + + + + + +chunk6ZDHSOUV.AssertionError = AssertionError; chunk6ZDHSOUV.assert = assert$2; chunk6ZDHSOUV.assertStruct = assertStruct; chunk6ZDHSOUV.assertExhaustive = assertExhaustive; + +var sha3 = {}; + +var _assert = {}; + +Object.defineProperty(_assert, "__esModule", { value: true }); +_assert.output = _assert.exists = _assert.hash = _assert.bytes = _assert.bool = _assert.number = _assert.isBytes = void 0; +function number$1(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`positive integer expected, not ${n}`); +} +_assert.number = number$1; +function bool$1(b) { + if (typeof b !== 'boolean') + throw new Error(`boolean expected, not ${b}`); +} +_assert.bool = bool$1; +// copied from utils +function isBytes$1(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +_assert.isBytes = isBytes$1; +function bytes$1(b, ...lengths) { + if (!isBytes$1(b)) + throw new Error('Uint8Array expected'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`); +} +_assert.bytes = bytes$1; +function hash(h) { + if (typeof h !== 'function' || typeof h.create !== 'function') + throw new Error('Hash should be wrapped by utils.wrapConstructor'); + number$1(h.outputLen); + number$1(h.blockLen); +} +_assert.hash = hash; +function exists(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); +} +_assert.exists = exists; +function output(out, instance) { + bytes$1(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } +} +_assert.output = output; +const assert$1 = { number: number$1, bool: bool$1, bytes: bytes$1, hash, exists, output }; +_assert.default = assert$1; + +var _u64 = {}; + +Object.defineProperty(_u64, "__esModule", { value: true }); +_u64.add5L = _u64.add5H = _u64.add4H = _u64.add4L = _u64.add3H = _u64.add3L = _u64.add = _u64.rotlBL = _u64.rotlBH = _u64.rotlSL = _u64.rotlSH = _u64.rotr32L = _u64.rotr32H = _u64.rotrBL = _u64.rotrBH = _u64.rotrSL = _u64.rotrSH = _u64.shrSL = _u64.shrSH = _u64.toBig = _u64.split = _u64.fromBig = void 0; +const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); +const _32n = /* @__PURE__ */ BigInt(32); +// We are not using BigUint64Array, because they are extremely slow as per 2022 +function fromBig(n, le = false) { + if (le) + return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; + return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; +} +_u64.fromBig = fromBig; +function split(lst, le = false) { + let Ah = new Uint32Array(lst.length); + let Al = new Uint32Array(lst.length); + for (let i = 0; i < lst.length; i++) { + const { h, l } = fromBig(lst[i], le); + [Ah[i], Al[i]] = [h, l]; + } + return [Ah, Al]; +} +_u64.split = split; +const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0); +_u64.toBig = toBig; +// for Shift in [0, 32) +const shrSH = (h, _l, s) => h >>> s; +_u64.shrSH = shrSH; +const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +_u64.shrSL = shrSL; +// Right rotate for Shift in [1, 32) +const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s)); +_u64.rotrSH = rotrSH; +const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +_u64.rotrSL = rotrSL; +// Right rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); +_u64.rotrBH = rotrBH; +const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); +_u64.rotrBL = rotrBL; +// Right rotate for shift===32 (just swaps l&h) +const rotr32H = (_h, l) => l; +_u64.rotr32H = rotr32H; +const rotr32L = (h, _l) => h; +_u64.rotr32L = rotr32L; +// Left rotate for Shift in [1, 32) +const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); +_u64.rotlSH = rotlSH; +const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); +_u64.rotlSL = rotlSL; +// Left rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); +_u64.rotlBH = rotlBH; +const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); +_u64.rotlBL = rotlBL; +// JS uses 32-bit signed integers for bitwise operations which means we cannot +// simple take carry out of low bit sum by shift, we need to use division. +function add(Ah, Al, Bh, Bl) { + const l = (Al >>> 0) + (Bl >>> 0); + return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; +} +_u64.add = add; +// Addition with more than 2 elements +const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); +_u64.add3L = add3L; +const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; +_u64.add3H = add3H; +const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); +_u64.add4L = add4L; +const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; +_u64.add4H = add4H; +const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); +_u64.add5L = add5L; +const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; +_u64.add5H = add5H; +// prettier-ignore +const u64 = { + fromBig, split, toBig, + shrSH, shrSL, + rotrSH, rotrSL, rotrBH, rotrBL, + rotr32H, rotr32L, + rotlSH, rotlSL, rotlBH, rotlBL, + add, add3L, add3H, add4L, add4H, add5H, add5L, +}; +_u64.default = u64; + +var utils$2 = {}; + +var crypto = {}; + +Object.defineProperty(crypto, "__esModule", { value: true }); +crypto.crypto = void 0; +crypto.crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; + +(function (exports) { + /*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + Object.defineProperty(exports, "__esModule", { value: true }); + exports.randomBytes = exports.wrapXOFConstructorWithOpts = exports.wrapConstructorWithOpts = exports.wrapConstructor = exports.checkOpts = exports.Hash = exports.concatBytes = exports.toBytes = exports.utf8ToBytes = exports.asyncLoop = exports.nextTick = exports.hexToBytes = exports.bytesToHex = exports.byteSwap32 = exports.byteSwapIfBE = exports.byteSwap = exports.isLE = exports.rotl = exports.rotr = exports.createView = exports.u32 = exports.u8 = exports.isBytes = void 0; + // We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. + // node.js versions earlier than v19 don't declare it in global scope. + // For node.js, package.json#exports field mapping rewrites import + // from `crypto` to `cryptoNode`, which imports native module. + // Makes the utils un-importable in browsers without a bundler. + // Once node.js 18 is deprecated (2025-04-30), we can just drop the import. + const crypto_1 = crypto; + const _assert_js_1 = _assert; + // export { isBytes } from './_assert.js'; + // We can't reuse isBytes from _assert, because somehow this causes huge perf issues + function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); + } + exports.isBytes = isBytes; + // Cast array to different type + const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); + exports.u8 = u8; + const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); + exports.u32 = u32; + // Cast array to view + const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); + exports.createView = createView; + // The rotate right (circular right shift) operation for uint32 + const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); + exports.rotr = rotr; + // The rotate left (circular left shift) operation for uint32 + const rotl = (word, shift) => (word << shift) | ((word >>> (32 - shift)) >>> 0); + exports.rotl = rotl; + exports.isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; + // The byte swap operation for uint32 + const byteSwap = (word) => ((word << 24) & 0xff000000) | + ((word << 8) & 0xff0000) | + ((word >>> 8) & 0xff00) | + ((word >>> 24) & 0xff); + exports.byteSwap = byteSwap; + // Conditionally byte swap if on a big-endian platform + exports.byteSwapIfBE = exports.isLE ? (n) => n : (n) => (0, exports.byteSwap)(n); + // In place byte swap for Uint32Array + function byteSwap32(arr) { + for (let i = 0; i < arr.length; i++) { + arr[i] = (0, exports.byteSwap)(arr[i]); + } + } + exports.byteSwap32 = byteSwap32; + // Array where index 0xf0 (240) is mapped to string 'f0' + const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); + /** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ + function bytesToHex(bytes) { + (0, _assert_js_1.bytes)(bytes); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; + } + exports.bytesToHex = bytesToHex; + // We use optimized technique to convert hex string to byte array + const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; + function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; + } + /** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ + function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; + } + exports.hexToBytes = hexToBytes; + // There is no setImmediate in browser and setTimeout is slow. + // call of async fn will return Promise, which will be fullfiled only on + // next scheduler queue processing step and this is exactly what we need. + const nextTick = async () => { }; + exports.nextTick = nextTick; + // Returns control to thread each 'tick' ms to avoid blocking + async function asyncLoop(iters, tick, cb) { + let ts = Date.now(); + for (let i = 0; i < iters; i++) { + cb(i); + // Date.now() is not monotonic, so in case if clock goes backwards we return return control too + const diff = Date.now() - ts; + if (diff >= 0 && diff < tick) + continue; + await (0, exports.nextTick)(); + ts += diff; + } + } + exports.asyncLoop = asyncLoop; + /** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ + function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 + } + exports.utf8ToBytes = utf8ToBytes; + /** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ + function toBytes(data) { + if (typeof data === 'string') + data = utf8ToBytes(data); + (0, _assert_js_1.bytes)(data); + return data; + } + exports.toBytes = toBytes; + /** + * Copies several Uint8Arrays into one. + */ + function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + (0, _assert_js_1.bytes)(a); + sum += a.length; + } + const res = new Uint8Array(sum); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; + } + exports.concatBytes = concatBytes; + // For runtime check if class implements interface + class Hash { + // Safe version that clones internal state + clone() { + return this._cloneInto(); + } + } + exports.Hash = Hash; + const toStr = {}.toString; + function checkOpts(defaults, opts) { + if (opts !== undefined && toStr.call(opts) !== '[object Object]') + throw new Error('Options should be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; + } + exports.checkOpts = checkOpts; + function wrapConstructor(hashCons) { + const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); + const tmp = hashCons(); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = () => hashCons(); + return hashC; + } + exports.wrapConstructor = wrapConstructor; + function wrapConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; + } + exports.wrapConstructorWithOpts = wrapConstructorWithOpts; + function wrapXOFConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; + } + exports.wrapXOFConstructorWithOpts = wrapXOFConstructorWithOpts; + /** + * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. + */ + function randomBytes(bytesLength = 32) { + if (crypto_1.crypto && typeof crypto_1.crypto.getRandomValues === 'function') { + return crypto_1.crypto.getRandomValues(new Uint8Array(bytesLength)); + } + throw new Error('crypto.getRandomValues must be defined'); + } + exports.randomBytes = randomBytes; + +} (utils$2)); + +Object.defineProperty(sha3, "__esModule", { value: true }); +sha3.shake256 = sha3.shake128 = sha3.keccak_512 = sha3.keccak_384 = sha3.keccak_256 = sha3.keccak_224 = sha3.sha3_512 = sha3.sha3_384 = sha3.sha3_256 = sha3.sha3_224 = sha3.Keccak = sha3.keccakP = void 0; +const _assert_js_1 = _assert; +const _u64_js_1 = _u64; +const utils_js_1 = utils$2; +// SHA3 (keccak) is based on a new design: basically, the internal state is bigger than output size. +// It's called a sponge function. +// Various per round constants calculations +const SHA3_PI = []; +const SHA3_ROTL = []; +const _SHA3_IOTA = []; +const _0n = /* @__PURE__ */ BigInt(0); +const _1n = /* @__PURE__ */ BigInt(1); +const _2n = /* @__PURE__ */ BigInt(2); +const _7n = /* @__PURE__ */ BigInt(7); +const _256n = /* @__PURE__ */ BigInt(256); +const _0x71n = /* @__PURE__ */ BigInt(0x71); +for (let round = 0, R = _1n, x = 1, y = 0; round < 24; round++) { + // Pi + [x, y] = [y, (2 * x + 3 * y) % 5]; + SHA3_PI.push(2 * (5 * y + x)); + // Rotational + SHA3_ROTL.push((((round + 1) * (round + 2)) / 2) % 64); + // Iota + let t = _0n; + for (let j = 0; j < 7; j++) { + R = ((R << _1n) ^ ((R >> _7n) * _0x71n)) % _256n; + if (R & _2n) + t ^= _1n << ((_1n << /* @__PURE__ */ BigInt(j)) - _1n); + } + _SHA3_IOTA.push(t); +} +const [SHA3_IOTA_H, SHA3_IOTA_L] = /* @__PURE__ */ (0, _u64_js_1.split)(_SHA3_IOTA, true); +// Left rotation (without 0, 32, 64) +const rotlH = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBH)(h, l, s) : (0, _u64_js_1.rotlSH)(h, l, s)); +const rotlL = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBL)(h, l, s) : (0, _u64_js_1.rotlSL)(h, l, s)); +// Same as keccakf1600, but allows to skip some rounds +function keccakP(s, rounds = 24) { + const B = new Uint32Array(5 * 2); + // NOTE: all indices are x2 since we store state as u32 instead of u64 (bigints to slow in js) + for (let round = 24 - rounds; round < 24; round++) { + // Theta θ + for (let x = 0; x < 10; x++) + B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; + for (let x = 0; x < 10; x += 2) { + const idx1 = (x + 8) % 10; + const idx0 = (x + 2) % 10; + const B0 = B[idx0]; + const B1 = B[idx0 + 1]; + const Th = rotlH(B0, B1, 1) ^ B[idx1]; + const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; + for (let y = 0; y < 50; y += 10) { + s[x + y] ^= Th; + s[x + y + 1] ^= Tl; + } + } + // Rho (ρ) and Pi (π) + let curH = s[2]; + let curL = s[3]; + for (let t = 0; t < 24; t++) { + const shift = SHA3_ROTL[t]; + const Th = rotlH(curH, curL, shift); + const Tl = rotlL(curH, curL, shift); + const PI = SHA3_PI[t]; + curH = s[PI]; + curL = s[PI + 1]; + s[PI] = Th; + s[PI + 1] = Tl; + } + // Chi (χ) + for (let y = 0; y < 50; y += 10) { + for (let x = 0; x < 10; x++) + B[x] = s[y + x]; + for (let x = 0; x < 10; x++) + s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; + } + // Iota (ι) + s[0] ^= SHA3_IOTA_H[round]; + s[1] ^= SHA3_IOTA_L[round]; + } + B.fill(0); +} +sha3.keccakP = keccakP; +class Keccak extends utils_js_1.Hash { + // NOTE: we accept arguments in bytes instead of bits here. + constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { + super(); + this.blockLen = blockLen; + this.suffix = suffix; + this.outputLen = outputLen; + this.enableXOF = enableXOF; + this.rounds = rounds; + this.pos = 0; + this.posOut = 0; + this.finished = false; + this.destroyed = false; + // Can be passed from user as dkLen + (0, _assert_js_1.number)(outputLen); + // 1600 = 5x5 matrix of 64bit. 1600 bits === 200 bytes + if (0 >= this.blockLen || this.blockLen >= 200) + throw new Error('Sha3 supports only keccak-f1600 function'); + this.state = new Uint8Array(200); + this.state32 = (0, utils_js_1.u32)(this.state); + } + keccak() { + if (!utils_js_1.isLE) + (0, utils_js_1.byteSwap32)(this.state32); + keccakP(this.state32, this.rounds); + if (!utils_js_1.isLE) + (0, utils_js_1.byteSwap32)(this.state32); + this.posOut = 0; + this.pos = 0; + } + update(data) { + (0, _assert_js_1.exists)(this); + const { blockLen, state } = this; + data = (0, utils_js_1.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + for (let i = 0; i < take; i++) + state[this.pos++] ^= data[pos++]; + if (this.pos === blockLen) + this.keccak(); + } + return this; + } + finish() { + if (this.finished) + return; + this.finished = true; + const { state, suffix, pos, blockLen } = this; + // Do the padding + state[pos] ^= suffix; + if ((suffix & 0x80) !== 0 && pos === blockLen - 1) + this.keccak(); + state[blockLen - 1] ^= 0x80; + this.keccak(); + } + writeInto(out) { + (0, _assert_js_1.exists)(this, false); + (0, _assert_js_1.bytes)(out); + this.finish(); + const bufferOut = this.state; + const { blockLen } = this; + for (let pos = 0, len = out.length; pos < len;) { + if (this.posOut >= blockLen) + this.keccak(); + const take = Math.min(blockLen - this.posOut, len - pos); + out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); + this.posOut += take; + pos += take; + } + return out; + } + xofInto(out) { + // Sha3/Keccak usage with XOF is probably mistake, only SHAKE instances can do XOF + if (!this.enableXOF) + throw new Error('XOF is not possible for this instance'); + return this.writeInto(out); + } + xof(bytes) { + (0, _assert_js_1.number)(bytes); + return this.xofInto(new Uint8Array(bytes)); + } + digestInto(out) { + (0, _assert_js_1.output)(out, this); + if (this.finished) + throw new Error('digest() was already called'); + this.writeInto(out); + this.destroy(); + return out; + } + digest() { + return this.digestInto(new Uint8Array(this.outputLen)); + } + destroy() { + this.destroyed = true; + this.state.fill(0); + } + _cloneInto(to) { + const { blockLen, suffix, outputLen, rounds, enableXOF } = this; + to || (to = new Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); + to.state32.set(this.state32); + to.pos = this.pos; + to.posOut = this.posOut; + to.finished = this.finished; + to.rounds = rounds; + // Suffix can change in cSHAKE + to.suffix = suffix; + to.outputLen = outputLen; + to.enableXOF = enableXOF; + to.destroyed = this.destroyed; + return to; + } +} +sha3.Keccak = Keccak; +const gen = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapConstructor)(() => new Keccak(blockLen, suffix, outputLen)); +sha3.sha3_224 = gen(0x06, 144, 224 / 8); +/** + * SHA3-256 hash function + * @param message - that would be hashed + */ +sha3.sha3_256 = gen(0x06, 136, 256 / 8); +sha3.sha3_384 = gen(0x06, 104, 384 / 8); +sha3.sha3_512 = gen(0x06, 72, 512 / 8); +sha3.keccak_224 = gen(0x01, 144, 224 / 8); +/** + * keccak-256 hash function. Different from SHA3-256. + * @param message - that would be hashed + */ +sha3.keccak_256 = gen(0x01, 136, 256 / 8); +sha3.keccak_384 = gen(0x01, 104, 384 / 8); +sha3.keccak_512 = gen(0x01, 72, 512 / 8); +const genShake = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapXOFConstructorWithOpts)((opts = {}) => new Keccak(blockLen, suffix, opts.dkLen === undefined ? outputLen : opts.dkLen, true)); +sha3.shake128 = genShake(0x1f, 168, 128 / 8); +sha3.shake256 = genShake(0x1f, 136, 256 / 8); + +var lib$1 = {}; + +(function (exports) { + /*! scure-base - MIT License (c) 2022 Paul Miller (paulmillr.com) */ + Object.defineProperty(exports, "__esModule", { value: true }); + exports.bytes = exports.stringToBytes = exports.str = exports.bytesToString = exports.hex = exports.utf8 = exports.bech32m = exports.bech32 = exports.base58check = exports.createBase58check = exports.base58xmr = exports.base58xrp = exports.base58flickr = exports.base58 = exports.base64urlnopad = exports.base64url = exports.base64nopad = exports.base64 = exports.base32crockford = exports.base32hex = exports.base32 = exports.base16 = exports.utils = exports.assertNumber = void 0; + // Utilities + /** + * @__NO_SIDE_EFFECTS__ + */ + function assertNumber(n) { + if (!Number.isSafeInteger(n)) + throw new Error(`Wrong integer: ${n}`); + } + exports.assertNumber = assertNumber; + function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); + } + /** + * @__NO_SIDE_EFFECTS__ + */ + function chain(...args) { + const id = (a) => a; + // Wrap call in closure so JIT can inline calls + const wrap = (a, b) => (c) => a(b(c)); + // Construct chain of args[-1].encode(args[-2].encode([...])) + const encode = args.map((x) => x.encode).reduceRight(wrap, id); + // Construct chain of args[0].decode(args[1].decode(...)) + const decode = args.map((x) => x.decode).reduce(wrap, id); + return { encode, decode }; + } + /** + * Encodes integer radix representation to array of strings using alphabet and back + * @__NO_SIDE_EFFECTS__ + */ + function alphabet(alphabet) { + return { + encode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('alphabet.encode input should be an array of numbers'); + return digits.map((i) => { + if (i < 0 || i >= alphabet.length) + throw new Error(`Digit index outside alphabet: ${i} (alphabet: ${alphabet.length})`); + return alphabet[i]; + }); + }, + decode: (input) => { + if (!Array.isArray(input) || (input.length && typeof input[0] !== 'string')) + throw new Error('alphabet.decode input should be array of strings'); + return input.map((letter) => { + if (typeof letter !== 'string') + throw new Error(`alphabet.decode: not string element=${letter}`); + const index = alphabet.indexOf(letter); + if (index === -1) + throw new Error(`Unknown letter: "${letter}". Allowed: ${alphabet}`); + return index; + }); + }, + }; + } + /** + * @__NO_SIDE_EFFECTS__ + */ + function join(separator = '') { + if (typeof separator !== 'string') + throw new Error('join separator should be string'); + return { + encode: (from) => { + if (!Array.isArray(from) || (from.length && typeof from[0] !== 'string')) + throw new Error('join.encode input should be array of strings'); + for (let i of from) + if (typeof i !== 'string') + throw new Error(`join.encode: non-string input=${i}`); + return from.join(separator); + }, + decode: (to) => { + if (typeof to !== 'string') + throw new Error('join.decode input should be string'); + return to.split(separator); + }, + }; + } + /** + * Pad strings array so it has integer number of bits + * @__NO_SIDE_EFFECTS__ + */ + function padding(bits, chr = '=') { + if (typeof chr !== 'string') + throw new Error('padding chr should be string'); + return { + encode(data) { + if (!Array.isArray(data) || (data.length && typeof data[0] !== 'string')) + throw new Error('padding.encode input should be array of strings'); + for (let i of data) + if (typeof i !== 'string') + throw new Error(`padding.encode: non-string input=${i}`); + while ((data.length * bits) % 8) + data.push(chr); + return data; + }, + decode(input) { + if (!Array.isArray(input) || (input.length && typeof input[0] !== 'string')) + throw new Error('padding.encode input should be array of strings'); + for (let i of input) + if (typeof i !== 'string') + throw new Error(`padding.decode: non-string input=${i}`); + let end = input.length; + if ((end * bits) % 8) + throw new Error('Invalid padding: string should have whole number of bytes'); + for (; end > 0 && input[end - 1] === chr; end--) { + if (!(((end - 1) * bits) % 8)) + throw new Error('Invalid padding: string has too much padding'); + } + return input.slice(0, end); + }, + }; + } + /** + * @__NO_SIDE_EFFECTS__ + */ + function normalize(fn) { + if (typeof fn !== 'function') + throw new Error('normalize fn should be function'); + return { encode: (from) => from, decode: (to) => fn(to) }; + } + /** + * Slow: O(n^2) time complexity + * @__NO_SIDE_EFFECTS__ + */ + function convertRadix(data, from, to) { + // base 1 is impossible + if (from < 2) + throw new Error(`convertRadix: wrong from=${from}, base cannot be less than 2`); + if (to < 2) + throw new Error(`convertRadix: wrong to=${to}, base cannot be less than 2`); + if (!Array.isArray(data)) + throw new Error('convertRadix: data should be array'); + if (!data.length) + return []; + let pos = 0; + const res = []; + const digits = Array.from(data); + digits.forEach((d) => { + if (d < 0 || d >= from) + throw new Error(`Wrong integer: ${d}`); + }); + while (true) { + let carry = 0; + let done = true; + for (let i = pos; i < digits.length; i++) { + const digit = digits[i]; + const digitBase = from * carry + digit; + if (!Number.isSafeInteger(digitBase) || + (from * carry) / from !== carry || + digitBase - digit !== from * carry) { + throw new Error('convertRadix: carry overflow'); + } + carry = digitBase % to; + const rounded = Math.floor(digitBase / to); + digits[i] = rounded; + if (!Number.isSafeInteger(rounded) || rounded * to + carry !== digitBase) + throw new Error('convertRadix: carry overflow'); + if (!done) + continue; + else if (!rounded) + pos = i; + else + done = false; + } + res.push(carry); + if (done) + break; + } + for (let i = 0; i < data.length - 1 && data[i] === 0; i++) + res.push(0); + return res.reverse(); + } + const gcd = /* @__NO_SIDE_EFFECTS__ */ (a, b) => (!b ? a : gcd(b, a % b)); + const radix2carry = /*@__NO_SIDE_EFFECTS__ */ (from, to) => from + (to - gcd(from, to)); + /** + * Implemented with numbers, because BigInt is 5x slower + * @__NO_SIDE_EFFECTS__ + */ + function convertRadix2(data, from, to, padding) { + if (!Array.isArray(data)) + throw new Error('convertRadix2: data should be array'); + if (from <= 0 || from > 32) + throw new Error(`convertRadix2: wrong from=${from}`); + if (to <= 0 || to > 32) + throw new Error(`convertRadix2: wrong to=${to}`); + if (radix2carry(from, to) > 32) { + throw new Error(`convertRadix2: carry overflow from=${from} to=${to} carryBits=${radix2carry(from, to)}`); + } + let carry = 0; + let pos = 0; // bitwise position in current element + const mask = 2 ** to - 1; + const res = []; + for (const n of data) { + if (n >= 2 ** from) + throw new Error(`convertRadix2: invalid data word=${n} from=${from}`); + carry = (carry << from) | n; + if (pos + from > 32) + throw new Error(`convertRadix2: carry overflow pos=${pos} from=${from}`); + pos += from; + for (; pos >= to; pos -= to) + res.push(((carry >> (pos - to)) & mask) >>> 0); + carry &= 2 ** pos - 1; // clean carry, otherwise it will cause overflow + } + carry = (carry << (to - pos)) & mask; + if (!padding && pos >= from) + throw new Error('Excess padding'); + if (!padding && carry) + throw new Error(`Non-zero padding: ${carry}`); + if (padding && pos > 0) + res.push(carry >>> 0); + return res; + } + /** + * @__NO_SIDE_EFFECTS__ + */ + function radix(num) { + return { + encode: (bytes) => { + if (!isBytes(bytes)) + throw new Error('radix.encode input should be Uint8Array'); + return convertRadix(Array.from(bytes), 2 ** 8, num); + }, + decode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('radix.decode input should be array of numbers'); + return Uint8Array.from(convertRadix(digits, num, 2 ** 8)); + }, + }; + } + /** + * If both bases are power of same number (like `2**8 <-> 2**64`), + * there is a linear algorithm. For now we have implementation for power-of-two bases only. + * @__NO_SIDE_EFFECTS__ + */ + function radix2(bits, revPadding = false) { + if (bits <= 0 || bits > 32) + throw new Error('radix2: bits should be in (0..32]'); + if (radix2carry(8, bits) > 32 || radix2carry(bits, 8) > 32) + throw new Error('radix2: carry overflow'); + return { + encode: (bytes) => { + if (!isBytes(bytes)) + throw new Error('radix2.encode input should be Uint8Array'); + return convertRadix2(Array.from(bytes), 8, bits, !revPadding); + }, + decode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('radix2.decode input should be array of numbers'); + return Uint8Array.from(convertRadix2(digits, bits, 8, revPadding)); + }, + }; + } + /** + * @__NO_SIDE_EFFECTS__ + */ + function unsafeWrapper(fn) { + if (typeof fn !== 'function') + throw new Error('unsafeWrapper fn should be function'); + return function (...args) { + try { + return fn.apply(null, args); + } + catch (e) { } + }; + } + /** + * @__NO_SIDE_EFFECTS__ + */ + function checksum(len, fn) { + if (typeof fn !== 'function') + throw new Error('checksum fn should be function'); + return { + encode(data) { + if (!isBytes(data)) + throw new Error('checksum.encode: input should be Uint8Array'); + const checksum = fn(data).slice(0, len); + const res = new Uint8Array(data.length + len); + res.set(data); + res.set(checksum, data.length); + return res; + }, + decode(data) { + if (!isBytes(data)) + throw new Error('checksum.decode: input should be Uint8Array'); + const payload = data.slice(0, -len); + const newChecksum = fn(payload).slice(0, len); + const oldChecksum = data.slice(-len); + for (let i = 0; i < len; i++) + if (newChecksum[i] !== oldChecksum[i]) + throw new Error('Invalid checksum'); + return payload; + }, + }; + } + // prettier-ignore + exports.utils = { + alphabet, chain, checksum, convertRadix, convertRadix2, radix, radix2, join, padding, + }; + // RFC 4648 aka RFC 3548 + // --------------------- + exports.base16 = chain(radix2(4), alphabet('0123456789ABCDEF'), join('')); + exports.base32 = chain(radix2(5), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'), padding(5), join('')); + exports.base32hex = chain(radix2(5), alphabet('0123456789ABCDEFGHIJKLMNOPQRSTUV'), padding(5), join('')); + exports.base32crockford = chain(radix2(5), alphabet('0123456789ABCDEFGHJKMNPQRSTVWXYZ'), join(''), normalize((s) => s.toUpperCase().replace(/O/g, '0').replace(/[IL]/g, '1'))); + exports.base64 = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'), padding(6), join('')); + exports.base64nopad = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'), join('')); + exports.base64url = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'), padding(6), join('')); + exports.base64urlnopad = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'), join('')); + // base58 code + // ----------- + const genBase58 = (abc) => chain(radix(58), alphabet(abc), join('')); + exports.base58 = genBase58('123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'); + exports.base58flickr = genBase58('123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ'); + exports.base58xrp = genBase58('rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz'); + // xmr ver is done in 8-byte blocks (which equals 11 chars in decoding). Last (non-full) block padded with '1' to size in XMR_BLOCK_LEN. + // Block encoding significantly reduces quadratic complexity of base58. + // Data len (index) -> encoded block len + const XMR_BLOCK_LEN = [0, 2, 3, 5, 6, 7, 9, 10, 11]; + exports.base58xmr = { + encode(data) { + let res = ''; + for (let i = 0; i < data.length; i += 8) { + const block = data.subarray(i, i + 8); + res += exports.base58.encode(block).padStart(XMR_BLOCK_LEN[block.length], '1'); + } + return res; + }, + decode(str) { + let res = []; + for (let i = 0; i < str.length; i += 11) { + const slice = str.slice(i, i + 11); + const blockLen = XMR_BLOCK_LEN.indexOf(slice.length); + const block = exports.base58.decode(slice); + for (let j = 0; j < block.length - blockLen; j++) { + if (block[j] !== 0) + throw new Error('base58xmr: wrong padding'); + } + res = res.concat(Array.from(block.slice(block.length - blockLen))); + } + return Uint8Array.from(res); + }, + }; + const createBase58check = (sha256) => chain(checksum(4, (data) => sha256(sha256(data))), exports.base58); + exports.createBase58check = createBase58check; + // legacy export, bad name + exports.base58check = exports.createBase58check; + const BECH_ALPHABET = /* @__PURE__ */ chain(alphabet('qpzry9x8gf2tvdw0s3jn54khce6mua7l'), join('')); + const POLYMOD_GENERATORS = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3]; + /** + * @__NO_SIDE_EFFECTS__ + */ + function bech32Polymod(pre) { + const b = pre >> 25; + let chk = (pre & 0x1ffffff) << 5; + for (let i = 0; i < POLYMOD_GENERATORS.length; i++) { + if (((b >> i) & 1) === 1) + chk ^= POLYMOD_GENERATORS[i]; + } + return chk; + } + /** + * @__NO_SIDE_EFFECTS__ + */ + function bechChecksum(prefix, words, encodingConst = 1) { + const len = prefix.length; + let chk = 1; + for (let i = 0; i < len; i++) { + const c = prefix.charCodeAt(i); + if (c < 33 || c > 126) + throw new Error(`Invalid prefix (${prefix})`); + chk = bech32Polymod(chk) ^ (c >> 5); + } + chk = bech32Polymod(chk); + for (let i = 0; i < len; i++) + chk = bech32Polymod(chk) ^ (prefix.charCodeAt(i) & 0x1f); + for (let v of words) + chk = bech32Polymod(chk) ^ v; + for (let i = 0; i < 6; i++) + chk = bech32Polymod(chk); + chk ^= encodingConst; + return BECH_ALPHABET.encode(convertRadix2([chk % 2 ** 30], 30, 5, false)); + } + /** + * @__NO_SIDE_EFFECTS__ + */ + function genBech32(encoding) { + const ENCODING_CONST = encoding === 'bech32' ? 1 : 0x2bc830a3; + const _words = radix2(5); + const fromWords = _words.decode; + const toWords = _words.encode; + const fromWordsUnsafe = unsafeWrapper(fromWords); + function encode(prefix, words, limit = 90) { + if (typeof prefix !== 'string') + throw new Error(`bech32.encode prefix should be string, not ${typeof prefix}`); + if (!Array.isArray(words) || (words.length && typeof words[0] !== 'number')) + throw new Error(`bech32.encode words should be array of numbers, not ${typeof words}`); + if (prefix.length === 0) + throw new TypeError(`Invalid prefix length ${prefix.length}`); + const actualLength = prefix.length + 7 + words.length; + if (limit !== false && actualLength > limit) + throw new TypeError(`Length ${actualLength} exceeds limit ${limit}`); + const lowered = prefix.toLowerCase(); + const sum = bechChecksum(lowered, words, ENCODING_CONST); + return `${lowered}1${BECH_ALPHABET.encode(words)}${sum}`; + } + function decode(str, limit = 90) { + if (typeof str !== 'string') + throw new Error(`bech32.decode input should be string, not ${typeof str}`); + if (str.length < 8 || (limit !== false && str.length > limit)) + throw new TypeError(`Wrong string length: ${str.length} (${str}). Expected (8..${limit})`); + // don't allow mixed case + const lowered = str.toLowerCase(); + if (str !== lowered && str !== str.toUpperCase()) + throw new Error(`String must be lowercase or uppercase`); + const sepIndex = lowered.lastIndexOf('1'); + if (sepIndex === 0 || sepIndex === -1) + throw new Error(`Letter "1" must be present between prefix and data only`); + const prefix = lowered.slice(0, sepIndex); + const data = lowered.slice(sepIndex + 1); + if (data.length < 6) + throw new Error('Data must be at least 6 characters long'); + const words = BECH_ALPHABET.decode(data).slice(0, -6); + const sum = bechChecksum(prefix, words, ENCODING_CONST); + if (!data.endsWith(sum)) + throw new Error(`Invalid checksum in ${str}: expected "${sum}"`); + return { prefix, words }; + } + const decodeUnsafe = unsafeWrapper(decode); + function decodeToBytes(str) { + const { prefix, words } = decode(str, false); + return { prefix, words, bytes: fromWords(words) }; + } + return { encode, decode, decodeToBytes, decodeUnsafe, fromWords, fromWordsUnsafe, toWords }; + } + exports.bech32 = genBech32('bech32'); + exports.bech32m = genBech32('bech32m'); + exports.utf8 = { + encode: (data) => new TextDecoder().decode(data), + decode: (str) => new TextEncoder().encode(str), + }; + exports.hex = chain(radix2(4), alphabet('0123456789abcdef'), join(''), normalize((s) => { + if (typeof s !== 'string' || s.length % 2) + throw new TypeError(`hex.decode: expected string, got ${typeof s} with length ${s.length}`); + return s.toLowerCase(); + })); + // prettier-ignore + const CODERS = { + utf8: exports.utf8, hex: exports.hex, base16: exports.base16, base32: exports.base32, base64: exports.base64, base64url: exports.base64url, base58: exports.base58, base58xmr: exports.base58xmr + }; + const coderTypeError = 'Invalid encoding type. Available types: utf8, hex, base16, base32, base64, base64url, base58, base58xmr'; + const bytesToString = (type, bytes) => { + if (typeof type !== 'string' || !CODERS.hasOwnProperty(type)) + throw new TypeError(coderTypeError); + if (!isBytes(bytes)) + throw new TypeError('bytesToString() expects Uint8Array'); + return CODERS[type].encode(bytes); + }; + exports.bytesToString = bytesToString; + exports.str = exports.bytesToString; // as in python, but for bytes only + const stringToBytes = (type, str) => { + if (!CODERS.hasOwnProperty(type)) + throw new TypeError(coderTypeError); + if (typeof str !== 'string') + throw new TypeError('stringToBytes() expects string'); + return CODERS[type].decode(str); + }; + exports.stringToBytes = stringToBytes; + exports.bytes = exports.stringToBytes; + +} (lib$1)); + +Object.defineProperty(chunkQEPVHEP7, "__esModule", {value: true}); function _optionalChain$1(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } + +var _chunk6ZDHSOUVjs$6 = chunk6ZDHSOUV; + +// src/hex.ts +var _sha3 = sha3; +var _superstruct$6 = require$$1; + +// src/bytes.ts +var _base = lib$1; +var HEX_MINIMUM_NUMBER_CHARACTER = 48; +var HEX_MAXIMUM_NUMBER_CHARACTER = 58; +var HEX_CHARACTER_OFFSET = 87; +function getPrecomputedHexValuesBuilder() { + const lookupTable = []; + return () => { + if (lookupTable.length === 0) { + for (let i = 0; i < 256; i++) { + lookupTable.push(i.toString(16).padStart(2, "0")); + } + } + return lookupTable; + }; +} +var getPrecomputedHexValues = getPrecomputedHexValuesBuilder(); +function isBytes(value) { + return value instanceof Uint8Array; +} +function assertIsBytes(value) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, isBytes(value), "Value must be a Uint8Array."); +} +function bytesToHex(bytes) { + assertIsBytes(bytes); + if (bytes.length === 0) { + return "0x"; + } + const lookupTable = getPrecomputedHexValues(); + const hexadecimal = new Array(bytes.length); + for (let i = 0; i < bytes.length; i++) { + hexadecimal[i] = lookupTable[bytes[i]]; + } + return add0x(hexadecimal.join("")); +} +function bytesToBigInt(bytes) { + assertIsBytes(bytes); + const hexadecimal = bytesToHex(bytes); + return BigInt(hexadecimal); +} +function bytesToSignedBigInt(bytes) { + assertIsBytes(bytes); + let value = BigInt(0); + for (const byte of bytes) { + value = (value << BigInt(8)) + BigInt(byte); + } + return BigInt.asIntN(bytes.length * 8, value); +} +function bytesToNumber(bytes) { + assertIsBytes(bytes); + const bigint = bytesToBigInt(bytes); + _chunk6ZDHSOUVjs$6.assert.call(void 0, + bigint <= BigInt(Number.MAX_SAFE_INTEGER), + "Number is not a safe integer. Use `bytesToBigInt` instead." + ); + return Number(bigint); +} +function bytesToString(bytes) { + assertIsBytes(bytes); + return new TextDecoder().decode(bytes); +} +function bytesToBase64(bytes) { + assertIsBytes(bytes); + return _base.base64.encode(bytes); +} +function hexToBytes(value) { + if (_optionalChain$1([value, 'optionalAccess', _ => _.toLowerCase, 'optionalCall', _2 => _2()]) === "0x") { + return new Uint8Array(); + } + assertIsHexString(value); + const strippedValue = remove0x(value).toLowerCase(); + const normalizedValue = strippedValue.length % 2 === 0 ? strippedValue : `0${strippedValue}`; + const bytes = new Uint8Array(normalizedValue.length / 2); + for (let i = 0; i < bytes.length; i++) { + const c1 = normalizedValue.charCodeAt(i * 2); + const c2 = normalizedValue.charCodeAt(i * 2 + 1); + const n1 = c1 - (c1 < HEX_MAXIMUM_NUMBER_CHARACTER ? HEX_MINIMUM_NUMBER_CHARACTER : HEX_CHARACTER_OFFSET); + const n2 = c2 - (c2 < HEX_MAXIMUM_NUMBER_CHARACTER ? HEX_MINIMUM_NUMBER_CHARACTER : HEX_CHARACTER_OFFSET); + bytes[i] = n1 * 16 + n2; + } + return bytes; +} +function bigIntToBytes(value) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs$6.assert.call(void 0, value >= BigInt(0), "Value must be a non-negative bigint."); + const hexadecimal = value.toString(16); + return hexToBytes(hexadecimal); +} +function bigIntFits(value, bytes) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, bytes > 0); + const mask = value >> BigInt(31); + return !((~value & mask) + (value & ~mask) >> BigInt(bytes * 8 + ~0)); +} +function signedBigIntToBytes(value, byteLength) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs$6.assert.call(void 0, typeof byteLength === "number", "Byte length must be a number."); + _chunk6ZDHSOUVjs$6.assert.call(void 0, byteLength > 0, "Byte length must be greater than 0."); + _chunk6ZDHSOUVjs$6.assert.call(void 0, + bigIntFits(value, byteLength), + "Byte length is too small to represent the given value." + ); + let numberValue = value; + const bytes = new Uint8Array(byteLength); + for (let i = 0; i < bytes.length; i++) { + bytes[i] = Number(BigInt.asUintN(8, numberValue)); + numberValue >>= BigInt(8); + } + return bytes.reverse(); +} +function numberToBytes(value) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, typeof value === "number", "Value must be a number."); + _chunk6ZDHSOUVjs$6.assert.call(void 0, value >= 0, "Value must be a non-negative number."); + _chunk6ZDHSOUVjs$6.assert.call(void 0, + Number.isSafeInteger(value), + "Value is not a safe integer. Use `bigIntToBytes` instead." + ); + const hexadecimal = value.toString(16); + return hexToBytes(hexadecimal); +} +function stringToBytes(value) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, typeof value === "string", "Value must be a string."); + return new TextEncoder().encode(value); +} +function base64ToBytes(value) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, typeof value === "string", "Value must be a string."); + return _base.base64.decode(value); +} +function valueToBytes(value) { + if (typeof value === "bigint") { + return bigIntToBytes(value); + } + if (typeof value === "number") { + return numberToBytes(value); + } + if (typeof value === "string") { + if (value.startsWith("0x")) { + return hexToBytes(value); + } + return stringToBytes(value); + } + if (isBytes(value)) { + return value; + } + throw new TypeError(`Unsupported value type: "${typeof value}".`); +} +function concatBytes(values) { + const normalizedValues = new Array(values.length); + let byteLength = 0; + for (let i = 0; i < values.length; i++) { + const value = valueToBytes(values[i]); + normalizedValues[i] = value; + byteLength += value.length; + } + const bytes = new Uint8Array(byteLength); + for (let i = 0, offset = 0; i < normalizedValues.length; i++) { + bytes.set(normalizedValues[i], offset); + offset += normalizedValues[i].length; + } + return bytes; +} +function createDataView(bytes) { + if (typeof Buffer !== "undefined" && bytes instanceof Buffer) { + const buffer = bytes.buffer.slice( + bytes.byteOffset, + bytes.byteOffset + bytes.byteLength + ); + return new DataView(buffer); + } + return new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength); +} + +// src/hex.ts +var HexStruct = _superstruct$6.pattern.call(void 0, _superstruct$6.string.call(void 0, ), /^(?:0x)?[0-9a-f]+$/iu); +var StrictHexStruct = _superstruct$6.pattern.call(void 0, _superstruct$6.string.call(void 0, ), /^0x[0-9a-f]+$/iu); +var HexAddressStruct = _superstruct$6.pattern.call(void 0, + _superstruct$6.string.call(void 0, ), + /^0x[0-9a-f]{40}$/u +); +var HexChecksumAddressStruct = _superstruct$6.pattern.call(void 0, + _superstruct$6.string.call(void 0, ), + /^0x[0-9a-fA-F]{40}$/u +); +function isHexString(value) { + return _superstruct$6.is.call(void 0, value, HexStruct); +} +function isStrictHexString(value) { + return _superstruct$6.is.call(void 0, value, StrictHexStruct); +} +function assertIsHexString(value) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, isHexString(value), "Value must be a hexadecimal string."); +} +function assertIsStrictHexString(value) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, + isStrictHexString(value), + 'Value must be a hexadecimal string, starting with "0x".' + ); +} +function isValidHexAddress(possibleAddress) { + return _superstruct$6.is.call(void 0, possibleAddress, HexAddressStruct) || isValidChecksumAddress(possibleAddress); +} +function getChecksumAddress(address) { + _chunk6ZDHSOUVjs$6.assert.call(void 0, _superstruct$6.is.call(void 0, address, HexChecksumAddressStruct), "Invalid hex address."); + const unPrefixed = remove0x(address.toLowerCase()); + const unPrefixedHash = remove0x(bytesToHex(_sha3.keccak_256.call(void 0, unPrefixed))); + return `0x${unPrefixed.split("").map((character, nibbleIndex) => { + const hashCharacter = unPrefixedHash[nibbleIndex]; + _chunk6ZDHSOUVjs$6.assert.call(void 0, _superstruct$6.is.call(void 0, hashCharacter, _superstruct$6.string.call(void 0, )), "Hash shorter than address."); + return parseInt(hashCharacter, 16) > 7 ? character.toUpperCase() : character; + }).join("")}`; +} +function isValidChecksumAddress(possibleChecksum) { + if (!_superstruct$6.is.call(void 0, possibleChecksum, HexChecksumAddressStruct)) { + return false; + } + return getChecksumAddress(possibleChecksum) === possibleChecksum; +} +function add0x(hexadecimal) { + if (hexadecimal.startsWith("0x")) { + return hexadecimal; + } + if (hexadecimal.startsWith("0X")) { + return `0x${hexadecimal.substring(2)}`; + } + return `0x${hexadecimal}`; +} +function remove0x(hexadecimal) { + if (hexadecimal.startsWith("0x") || hexadecimal.startsWith("0X")) { + return hexadecimal.substring(2); + } + return hexadecimal; +} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +chunkQEPVHEP7.HexStruct = HexStruct; chunkQEPVHEP7.StrictHexStruct = StrictHexStruct; chunkQEPVHEP7.HexAddressStruct = HexAddressStruct; chunkQEPVHEP7.HexChecksumAddressStruct = HexChecksumAddressStruct; chunkQEPVHEP7.isHexString = isHexString; chunkQEPVHEP7.isStrictHexString = isStrictHexString; chunkQEPVHEP7.assertIsHexString = assertIsHexString; chunkQEPVHEP7.assertIsStrictHexString = assertIsStrictHexString; chunkQEPVHEP7.isValidHexAddress = isValidHexAddress; chunkQEPVHEP7.getChecksumAddress = getChecksumAddress; chunkQEPVHEP7.isValidChecksumAddress = isValidChecksumAddress; chunkQEPVHEP7.add0x = add0x; chunkQEPVHEP7.remove0x = remove0x; chunkQEPVHEP7.isBytes = isBytes; chunkQEPVHEP7.assertIsBytes = assertIsBytes; chunkQEPVHEP7.bytesToHex = bytesToHex; chunkQEPVHEP7.bytesToBigInt = bytesToBigInt; chunkQEPVHEP7.bytesToSignedBigInt = bytesToSignedBigInt; chunkQEPVHEP7.bytesToNumber = bytesToNumber; chunkQEPVHEP7.bytesToString = bytesToString; chunkQEPVHEP7.bytesToBase64 = bytesToBase64; chunkQEPVHEP7.hexToBytes = hexToBytes; chunkQEPVHEP7.bigIntToBytes = bigIntToBytes; chunkQEPVHEP7.signedBigIntToBytes = signedBigIntToBytes; chunkQEPVHEP7.numberToBytes = numberToBytes; chunkQEPVHEP7.stringToBytes = stringToBytes; chunkQEPVHEP7.base64ToBytes = base64ToBytes; chunkQEPVHEP7.valueToBytes = valueToBytes; chunkQEPVHEP7.concatBytes = concatBytes; chunkQEPVHEP7.createDataView = createDataView; + +Object.defineProperty(chunkVFXTVNXN, "__esModule", {value: true}); + + +var _chunkQEPVHEP7js$2 = chunkQEPVHEP7; + + +var _chunk6ZDHSOUVjs$5 = chunk6ZDHSOUV; + +// src/number.ts +var numberToHex = (value) => { + _chunk6ZDHSOUVjs$5.assert.call(void 0, typeof value === "number", "Value must be a number."); + _chunk6ZDHSOUVjs$5.assert.call(void 0, value >= 0, "Value must be a non-negative number."); + _chunk6ZDHSOUVjs$5.assert.call(void 0, + Number.isSafeInteger(value), + "Value is not a safe integer. Use `bigIntToHex` instead." + ); + return _chunkQEPVHEP7js$2.add0x.call(void 0, value.toString(16)); +}; +var bigIntToHex = (value) => { + _chunk6ZDHSOUVjs$5.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs$5.assert.call(void 0, value >= 0, "Value must be a non-negative bigint."); + return _chunkQEPVHEP7js$2.add0x.call(void 0, value.toString(16)); +}; +var hexToNumber = (value) => { + _chunkQEPVHEP7js$2.assertIsHexString.call(void 0, value); + const numberValue = parseInt(value, 16); + _chunk6ZDHSOUVjs$5.assert.call(void 0, + Number.isSafeInteger(numberValue), + "Value is not a safe integer. Use `hexToBigInt` instead." + ); + return numberValue; +}; +var hexToBigInt = (value) => { + _chunkQEPVHEP7js$2.assertIsHexString.call(void 0, value); + return BigInt(_chunkQEPVHEP7js$2.add0x.call(void 0, value)); +}; + + + + + + +chunkVFXTVNXN.numberToHex = numberToHex; chunkVFXTVNXN.bigIntToHex = bigIntToHex; chunkVFXTVNXN.hexToNumber = hexToNumber; chunkVFXTVNXN.hexToBigInt = hexToBigInt; + +var chunkC6HGFEYL = {}; + +Object.defineProperty(chunkC6HGFEYL, "__esModule", {value: true});// src/promise.ts +function createDeferredPromise({ + suppressUnhandledRejection = false +} = {}) { + let resolve; + let reject; + const promise = new Promise( + (innerResolve, innerReject) => { + resolve = innerResolve; + reject = innerReject; + } + ); + if (suppressUnhandledRejection) { + promise.catch((_error) => { + }); + } + return { promise, resolve, reject }; +} + + + +chunkC6HGFEYL.createDeferredPromise = createDeferredPromise; + +var chunk4RMX5YWE = {}; + +Object.defineProperty(chunk4RMX5YWE, "__esModule", {value: true});// src/time.ts +var Duration = /* @__PURE__ */ ((Duration2) => { + Duration2[Duration2["Millisecond"] = 1] = "Millisecond"; + Duration2[Duration2["Second"] = 1e3] = "Second"; + Duration2[Duration2["Minute"] = 6e4] = "Minute"; + Duration2[Duration2["Hour"] = 36e5] = "Hour"; + Duration2[Duration2["Day"] = 864e5] = "Day"; + Duration2[Duration2["Week"] = 6048e5] = "Week"; + Duration2[Duration2["Year"] = 31536e6] = "Year"; + return Duration2; +})(Duration || {}); +var isNonNegativeInteger = (number) => Number.isInteger(number) && number >= 0; +var assertIsNonNegativeInteger = (number, name) => { + if (!isNonNegativeInteger(number)) { + throw new Error( + `"${name}" must be a non-negative integer. Received: "${number}".` + ); + } +}; +function inMilliseconds(count, duration) { + assertIsNonNegativeInteger(count, "count"); + return count * duration; +} +function timeSince(timestamp) { + assertIsNonNegativeInteger(timestamp, "timestamp"); + return Date.now() - timestamp; +} + + + + + +chunk4RMX5YWE.Duration = Duration; chunk4RMX5YWE.inMilliseconds = inMilliseconds; chunk4RMX5YWE.timeSince = timeSince; + +var chunk4D6XQBHA = {}; + +var re$2 = {exports: {}}; + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0'; + +const MAX_LENGTH$1 = 256; +const MAX_SAFE_INTEGER$1 = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991; + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16; + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH$1 - 6; + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +]; + +var constants$1 = { + MAX_LENGTH: MAX_LENGTH$1, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER: MAX_SAFE_INTEGER$1, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +}; + +const debug$1 = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {}; + +var debug_1 = debug$1; + +(function (module, exports) { + const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, + } = constants$1; + const debug = debug_1; + exports = module.exports = {}; + + // The actual regexps go on exports.re + const re = exports.re = []; + const safeRe = exports.safeRe = []; + const src = exports.src = []; + const t = exports.t = {}; + let R = 0; + + const LETTERDASHNUMBER = '[a-zA-Z0-9-]'; + + // Replace some greedy regex tokens to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], + ]; + + const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`); + } + return value + }; + + const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value); + const index = R++; + debug(name, index, value); + t[name] = index; + src[index] = value; + re[index] = new RegExp(value, isGlobal ? 'g' : undefined); + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined); + }; + + // The following Regular Expressions can be used for tokenizing, + // validating, and parsing SemVer version strings. + + // ## Numeric Identifier + // A single `0`, or a non-zero digit followed by zero or more digits. + + createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*'); + createToken('NUMERICIDENTIFIERLOOSE', '\\d+'); + + // ## Non-numeric Identifier + // Zero or more digits, followed by a letter or hyphen, and then zero or + // more letters, digits, or hyphens. + + createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`); + + // ## Main Version + // Three dot-separated numeric identifiers. + + createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`); + + createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`); + + // ## Pre-release Version Identifier + // A numeric identifier, or a non-numeric identifier. + + createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] + }|${src[t.NONNUMERICIDENTIFIER]})`); + + createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] + }|${src[t.NONNUMERICIDENTIFIER]})`); + + // ## Pre-release Version + // Hyphen, followed by one or more dot-separated pre-release version + // identifiers. + + createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] + }(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`); + + createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] + }(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`); + + // ## Build Metadata Identifier + // Any combination of digits, letters, or hyphens. + + createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`); + + // ## Build Metadata + // Plus sign, followed by one or more period-separated build metadata + // identifiers. + + createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] + }(?:\\.${src[t.BUILDIDENTIFIER]})*))`); + + // ## Full Version String + // A main version, followed optionally by a pre-release version and + // build metadata. + + // Note that the only major, minor, patch, and pre-release sections of + // the version string are capturing groups. The build metadata is not a + // capturing group, because it should not ever be used in version + // comparison. + + createToken('FULLPLAIN', `v?${src[t.MAINVERSION] + }${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`); + + createToken('FULL', `^${src[t.FULLPLAIN]}$`); + + // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. + // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty + // common in the npm registry. + createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] + }${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`); + + createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`); + + createToken('GTLT', '((?:<|>)?=?)'); + + // Something like "2.*" or "1.2.x". + // Note that "x.x" is a valid xRange identifer, meaning "any version" + // Only the first item is strictly required. + createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`); + createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`); + + createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`); + + createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`); + + createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`); + createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`); + + // Coercion. + // Extract anything that could conceivably be a part of a valid semver + createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`); + createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`); + createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`); + createToken('COERCERTL', src[t.COERCE], true); + createToken('COERCERTLFULL', src[t.COERCEFULL], true); + + // Tilde ranges. + // Meaning is "reasonably at or greater than" + createToken('LONETILDE', '(?:~>?)'); + + createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true); + exports.tildeTrimReplace = '$1~'; + + createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`); + createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`); + + // Caret ranges. + // Meaning is "at least and backwards compatible with" + createToken('LONECARET', '(?:\\^)'); + + createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true); + exports.caretTrimReplace = '$1^'; + + createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`); + createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`); + + // A simple gt/lt/eq thing, or just "" to indicate "any version" + createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`); + createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`); + + // An expression to strip any whitespace between the gtlt and the thing + // it modifies, so that `> 1.2.3` ==> `>1.2.3` + createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] + }\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true); + exports.comparatorTrimReplace = '$1$2$3'; + + // Something like `1.2.3 - 1.2.4` + // Note that these all use the loose form, because they'll be + // checked against either the strict or loose comparator form + // later. + createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`); + + createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`); + + // Star ranges basically just allow anything at all. + createToken('STAR', '(<|>)?=?\\s*\\*'); + // >=0.0.0 is like a star + createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$'); + createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$'); +} (re$2, re$2.exports)); + +var reExports = re$2.exports; + +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }); +const emptyOpts = Object.freeze({ }); +const parseOptions$1 = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +}; +var parseOptions_1 = parseOptions$1; + +const numeric = /^[0-9]+$/; +const compareIdentifiers$1 = (a, b) => { + const anum = numeric.test(a); + const bnum = numeric.test(b); + + if (anum && bnum) { + a = +a; + b = +b; + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +}; + +const rcompareIdentifiers = (a, b) => compareIdentifiers$1(b, a); + +var identifiers$1 = { + compareIdentifiers: compareIdentifiers$1, + rcompareIdentifiers, +}; + +const debug = debug_1; +const { MAX_LENGTH, MAX_SAFE_INTEGER } = constants$1; +const { safeRe: re$1, t: t$1 } = reExports; + +const parseOptions = parseOptions_1; +const { compareIdentifiers } = identifiers$1; +let SemVer$d = class SemVer { + constructor (version, options) { + options = parseOptions(options); + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version; + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options); + this.options = options; + this.loose = !!options.loose; + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease; + + const m = version.trim().match(options.loose ? re$1[t$1.LOOSE] : re$1[t$1.FULL]); + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version; + + // these are actually numbers + this.major = +m[1]; + this.minor = +m[2]; + this.patch = +m[3]; + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = []; + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id; + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }); + } + + this.build = m[5] ? m[5].split('.') : []; + this.format(); + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}`; + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}`; + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other); + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options); + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0; + do { + const a = this.prerelease[i]; + const b = other.prerelease[i]; + debug('prerelease compare', i, a, b); + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); + } + + let i = 0; + do { + const a = this.build[i]; + const b = other.build[i]; + debug('prerelease compare', i, a, b); + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0; + this.patch = 0; + this.minor = 0; + this.major++; + this.inc('pre', identifier, identifierBase); + break + case 'preminor': + this.prerelease.length = 0; + this.patch = 0; + this.minor++; + this.inc('pre', identifier, identifierBase); + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0; + this.inc('patch', identifier, identifierBase); + this.inc('pre', identifier, identifierBase); + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase); + } + this.inc('pre', identifier, identifierBase); + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++; + } + this.minor = 0; + this.patch = 0; + this.prerelease = []; + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++; + } + this.patch = 0; + this.prerelease = []; + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++; + } + this.prerelease = []; + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0; + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + + if (this.prerelease.length === 0) { + this.prerelease = [base]; + } else { + let i = this.prerelease.length; + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++; + i = -2; + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base); + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base]; + if (identifierBase === false) { + prerelease = [identifier]; + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease; + } + } else { + this.prerelease = prerelease; + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format(); + if (this.build.length) { + this.raw += `+${this.build.join('.')}`; + } + return this + } +}; + +var semver$1 = SemVer$d; + +const SemVer$c = semver$1; +const parse$6 = (version, options, throwErrors = false) => { + if (version instanceof SemVer$c) { + return version + } + try { + return new SemVer$c(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +}; + +var parse_1 = parse$6; + +const parse$5 = parse_1; +const valid$2 = (version, options) => { + const v = parse$5(version, options); + return v ? v.version : null +}; +var valid_1 = valid$2; + +const parse$4 = parse_1; +const clean$1 = (version, options) => { + const s = parse$4(version.trim().replace(/^[=v]+/, ''), options); + return s ? s.version : null +}; +var clean_1 = clean$1; + +const SemVer$b = semver$1; + +const inc$1 = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier; + identifier = options; + options = undefined; + } + + try { + return new SemVer$b( + version instanceof SemVer$b ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +}; +var inc_1 = inc$1; + +const parse$3 = parse_1; + +const diff$1 = (version1, version2) => { + const v1 = parse$3(version1, null, true); + const v2 = parse$3(version2, null, true); + const comparison = v1.compare(v2); + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0; + const highVersion = v1Higher ? v1 : v2; + const lowVersion = v1Higher ? v2 : v1; + const highHasPre = !!highVersion.prerelease.length; + const lowHasPre = !!lowVersion.prerelease.length; + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : ''; + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +}; + +var diff_1 = diff$1; + +const SemVer$a = semver$1; +const major$1 = (a, loose) => new SemVer$a(a, loose).major; +var major_1 = major$1; + +const SemVer$9 = semver$1; +const minor$1 = (a, loose) => new SemVer$9(a, loose).minor; +var minor_1 = minor$1; + +const SemVer$8 = semver$1; +const patch$1 = (a, loose) => new SemVer$8(a, loose).patch; +var patch_1 = patch$1; + +const parse$2 = parse_1; +const prerelease$1 = (version, options) => { + const parsed = parse$2(version, options); + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +}; +var prerelease_1 = prerelease$1; + +const SemVer$7 = semver$1; +const compare$b = (a, b, loose) => + new SemVer$7(a, loose).compare(new SemVer$7(b, loose)); + +var compare_1 = compare$b; + +const compare$a = compare_1; +const rcompare$1 = (a, b, loose) => compare$a(b, a, loose); +var rcompare_1 = rcompare$1; + +const compare$9 = compare_1; +const compareLoose$1 = (a, b) => compare$9(a, b, true); +var compareLoose_1 = compareLoose$1; + +const SemVer$6 = semver$1; +const compareBuild$3 = (a, b, loose) => { + const versionA = new SemVer$6(a, loose); + const versionB = new SemVer$6(b, loose); + return versionA.compare(versionB) || versionA.compareBuild(versionB) +}; +var compareBuild_1 = compareBuild$3; + +const compareBuild$2 = compareBuild_1; +const sort$1 = (list, loose) => list.sort((a, b) => compareBuild$2(a, b, loose)); +var sort_1 = sort$1; + +const compareBuild$1 = compareBuild_1; +const rsort$1 = (list, loose) => list.sort((a, b) => compareBuild$1(b, a, loose)); +var rsort_1 = rsort$1; + +const compare$8 = compare_1; +const gt$4 = (a, b, loose) => compare$8(a, b, loose) > 0; +var gt_1 = gt$4; + +const compare$7 = compare_1; +const lt$3 = (a, b, loose) => compare$7(a, b, loose) < 0; +var lt_1 = lt$3; + +const compare$6 = compare_1; +const eq$2 = (a, b, loose) => compare$6(a, b, loose) === 0; +var eq_1 = eq$2; + +const compare$5 = compare_1; +const neq$2 = (a, b, loose) => compare$5(a, b, loose) !== 0; +var neq_1 = neq$2; + +const compare$4 = compare_1; +const gte$3 = (a, b, loose) => compare$4(a, b, loose) >= 0; +var gte_1 = gte$3; + +const compare$3 = compare_1; +const lte$3 = (a, b, loose) => compare$3(a, b, loose) <= 0; +var lte_1 = lte$3; + +const eq$1 = eq_1; +const neq$1 = neq_1; +const gt$3 = gt_1; +const gte$2 = gte_1; +const lt$2 = lt_1; +const lte$2 = lte_1; + +const cmp$1 = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version; + } + if (typeof b === 'object') { + b = b.version; + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version; + } + if (typeof b === 'object') { + b = b.version; + } + return a !== b + + case '': + case '=': + case '==': + return eq$1(a, b, loose) + + case '!=': + return neq$1(a, b, loose) + + case '>': + return gt$3(a, b, loose) + + case '>=': + return gte$2(a, b, loose) + + case '<': + return lt$2(a, b, loose) + + case '<=': + return lte$2(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +}; +var cmp_1 = cmp$1; + +const SemVer$5 = semver$1; +const parse$1 = parse_1; +const { safeRe: re, t } = reExports; + +const coerce$1 = (version, options) => { + if (version instanceof SemVer$5) { + return version + } + + if (typeof version === 'number') { + version = String(version); + } + + if (typeof version !== 'string') { + return null + } + + options = options || {}; + + let match = null; + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]); + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL]; + let next; + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next; + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length; + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1; + } + + if (match === null) { + return null + } + + const major = match[2]; + const minor = match[3] || '0'; + const patch = match[4] || '0'; + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : ''; + const build = options.includePrerelease && match[6] ? `+${match[6]}` : ''; + + return parse$1(`${major}.${minor}.${patch}${prerelease}${build}`, options) +}; +var coerce_1 = coerce$1; + +var iterator$1; +var hasRequiredIterator; + +function requireIterator () { + if (hasRequiredIterator) return iterator$1; + hasRequiredIterator = 1; + iterator$1 = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value; + } + }; + }; + return iterator$1; +} + +var yallist; +var hasRequiredYallist; + +function requireYallist () { + if (hasRequiredYallist) return yallist; + hasRequiredYallist = 1; + yallist = Yallist; + + Yallist.Node = Node; + Yallist.create = Yallist; + + function Yallist (list) { + var self = this; + if (!(self instanceof Yallist)) { + self = new Yallist(); + } + + self.tail = null; + self.head = null; + self.length = 0; + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item); + }); + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]); + } + } + + return self + } + + Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next; + var prev = node.prev; + + if (next) { + next.prev = prev; + } + + if (prev) { + prev.next = next; + } + + if (node === this.head) { + this.head = next; + } + if (node === this.tail) { + this.tail = prev; + } + + node.list.length--; + node.next = null; + node.prev = null; + node.list = null; + + return next + }; + + Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node); + } + + var head = this.head; + node.list = this; + node.next = head; + if (head) { + head.prev = node; + } + + this.head = node; + if (!this.tail) { + this.tail = node; + } + this.length++; + }; + + Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node); + } + + var tail = this.tail; + node.list = this; + node.prev = tail; + if (tail) { + tail.next = node; + } + + this.tail = node; + if (!this.head) { + this.head = node; + } + this.length++; + }; + + Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]); + } + return this.length + }; + + Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]); + } + return this.length + }; + + Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } + + var res = this.tail.value; + this.tail = this.tail.prev; + if (this.tail) { + this.tail.next = null; + } else { + this.head = null; + } + this.length--; + return res + }; + + Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + + var res = this.head.value; + this.head = this.head.next; + if (this.head) { + this.head.prev = null; + } else { + this.tail = null; + } + this.length--; + return res + }; + + Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this; + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this); + walker = walker.next; + } + }; + + Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this; + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this); + walker = walker.prev; + } + }; + + Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next; + } + if (i === n && walker !== null) { + return walker.value + } + }; + + Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev; + } + if (i === n && walker !== null) { + return walker.value + } + }; + + Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this; + var res = new Yallist(); + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.next; + } + return res + }; + + Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this; + var res = new Yallist(); + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.prev; + } + return res + }; + + Yallist.prototype.reduce = function (fn, initial) { + var acc; + var walker = this.head; + if (arguments.length > 1) { + acc = initial; + } else if (this.head) { + walker = this.head.next; + acc = this.head.value; + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i); + walker = walker.next; + } + + return acc + }; + + Yallist.prototype.reduceReverse = function (fn, initial) { + var acc; + var walker = this.tail; + if (arguments.length > 1) { + acc = initial; + } else if (this.tail) { + walker = this.tail.prev; + acc = this.tail.value; + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i); + walker = walker.prev; + } + + return acc + }; + + Yallist.prototype.toArray = function () { + var arr = new Array(this.length); + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value; + walker = walker.next; + } + return arr + }; + + Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length); + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value; + walker = walker.prev; + } + return arr + }; + + Yallist.prototype.slice = function (from, to) { + to = to || this.length; + if (to < 0) { + to += this.length; + } + from = from || 0; + if (from < 0) { + from += this.length; + } + var ret = new Yallist(); + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next; + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value); + } + return ret + }; + + Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length; + if (to < 0) { + to += this.length; + } + from = from || 0; + if (from < 0) { + from += this.length; + } + var ret = new Yallist(); + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev; + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value); + } + return ret + }; + + Yallist.prototype.splice = function (start, deleteCount, ...nodes) { + if (start > this.length) { + start = this.length - 1; + } + if (start < 0) { + start = this.length + start; + } + + for (var i = 0, walker = this.head; walker !== null && i < start; i++) { + walker = walker.next; + } + + var ret = []; + for (var i = 0; walker && i < deleteCount; i++) { + ret.push(walker.value); + walker = this.removeNode(walker); + } + if (walker === null) { + walker = this.tail; + } + + if (walker !== this.head && walker !== this.tail) { + walker = walker.prev; + } + + for (var i = 0; i < nodes.length; i++) { + walker = insert(this, walker, nodes[i]); + } + return ret; + }; + + Yallist.prototype.reverse = function () { + var head = this.head; + var tail = this.tail; + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev; + walker.prev = walker.next; + walker.next = p; + } + this.head = tail; + this.tail = head; + return this + }; + + function insert (self, node, value) { + var inserted = node === self.head ? + new Node(value, null, node, self) : + new Node(value, node, node.next, self); + + if (inserted.next === null) { + self.tail = inserted; + } + if (inserted.prev === null) { + self.head = inserted; + } + + self.length++; + + return inserted + } + + function push (self, item) { + self.tail = new Node(item, self.tail, null, self); + if (!self.head) { + self.head = self.tail; + } + self.length++; + } + + function unshift (self, item) { + self.head = new Node(item, null, self.head, self); + if (!self.tail) { + self.tail = self.head; + } + self.length++; + } + + function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) + } + + this.list = list; + this.value = value; + + if (prev) { + prev.next = this; + this.prev = prev; + } else { + this.prev = null; + } + + if (next) { + next.prev = this; + this.next = next; + } else { + this.next = null; + } + } + + try { + // add if support for Symbol.iterator is present + requireIterator()(Yallist); + } catch (er) {} + return yallist; +} + +var lruCache; +var hasRequiredLruCache; + +function requireLruCache () { + if (hasRequiredLruCache) return lruCache; + hasRequiredLruCache = 1; + + // A linked list to keep track of recently-used-ness + const Yallist = requireYallist(); + + const MAX = Symbol('max'); + const LENGTH = Symbol('length'); + const LENGTH_CALCULATOR = Symbol('lengthCalculator'); + const ALLOW_STALE = Symbol('allowStale'); + const MAX_AGE = Symbol('maxAge'); + const DISPOSE = Symbol('dispose'); + const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet'); + const LRU_LIST = Symbol('lruList'); + const CACHE = Symbol('cache'); + const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet'); + + const naiveLength = () => 1; + + // lruList is a yallist where the head is the youngest + // item, and the tail is the oldest. the list contains the Hit + // objects as the entries. + // Each Hit object has a reference to its Yallist.Node. This + // never changes. + // + // cache is a Map (or PseudoMap) that matches the keys to + // the Yallist.Node object. + class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options }; + + if (!options) + options = {}; + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + this[MAX] = options.max || Infinity; + + const lc = options.length || naiveLength; + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc; + this[ALLOW_STALE] = options.stale || false; + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0; + this[DISPOSE] = options.dispose; + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false; + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false; + this.reset(); + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity; + trim(this); + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale; + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA; + trim(this); + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength; + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC; + this[LENGTH] = 0; + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key); + this[LENGTH] += hit.length; + }); + } + trim(this); + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this; + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev; + forEachStep(this, fn, walker, thisp); + walker = prev; + } + } + + forEach (fn, thisp) { + thisp = thisp || this; + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next; + forEachStep(this, fn, walker, thisp); + walker = next; + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)); + } + + this[CACHE] = new Map(); // hash of items by key + this[LRU_LIST] = new Yallist(); // list of items in order of use recency + this[LENGTH] = 0; // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE]; + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0; + const len = this[LENGTH_CALCULATOR](value, key); + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)); + return false + } + + const node = this[CACHE].get(key); + const item = node.value; + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value); + } + + item.now = now; + item.maxAge = maxAge; + item.value = value; + this[LENGTH] += len - item.length; + item.length = len; + this.get(key); + trim(this); + return true + } + + const hit = new Entry(key, value, len, now, maxAge); + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value); + + return false + } + + this[LENGTH] += hit.length; + this[LRU_LIST].unshift(hit); + this[CACHE].set(key, this[LRU_LIST].head); + trim(this); + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value; + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail; + if (!node) + return null + + del(this, node); + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)); + } + + load (arr) { + // reset the cache + this.reset(); + + const now = Date.now(); + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l]; + const expiresAt = hit.e || 0; + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v); + else { + const maxAge = expiresAt - now; + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge); + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)); + } + } + + const get = (self, key, doUse) => { + const node = self[CACHE].get(key); + if (node) { + const hit = node.value; + if (isStale(self, hit)) { + del(self, node); + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now(); + self[LRU_LIST].unshiftNode(node); + } + } + return hit.value + } + }; + + const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now; + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) + }; + + const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev; + del(self, walker); + walker = prev; + } + } + }; + + const del = (self, node) => { + if (node) { + const hit = node.value; + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value); + + self[LENGTH] -= hit.length; + self[CACHE].delete(hit.key); + self[LRU_LIST].removeNode(node); + } + }; + + class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key; + this.value = value; + this.length = length; + this.now = now; + this.maxAge = maxAge || 0; + } + } + + const forEachStep = (self, fn, node, thisp) => { + let hit = node.value; + if (isStale(self, hit)) { + del(self, node); + if (!self[ALLOW_STALE]) + hit = undefined; + } + if (hit) + fn.call(thisp, hit.value, hit.key, self); + }; + + lruCache = LRUCache; + return lruCache; +} + +var range; +var hasRequiredRange; + +function requireRange () { + if (hasRequiredRange) return range; + hasRequiredRange = 1; + // hoisted class for cyclic dependency + class Range { + constructor (range, options) { + options = parseOptions(options); + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value; + this.set = [[range]]; + this.format(); + return this + } + + this.options = options; + this.loose = !!options.loose; + this.includePrerelease = !!options.includePrerelease; + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' '); + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length); + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0]; + this.set = this.set.filter(c => !isNullSet(c[0])); + if (this.set.length === 0) { + this.set = [first]; + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c]; + break + } + } + } + } + + this.format(); + } + + format () { + this.range = this.set + .map((comps) => comps.join(' ').trim()) + .join('||') + .trim(); + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE); + const memoKey = memoOpts + ':' + range; + const cached = cache.get(memoKey); + if (cached) { + return cached + } + + const loose = this.options.loose; + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]; + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)); + debug('hyphen replace', range); + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace); + debug('comparator trim', range); + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace); + debug('tilde trim', range); + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace); + debug('caret trim', range); + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)); + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options); + return !!comp.match(re[t.COMPARATORLOOSE]) + }); + } + debug('range list', rangeList); + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map(); + const comparators = rangeList.map(comp => new Comparator(comp, this.options)); + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp); + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete(''); + } + + const result = [...rangeMap.values()]; + cache.set(memoKey, result); + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options); + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } + } + + range = Range; + + const LRU = requireLruCache(); + const cache = new LRU({ max: 1000 }); + + const parseOptions = parseOptions_1; + const Comparator = requireComparator(); + const debug = debug_1; + const SemVer = semver$1; + const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, + } = reExports; + const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = constants$1; + + const isNullSet = c => c.value === '<0.0.0-0'; + const isAny = c => c.value === ''; + + // take a set of comparators and determine whether there + // exists a version which can satisfy it + const isSatisfiable = (comparators, options) => { + let result = true; + const remainingComparators = comparators.slice(); + let testComparator = remainingComparators.pop(); + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }); + + testComparator = remainingComparators.pop(); + } + + return result + }; + + // comprised of xranges, tildes, stars, and gtlt's at this point. + // already replaced the hyphen ranges + // turn into a set of JUST comparators. + const parseComparator = (comp, options) => { + debug('comp', comp, options); + comp = replaceCarets(comp, options); + debug('caret', comp); + comp = replaceTildes(comp, options); + debug('tildes', comp); + comp = replaceXRanges(comp, options); + debug('xrange', comp); + comp = replaceStars(comp, options); + debug('stars', comp); + return comp + }; + + const isX = id => !id || id.toLowerCase() === 'x' || id === '*'; + + // ~, ~> --> * (any, kinda silly) + // ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 + // ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 + // ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 + // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 + // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 + // ~0.0.1 --> >=0.0.1 <0.1.0-0 + const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') + }; + + const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]; + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr); + let ret; + + if (isX(M)) { + ret = ''; + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0`; + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0`; + } else if (pr) { + debug('replaceTilde pr', pr); + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0`; + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0`; + } + + debug('tilde return', ret); + return ret + }) + }; + + // ^ --> * (any, kinda silly) + // ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 + // ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 + // ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 + // ^1.2.3 --> >=1.2.3 <2.0.0-0 + // ^1.2.0 --> >=1.2.0 <2.0.0-0 + // ^0.0.1 --> >=0.0.1 <0.0.2-0 + // ^0.1.0 --> >=0.1.0 <0.2.0-0 + const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') + }; + + const replaceCaret = (comp, options) => { + debug('caret', comp, options); + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]; + const z = options.includePrerelease ? '-0' : ''; + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr); + let ret; + + if (isX(M)) { + ret = ''; + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0`; + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0`; + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0`; + } + } else if (pr) { + debug('replaceCaret pr', pr); + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0`; + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0`; + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0`; + } + } else { + debug('no pr'); + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0`; + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0`; + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0`; + } + } + + debug('caret return', ret); + return ret + }) + }; + + const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options); + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') + }; + + const replaceXRange = (comp, options) => { + comp = comp.trim(); + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]; + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr); + const xM = isX(M); + const xm = xM || isX(m); + const xp = xm || isX(p); + const anyX = xp; + + if (gtlt === '=' && anyX) { + gtlt = ''; + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : ''; + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0'; + } else { + // nothing is forbidden + ret = '*'; + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0; + } + p = 0; + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>='; + if (xm) { + M = +M + 1; + m = 0; + p = 0; + } else { + m = +m + 1; + p = 0; + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<'; + if (xm) { + M = +M + 1; + } else { + m = +m + 1; + } + } + + if (gtlt === '<') { + pr = '-0'; + } + + ret = `${gtlt + M}.${m}.${p}${pr}`; + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0`; + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0`; + } + + debug('xRange return', ret); + + return ret + }) + }; + + // Because * is AND-ed with everything else in the comparator, + // and '' means "any version", just remove the *s entirely. + const replaceStars = (comp, options) => { + debug('replaceStars', comp, options); + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') + }; + + const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options); + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') + }; + + // This function is passed to string.replace(re[t.HYPHENRANGE]) + // M, m, patch, prerelease, build + // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 + // 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do + // 1.2 - 3.4 => >=1.2.0 <3.5.0-0 + const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) => { + if (isX(fM)) { + from = ''; + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}`; + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}`; + } else if (fpr) { + from = `>=${from}`; + } else { + from = `>=${from}${incPr ? '-0' : ''}`; + } + + if (isX(tM)) { + to = ''; + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0`; + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0`; + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}`; + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0`; + } else { + to = `<=${to}`; + } + + return `${from} ${to}`.trim() + }; + + const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver); + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver; + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true + }; + return range; +} + +var comparator; +var hasRequiredComparator; + +function requireComparator () { + if (hasRequiredComparator) return comparator; + hasRequiredComparator = 1; + const ANY = Symbol('SemVer ANY'); + // hoisted class for cyclic dependency + class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options); + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value; + } + } + + comp = comp.trim().split(/\s+/).join(' '); + debug('comparator', comp, options); + this.options = options; + this.loose = !!options.loose; + this.parse(comp); + + if (this.semver === ANY) { + this.value = ''; + } else { + this.value = this.operator + this.semver.version; + } + + debug('comp', this); + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]; + const m = comp.match(r); + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : ''; + if (this.operator === '=') { + this.operator = ''; + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY; + } else { + this.semver = new SemVer(m[2], this.options.loose); + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose); + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options); + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options); + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } + } + + comparator = Comparator; + + const parseOptions = parseOptions_1; + const { safeRe: re, t } = reExports; + const cmp = cmp_1; + const debug = debug_1; + const SemVer = semver$1; + const Range = requireRange(); + return comparator; +} + +const Range$9 = requireRange(); +const satisfies$4 = (version, range, options) => { + try { + range = new Range$9(range, options); + } catch (er) { + return false + } + return range.test(version) +}; +var satisfies_1 = satisfies$4; + +const Range$8 = requireRange(); + +// Mostly just for testing and legacy API reasons +const toComparators$1 = (range, options) => + new Range$8(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')); + +var toComparators_1 = toComparators$1; + +const SemVer$4 = semver$1; +const Range$7 = requireRange(); + +const maxSatisfying$1 = (versions, range, options) => { + let max = null; + let maxSV = null; + let rangeObj = null; + try { + rangeObj = new Range$7(range, options); + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v; + maxSV = new SemVer$4(max, options); + } + } + }); + return max +}; +var maxSatisfying_1 = maxSatisfying$1; + +const SemVer$3 = semver$1; +const Range$6 = requireRange(); +const minSatisfying$1 = (versions, range, options) => { + let min = null; + let minSV = null; + let rangeObj = null; + try { + rangeObj = new Range$6(range, options); + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v; + minSV = new SemVer$3(min, options); + } + } + }); + return min +}; +var minSatisfying_1 = minSatisfying$1; + +const SemVer$2 = semver$1; +const Range$5 = requireRange(); +const gt$2 = gt_1; + +const minVersion$1 = (range, loose) => { + range = new Range$5(range, loose); + + let minver = new SemVer$2('0.0.0'); + if (range.test(minver)) { + return minver + } + + minver = new SemVer$2('0.0.0-0'); + if (range.test(minver)) { + return minver + } + + minver = null; + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i]; + + let setMin = null; + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer$2(comparator.semver.version); + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++; + } else { + compver.prerelease.push(0); + } + compver.raw = compver.format(); + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt$2(compver, setMin)) { + setMin = compver; + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }); + if (setMin && (!minver || gt$2(minver, setMin))) { + minver = setMin; + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +}; +var minVersion_1 = minVersion$1; + +const Range$4 = requireRange(); +const validRange$1 = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range$4(range, options).range || '*' + } catch (er) { + return null + } +}; +var valid$1 = validRange$1; + +const SemVer$1 = semver$1; +const Comparator$2 = requireComparator(); +const { ANY: ANY$1 } = Comparator$2; +const Range$3 = requireRange(); +const satisfies$3 = satisfies_1; +const gt$1 = gt_1; +const lt$1 = lt_1; +const lte$1 = lte_1; +const gte$1 = gte_1; + +const outside$3 = (version, range, hilo, options) => { + version = new SemVer$1(version, options); + range = new Range$3(range, options); + + let gtfn, ltefn, ltfn, comp, ecomp; + switch (hilo) { + case '>': + gtfn = gt$1; + ltefn = lte$1; + ltfn = lt$1; + comp = '>'; + ecomp = '>='; + break + case '<': + gtfn = lt$1; + ltefn = gte$1; + ltfn = gt$1; + comp = '<'; + ecomp = '<='; + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies$3(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i]; + + let high = null; + let low = null; + + comparators.forEach((comparator) => { + if (comparator.semver === ANY$1) { + comparator = new Comparator$2('>=0.0.0'); + } + high = high || comparator; + low = low || comparator; + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator; + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator; + } + }); + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +}; + +var outside_1 = outside$3; + +// Determine if version is greater than all the versions possible in the range. +const outside$2 = outside_1; +const gtr$1 = (version, range, options) => outside$2(version, range, '>', options); +var gtr_1 = gtr$1; + +const outside$1 = outside_1; +// Determine if version is less than all the versions possible in the range +const ltr$1 = (version, range, options) => outside$1(version, range, '<', options); +var ltr_1 = ltr$1; + +const Range$2 = requireRange(); +const intersects$1 = (r1, r2, options) => { + r1 = new Range$2(r1, options); + r2 = new Range$2(r2, options); + return r1.intersects(r2, options) +}; +var intersects_1 = intersects$1; + +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies$2 = satisfies_1; +const compare$2 = compare_1; +var simplify = (versions, range, options) => { + const set = []; + let first = null; + let prev = null; + const v = versions.sort((a, b) => compare$2(a, b, options)); + for (const version of v) { + const included = satisfies$2(version, range, options); + if (included) { + prev = version; + if (!first) { + first = version; + } + } else { + if (prev) { + set.push([first, prev]); + } + prev = null; + first = null; + } + } + if (first) { + set.push([first, null]); + } + + const ranges = []; + for (const [min, max] of set) { + if (min === max) { + ranges.push(min); + } else if (!max && min === v[0]) { + ranges.push('*'); + } else if (!max) { + ranges.push(`>=${min}`); + } else if (min === v[0]) { + ranges.push(`<=${max}`); + } else { + ranges.push(`${min} - ${max}`); + } + } + const simplified = ranges.join(' || '); + const original = typeof range.raw === 'string' ? range.raw : String(range); + return simplified.length < original.length ? simplified : range +}; + +const Range$1 = requireRange(); +const Comparator$1 = requireComparator(); +const { ANY } = Comparator$1; +const satisfies$1 = satisfies_1; +const compare$1 = compare_1; + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset$1 = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range$1(sub, options); + dom = new Range$1(dom, options); + let sawNonNull = false; + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options); + sawNonNull = sawNonNull || isSub !== null; + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +}; + +const minimumVersionWithPreRelease = [new Comparator$1('>=0.0.0-0')]; +const minimumVersion = [new Comparator$1('>=0.0.0')]; + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease; + } else { + sub = minimumVersion; + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion; + } + } + + const eqSet = new Set(); + let gt, lt; + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options); + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options); + } else { + eqSet.add(c.semver); + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp; + if (gt && lt) { + gtltComp = compare$1(gt.semver, lt.semver, options); + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies$1(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies$1(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies$1(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower; + let hasDomLT, hasDomGT; + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false; + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false; + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false; + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>='; + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<='; + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false; + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options); + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies$1(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false; + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options); + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies$1(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +}; + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare$1(a.semver, b.semver, options); + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +}; + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare$1(a.semver, b.semver, options); + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +}; + +var subset_1 = subset$1; + +// just pre-load all the stuff that index.js lazily exports +const internalRe = reExports; +const constants = constants$1; +const SemVer = semver$1; +const identifiers = identifiers$1; +const parse = parse_1; +const valid = valid_1; +const clean = clean_1; +const inc = inc_1; +const diff = diff_1; +const major = major_1; +const minor = minor_1; +const patch = patch_1; +const prerelease = prerelease_1; +const compare = compare_1; +const rcompare = rcompare_1; +const compareLoose = compareLoose_1; +const compareBuild = compareBuild_1; +const sort = sort_1; +const rsort = rsort_1; +const gt = gt_1; +const lt = lt_1; +const eq = eq_1; +const neq = neq_1; +const gte = gte_1; +const lte = lte_1; +const cmp = cmp_1; +const coerce = coerce_1; +const Comparator = requireComparator(); +const Range = requireRange(); +const satisfies = satisfies_1; +const toComparators = toComparators_1; +const maxSatisfying = maxSatisfying_1; +const minSatisfying = minSatisfying_1; +const minVersion = minVersion_1; +const validRange = valid$1; +const outside = outside_1; +const gtr = gtr_1; +const ltr = ltr_1; +const intersects = intersects_1; +const simplifyRange = simplify; +const subset = subset_1; +var semver = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +}; + +Object.defineProperty(chunk4D6XQBHA, "__esModule", {value: true}); + +var _chunk6ZDHSOUVjs$4 = chunk6ZDHSOUV; + +// src/versions.ts + + + + + + +var _semver = semver; +var _superstruct$5 = require$$1; +var VersionStruct = _superstruct$5.refine.call(void 0, + _superstruct$5.string.call(void 0, ), + "Version", + (value) => { + if (_semver.valid.call(void 0, value) === null) { + return `Expected SemVer version, got "${value}"`; + } + return true; + } +); +var VersionRangeStruct = _superstruct$5.refine.call(void 0, + _superstruct$5.string.call(void 0, ), + "Version range", + (value) => { + if (_semver.validRange.call(void 0, value) === null) { + return `Expected SemVer range, got "${value}"`; + } + return true; + } +); +function isValidSemVerVersion(version) { + return _superstruct$5.is.call(void 0, version, VersionStruct); +} +function isValidSemVerRange(versionRange) { + return _superstruct$5.is.call(void 0, versionRange, VersionRangeStruct); +} +function assertIsSemVerVersion(version) { + _chunk6ZDHSOUVjs$4.assertStruct.call(void 0, version, VersionStruct); +} +function assertIsSemVerRange(range) { + _chunk6ZDHSOUVjs$4.assertStruct.call(void 0, range, VersionRangeStruct); +} +function gtVersion(version1, version2) { + return _semver.gt.call(void 0, version1, version2); +} +function gtRange(version, range) { + return _semver.gtr.call(void 0, version, range); +} +function satisfiesVersionRange(version, versionRange) { + return _semver.satisfies.call(void 0, version, versionRange, { + includePrerelease: true + }); +} + + + + + + + + + + + +chunk4D6XQBHA.VersionStruct = VersionStruct; chunk4D6XQBHA.VersionRangeStruct = VersionRangeStruct; chunk4D6XQBHA.isValidSemVerVersion = isValidSemVerVersion; chunk4D6XQBHA.isValidSemVerRange = isValidSemVerRange; chunk4D6XQBHA.assertIsSemVerVersion = assertIsSemVerVersion; chunk4D6XQBHA.assertIsSemVerRange = assertIsSemVerRange; chunk4D6XQBHA.gtVersion = gtVersion; chunk4D6XQBHA.gtRange = gtRange; chunk4D6XQBHA.satisfiesVersionRange = satisfiesVersionRange; + +var chunkOLLG4H35 = {}; + +Object.defineProperty(chunkOLLG4H35, "__esModule", {value: true}); + +var _chunk6ZDHSOUVjs$3 = chunk6ZDHSOUV; + + +var _chunkQVEKZRZ2js$1 = chunkQVEKZRZ2; + +// src/json.ts + + + + + + + + + + + + + + + + + + + + +var _superstruct$4 = require$$1; +var object = (schema) => ( + // The type is slightly different from a regular object struct, because we + // want to make properties with `undefined` in their type optional, but not + // `undefined` itself. This means that we need a type cast. + _superstruct$4.object.call(void 0, schema) +); +function hasOptional({ path, branch }) { + const field = path[path.length - 1]; + return _chunkQVEKZRZ2js$1.hasProperty.call(void 0, branch[branch.length - 2], field); +} +function exactOptional(struct) { + return new (_superstruct$4.Struct)({ + ...struct, + type: `optional ${struct.type}`, + validator: (value, context) => !hasOptional(context) || struct.validator(value, context), + refiner: (value, context) => !hasOptional(context) || struct.refiner(value, context) + }); +} +var finiteNumber = () => _superstruct$4.define.call(void 0, "finite number", (value) => { + return _superstruct$4.is.call(void 0, value, _superstruct$4.number.call(void 0, )) && Number.isFinite(value); +}); +var UnsafeJsonStruct = _superstruct$4.union.call(void 0, [ + _superstruct$4.literal.call(void 0, null), + _superstruct$4.boolean.call(void 0, ), + finiteNumber(), + _superstruct$4.string.call(void 0, ), + _superstruct$4.array.call(void 0, _superstruct$4.lazy.call(void 0, () => UnsafeJsonStruct)), + _superstruct$4.record.call(void 0, + _superstruct$4.string.call(void 0, ), + _superstruct$4.lazy.call(void 0, () => UnsafeJsonStruct) + ) +]); +var JsonStruct = _superstruct$4.coerce.call(void 0, UnsafeJsonStruct, _superstruct$4.any.call(void 0, ), (value) => { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, value, UnsafeJsonStruct); + return JSON.parse( + JSON.stringify(value, (propKey, propValue) => { + if (propKey === "__proto__" || propKey === "constructor") { + return void 0; + } + return propValue; + }) + ); +}); +function isValidJson(value) { + try { + getSafeJson(value); + return true; + } catch (e) { + return false; + } +} +function getSafeJson(value) { + return _superstruct$4.create.call(void 0, value, JsonStruct); +} +function getJsonSize(value) { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, value, JsonStruct, "Invalid JSON value"); + const json = JSON.stringify(value); + return new TextEncoder().encode(json).byteLength; +} +var jsonrpc2 = "2.0"; +var JsonRpcVersionStruct = _superstruct$4.literal.call(void 0, jsonrpc2); +var JsonRpcIdStruct = _superstruct$4.nullable.call(void 0, _superstruct$4.union.call(void 0, [_superstruct$4.number.call(void 0, ), _superstruct$4.string.call(void 0, )])); +var JsonRpcErrorStruct = object({ + code: _superstruct$4.integer.call(void 0, ), + message: _superstruct$4.string.call(void 0, ), + data: exactOptional(JsonStruct), + stack: exactOptional(_superstruct$4.string.call(void 0, )) +}); +var JsonRpcParamsStruct = _superstruct$4.union.call(void 0, [_superstruct$4.record.call(void 0, _superstruct$4.string.call(void 0, ), JsonStruct), _superstruct$4.array.call(void 0, JsonStruct)]); +var JsonRpcRequestStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + method: _superstruct$4.string.call(void 0, ), + params: exactOptional(JsonRpcParamsStruct) +}); +var JsonRpcNotificationStruct = object({ + jsonrpc: JsonRpcVersionStruct, + method: _superstruct$4.string.call(void 0, ), + params: exactOptional(JsonRpcParamsStruct) +}); +function isJsonRpcNotification(value) { + return _superstruct$4.is.call(void 0, value, JsonRpcNotificationStruct); +} +function assertIsJsonRpcNotification(value, ErrorWrapper) { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, + value, + JsonRpcNotificationStruct, + "Invalid JSON-RPC notification", + ErrorWrapper + ); +} +function isJsonRpcRequest(value) { + return _superstruct$4.is.call(void 0, value, JsonRpcRequestStruct); +} +function assertIsJsonRpcRequest(value, ErrorWrapper) { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, + value, + JsonRpcRequestStruct, + "Invalid JSON-RPC request", + ErrorWrapper + ); +} +var PendingJsonRpcResponseStruct = _superstruct$4.object.call(void 0, { + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + result: _superstruct$4.optional.call(void 0, _superstruct$4.unknown.call(void 0, )), + error: _superstruct$4.optional.call(void 0, JsonRpcErrorStruct) +}); +var JsonRpcSuccessStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + result: JsonStruct +}); +var JsonRpcFailureStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + error: JsonRpcErrorStruct +}); +var JsonRpcResponseStruct = _superstruct$4.union.call(void 0, [ + JsonRpcSuccessStruct, + JsonRpcFailureStruct +]); +function isPendingJsonRpcResponse(response) { + return _superstruct$4.is.call(void 0, response, PendingJsonRpcResponseStruct); +} +function assertIsPendingJsonRpcResponse(response, ErrorWrapper) { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, + response, + PendingJsonRpcResponseStruct, + "Invalid pending JSON-RPC response", + ErrorWrapper + ); +} +function isJsonRpcResponse(response) { + return _superstruct$4.is.call(void 0, response, JsonRpcResponseStruct); +} +function assertIsJsonRpcResponse(value, ErrorWrapper) { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, + value, + JsonRpcResponseStruct, + "Invalid JSON-RPC response", + ErrorWrapper + ); +} +function isJsonRpcSuccess(value) { + return _superstruct$4.is.call(void 0, value, JsonRpcSuccessStruct); +} +function assertIsJsonRpcSuccess(value, ErrorWrapper) { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, + value, + JsonRpcSuccessStruct, + "Invalid JSON-RPC success response", + ErrorWrapper + ); +} +function isJsonRpcFailure(value) { + return _superstruct$4.is.call(void 0, value, JsonRpcFailureStruct); +} +function assertIsJsonRpcFailure(value, ErrorWrapper) { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, + value, + JsonRpcFailureStruct, + "Invalid JSON-RPC failure response", + ErrorWrapper + ); +} +function isJsonRpcError(value) { + return _superstruct$4.is.call(void 0, value, JsonRpcErrorStruct); +} +function assertIsJsonRpcError(value, ErrorWrapper) { + _chunk6ZDHSOUVjs$3.assertStruct.call(void 0, + value, + JsonRpcErrorStruct, + "Invalid JSON-RPC error", + ErrorWrapper + ); +} +function getJsonRpcIdValidator(options) { + const { permitEmptyString, permitFractions, permitNull } = { + permitEmptyString: true, + permitFractions: false, + permitNull: true, + ...options + }; + const isValidJsonRpcId = (id) => { + return Boolean( + typeof id === "number" && (permitFractions || Number.isInteger(id)) || typeof id === "string" && (permitEmptyString || id.length > 0) || permitNull && id === null + ); + }; + return isValidJsonRpcId; +} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +chunkOLLG4H35.object = object; chunkOLLG4H35.exactOptional = exactOptional; chunkOLLG4H35.UnsafeJsonStruct = UnsafeJsonStruct; chunkOLLG4H35.JsonStruct = JsonStruct; chunkOLLG4H35.isValidJson = isValidJson; chunkOLLG4H35.getSafeJson = getSafeJson; chunkOLLG4H35.getJsonSize = getJsonSize; chunkOLLG4H35.jsonrpc2 = jsonrpc2; chunkOLLG4H35.JsonRpcVersionStruct = JsonRpcVersionStruct; chunkOLLG4H35.JsonRpcIdStruct = JsonRpcIdStruct; chunkOLLG4H35.JsonRpcErrorStruct = JsonRpcErrorStruct; chunkOLLG4H35.JsonRpcParamsStruct = JsonRpcParamsStruct; chunkOLLG4H35.JsonRpcRequestStruct = JsonRpcRequestStruct; chunkOLLG4H35.JsonRpcNotificationStruct = JsonRpcNotificationStruct; chunkOLLG4H35.isJsonRpcNotification = isJsonRpcNotification; chunkOLLG4H35.assertIsJsonRpcNotification = assertIsJsonRpcNotification; chunkOLLG4H35.isJsonRpcRequest = isJsonRpcRequest; chunkOLLG4H35.assertIsJsonRpcRequest = assertIsJsonRpcRequest; chunkOLLG4H35.PendingJsonRpcResponseStruct = PendingJsonRpcResponseStruct; chunkOLLG4H35.JsonRpcSuccessStruct = JsonRpcSuccessStruct; chunkOLLG4H35.JsonRpcFailureStruct = JsonRpcFailureStruct; chunkOLLG4H35.JsonRpcResponseStruct = JsonRpcResponseStruct; chunkOLLG4H35.isPendingJsonRpcResponse = isPendingJsonRpcResponse; chunkOLLG4H35.assertIsPendingJsonRpcResponse = assertIsPendingJsonRpcResponse; chunkOLLG4H35.isJsonRpcResponse = isJsonRpcResponse; chunkOLLG4H35.assertIsJsonRpcResponse = assertIsJsonRpcResponse; chunkOLLG4H35.isJsonRpcSuccess = isJsonRpcSuccess; chunkOLLG4H35.assertIsJsonRpcSuccess = assertIsJsonRpcSuccess; chunkOLLG4H35.isJsonRpcFailure = isJsonRpcFailure; chunkOLLG4H35.assertIsJsonRpcFailure = assertIsJsonRpcFailure; chunkOLLG4H35.isJsonRpcError = isJsonRpcError; chunkOLLG4H35.assertIsJsonRpcError = assertIsJsonRpcError; chunkOLLG4H35.getJsonRpcIdValidator = getJsonRpcIdValidator; + +var chunk2LBGT4GH = {}; + +Object.defineProperty(chunk2LBGT4GH, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }// src/logging.ts +var _debug = srcExports; var _debug2 = _interopRequireDefault(_debug); +var globalLogger = _debug2.default.call(void 0, "metamask"); +function createProjectLogger(projectName) { + return globalLogger.extend(projectName); +} +function createModuleLogger(projectLogger, moduleName) { + return projectLogger.extend(moduleName); +} + + + + +chunk2LBGT4GH.createProjectLogger = createProjectLogger; chunk2LBGT4GH.createModuleLogger = createModuleLogger; + +var chunkYWAID473 = {}; + +Object.defineProperty(chunkYWAID473, "__esModule", {value: true}); function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }// src/caip-types.ts +var _superstruct$3 = require$$1; +var CAIP_CHAIN_ID_REGEX = /^(?[-a-z0-9]{3,8}):(?[-_a-zA-Z0-9]{1,32})$/u; +var CAIP_NAMESPACE_REGEX = /^[-a-z0-9]{3,8}$/u; +var CAIP_REFERENCE_REGEX = /^[-_a-zA-Z0-9]{1,32}$/u; +var CAIP_ACCOUNT_ID_REGEX = /^(?(?[-a-z0-9]{3,8}):(?[-_a-zA-Z0-9]{1,32})):(?[-.%a-zA-Z0-9]{1,128})$/u; +var CAIP_ACCOUNT_ADDRESS_REGEX = /^[-.%a-zA-Z0-9]{1,128}$/u; +var CaipChainIdStruct = _superstruct$3.pattern.call(void 0, _superstruct$3.string.call(void 0, ), CAIP_CHAIN_ID_REGEX); +var CaipNamespaceStruct = _superstruct$3.pattern.call(void 0, _superstruct$3.string.call(void 0, ), CAIP_NAMESPACE_REGEX); +var CaipReferenceStruct = _superstruct$3.pattern.call(void 0, _superstruct$3.string.call(void 0, ), CAIP_REFERENCE_REGEX); +var CaipAccountIdStruct = _superstruct$3.pattern.call(void 0, _superstruct$3.string.call(void 0, ), CAIP_ACCOUNT_ID_REGEX); +var CaipAccountAddressStruct = _superstruct$3.pattern.call(void 0, + _superstruct$3.string.call(void 0, ), + CAIP_ACCOUNT_ADDRESS_REGEX +); +var KnownCaipNamespace = /* @__PURE__ */ ((KnownCaipNamespace2) => { + KnownCaipNamespace2["Eip155"] = "eip155"; + return KnownCaipNamespace2; +})(KnownCaipNamespace || {}); +function isCaipChainId(value) { + return _superstruct$3.is.call(void 0, value, CaipChainIdStruct); +} +function isCaipNamespace(value) { + return _superstruct$3.is.call(void 0, value, CaipNamespaceStruct); +} +function isCaipReference(value) { + return _superstruct$3.is.call(void 0, value, CaipReferenceStruct); +} +function isCaipAccountId(value) { + return _superstruct$3.is.call(void 0, value, CaipAccountIdStruct); +} +function isCaipAccountAddress(value) { + return _superstruct$3.is.call(void 0, value, CaipAccountAddressStruct); +} +function parseCaipChainId(caipChainId) { + const match = CAIP_CHAIN_ID_REGEX.exec(caipChainId); + if (!_optionalChain([match, 'optionalAccess', _ => _.groups])) { + throw new Error("Invalid CAIP chain ID."); + } + return { + namespace: match.groups.namespace, + reference: match.groups.reference + }; +} +function parseCaipAccountId(caipAccountId) { + const match = CAIP_ACCOUNT_ID_REGEX.exec(caipAccountId); + if (!_optionalChain([match, 'optionalAccess', _2 => _2.groups])) { + throw new Error("Invalid CAIP account ID."); + } + return { + address: match.groups.accountAddress, + chainId: match.groups.chainId, + chain: { + namespace: match.groups.namespace, + reference: match.groups.reference + } + }; +} +function toCaipChainId(namespace, reference) { + if (!isCaipNamespace(namespace)) { + throw new Error( + `Invalid "namespace", must match: ${CAIP_NAMESPACE_REGEX.toString()}` + ); + } + if (!isCaipReference(reference)) { + throw new Error( + `Invalid "reference", must match: ${CAIP_REFERENCE_REGEX.toString()}` + ); + } + return `${namespace}:${reference}`; +} + + + + + + + + + + + + + + + + + + + + + +chunkYWAID473.CAIP_CHAIN_ID_REGEX = CAIP_CHAIN_ID_REGEX; chunkYWAID473.CAIP_NAMESPACE_REGEX = CAIP_NAMESPACE_REGEX; chunkYWAID473.CAIP_REFERENCE_REGEX = CAIP_REFERENCE_REGEX; chunkYWAID473.CAIP_ACCOUNT_ID_REGEX = CAIP_ACCOUNT_ID_REGEX; chunkYWAID473.CAIP_ACCOUNT_ADDRESS_REGEX = CAIP_ACCOUNT_ADDRESS_REGEX; chunkYWAID473.CaipChainIdStruct = CaipChainIdStruct; chunkYWAID473.CaipNamespaceStruct = CaipNamespaceStruct; chunkYWAID473.CaipReferenceStruct = CaipReferenceStruct; chunkYWAID473.CaipAccountIdStruct = CaipAccountIdStruct; chunkYWAID473.CaipAccountAddressStruct = CaipAccountAddressStruct; chunkYWAID473.KnownCaipNamespace = KnownCaipNamespace; chunkYWAID473.isCaipChainId = isCaipChainId; chunkYWAID473.isCaipNamespace = isCaipNamespace; chunkYWAID473.isCaipReference = isCaipReference; chunkYWAID473.isCaipAccountId = isCaipAccountId; chunkYWAID473.isCaipAccountAddress = isCaipAccountAddress; chunkYWAID473.parseCaipChainId = parseCaipChainId; chunkYWAID473.parseCaipAccountId = parseCaipAccountId; chunkYWAID473.toCaipChainId = toCaipChainId; + +var chunkE4C7EW4R = {}; + +var chunk6NZW4WK4 = {}; + +Object.defineProperty(chunk6NZW4WK4, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } + +var _chunk6ZDHSOUVjs$2 = chunk6ZDHSOUV; + +// src/base64.ts +var _superstruct$2 = require$$1; +var base64 = (struct, options = {}) => { + const paddingRequired = _nullishCoalesce(options.paddingRequired, () => ( false)); + const characterSet = _nullishCoalesce(options.characterSet, () => ( "base64")); + let letters; + if (characterSet === "base64") { + letters = String.raw`[A-Za-z0-9+\/]`; + } else { + _chunk6ZDHSOUVjs$2.assert.call(void 0, characterSet === "base64url"); + letters = String.raw`[-_A-Za-z0-9]`; + } + let re; + if (paddingRequired) { + re = new RegExp( + `^(?:${letters}{4})*(?:${letters}{3}=|${letters}{2}==)?$`, + "u" + ); + } else { + re = new RegExp( + `^(?:${letters}{4})*(?:${letters}{2,3}|${letters}{3}=|${letters}{2}==)?$`, + "u" + ); + } + return _superstruct$2.pattern.call(void 0, struct, re); +}; + + + +chunk6NZW4WK4.base64 = base64; + +Object.defineProperty(chunkE4C7EW4R, "__esModule", {value: true}); + +var _chunk6NZW4WK4js$1 = chunk6NZW4WK4; + +// src/checksum.ts +var _superstruct$1 = require$$1; +var ChecksumStruct = _superstruct$1.size.call(void 0, + _chunk6NZW4WK4js$1.base64.call(void 0, _superstruct$1.string.call(void 0, ), { paddingRequired: true }), + 44, + 44 +); + + + +chunkE4C7EW4R.ChecksumStruct = ChecksumStruct; + +var chunkDHVKFDHQ = {}; + +Object.defineProperty(chunkDHVKFDHQ, "__esModule", {value: true}); + + + +var _chunkQEPVHEP7js$1 = chunkQEPVHEP7; + + +var _chunk6ZDHSOUVjs$1 = chunk6ZDHSOUV; + +// src/coercers.ts + + + + + + + + + +var _superstruct = require$$1; +var NumberLikeStruct = _superstruct.union.call(void 0, [_superstruct.number.call(void 0, ), _superstruct.bigint.call(void 0, ), _superstruct.string.call(void 0, ), _chunkQEPVHEP7js$1.StrictHexStruct]); +var NumberCoercer = _superstruct.coerce.call(void 0, _superstruct.number.call(void 0, ), NumberLikeStruct, Number); +var BigIntCoercer = _superstruct.coerce.call(void 0, _superstruct.bigint.call(void 0, ), NumberLikeStruct, BigInt); +_superstruct.union.call(void 0, [_chunkQEPVHEP7js$1.StrictHexStruct, _superstruct.instance.call(void 0, Uint8Array)]); +var BytesCoercer = _superstruct.coerce.call(void 0, + _superstruct.instance.call(void 0, Uint8Array), + _superstruct.union.call(void 0, [_chunkQEPVHEP7js$1.StrictHexStruct]), + _chunkQEPVHEP7js$1.hexToBytes +); +var HexCoercer = _superstruct.coerce.call(void 0, _chunkQEPVHEP7js$1.StrictHexStruct, _superstruct.instance.call(void 0, Uint8Array), _chunkQEPVHEP7js$1.bytesToHex); +function createNumber(value) { + try { + const result = _superstruct.create.call(void 0, value, NumberCoercer); + _chunk6ZDHSOUVjs$1.assert.call(void 0, + Number.isFinite(result), + `Expected a number-like value, got "${value}".` + ); + return result; + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error(`Expected a number-like value, got "${value}".`); + } + throw error; + } +} +function createBigInt(value) { + try { + return _superstruct.create.call(void 0, value, BigIntCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a number-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} +function createBytes(value) { + if (typeof value === "string" && value.toLowerCase() === "0x") { + return new Uint8Array(); + } + try { + return _superstruct.create.call(void 0, value, BytesCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a bytes-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} +function createHex(value) { + if (value instanceof Uint8Array && value.length === 0 || typeof value === "string" && value.toLowerCase() === "0x") { + return "0x"; + } + try { + return _superstruct.create.call(void 0, value, HexCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a bytes-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} + + + + + + +chunkDHVKFDHQ.createNumber = createNumber; chunkDHVKFDHQ.createBigInt = createBigInt; chunkDHVKFDHQ.createBytes = createBytes; chunkDHVKFDHQ.createHex = createHex; + +var chunkZ2RGWDD7 = {}; + +var chunk3W5G4CYI = {}; + +Object.defineProperty(chunk3W5G4CYI, "__esModule", {value: true});var __accessCheck = (obj, member, msg) => { + if (!member.has(obj)) + throw TypeError("Cannot " + msg); +}; +var __privateGet = (obj, member, getter) => { + __accessCheck(obj, member, "read from private field"); + return getter ? getter.call(obj) : member.get(obj); +}; +var __privateAdd = (obj, member, value) => { + if (member.has(obj)) + throw TypeError("Cannot add the same private member more than once"); + member instanceof WeakSet ? member.add(obj) : member.set(obj, value); +}; +var __privateSet = (obj, member, value, setter) => { + __accessCheck(obj, member, "write to private field"); + setter ? setter.call(obj, value) : member.set(obj, value); + return value; +}; + + + + + +chunk3W5G4CYI.__privateGet = __privateGet; chunk3W5G4CYI.__privateAdd = __privateAdd; chunk3W5G4CYI.__privateSet = __privateSet; + +Object.defineProperty(chunkZ2RGWDD7, "__esModule", {value: true}); + + + +var _chunk3W5G4CYIjs = chunk3W5G4CYI; + +// src/collections.ts +var _map; +var FrozenMap = class { + constructor(entries) { + _chunk3W5G4CYIjs.__privateAdd.call(void 0, this, _map, void 0); + _chunk3W5G4CYIjs.__privateSet.call(void 0, this, _map, new Map(entries)); + Object.freeze(this); + } + get size() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).size; + } + [Symbol.iterator]() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map)[Symbol.iterator](); + } + entries() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).entries(); + } + forEach(callbackfn, thisArg) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).forEach( + (value, key, _map2) => callbackfn.call(thisArg, value, key, this) + ); + } + get(key) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).get(key); + } + has(key) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).has(key); + } + keys() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).keys(); + } + values() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).values(); + } + toString() { + return `FrozenMap(${this.size}) {${this.size > 0 ? ` ${[...this.entries()].map(([key, value]) => `${String(key)} => ${String(value)}`).join(", ")} ` : ""}}`; + } +}; +_map = new WeakMap(); +var _set; +var FrozenSet = class { + constructor(values) { + _chunk3W5G4CYIjs.__privateAdd.call(void 0, this, _set, void 0); + _chunk3W5G4CYIjs.__privateSet.call(void 0, this, _set, new Set(values)); + Object.freeze(this); + } + get size() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).size; + } + [Symbol.iterator]() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set)[Symbol.iterator](); + } + entries() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).entries(); + } + forEach(callbackfn, thisArg) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).forEach( + (value, value2, _set2) => callbackfn.call(thisArg, value, value2, this) + ); + } + has(value) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).has(value); + } + keys() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).keys(); + } + values() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).values(); + } + toString() { + return `FrozenSet(${this.size}) {${this.size > 0 ? ` ${[...this.values()].map((member) => String(member)).join(", ")} ` : ""}}`; + } +}; +_set = new WeakMap(); +Object.freeze(FrozenMap); +Object.freeze(FrozenMap.prototype); +Object.freeze(FrozenSet); +Object.freeze(FrozenSet.prototype); + + + + +chunkZ2RGWDD7.FrozenMap = FrozenMap; chunkZ2RGWDD7.FrozenSet = FrozenSet; + +Object.defineProperty(dist$2, "__esModule", {value: true}); + + + + + +var _chunkVFXTVNXNjs = chunkVFXTVNXN; + + + +var _chunkC6HGFEYLjs = chunkC6HGFEYL; + + + + +var _chunk4RMX5YWEjs = chunk4RMX5YWE; + + + + + + + + + + + +var _chunk4D6XQBHAjs = chunk4D6XQBHA; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +var _chunkOLLG4H35js = chunkOLLG4H35; + + + + +var _chunk2LBGT4GHjs = chunk2LBGT4GH; + + + + + + + + + + + + + + + + + + + + +var _chunkYWAID473js = chunkYWAID473; + + +var _chunkE4C7EW4Rjs = chunkE4C7EW4R; + + +var _chunk6NZW4WK4js = chunk6NZW4WK4; + + + + + +var _chunkDHVKFDHQjs = chunkDHVKFDHQ; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +var _chunkQEPVHEP7js = chunkQEPVHEP7; + + + + + +var _chunk6ZDHSOUVjs = chunk6ZDHSOUV; + + + + + + +var _chunkIZC266HSjs = chunkIZC266HS; + + + + + + + + + + + + +var _chunkQVEKZRZ2js = chunkQVEKZRZ2; + + + +var _chunkZ2RGWDD7js = chunkZ2RGWDD7; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +dist$2.AssertionError = _chunk6ZDHSOUVjs.AssertionError; dist$2.CAIP_ACCOUNT_ADDRESS_REGEX = _chunkYWAID473js.CAIP_ACCOUNT_ADDRESS_REGEX; dist$2.CAIP_ACCOUNT_ID_REGEX = _chunkYWAID473js.CAIP_ACCOUNT_ID_REGEX; dist$2.CAIP_CHAIN_ID_REGEX = _chunkYWAID473js.CAIP_CHAIN_ID_REGEX; dist$2.CAIP_NAMESPACE_REGEX = _chunkYWAID473js.CAIP_NAMESPACE_REGEX; dist$2.CAIP_REFERENCE_REGEX = _chunkYWAID473js.CAIP_REFERENCE_REGEX; dist$2.CaipAccountAddressStruct = _chunkYWAID473js.CaipAccountAddressStruct; dist$2.CaipAccountIdStruct = _chunkYWAID473js.CaipAccountIdStruct; dist$2.CaipChainIdStruct = _chunkYWAID473js.CaipChainIdStruct; dist$2.CaipNamespaceStruct = _chunkYWAID473js.CaipNamespaceStruct; dist$2.CaipReferenceStruct = _chunkYWAID473js.CaipReferenceStruct; dist$2.ChecksumStruct = _chunkE4C7EW4Rjs.ChecksumStruct; dist$2.Duration = _chunk4RMX5YWEjs.Duration; dist$2.ESCAPE_CHARACTERS_REGEXP = _chunkQVEKZRZ2js.ESCAPE_CHARACTERS_REGEXP; dist$2.FrozenMap = _chunkZ2RGWDD7js.FrozenMap; dist$2.FrozenSet = _chunkZ2RGWDD7js.FrozenSet; dist$2.HexAddressStruct = _chunkQEPVHEP7js.HexAddressStruct; dist$2.HexChecksumAddressStruct = _chunkQEPVHEP7js.HexChecksumAddressStruct; dist$2.HexStruct = _chunkQEPVHEP7js.HexStruct; dist$2.JsonRpcErrorStruct = _chunkOLLG4H35js.JsonRpcErrorStruct; dist$2.JsonRpcFailureStruct = _chunkOLLG4H35js.JsonRpcFailureStruct; dist$2.JsonRpcIdStruct = _chunkOLLG4H35js.JsonRpcIdStruct; dist$2.JsonRpcNotificationStruct = _chunkOLLG4H35js.JsonRpcNotificationStruct; dist$2.JsonRpcParamsStruct = _chunkOLLG4H35js.JsonRpcParamsStruct; dist$2.JsonRpcRequestStruct = _chunkOLLG4H35js.JsonRpcRequestStruct; dist$2.JsonRpcResponseStruct = _chunkOLLG4H35js.JsonRpcResponseStruct; dist$2.JsonRpcSuccessStruct = _chunkOLLG4H35js.JsonRpcSuccessStruct; dist$2.JsonRpcVersionStruct = _chunkOLLG4H35js.JsonRpcVersionStruct; dist$2.JsonSize = _chunkQVEKZRZ2js.JsonSize; dist$2.JsonStruct = _chunkOLLG4H35js.JsonStruct; dist$2.KnownCaipNamespace = _chunkYWAID473js.KnownCaipNamespace; dist$2.PendingJsonRpcResponseStruct = _chunkOLLG4H35js.PendingJsonRpcResponseStruct; dist$2.StrictHexStruct = _chunkQEPVHEP7js.StrictHexStruct; dist$2.UnsafeJsonStruct = _chunkOLLG4H35js.UnsafeJsonStruct; dist$2.VersionRangeStruct = _chunk4D6XQBHAjs.VersionRangeStruct; dist$2.VersionStruct = _chunk4D6XQBHAjs.VersionStruct; dist$2.add0x = _chunkQEPVHEP7js.add0x; dist$2.assert = _chunk6ZDHSOUVjs.assert; dist$2.assertExhaustive = _chunk6ZDHSOUVjs.assertExhaustive; dist$2.assertIsBytes = _chunkQEPVHEP7js.assertIsBytes; dist$2.assertIsHexString = _chunkQEPVHEP7js.assertIsHexString; dist$2.assertIsJsonRpcError = _chunkOLLG4H35js.assertIsJsonRpcError; dist$2.assertIsJsonRpcFailure = _chunkOLLG4H35js.assertIsJsonRpcFailure; dist$2.assertIsJsonRpcNotification = _chunkOLLG4H35js.assertIsJsonRpcNotification; dist$2.assertIsJsonRpcRequest = _chunkOLLG4H35js.assertIsJsonRpcRequest; dist$2.assertIsJsonRpcResponse = _chunkOLLG4H35js.assertIsJsonRpcResponse; dist$2.assertIsJsonRpcSuccess = _chunkOLLG4H35js.assertIsJsonRpcSuccess; dist$2.assertIsPendingJsonRpcResponse = _chunkOLLG4H35js.assertIsPendingJsonRpcResponse; dist$2.assertIsSemVerRange = _chunk4D6XQBHAjs.assertIsSemVerRange; dist$2.assertIsSemVerVersion = _chunk4D6XQBHAjs.assertIsSemVerVersion; dist$2.assertIsStrictHexString = _chunkQEPVHEP7js.assertIsStrictHexString; dist$2.assertStruct = _chunk6ZDHSOUVjs.assertStruct; dist$2.base64 = _chunk6NZW4WK4js.base64; dist$2.base64ToBytes = _chunkQEPVHEP7js.base64ToBytes; dist$2.bigIntToBytes = _chunkQEPVHEP7js.bigIntToBytes; dist$2.bigIntToHex = _chunkVFXTVNXNjs.bigIntToHex; dist$2.bytesToBase64 = _chunkQEPVHEP7js.bytesToBase64; dist$2.bytesToBigInt = _chunkQEPVHEP7js.bytesToBigInt; dist$2.bytesToHex = _chunkQEPVHEP7js.bytesToHex; dist$2.bytesToNumber = _chunkQEPVHEP7js.bytesToNumber; dist$2.bytesToSignedBigInt = _chunkQEPVHEP7js.bytesToSignedBigInt; dist$2.bytesToString = _chunkQEPVHEP7js.bytesToString; dist$2.calculateNumberSize = _chunkQVEKZRZ2js.calculateNumberSize; dist$2.calculateStringSize = _chunkQVEKZRZ2js.calculateStringSize; dist$2.concatBytes = _chunkQEPVHEP7js.concatBytes; dist$2.createBigInt = _chunkDHVKFDHQjs.createBigInt; dist$2.createBytes = _chunkDHVKFDHQjs.createBytes; dist$2.createDataView = _chunkQEPVHEP7js.createDataView; dist$2.createDeferredPromise = _chunkC6HGFEYLjs.createDeferredPromise; dist$2.createHex = _chunkDHVKFDHQjs.createHex; dist$2.createModuleLogger = _chunk2LBGT4GHjs.createModuleLogger; dist$2.createNumber = _chunkDHVKFDHQjs.createNumber; dist$2.createProjectLogger = _chunk2LBGT4GHjs.createProjectLogger; dist$2.exactOptional = _chunkOLLG4H35js.exactOptional; dist$2.getChecksumAddress = _chunkQEPVHEP7js.getChecksumAddress; dist$2.getErrorMessage = _chunkIZC266HSjs.getErrorMessage; dist$2.getJsonRpcIdValidator = _chunkOLLG4H35js.getJsonRpcIdValidator; dist$2.getJsonSize = _chunkOLLG4H35js.getJsonSize; dist$2.getKnownPropertyNames = _chunkQVEKZRZ2js.getKnownPropertyNames; dist$2.getSafeJson = _chunkOLLG4H35js.getSafeJson; dist$2.gtRange = _chunk4D6XQBHAjs.gtRange; dist$2.gtVersion = _chunk4D6XQBHAjs.gtVersion; dist$2.hasProperty = _chunkQVEKZRZ2js.hasProperty; dist$2.hexToBigInt = _chunkVFXTVNXNjs.hexToBigInt; dist$2.hexToBytes = _chunkQEPVHEP7js.hexToBytes; dist$2.hexToNumber = _chunkVFXTVNXNjs.hexToNumber; dist$2.inMilliseconds = _chunk4RMX5YWEjs.inMilliseconds; dist$2.isASCII = _chunkQVEKZRZ2js.isASCII; dist$2.isBytes = _chunkQEPVHEP7js.isBytes; dist$2.isCaipAccountAddress = _chunkYWAID473js.isCaipAccountAddress; dist$2.isCaipAccountId = _chunkYWAID473js.isCaipAccountId; dist$2.isCaipChainId = _chunkYWAID473js.isCaipChainId; dist$2.isCaipNamespace = _chunkYWAID473js.isCaipNamespace; dist$2.isCaipReference = _chunkYWAID473js.isCaipReference; dist$2.isErrorWithCode = _chunkIZC266HSjs.isErrorWithCode; dist$2.isErrorWithMessage = _chunkIZC266HSjs.isErrorWithMessage; dist$2.isErrorWithStack = _chunkIZC266HSjs.isErrorWithStack; dist$2.isHexString = _chunkQEPVHEP7js.isHexString; dist$2.isJsonRpcError = _chunkOLLG4H35js.isJsonRpcError; dist$2.isJsonRpcFailure = _chunkOLLG4H35js.isJsonRpcFailure; dist$2.isJsonRpcNotification = _chunkOLLG4H35js.isJsonRpcNotification; dist$2.isJsonRpcRequest = _chunkOLLG4H35js.isJsonRpcRequest; dist$2.isJsonRpcResponse = _chunkOLLG4H35js.isJsonRpcResponse; dist$2.isJsonRpcSuccess = _chunkOLLG4H35js.isJsonRpcSuccess; dist$2.isNonEmptyArray = _chunkQVEKZRZ2js.isNonEmptyArray; dist$2.isNullOrUndefined = _chunkQVEKZRZ2js.isNullOrUndefined; dist$2.isObject = _chunkQVEKZRZ2js.isObject; dist$2.isPendingJsonRpcResponse = _chunkOLLG4H35js.isPendingJsonRpcResponse; dist$2.isPlainObject = _chunkQVEKZRZ2js.isPlainObject; dist$2.isStrictHexString = _chunkQEPVHEP7js.isStrictHexString; dist$2.isValidChecksumAddress = _chunkQEPVHEP7js.isValidChecksumAddress; dist$2.isValidHexAddress = _chunkQEPVHEP7js.isValidHexAddress; dist$2.isValidJson = _chunkOLLG4H35js.isValidJson; dist$2.isValidSemVerRange = _chunk4D6XQBHAjs.isValidSemVerRange; dist$2.isValidSemVerVersion = _chunk4D6XQBHAjs.isValidSemVerVersion; dist$2.jsonrpc2 = _chunkOLLG4H35js.jsonrpc2; dist$2.numberToBytes = _chunkQEPVHEP7js.numberToBytes; dist$2.numberToHex = _chunkVFXTVNXNjs.numberToHex; dist$2.object = _chunkOLLG4H35js.object; dist$2.parseCaipAccountId = _chunkYWAID473js.parseCaipAccountId; dist$2.parseCaipChainId = _chunkYWAID473js.parseCaipChainId; dist$2.remove0x = _chunkQEPVHEP7js.remove0x; dist$2.satisfiesVersionRange = _chunk4D6XQBHAjs.satisfiesVersionRange; dist$2.signedBigIntToBytes = _chunkQEPVHEP7js.signedBigIntToBytes; dist$2.stringToBytes = _chunkQEPVHEP7js.stringToBytes; dist$2.timeSince = _chunk4RMX5YWEjs.timeSince; dist$2.toCaipChainId = _chunkYWAID473js.toCaipChainId; dist$2.valueToBytes = _chunkQEPVHEP7js.valueToBytes; dist$2.wrapError = _chunkIZC266HSjs.wrapError; + +Object.defineProperty(utils$3, "__esModule", { value: true }); +utils$3.normalize = utils$3.recoverPublicKey = utils$3.concatSig = utils$3.legacyToBuffer = utils$3.isNullish = utils$3.padWithZeroes = void 0; +const util_1$1 = dist$4; +const utils_1$6 = dist$2; +/** + * Pads the front of the given hex string with zeroes until it reaches the + * target length. If the input string is already longer than or equal to the + * target length, it is returned unmodified. + * + * If the input string is "0x"-prefixed or not a hex string, an error will be + * thrown. + * + * @param hexString - The hexadecimal string to pad with zeroes. + * @param targetLength - The target length of the hexadecimal string. + * @returns The input string front-padded with zeroes, or the original string + * if it was already greater than or equal to to the target length. + */ +function padWithZeroes(hexString, targetLength) { + if (hexString !== '' && !/^[a-f0-9]+$/iu.test(hexString)) { + throw new Error(`Expected an unprefixed hex string. Received: ${hexString}`); + } + if (targetLength < 0) { + throw new Error(`Expected a non-negative integer target length. Received: ${targetLength}`); + } + return String.prototype.padStart.call(hexString, targetLength, '0'); +} +utils$3.padWithZeroes = padWithZeroes; +/** + * Returns `true` if the given value is nullish. + * + * @param value - The value being checked. + * @returns Whether the value is nullish. + */ +function isNullish(value) { + return value === null || value === undefined; +} +utils$3.isNullish = isNullish; +/** + * Convert a value to a Buffer. This function should be equivalent to the `toBuffer` function in + * `ethereumjs-util@5.2.1`. + * + * @param value - The value to convert to a Buffer. + * @returns The given value as a Buffer. + */ +function legacyToBuffer(value) { + return typeof value === 'string' && !(0, util_1$1.isHexString)(value) + ? Buffer.from(value) + : (0, util_1$1.toBuffer)(value); +} +utils$3.legacyToBuffer = legacyToBuffer; +/** + * Concatenate an extended ECDSA signature into a single '0x'-prefixed hex string. + * + * @param v - The 'v' portion of the signature. + * @param r - The 'r' portion of the signature. + * @param s - The 's' portion of the signature. + * @returns The concatenated ECDSA signature as a '0x'-prefixed string. + */ +function concatSig(v, r, s) { + const rSig = (0, util_1$1.fromSigned)(r); + const sSig = (0, util_1$1.fromSigned)(s); + const vSig = (0, util_1$1.bufferToInt)(v); + const rStr = padWithZeroes((0, util_1$1.toUnsigned)(rSig).toString('hex'), 64); + const sStr = padWithZeroes((0, util_1$1.toUnsigned)(sSig).toString('hex'), 64); + const vStr = (0, utils_1$6.remove0x)((0, utils_1$6.numberToHex)(vSig)); + return (0, utils_1$6.add0x)(rStr.concat(sStr, vStr)); +} +utils$3.concatSig = concatSig; +/** + * Recover the public key from the given signature and message hash. + * + * @param messageHash - The hash of the signed message. + * @param signature - The signature. + * @returns The public key of the signer. + */ +function recoverPublicKey(messageHash, signature) { + const sigParams = (0, util_1$1.fromRpcSig)(signature); + return (0, util_1$1.ecrecover)(messageHash, sigParams.v, sigParams.r, sigParams.s); +} +utils$3.recoverPublicKey = recoverPublicKey; +/** + * Normalize the input to a lower-cased '0x'-prefixed hex string. + * + * @param input - The value to normalize. + * @returns The normalized value. + */ +function normalize(input) { + if (isNullish(input)) { + return undefined; + } + if (typeof input === 'number') { + if (input < 0) { + return '0x'; + } + const buffer = (0, utils_1$6.numberToBytes)(input); + input = (0, utils_1$6.bytesToHex)(buffer); + } + if (typeof input !== 'string') { + let msg = 'eth-sig-util.normalize() requires hex string or integer input.'; + msg += ` received ${typeof input}: ${input}`; + throw new Error(msg); + } + return (0, utils_1$6.add0x)(input.toLowerCase()); +} +utils$3.normalize = normalize; + +Object.defineProperty(personalSign$1, "__esModule", { value: true }); +personalSign$1.extractPublicKey = personalSign$1.recoverPersonalSignature = personalSign$1.personalSign = void 0; +const util_1 = dist$4; +const utils_1$5 = utils$3; +/** + * Create an Ethereum-specific signature for a message. + * + * This function is equivalent to the `eth_sign` Ethereum JSON-RPC method as specified in EIP-1417, + * as well as the MetaMask's `personal_sign` method. + * + * @param options - The personal sign options. + * @param options.privateKey - The key to sign with. + * @param options.data - The hex data to sign. + * @returns The '0x'-prefixed hex encoded signature. + */ +function personalSign({ privateKey, data, }) { + if ((0, utils_1$5.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1$5.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + const message = (0, utils_1$5.legacyToBuffer)(data); + const msgHash = (0, util_1.hashPersonalMessage)(message); + const sig = (0, util_1.ecsign)(msgHash, privateKey); + const serialized = (0, utils_1$5.concatSig)((0, util_1.toBuffer)(sig.v), sig.r, sig.s); + return serialized; +} +personalSign$1.personalSign = personalSign; +/** + * Recover the address of the account used to create the given Ethereum signature. The message + * must have been signed using the `personalSign` function, or an equivalent function. + * + * @param options - The signature recovery options. + * @param options.data - The hex data that was signed. + * @param options.signature - The '0x'-prefixed hex encoded message signature. + * @returns The '0x'-prefixed hex encoded address of the message signer. + */ +function recoverPersonalSignature({ data, signature, }) { + if ((0, utils_1$5.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1$5.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const publicKey = getPublicKeyFor(data, signature); + const sender = (0, util_1.publicToAddress)(publicKey); + const senderHex = (0, util_1.bufferToHex)(sender); + return senderHex; +} +personalSign$1.recoverPersonalSignature = recoverPersonalSignature; +/** + * Recover the public key of the account used to create the given Ethereum signature. The message + * must have been signed using the `personalSign` function, or an equivalent function. + * + * @param options - The public key recovery options. + * @param options.data - The hex data that was signed. + * @param options.signature - The '0x'-prefixed hex encoded message signature. + * @returns The '0x'-prefixed hex encoded public key of the message signer. + */ +function extractPublicKey({ data, signature, }) { + if ((0, utils_1$5.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1$5.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const publicKey = getPublicKeyFor(data, signature); + return `0x${publicKey.toString('hex')}`; +} +personalSign$1.extractPublicKey = extractPublicKey; +/** + * Get the public key for the given signature and message. + * + * @param message - The message that was signed. + * @param signature - The '0x'-prefixed hex encoded message signature. + * @returns The public key of the signer. + */ +function getPublicKeyFor(message, signature) { + const messageHash = (0, util_1.hashPersonalMessage)((0, utils_1$5.legacyToBuffer)(message)); + return (0, utils_1$5.recoverPublicKey)(messageHash, signature); +} + +var signTypedData = {}; + +var dist = {}; + +var abi$1 = {}; + +var errors = {}; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.ParserError = exports.getErrorStack = exports.getErrorMessage = void 0; + const utils_1 = dist$2; + /** + * Attempt to get an error message from a value. + * + * - If the value is an error, the error's message is returned. + * - If the value is an object with a `message` property, the value of that + * property is returned. + * - If the value is a string, the value is returned. + * - Otherwise, "Unknown error." is returned. + * + * @param error - The value to get an error message from. + * @returns The error message. + * @internal + */ + const getErrorMessage = (error) => { + if (typeof error === 'string') { + return error; + } + if (error instanceof Error) { + return error.message; + } + if ((0, utils_1.isObject)(error) && + (0, utils_1.hasProperty)(error, 'message') && + typeof error.message === 'string') { + return error.message; + } + return 'Unknown error.'; + }; + exports.getErrorMessage = getErrorMessage; + /** + * Get the error stack from a value. If the value is an error, the error's stack + * is returned. Otherwise, it returns `undefined`. + * + * @param error - The value to get an error stack from. + * @returns The error stack, or `undefined` if the value is not an error. + * @internal + */ + const getErrorStack = (error) => { + if (error instanceof Error) { + return error.stack; + } + return undefined; + }; + exports.getErrorStack = getErrorStack; + /** + * An error that is thrown when the ABI encoder or decoder encounters an + * issue. + */ + class ParserError extends Error { + constructor(message, originalError) { + super(message); + this.name = 'ParserError'; + const originalStack = (0, exports.getErrorStack)(originalError); + if (originalStack) { + this.stack = originalStack; + } + } + } + exports.ParserError = ParserError; + +} (errors)); + +var packer = {}; + +var iterator = {}; + +Object.defineProperty(iterator, "__esModule", { value: true }); +iterator.iterate = void 0; +const utils_1$4 = dist$2; +/** + * Iterate over a buffer with the specified size. This will yield a part of the + * buffer starting at an increment of the specified size, until the end of the + * buffer is reached. + * + * Calling the `skip` function will make it skip the specified number of bytes. + * + * @param buffer - The buffer to iterate over. + * @param size - The number of bytes to iterate with. + * @returns An iterator that yields the parts of the byte array. + * @yields The parts of the byte array. + */ +const iterate = function* (buffer, size = 32) { + for (let pointer = 0; pointer < buffer.length; pointer += size) { + const skip = (length) => { + (0, utils_1$4.assert)(length >= 0, 'Cannot skip a negative number of bytes.'); + (0, utils_1$4.assert)(length % size === 0, 'Length must be a multiple of the size.'); + pointer += length; + }; + const value = buffer.subarray(pointer); + yield { skip, value }; + } + return { + skip: () => undefined, + value: new Uint8Array(), + }; +}; +iterator.iterate = iterate; + +var parsers = {}; + +var address = {}; + +var utils$1 = {}; + +var buffer = {}; + +Object.defineProperty(buffer, "__esModule", { value: true }); +buffer.padEnd = buffer.padStart = buffer.set = void 0; +const utils_1$3 = dist$2; +const BUFFER_WIDTH = 32; +/** + * Set `buffer` in `target` at the specified position. + * + * @param target - The buffer to set to. + * @param buffer - The buffer to set in the target. + * @param position - The position at which to set the target. + * @returns The combined buffer. + */ +const set = (target, buffer, position) => { + return (0, utils_1$3.concatBytes)([ + target.subarray(0, position), + buffer, + target.subarray(position + buffer.length), + ]); +}; +buffer.set = set; +/** + * Add padding to a buffer. If the buffer is larger than `length`, this function won't do anything. If it's smaller, the + * buffer will be padded to the specified length, with extra zeroes at the start. + * + * @param buffer - The buffer to add padding to. + * @param length - The number of bytes to pad the buffer to. + * @returns The padded buffer. + */ +const padStart = (buffer, length = BUFFER_WIDTH) => { + const padding = new Uint8Array(Math.max(length - buffer.length, 0)).fill(0x00); + return (0, utils_1$3.concatBytes)([padding, buffer]); +}; +buffer.padStart = padStart; +/** + * Add padding to a buffer. If the buffer is larger than `length`, this function won't do anything. If it's smaller, the + * buffer will be padded to the specified length, with extra zeroes at the end. + * + * @param buffer - The buffer to add padding to. + * @param length - The number of bytes to pad the buffer to. + * @returns The padded buffer. + */ +const padEnd = (buffer, length = BUFFER_WIDTH) => { + const padding = new Uint8Array(Math.max(length - buffer.length, 0)).fill(0x00); + return (0, utils_1$3.concatBytes)([buffer, padding]); +}; +buffer.padEnd = padEnd; + +(function (exports) { + var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + })); + var __exportStar = (commonjsGlobal && commonjsGlobal.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); + }; + Object.defineProperty(exports, "__esModule", { value: true }); + __exportStar(buffer, exports); + +} (utils$1)); + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.address = exports.getAddress = void 0; + const utils_1 = dist$2; + const errors_1 = errors; + const utils_2 = utils$1; + /** + * Normalize an address value. This accepts the address as: + * + * - A hex string starting with the `0x` prefix. + * - A byte array (`Uint8Array` or `Buffer`). + * + * It checks that the address is 20 bytes long. + * + * @param value - The value to normalize. + * @returns The normalized address as `Uint8Array`. + */ + const getAddress = (value) => { + const bytesValue = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bytesValue.length <= 20, new errors_1.ParserError(`Invalid address value. Expected address to be 20 bytes long, but received ${bytesValue.length} bytes.`)); + return (0, utils_2.padStart)(bytesValue, 20); + }; + exports.getAddress = getAddress; + exports.address = { + isDynamic: false, + /** + * Get if the given value is a valid address type. Since `address` is a simple + * type, this is just a check that the value is "address". + * + * @param type - The type to check. + * @returns Whether the type is a valid address type. + */ + isType: (type) => type === 'address', + /** + * Get the byte length of an encoded address. Since `address` is a simple + * type, this always returns 32. + * + * Note that actual addresses are only 20 bytes long, but the encoding of + * the `address` type is always 32 bytes long. + * + * @returns The byte length of an encoded address. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given address to a 32-byte-long byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The address to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded address added to it. + */ + encode({ buffer, value, packed }) { + const addressValue = (0, exports.getAddress)(value); + // If we're using packed encoding, we can just add the address bytes to the + // byte array, without adding any padding. + if (packed) { + return (0, utils_1.concatBytes)([buffer, addressValue]); + } + const addressBuffer = (0, utils_2.padStart)(addressValue); + return (0, utils_1.concatBytes)([buffer, addressBuffer]); + }, + /** + * Decode the given byte array to an address. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded address as a hexadecimal string, starting with the + * "0x"-prefix. + */ + decode({ value }) { + return (0, utils_1.add0x)((0, utils_1.bytesToHex)(value.slice(12, 32))); + }, + }; + +} (address)); + +var array = {}; + +var fixedBytes = {}; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.fixedBytes = exports.getByteLength = void 0; + const utils_1 = dist$2; + const errors_1 = errors; + const utils_2 = utils$1; + const BYTES_REGEX = /^bytes([0-9]{1,2})$/u; + /** + * Get the length of the specified type. If a length is not specified, or if the + * length is out of range (0 < n <= 32), this will throw an error. + * + * @param type - The type to get the length for. + * @returns The byte length of the type. + */ + const getByteLength = (type) => { + const bytes = type.match(BYTES_REGEX)?.[1]; + (0, utils_1.assert)(bytes, `Invalid byte length. Expected a number between 1 and 32, but received "${type}".`); + const length = Number(bytes); + (0, utils_1.assert)(length > 0 && length <= 32, new errors_1.ParserError(`Invalid byte length. Expected a number between 1 and 32, but received "${type}".`)); + return length; + }; + exports.getByteLength = getByteLength; + exports.fixedBytes = { + isDynamic: false, + /** + * Check if a type is a fixed bytes type. + * + * @param type - The type to check. + * @returns Whether the type is a fixed bytes type. + */ + isType(type) { + return BYTES_REGEX.test(type); + }, + /** + * Get the byte length of an encoded fixed bytes type. + * + * @returns The byte length of the type. + */ + getByteLength() { + return 32; + }, + /** + * Encode a fixed bytes value. + * + * @param args - The arguments to encode. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed }) { + const length = (0, exports.getByteLength)(type); + const bufferValue = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bufferValue.length <= length, new errors_1.ParserError(`Expected a value of length ${length}, but received a value of length ${bufferValue.length}.`)); + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padEnd)(bufferValue, length)]); + } + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padEnd)(bufferValue)]); + }, + /** + * Decode a fixed bytes value. + * + * @param args - The arguments to decode. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @returns The decoded value as a `Uint8Array`. + */ + decode({ type, value }) { + const length = (0, exports.getByteLength)(type); + // Since we're returning a `Uint8Array`, we use `slice` to copy the bytes + // into a new array. + return value.slice(0, length); + }, + }; + +} (fixedBytes)); + +var tuple = {}; + +var hasRequiredTuple; + +function requireTuple () { + if (hasRequiredTuple) return tuple; + hasRequiredTuple = 1; + (function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.tuple = exports.getTupleElements = void 0; + const utils_1 = dist$2; + const errors_1 = errors; + const packer_1 = requirePacker(); + const TUPLE_REGEX = /^\((.+)\)$/u; + const isTupleType = (type) => TUPLE_REGEX.test(type); + /** + * Get elements from a tuple type. + * + * @param type - The tuple type to get the types for. + * @returns The elements of the tuple as string array. + */ + const getTupleElements = (type) => { + (0, utils_1.assert)(type.startsWith('(') && type.endsWith(')'), new errors_1.ParserError(`Invalid tuple type. Expected tuple type, but received "${type}".`)); + const elements = []; + let current = ''; + let depth = 0; + for (let i = 1; i < type.length - 1; i++) { + const char = type[i]; + if (char === ',' && depth === 0) { + elements.push(current.trim()); + current = ''; + } + else { + current += char; + if (char === '(') { + depth += 1; + } + else if (char === ')') { + depth -= 1; + } + } + } + if (current.trim()) { + elements.push(current.trim()); + } + return elements; + }; + exports.getTupleElements = getTupleElements; + exports.tuple = { + /** + * Check if the tuple is dynamic. Tuples are dynamic if one or more elements + * of the tuple are dynamic. + * + * @param type - The type to check. + * @returns Whether the tuple is dynamic. + */ + isDynamic(type) { + const elements = (0, exports.getTupleElements)(type); + return elements.some((element) => { + const parser = (0, packer_1.getParser)(element); + return (0, packer_1.isDynamicParser)(parser, element); + }); + }, + /** + * Check if a type is a tuple type. + * + * @param type - The type to check. + * @returns Whether the type is a tuple type. + */ + isType(type) { + return isTupleType(type); + }, + /** + * Get the byte length of a tuple type. If the tuple is dynamic, this will + * always return 32. If the tuple is static, this will return the sum of the + * byte lengths of the tuple elements. + * + * @param type - The type to get the byte length for. + * @returns The byte length of the tuple type. + */ + getByteLength(type) { + if ((0, packer_1.isDynamicParser)(this, type)) { + return 32; + } + const elements = (0, exports.getTupleElements)(type); + return elements.reduce((total, element) => { + return total + (0, packer_1.getParser)(element).getByteLength(element); + }, 0); + }, + /** + * Encode a tuple value. + * + * @param args - The encoding arguments. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use non-standard packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed, tight }) { + const elements = (0, exports.getTupleElements)(type); + return (0, packer_1.pack)({ + types: elements, + values: value, + byteArray: buffer, + packed, + tight, + }); + }, + /** + * Decode a tuple value. + * + * @param args - The decoding arguments. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @param args.skip - A function to skip a number of bytes. + * @returns The decoded value. + */ + decode({ type, value, skip }) { + const elements = (0, exports.getTupleElements)(type); + const length = this.getByteLength(type) - 32; + skip(length); + return (0, packer_1.unpack)(elements, value); + }, + }; + + } (tuple)); + return tuple; +} + +var hasRequiredArray; + +function requireArray () { + if (hasRequiredArray) return array; + hasRequiredArray = 1; + (function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.array = exports.getTupleType = exports.getArrayType = exports.isArrayType = void 0; + const utils_1 = dist$2; + const errors_1 = errors; + const packer_1 = requirePacker(); + const utils_2 = utils$1; + const fixed_bytes_1 = fixedBytes; + const tuple_1 = requireTuple(); + const ARRAY_REGEX = /^(?.*)\[(?\d*?)\]$/u; + const isArrayType = (type) => ARRAY_REGEX.test(type); + exports.isArrayType = isArrayType; + /** + * Get the type of the array. + * + * @param type - The type to get the array type for. + * @returns The array type. + */ + const getArrayType = (type) => { + const match = type.match(ARRAY_REGEX); + (0, utils_1.assert)(match?.groups?.type, new errors_1.ParserError(`Invalid array type. Expected an array type, but received "${type}".`)); + return [ + match.groups.type, + match.groups.length ? parseInt(match.groups.length, 10) : undefined, + ]; + }; + exports.getArrayType = getArrayType; + /** + * Get the type of the array as a tuple type. This is used for encoding fixed + * length arrays, which are encoded as tuples. + * + * @param innerType - The type of the array. + * @param length - The length of the array. + * @returns The tuple type. + */ + const getTupleType = (innerType, length) => { + return `(${new Array(length).fill(innerType).join(',')})`; + }; + exports.getTupleType = getTupleType; + exports.array = { + /** + * Check if the array is dynamic. Arrays are dynamic if the array does not + * have a fixed length, or if the array type is dynamic. + * + * @param type - The type to check. + * @returns Whether the array is dynamic. + */ + isDynamic(type) { + const [innerType, length] = (0, exports.getArrayType)(type); + return ( + // `T[]` is dynamic for any `T`. `T[k]` is dynamic for any dynamic `T` and + // any `k >= 0`. + length === undefined || (0, packer_1.isDynamicParser)((0, packer_1.getParser)(innerType), innerType)); + }, + /** + * Check if a type is an array type. + * + * @param type - The type to check. + * @returns Whether the type is an array type. + */ + isType(type) { + return (0, exports.isArrayType)(type); + }, + /** + * Get the byte length of an encoded array. If the array is dynamic, this + * returns 32, i.e., the length of the pointer to the array. If the array is + * static, this returns the byte length of the resulting tuple type. + * + * @param type - The type to get the byte length for. + * @returns The byte length of an encoded array. + */ + getByteLength(type) { + (0, utils_1.assert)((0, exports.isArrayType)(type), new errors_1.ParserError(`Expected an array type, but received "${type}".`)); + const [innerType, length] = (0, exports.getArrayType)(type); + if (!(0, packer_1.isDynamicParser)(this, type) && length !== undefined) { + return tuple_1.tuple.getByteLength((0, exports.getTupleType)(innerType, length)); + } + return 32; + }, + /** + * Encode the given array to a byte array. If the array is static, this uses + * the tuple encoder. + * + * @param args - The encoding arguments. + * @param args.type - The type of the array. + * @param args.buffer - The byte array to add to. + * @param args.value - The array to encode. + * @param args.packed - Whether to use non-standard packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded array added to it. + */ + encode({ type, buffer, value, packed, tight }) { + const [arrayType, fixedLength] = (0, exports.getArrayType)(type); + // Packed encoding does not support nested arrays. + (0, utils_1.assert)(!packed || !(0, exports.isArrayType)(arrayType), new errors_1.ParserError(`Cannot pack nested arrays.`)); + // Tightly pack `T[]` where `T` is a dynamic type. This is not supported in + // Solidity, but is commonly used in the Ethereum ecosystem. + if (packed && (0, packer_1.isDynamicParser)((0, packer_1.getParser)(arrayType), arrayType)) { + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: buffer, + packed, + arrayPacked: true, + tight, + }); + } + if (fixedLength) { + (0, utils_1.assert)(fixedLength === value.length, new errors_1.ParserError(`Array length does not match type length. Expected a length of ${fixedLength}, but received ${value.length}.`)); + // `T[k]` for any `T` and `k` is encoded as `(T[0], ..., T[k - 1])`. + return tuple_1.tuple.encode({ + type: (0, exports.getTupleType)(arrayType, fixedLength), + buffer, + value, + // In "tight" mode, we don't pad the values to 32 bytes if the value is + // of type `bytesN`. This is an edge case in `ethereumjs-abi` that we + // support to provide compatibility with it. + packed: fixed_bytes_1.fixedBytes.isType(arrayType) && tight, + tight, + }); + } + // For packed encoding, we don't need to encode the length of the array, + // so we can just encode the values. + if (packed) { + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: buffer, + // In "tight" mode, we don't pad the values to 32 bytes if the value is + // of type `bytesN`. This is an edge case in `ethereumjs-abi` that we + // support to provide compatibility with it. + packed: fixed_bytes_1.fixedBytes.isType(arrayType) && tight, + arrayPacked: true, + tight, + }); + } + // `T[]` with `k` elements is encoded as `k (T[0], ..., T[k - 1])`. That + // means that we just need to encode the length of the array, and then the + // array itself. The pointer is encoded by the {@link pack} function. + const arrayLength = (0, utils_2.padStart)((0, utils_1.numberToBytes)(value.length)); + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: (0, utils_1.concatBytes)([buffer, arrayLength]), + packed, + tight, + }); + }, + /** + * Decode an array from the given byte array. + * + * @param args - The decoding arguments. + * @param args.type - The type of the array. + * @param args.value - The byte array to decode. + * @returns The decoded array. + */ + decode({ type, value, ...rest }) { + const [arrayType, fixedLength] = (0, exports.getArrayType)(type); + if (fixedLength) { + const result = tuple_1.tuple.decode({ + type: (0, exports.getTupleType)(arrayType, fixedLength), + value, + ...rest, + }); + (0, utils_1.assert)(result.length === fixedLength, new errors_1.ParserError(`Array length does not match type length. Expected a length of ${fixedLength}, but received ${result.length}.`)); + return result; + } + const arrayLength = (0, utils_1.bytesToNumber)(value.subarray(0, 32)); + return (0, packer_1.unpack)(new Array(arrayLength).fill(arrayType), value.subarray(32)); + }, + }; + + } (array)); + return array; +} + +var bool = {}; + +var number = {}; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.number = exports.getBigInt = exports.assertNumberLength = exports.getLength = exports.isSigned = void 0; + const utils_1 = dist$2; + const errors_1 = errors; + const utils_2 = utils$1; + const NUMBER_REGEX = /^u?int(?[0-9]*)?$/u; + /** + * Check if a number type is signed. + * + * @param type - The type to check. + * @returns Whether the type is signed. + */ + const isSigned = (type) => { + return !type.startsWith('u'); + }; + exports.isSigned = isSigned; + /** + * Get the length of the specified type. If a length is not specified, if the + * length is out of range (8 <= n <= 256), or if the length is not a multiple of + * 8, this will throw an error. + * + * @param type - The type to get the length for. + * @returns The bit length of the type. + */ + const getLength = (type) => { + if (type === 'int' || type === 'uint') { + return 256; + } + const match = type.match(NUMBER_REGEX); + (0, utils_1.assert)(match?.groups?.length, new errors_1.ParserError(`Invalid number type. Expected a number type, but received "${type}".`)); + const length = parseInt(match.groups.length, 10); + (0, utils_1.assert)(length >= 8 && length <= 256, new errors_1.ParserError(`Invalid number length. Expected a number between 8 and 256, but received "${type}".`)); + (0, utils_1.assert)(length % 8 === 0, new errors_1.ParserError(`Invalid number length. Expected a multiple of 8, but received "${type}".`)); + return length; + }; + exports.getLength = getLength; + /** + * Assert that the byte length of the given value is in range for the given + * number type. + * + * @param value - The value to check. + * @param type - The type of the value. + * @throws If the value is out of range for the type. + */ + const assertNumberLength = (value, type) => { + const length = (0, exports.getLength)(type); + const maxValue = BigInt(2) ** BigInt(length - ((0, exports.isSigned)(type) ? 1 : 0)) - BigInt(1); + if ((0, exports.isSigned)(type)) { + // Signed types must be in the range of `-(2^(length - 1))` to + // `2^(length - 1) - 1`. + (0, utils_1.assert)(value >= -(maxValue + BigInt(1)) && value <= maxValue, new errors_1.ParserError(`Number "${value}" is out of range for type "${type}".`)); + return; + } + // Unsigned types must be in the range of `0` to `2^length - 1`. + (0, utils_1.assert)(value <= maxValue, new errors_1.ParserError(`Number "${value}" is out of range for type "${type}".`)); + }; + exports.assertNumberLength = assertNumberLength; + /** + * Normalize a `bigint` value. This accepts the value as: + * + * - A `bigint`. + * - A `number`. + * - A decimal string, i.e., a string that does not start with "0x". + * - A hexadecimal string, i.e., a string that starts with "0x". + * + * @param value - The number-like value to parse. + * @returns The value parsed as bigint. + */ + const getBigInt = (value) => { + try { + return (0, utils_1.createBigInt)(value); + } + catch { + throw new errors_1.ParserError(`Invalid number. Expected a valid number value, but received "${value}".`); + } + }; + exports.getBigInt = getBigInt; + exports.number = { + isDynamic: false, + /** + * Check if a type is a number type. + * + * @param type - The type to check. + * @returns Whether the type is a number type. + */ + isType(type) { + return NUMBER_REGEX.test(type); + }, + /** + * Get the byte length of an encoded number type. Since `int` and `uint` are + * simple types, this will always return 32. + * + * @returns The byte length of the type. + */ + getByteLength() { + return 32; + }, + /** + * Encode a number value. + * + * @param args - The arguments to encode. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed }) { + const bigIntValue = (0, exports.getBigInt)(value); + (0, exports.assertNumberLength)(bigIntValue, type); + if ((0, exports.isSigned)(type)) { + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + const length = (0, exports.getLength)(type) / 8; + return (0, utils_1.concatBytes)([buffer, (0, utils_1.signedBigIntToBytes)(bigIntValue, length)]); + } + return (0, utils_1.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1.signedBigIntToBytes)(bigIntValue, 32)), + ]); + } + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + const length = (0, exports.getLength)(type) / 8; + return (0, utils_1.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1.bigIntToBytes)(bigIntValue), length), + ]); + } + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padStart)((0, utils_1.bigIntToBytes)(bigIntValue))]); + }, + /** + * Decode a number value. + * + * @param args - The decoding arguments. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @returns The decoded value. + */ + decode({ type, value }) { + const buffer = value.subarray(0, 32); + if ((0, exports.isSigned)(type)) { + const numberValue = (0, utils_1.bytesToSignedBigInt)(buffer); + (0, exports.assertNumberLength)(numberValue, type); + return numberValue; + } + const numberValue = (0, utils_1.bytesToBigInt)(buffer); + (0, exports.assertNumberLength)(numberValue, type); + return numberValue; + }, + }; + +} (number)); + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.bool = exports.getBooleanValue = void 0; + const utils_1 = dist$2; + const superstruct_1 = require$$1; + const errors_1 = errors; + const number_1 = number; + const BooleanCoercer = (0, superstruct_1.coerce)((0, superstruct_1.boolean)(), (0, superstruct_1.union)([(0, superstruct_1.literal)('true'), (0, superstruct_1.literal)('false')]), (value) => value === 'true'); + /** + * Normalize a boolean value. This accepts the boolean as: + * + * - A boolean literal. + * - The string "true" or "false". + * + * @param value - The value to get a boolean for. + * @returns The parsed boolean value. This is `BigInt(1)` for truthy values, or + * `BigInt(0)` for falsy values. + */ + const getBooleanValue = (value) => { + try { + const booleanValue = (0, superstruct_1.create)(value, BooleanCoercer); + if (booleanValue) { + return BigInt(1); + } + return BigInt(0); + } + catch { + throw new errors_1.ParserError(`Invalid boolean value. Expected a boolean literal, or the string "true" or "false", but received "${value}".`); + } + }; + exports.getBooleanValue = getBooleanValue; + exports.bool = { + isDynamic: false, + /** + * Get if the given value is a valid boolean type. Since `bool` is a simple + * type, this is just a check that the value is "bool". + * + * @param type - The type to check. + * @returns Whether the type is a valid boolean type. + */ + isType: (type) => type === 'bool', + /** + * Get the byte length of an encoded boolean. Since `bool` is a simple + * type, this always returns 32. + * + * Note that actual booleans are only 1 byte long, but the encoding of + * the `bool` type is always 32 bytes long. + * + * @returns The byte length of an encoded boolean. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given boolean to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The boolean to encode. + * @param args.packed - Whether the value is packed. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded boolean added to it. + */ + encode({ buffer, value, packed, tight }) { + const booleanValue = (0, exports.getBooleanValue)(value); + // For packed encoding, we add a single byte (`0x00` or `0x01`) to the byte + // array. + if (packed) { + return (0, utils_1.concatBytes)([buffer, (0, utils_1.bigIntToBytes)(booleanValue)]); + } + // Booleans are encoded as 32-byte integers, so we use the number parser + // to encode the boolean value. + return number_1.number.encode({ + type: 'uint256', + buffer, + value: booleanValue, + packed, + tight, + }); + }, + /** + * Decode the given byte array to a boolean. + * + * @param args - The decoding arguments. + * @returns The decoded boolean. + */ + decode(args) { + // Booleans are encoded as 32-byte integers, so we use the number parser + // to decode the boolean value. + return number_1.number.decode({ ...args, type: 'uint256' }) === BigInt(1); + }, + }; + +} (bool)); + +var bytes = {}; + +Object.defineProperty(bytes, "__esModule", { value: true }); +bytes.bytes = void 0; +const utils_1$2 = dist$2; +const utils_2 = utils$1; +bytes.bytes = { + isDynamic: true, + /** + * Check if a type is a bytes type. Since `bytes` is a simple type, this is + * just a check that the type is "bytes". + * + * @param type - The type to check. + * @returns Whether the type is a bytes type. + */ + isType: (type) => type === 'bytes', + /** + * Get the byte length of an encoded bytes value. Since `bytes` is a simple + * type, this always returns 32. + * + * Note that actual length of a bytes value is variable, but the encoded + * static value (pointer) is always 32 bytes long. + * + * @returns The byte length of an encoded bytes value. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given bytes value to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The bytes value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded bytes value added to it. + */ + encode({ buffer, value, packed }) { + const bufferValue = (0, utils_1$2.createBytes)(value); + // For packed encoding, we can just add the bytes value to the byte array, + // without adding any padding or alignment. There is also no need to + // encode the length of the bytes. + if (packed) { + return (0, utils_1$2.concatBytes)([buffer, bufferValue]); + } + const paddedSize = Math.ceil(bufferValue.byteLength / 32) * 32; + // Bytes of length `k` are encoded as `k pad_right(bytes)`. + return (0, utils_1$2.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1$2.numberToBytes)(bufferValue.byteLength)), + (0, utils_2.padEnd)(bufferValue, paddedSize), + ]); + }, + /** + * Decode the given byte array to a bytes value. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded bytes value as a `Uint8Array`. + */ + decode({ value }) { + const bytesValue = value.subarray(0, 32); + const length = (0, utils_1$2.bytesToNumber)(bytesValue); + // Since we're returning a `Uint8Array`, we use `slice` to copy the bytes + // into a new array. + return value.slice(32, 32 + length); + }, +}; + +var _function = {}; + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.fn = exports.getFunction = void 0; + const utils_1 = dist$2; + const superstruct_1 = require$$1; + const errors_1 = errors; + const fixed_bytes_1 = fixedBytes; + /** + * A struct that represents a Solidity function. The value must be a hex string + * or a byte array. The created value will always be an object with an `address` + * and `selector` property. + */ + const FunctionStruct = (0, superstruct_1.coerce)((0, superstruct_1.object)({ + address: utils_1.StrictHexStruct, + selector: utils_1.StrictHexStruct, + }), (0, superstruct_1.union)([utils_1.StrictHexStruct, (0, superstruct_1.instance)(Uint8Array)]), (value) => { + const bytes = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bytes.length === 24, new errors_1.ParserError(`Invalid Solidity function. Expected function to be 24 bytes long, but received ${bytes.length} bytes.`)); + return { + address: (0, utils_1.bytesToHex)(bytes.subarray(0, 20)), + selector: (0, utils_1.bytesToHex)(bytes.subarray(20, 24)), + }; + }); + /** + * Normalize a function. This accepts the function as: + * + * - A {@link SolidityFunction} object. + * - A hexadecimal string. + * - A byte array. + * + * @param input - The function-like input. + * @returns The function as buffer. + */ + const getFunction = (input) => { + const value = (0, superstruct_1.create)(input, FunctionStruct); + return (0, utils_1.concatBytes)([(0, utils_1.hexToBytes)(value.address), (0, utils_1.hexToBytes)(value.selector)]); + }; + exports.getFunction = getFunction; + exports.fn = { + isDynamic: false, + /** + * Check if a type is a function type. Since `function` is a simple type, this + * is just a check that the type is "function". + * + * @param type - The type to check. + * @returns Whether the type is a function type. + */ + isType: (type) => type === 'function', + /** + * Get the byte length of an encoded function. Since `function` is a simple + * type, this always returns 32. + * + * Note that actual functions are only 24 bytes long, but the encoding of + * the `function` type is always 32 bytes long. + * + * @returns The byte length of an encoded function. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given function to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The function to encode. + * @param args.packed - Whether to use packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded function added to it. + */ + encode({ buffer, value, packed, tight }) { + const fnValue = (0, exports.getFunction)(value); + // Functions are encoded as `bytes24`, so we use the fixedBytes parser to + // encode the function. + return fixed_bytes_1.fixedBytes.encode({ + type: 'bytes24', + buffer, + value: fnValue, + packed, + tight, + }); + }, + /** + * Decode the given byte array to a function. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded function as a {@link SolidityFunction} object. + */ + decode({ value }) { + return { + address: (0, utils_1.bytesToHex)(value.slice(0, 20)), + selector: (0, utils_1.bytesToHex)(value.slice(20, 24)), + }; + }, + }; + +} (_function)); + +var parser = {}; + +Object.defineProperty(parser, "__esModule", { value: true }); + +var string = {}; + +Object.defineProperty(string, "__esModule", { value: true }); +string.string = void 0; +const utils_1$1 = dist$2; +const bytes_1 = bytes; +string.string = { + isDynamic: true, + /** + * Check if a type is a string type. Since `string` is a simple type, this + * is just a check if the type is "string". + * + * @param type - The type to check. + * @returns Whether the type is a string type. + */ + isType: (type) => type === 'string', + /** + * Get the byte length of an encoded string type. Since `string` is a simple + * type, this will always return 32. + * + * Note that actual strings are variable in length, but the encoded static + * value (pointer) is always 32 bytes long. + * + * @returns The byte length of an encoded string. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given string value to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The string value to encode. + * @param args.packed - Whether to use packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded string value added to it. + */ + encode({ buffer, value, packed, tight }) { + // Strings are encoded as UTF-8 bytes, so we use the bytes parser to encode + // the string as bytes. + return bytes_1.bytes.encode({ + type: 'bytes', + buffer, + value: (0, utils_1$1.stringToBytes)(value), + packed, + tight, + }); + }, + /** + * Decode the given byte array to a string value. + * + * @param args - The decoding arguments. + * @returns The decoded string value. + */ + decode(args) { + // Strings are encoded as UTF-8 bytes, so we use the bytes parser to decode + // the bytes, and convert them to a string. + return (0, utils_1$1.bytesToString)(bytes_1.bytes.decode(args)); + }, +}; + +var hasRequiredParsers; + +function requireParsers () { + if (hasRequiredParsers) return parsers; + hasRequiredParsers = 1; + (function (exports) { + var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + })); + var __exportStar = (commonjsGlobal && commonjsGlobal.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); + }; + Object.defineProperty(exports, "__esModule", { value: true }); + __exportStar(address, exports); + __exportStar(requireArray(), exports); + __exportStar(bool, exports); + __exportStar(bytes, exports); + __exportStar(fixedBytes, exports); + __exportStar(_function, exports); + __exportStar(number, exports); + __exportStar(parser, exports); + __exportStar(string, exports); + __exportStar(requireTuple(), exports); + + } (parsers)); + return parsers; +} + +var hasRequiredPacker; + +function requirePacker () { + if (hasRequiredPacker) return packer; + hasRequiredPacker = 1; + (function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.unpack = exports.pack = exports.isDynamicParser = exports.getParser = void 0; + const utils_1 = dist$2; + const errors_1 = errors; + const iterator_1 = iterator; + const parsers_1 = requireParsers(); + const utils_2 = utils$1; + /** + * Get the parser for the specified type. + * + * @param type - The type to get a parser for. + * @returns The parser. + * @throws If there is no parser for the specified type. + */ + const getParser = (type) => { + const parsers = { + address: parsers_1.address, + array: parsers_1.array, + bool: parsers_1.bool, + bytes: parsers_1.bytes, + fixedBytes: parsers_1.fixedBytes, + function: parsers_1.fn, + number: parsers_1.number, + string: parsers_1.string, + tuple: parsers_1.tuple, + }; + const staticParser = parsers[type]; + if (staticParser) { + return staticParser; + } + const parser = Object.values(parsers).find((value) => value.isType(type)); + if (parser) { + return parser; + } + throw new errors_1.ParserError(`The type "${type}" is not supported.`); + }; + exports.getParser = getParser; + /** + * Check if the specified parser is dynamic, for the provided types. This is + * primarily used for parsing tuples, where a tuple can be dynamic based on the + * types. For other parsers, it will simply use the set `isDynamic` value. + * + * @param parser - The parser to check. + * @param type - The type to check the parser with. + * @returns Whether the parser is dynamic. + */ + const isDynamicParser = (parser, type) => { + const { isDynamic } = parser; + if (typeof isDynamic === 'function') { + return isDynamic(type); + } + return isDynamic; + }; + exports.isDynamicParser = isDynamicParser; + /** + * Pack the provided values in a buffer, encoded with the specified types. If a + * buffer is specified, the resulting value will be concatenated with the + * buffer. + * + * @param args - The arguments object. + * @param args.types - The types of the values to pack. + * @param args.values - The values to pack. + * @param args.packed - Whether to use the non-standard packed mode. Defaults to + * `false`. + * @param args.arrayPacked - Whether to use the non-standard packed mode for + * arrays. Defaults to `false`. + * @param args.byteArray - The byte array to encode the values into. Defaults to + * an empty array. + * @param args.tight - Whether to use tight packing mode. Only applicable when + * `packed` is true. When true, the packed mode will not add any padding bytes. + * This matches the packing behaviour of `ethereumjs-abi`, but is not standard. + * @returns The resulting encoded buffer. + */ + const pack = ({ types, values, packed = false, tight = false, arrayPacked = false, byteArray = new Uint8Array(), }) => { + (0, utils_1.assert)(types.length === values.length, new errors_1.ParserError(`The number of types (${types.length}) does not match the number of values (${values.length}).`)); + const { staticBuffer, dynamicBuffer, pointers } = types.reduce( + // eslint-disable-next-line @typescript-eslint/no-shadow + ({ staticBuffer, dynamicBuffer, pointers }, type, index) => { + const parser = (0, exports.getParser)(type); + const value = values[index]; + // If packed mode is enabled, we can skip the dynamic check, as all + // values are encoded in the static buffer. + if (packed || arrayPacked || !(0, exports.isDynamicParser)(parser, type)) { + return { + staticBuffer: parser.encode({ + buffer: staticBuffer, + value, + type, + packed, + tight, + }), + dynamicBuffer, + pointers, + }; + } + const newStaticBuffer = (0, utils_1.concatBytes)([staticBuffer, new Uint8Array(32)]); + const newDynamicBuffer = parser.encode({ + buffer: dynamicBuffer, + value, + type, + packed, + tight, + }); + return { + staticBuffer: newStaticBuffer, + dynamicBuffer: newDynamicBuffer, + pointers: [ + ...pointers, + { position: staticBuffer.length, pointer: dynamicBuffer.length }, + ], + }; + }, { + staticBuffer: new Uint8Array(), + dynamicBuffer: new Uint8Array(), + pointers: [], + }); + // If packed mode is enabled, there shouldn't be any dynamic values. + (0, utils_1.assert)((!packed && !arrayPacked) || dynamicBuffer.length === 0, new errors_1.ParserError('Invalid pack state.')); + const dynamicStart = staticBuffer.length; + const updatedBuffer = pointers.reduce((target, { pointer, position }) => { + const offset = (0, utils_2.padStart)((0, utils_1.numberToBytes)(dynamicStart + pointer)); + return (0, utils_2.set)(target, offset, position); + }, staticBuffer); + return (0, utils_1.concatBytes)([byteArray, updatedBuffer, dynamicBuffer]); + }; + exports.pack = pack; + const unpack = (types, buffer) => { + const iterator = (0, iterator_1.iterate)(buffer); + return types.map((type) => { + const { value: { value, skip }, done, } = iterator.next(); + (0, utils_1.assert)(!done, new errors_1.ParserError(`The encoded value is invalid for the provided types. Reached end of buffer while attempting to parse "${type}".`)); + const parser = (0, exports.getParser)(type); + const isDynamic = (0, exports.isDynamicParser)(parser, type); + if (isDynamic) { + const pointer = (0, utils_1.bytesToNumber)(value.subarray(0, 32)); + const target = buffer.subarray(pointer); + return parser.decode({ type, value: target, skip }); + } + return parser.decode({ type, value, skip }); + }); + }; + exports.unpack = unpack; + + } (packer)); + return packer; +} + +(function (exports) { + // ESLint gets confused by the nested list and tables in the docs, so we disable + // the rule for this file. + /* eslint-disable jsdoc/check-indentation, jsdoc/match-description */ + Object.defineProperty(exports, "__esModule", { value: true }); + exports.decodeSingle = exports.decode = exports.encodePacked = exports.encodeSingle = exports.encode = void 0; + const utils_1 = dist$2; + const errors_1 = errors; + const packer_1 = requirePacker(); + /** + * Encode the data with the provided types. The types must be valid Solidity + * ABI types. + * + * This will attempt to parse the values into the correct types. For example, + * if you pass in a hex string for a `uint256`, it will be parsed into a + * `bigint`. Regular strings are interpreted as UTF-8 strings. If you want to + * pass in a hex string, you must pass it in as a `Uint8Array`, or use the + * "0x"-prefix. + * + * It will also attempt to infer the types of the values. For example, if you + * pass in a string for a `uint256`, it will result in a TypeScript compile-time + * error. This does not work for all types, however. For example, if you use + * nested arrays or tuples, the type will be inferred as `unknown`. + * + * The following types are supported: + * + * - `address`: A 20-byte Ethereum address. + * - As a 40-character-long hexadecimal string, starting with "0x". + * - As a 20-byte-long byte array, i.e., `Uint8Array`. + * - `bool`: A boolean value. + * - As a boolean literal, i.e., `true` or `false`. + * - As the strings "true" or "false". + * - `bytes(n)`: A dynamic byte array. + * - As a hexadecimal string, starting with "0x". + * - As a byte array, i.e., `Uint8Array`. + * - As a regular string, which will be interpreted as UTF-8. + * - `function`: A Solidity function. + * - As a 48-character-long hexadecimal string, starting with "0x". + * - As a 24-byte-long byte array, i.e., `Uint8Array`. + * - As a {@link SolidityFunction} object. + * - `int(n)`: A signed integer. + * - As a number. + * - As a `bigint`. + * - As a hexadecimal string, starting with "0x". + * - `string`: A dynamic UTF-8 string. + * - As a regular string. + * - As a hexadecimal string, starting with "0x". + * - As a byte array, i.e., `Uint8Array`. + * - `tuple`: A tuple of values. + * - As an array of values. + * - `uint(n)`: An unsigned integer. + * - As a number. + * - As a `bigint`. + * - As a hexadecimal string, starting with "0x". + * + * @example + * ```typescript + * import { encode, decode } from '@metamask/abi-utils'; + * + * const types = ['uint256', 'string']; + * const encoded = encode(types, [42, 'Hello, world!']); + * const decoded = decode(types, encoded); + * + * console.log(decoded); // [42n, 'Hello, world!'] + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html + * @param types - The types to encode. + * @param values - The values to encode. This array must have the same length as + * the types array. + * @param packed - Whether to use the non-standard packed mode. Defaults to + * `false`. + * @param tight - Whether to pack the values tightly. When enabled, the values + * will be packed without any padding. This matches the behaviour of + * `ethereumjs-abi`. Defaults to `false`. + * @returns The ABI encoded bytes. + */ + const encode = (types, values, packed, tight) => { + try { + return (0, packer_1.pack)({ types, values, packed, tight }); + } + catch (error) { + if (error instanceof errors_1.ParserError) { + throw new errors_1.ParserError(`Unable to encode value: ${error.message}`, error); + } + throw new errors_1.ParserError(`An unexpected error occurred: ${(0, errors_1.getErrorMessage)(error)}`, error); + } + }; + exports.encode = encode; + /** + * Encode the data with the provided type. The type must be a valid Solidity + * ABI type. + * + * See {@link encode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodeSingle, decodeSingle } from '@metamask/abi-utils'; + * + * const encoded = encodeSingle('uint256', 42); + * const decoded = decodeSingle('uint256', encoded); + * + * console.log(decoded); // 42n + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param type - The type to encode. + * @param value - The value to encode. + * @returns The ABI encoded bytes. + */ + const encodeSingle = (type, value) => { + return (0, exports.encode)([type], [value]); + }; + exports.encodeSingle = encodeSingle; + /** + * Encode the data with the provided types. The types must be valid Solidity + * ABI types. This is similar to {@link encode}, but the values are encoded in + * the non-standard packed mode. This differs from the standard encoding in the + * following ways: + * + * - Most values are packed tightly, without alignment padding. + * - The exception is array values, which are padded to 32 bytes. + * - Values are still padded to their full size, i.e., `uint16` values are still + * padded to 2 bytes, regardless of the length of the value. + * - The encoding of dynamic types (`bytes`, `string`) is different. The length + * of the dynamic type is not included in the encoding, and the dynamic type is + * not padded to a multiple of 32 bytes. + * - All values are encoded in-place, without any offsets. + * + * The encoding of this is ambiguous as soon as there is more than one dynamic + * type. That means that these values cannot be decoded with {@link decode} or + * Solidity's `abi.decode` function. + * + * See {@link encode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodePacked } from '@metamask/abi-utils'; + * + * const encoded = encodePacked(['uint8'], [42]); + * + * console.log(encoded); // `Uint8Array [ 42 ]` + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#non-standard-packed-mode + * @param types - The types to encode. + * @param values - The values to encode. + * @param tight - Whether to pack the values tightly. When enabled, `bytesN` + * values in arrays will be packed without any padding. This matches the + * behaviour of `ethereumjs-abi`. Defaults to `false`. + * @returns The ABI encoded bytes. + */ + const encodePacked = (types, values, tight) => { + return (0, exports.encode)(types, values, true, tight); + }; + exports.encodePacked = encodePacked; + /** + * Decode an ABI encoded buffer with the specified types. The types must be + * valid Solidity ABI types. + * + * This will attempt to infer the output types from the input types. For + * example, if you use `uint256` as an input type, the output type will be + * `bigint`. This does not work for all types, however. For example, if you use + * nested array types or tuple types, the output type will be `unknown`. + * + * The resulting types of the values will be as follows: + * + * | Contract ABI Type | Resulting JavaScript Type | + * | ----------------- | ------------------------- | + * | `address` | `string` | + * | `bool` | `boolean` | + * | `bytes(n)` | `Uint8Array` | + * | `function` | {@link SolidityFunction} | + * | `int(n)` | `bigint` | + * | `string` | `string` | + * | `tuple` | `Array` | + * | `array` | `Array` | + * | `uint(n)` | `bigint` | + * + * @example + * ```typescript + * import { encode, decode } from '@metamask/abi-utils'; + * + * const types = ['uint256', 'string']; + * const encoded = encode(types, [42, 'Hello, world!']); + * const decoded = decode(types, encoded); + * + * console.log(decoded); // [42n, 'Hello, world!'] + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param types - The types to decode the bytes with. + * @param value - The bytes-like value to decode. + * @returns The decoded values as array. + */ + const decode = (types, value) => { + const bytes = (0, utils_1.createBytes)(value); + try { + return (0, packer_1.unpack)(types, bytes); + } + catch (error) { + if (error instanceof errors_1.ParserError) { + throw new errors_1.ParserError(`Unable to decode value: ${error.message}`, error); + } + throw new errors_1.ParserError(`An unexpected error occurred: ${(0, errors_1.getErrorMessage)(error)}`, error); + } + }; + exports.decode = decode; + /** + * Decode the data with the provided type. The type must be a valid Solidity + * ABI type. + * + * See {@link decode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodeSingle, decodeSingle } from '@metamask/abi-utils'; + * + * const encoded = encodeSingle('uint256', 42); + * const decoded = decodeSingle('uint256', encoded); + * + * console.log(decoded); // 42n + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param type - The type to decode. + * @param value - The bytes-like value to decode. + * @returns The decoded value. + */ + const decodeSingle = (type, value) => { + const result = (0, exports.decode)([type], value); + (0, utils_1.assert)(result.length === 1, new errors_1.ParserError('Decoded value array has unexpected length.')); + return result[0]; + }; + exports.decodeSingle = decodeSingle; + +} (abi$1)); + +var types = {}; + +var abi = {}; + +Object.defineProperty(abi, "__esModule", { value: true }); + +(function (exports) { + var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + })); + var __exportStar = (commonjsGlobal && commonjsGlobal.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); + }; + Object.defineProperty(exports, "__esModule", { value: true }); + __exportStar(abi, exports); + +} (types)); + +(function (exports) { + var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + })); + var __exportStar = (commonjsGlobal && commonjsGlobal.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); + }; + Object.defineProperty(exports, "__esModule", { value: true }); + __exportStar(abi$1, exports); + __exportStar(errors, exports); + __exportStar(types, exports); + +} (dist)); + +(function (exports) { + Object.defineProperty(exports, "__esModule", { value: true }); + exports.recoverTypedSignature = exports.signTypedData = exports.typedSignatureHash = exports.TypedDataUtils = exports.TYPED_MESSAGE_SCHEMA = exports.SignTypedDataVersion = void 0; + const util_1 = dist$4; + const abi_utils_1 = dist; + const parsers_1 = requireParsers(); + const utils_1 = utils$1; + const utils_2 = dist$2; + const keccak_1 = keccak; + const utils_3 = utils$3; + /** + * Represents the version of `signTypedData` being used. + * + * V1 is based upon [an early version of + * EIP-712](https://github.com/ethereum/EIPs/pull/712/commits/21abe254fe0452d8583d5b132b1d7be87c0439ca) + * that lacked some later security improvements, and should generally be neglected in favor of + * later versions. + * + * V3 is based on EIP-712, except that arrays and recursive data structures are not supported. + * + * V4 is based on EIP-712, and includes full support of arrays and recursive data structures. + */ + var SignTypedDataVersion; + (function (SignTypedDataVersion) { + SignTypedDataVersion["V1"] = "V1"; + SignTypedDataVersion["V3"] = "V3"; + SignTypedDataVersion["V4"] = "V4"; + })(SignTypedDataVersion = exports.SignTypedDataVersion || (exports.SignTypedDataVersion = {})); + exports.TYPED_MESSAGE_SCHEMA = { + type: 'object', + properties: { + types: { + type: 'object', + additionalProperties: { + type: 'array', + items: { + type: 'object', + properties: { + name: { type: 'string' }, + type: { type: 'string' }, + }, + required: ['name', 'type'], + }, + }, + }, + primaryType: { type: 'string' }, + domain: { type: 'object' }, + message: { type: 'object' }, + }, + required: ['types', 'primaryType', 'domain', 'message'], + }; + /** + * Validate that the given value is a valid version string. + * + * @param version - The version value to validate. + * @param allowedVersions - A list of allowed versions. If omitted, all versions are assumed to be + * allowed. + */ + function validateVersion(version, allowedVersions) { + if (!Object.keys(SignTypedDataVersion).includes(version)) { + throw new Error(`Invalid version: '${version}'`); + } + else if (allowedVersions && !allowedVersions.includes(version)) { + throw new Error(`SignTypedDataVersion not allowed: '${version}'. Allowed versions are: ${allowedVersions.join(', ')}`); + } + } + /** + * Parse a string, number, or bigint value into a `Uint8Array`. + * + * @param type - The type of the value. + * @param value - The value to parse. + * @returns The parsed value. + */ + function parseNumber(type, value) { + (0, utils_2.assert)(value !== null, `Unable to encode value: Invalid number. Expected a valid number value, but received "${value}".`); + const bigIntValue = BigInt(value); + const length = (0, parsers_1.getLength)(type); + const maxValue = BigInt(2) ** BigInt(length) - BigInt(1); + // Note that this is not accurate, since the actual maximum value for unsigned + // integers is `2 ^ (length - 1) - 1`, but this is required for backwards + // compatibility with the old implementation. + (0, utils_2.assert)(bigIntValue >= -maxValue && bigIntValue <= maxValue, `Unable to encode value: Number "${value}" is out of range for type "${type}".`); + return bigIntValue; + } + /** + * Parse an address string to a `Uint8Array`. The behaviour of this is quite + * strange, in that it does not parse the address as hexadecimal string, nor as + * UTF-8. It does some weird stuff with the string and char codes, and then + * returns the result as a `Uint8Array`. + * + * This is based on the old `ethereumjs-abi` implementation, which essentially + * calls `new BN(address, 10)` on the address string, the equivalent of calling + * `parseInt(address, 10)` in JavaScript. This is not a valid way to parse an + * address and would result in `NaN` in plain JavaScript, but it is the + * behaviour of the old implementation, and so we must preserve it for backwards + * compatibility. + * + * @param address - The address to parse. + * @returns The parsed address. + */ + function reallyStrangeAddressToBytes(address) { + let addressValue = BigInt(0); + for (let i = 0; i < address.length; i++) { + const character = BigInt(address.charCodeAt(i) - 48); + addressValue *= BigInt(10); + // 'a' + if (character >= 49) { + addressValue += character - BigInt(49) + BigInt(0xa); + // 'A' + } + else if (character >= 17) { + addressValue += character - BigInt(17) + BigInt(0xa); + // '0' - '9' + } + else { + addressValue += character; + } + } + return (0, utils_1.padStart)((0, utils_2.bigIntToBytes)(addressValue), 20); + } + /** + * Encode a single field. + * + * @param types - All type definitions. + * @param name - The name of the field to encode. + * @param type - The type of the field being encoded. + * @param value - The value to encode. + * @param version - The EIP-712 version the encoding should comply with. + * @returns Encoded representation of the field. + */ + function encodeField(types, name, type, + // TODO: constrain type on `value` + value, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + if (types[type] !== undefined) { + return [ + 'bytes32', + // TODO: return Buffer, remove string from return type + version === SignTypedDataVersion.V4 && value == null // eslint-disable-line no-eq-null + ? '0x0000000000000000000000000000000000000000000000000000000000000000' + : (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(encodeData(type, value, types, version))), + ]; + } + // `function` is supported in `@metamask/abi-utils`, but not allowed by + // EIP-712, so we throw an error here. + if (type === 'function') { + throw new Error('Unsupported or invalid type: "function"'); + } + if (value === undefined) { + throw new Error(`missing value for field ${name} of type ${type}`); + } + if (type === 'address') { + if (typeof value === 'number') { + return ['address', (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 20)]; + } + else if ((0, utils_2.isStrictHexString)(value)) { + return ['address', (0, utils_2.add0x)(value)]; + } + else if (typeof value === 'string') { + return ['address', reallyStrangeAddressToBytes(value).subarray(0, 20)]; + } + } + if (type === 'bool') { + return ['bool', Boolean(value)]; + } + if (type === 'bytes') { + if (typeof value === 'number') { + value = (0, utils_2.numberToBytes)(value); + } + else if ((0, utils_2.isStrictHexString)(value) || value === '0x') { + value = (0, utils_2.hexToBytes)(value); + } + else if (typeof value === 'string') { + value = (0, utils_2.stringToBytes)(value); + } + return ['bytes32', (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(value))]; + } + if (type.startsWith('bytes') && type !== 'bytes' && !type.includes('[')) { + if (typeof value === 'number') { + if (value < 0) { + return ['bytes32', new Uint8Array(32)]; + } + return ['bytes32', (0, utils_2.bigIntToBytes)(BigInt(value))]; + } + else if ((0, utils_2.isStrictHexString)(value)) { + return ['bytes32', (0, utils_2.hexToBytes)(value)]; + } + return ['bytes32', value]; + } + if (type.startsWith('int') && !type.includes('[')) { + const bigIntValue = parseNumber(type, value); + if (bigIntValue >= BigInt(0)) { + return ['uint256', bigIntValue]; + } + return ['int256', bigIntValue]; + } + if (type === 'string') { + if (typeof value === 'number') { + value = (0, utils_2.numberToBytes)(value); + } + else { + value = (0, utils_2.stringToBytes)(value !== null && value !== void 0 ? value : ''); + } + return ['bytes32', (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(value))]; + } + if (type.endsWith(']')) { + if (version === SignTypedDataVersion.V3) { + throw new Error('Arrays are unimplemented in encodeData; use V4 extension'); + } + const parsedType = type.slice(0, type.lastIndexOf('[')); + const typeValuePairs = value.map((item) => encodeField(types, name, parsedType, item, version)); + return [ + 'bytes32', + (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, abi_utils_1.encode)(typeValuePairs.map(([t]) => t), typeValuePairs.map(([, v]) => v)))), + ]; + } + return [type, value]; + } + /** + * Encodes an object by encoding and concatenating each of its members. + * + * @param primaryType - The root type. + * @param data - The object to encode. + * @param types - Type definitions for all types included in the message. + * @param version - The EIP-712 version the encoding should comply with. + * @returns An encoded representation of an object. + */ + function encodeData(primaryType, data, types, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const encodedTypes = ['bytes32']; + const encodedValues = [ + hashType(primaryType, types), + ]; + for (const field of types[primaryType]) { + if (version === SignTypedDataVersion.V3 && data[field.name] === undefined) { + continue; + } + const [type, value] = encodeField(types, field.name, field.type, data[field.name], version); + encodedTypes.push(type); + encodedValues.push(value); + } + return (0, util_1.arrToBufArr)((0, abi_utils_1.encode)(encodedTypes, encodedValues)); + } + /** + * Encodes the type of an object by encoding a comma delimited list of its members. + * + * @param primaryType - The root type to encode. + * @param types - Type definitions for all types included in the message. + * @returns An encoded representation of the primary type. + */ + function encodeType(primaryType, types) { + let result = ''; + const unsortedDeps = findTypeDependencies(primaryType, types); + unsortedDeps.delete(primaryType); + const deps = [primaryType, ...Array.from(unsortedDeps).sort()]; + for (const type of deps) { + const children = types[type]; + if (!children) { + throw new Error(`No type definition specified: ${type}`); + } + result += `${type}(${types[type] + .map(({ name, type: t }) => `${t} ${name}`) + .join(',')})`; + } + return result; + } + /** + * Finds all types within a type definition object. + * + * @param primaryType - The root type. + * @param types - Type definitions for all types included in the message. + * @param results - The current set of accumulated types. + * @returns The set of all types found in the type definition. + */ + function findTypeDependencies(primaryType, types, results = new Set()) { + if (typeof primaryType !== 'string') { + throw new Error(`Invalid findTypeDependencies input ${JSON.stringify(primaryType)}`); + } + const match = primaryType.match(/^\w*/u); + [primaryType] = match; + if (results.has(primaryType) || types[primaryType] === undefined) { + return results; + } + results.add(primaryType); + for (const field of types[primaryType]) { + findTypeDependencies(field.type, types, results); + } + return results; + } + /** + * Hashes an object. + * + * @param primaryType - The root type. + * @param data - The object to hash. + * @param types - Type definitions for all types included in the message. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the object. + */ + function hashStruct(primaryType, data, types, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const encoded = encodeData(primaryType, data, types, version); + const hashed = (0, keccak_1.keccak256)(encoded); + const buf = (0, util_1.arrToBufArr)(hashed); + return buf; + } + /** + * Hashes the type of an object. + * + * @param primaryType - The root type to hash. + * @param types - Type definitions for all types included in the message. + * @returns The hash of the object type. + */ + function hashType(primaryType, types) { + const encodedHashType = (0, utils_2.stringToBytes)(encodeType(primaryType, types)); + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(encodedHashType)); + } + /** + * Removes properties from a message object that are not defined per EIP-712. + * + * @param data - The typed message object. + * @returns The typed message object with only allowed fields. + */ + function sanitizeData(data) { + const sanitizedData = {}; + for (const key in exports.TYPED_MESSAGE_SCHEMA.properties) { + if (data[key]) { + sanitizedData[key] = data[key]; + } + } + if ('types' in sanitizedData) { + // TODO: Fix types + sanitizedData.types = Object.assign({ EIP712Domain: [] }, sanitizedData.types); + } + return sanitizedData; + } + /** + * Create a EIP-712 Domain Hash. + * This hash is used at the top of the EIP-712 encoding. + * + * @param typedData - The typed message to hash. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the domain object. + */ + function eip712DomainHash(typedData, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const sanitizedData = sanitizeData(typedData); + const { domain } = sanitizedData; + const domainType = { EIP712Domain: sanitizedData.types.EIP712Domain }; + return hashStruct('EIP712Domain', domain, domainType, version); + } + /** + * Hash a typed message according to EIP-712. The returned message starts with the EIP-712 prefix, + * which is "1901", followed by the hash of the domain separator, then the data (if any). + * The result is hashed again and returned. + * + * This function does not sign the message. The resulting hash must still be signed to create an + * EIP-712 signature. + * + * @param typedData - The typed message to hash. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the typed message. + */ + function eip712Hash(typedData, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const sanitizedData = sanitizeData(typedData); + const parts = [(0, utils_2.hexToBytes)('1901')]; + parts.push(eip712DomainHash(typedData, version)); + if (sanitizedData.primaryType !== 'EIP712Domain') { + parts.push(hashStruct( + // TODO: Validate that this is a string, so this type cast can be removed. + sanitizedData.primaryType, sanitizedData.message, sanitizedData.types, version)); + } + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, utils_2.concatBytes)(parts))); + } + /** + * A collection of utility functions used for signing typed data. + */ + exports.TypedDataUtils = { + encodeData, + encodeType, + findTypeDependencies, + hashStruct, + hashType, + sanitizeData, + eip712Hash, + eip712DomainHash, + }; + /** + * Generate the "V1" hash for the provided typed message. + * + * The hash will be generated in accordance with an earlier version of the EIP-712 + * specification. This hash is used in `signTypedData_v1`. + * + * @param typedData - The typed message. + * @returns The '0x'-prefixed hex encoded hash representing the type of the provided message. + */ + function typedSignatureHash(typedData) { + const hashBuffer = _typedSignatureHash(typedData); + return (0, utils_2.bytesToHex)(hashBuffer); + } + exports.typedSignatureHash = typedSignatureHash; + /** + * Normalize a value, so that `@metamask/abi-utils` can handle it. This + * matches the behaviour of the `ethereumjs-abi` library. + * + * @param type - The type of the value to normalize. + * @param value - The value to normalize. + * @returns The normalized value. + */ + function normalizeValue(type, value) { + if ((0, parsers_1.isArrayType)(type) && Array.isArray(value)) { + const [innerType] = (0, parsers_1.getArrayType)(type); + return value.map((item) => normalizeValue(innerType, item)); + } + if (type === 'address') { + if (typeof value === 'number') { + return (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 20); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_1.padStart)((0, utils_2.hexToBytes)(value).subarray(0, 20), 20); + } + if (value instanceof Uint8Array) { + return (0, utils_1.padStart)(value.subarray(0, 20), 20); + } + } + if (type === 'bool') { + return Boolean(value); + } + if (type.startsWith('bytes') && type !== 'bytes') { + const length = (0, parsers_1.getByteLength)(type); + if (typeof value === 'number') { + if (value < 0) { + // `solidityPack(['bytesN'], [-1])` returns `0x00..00`. + return new Uint8Array(); + } + return (0, utils_2.numberToBytes)(value).subarray(0, length); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_2.hexToBytes)(value).subarray(0, length); + } + if (value instanceof Uint8Array) { + return value.subarray(0, length); + } + } + if (type.startsWith('uint')) { + if (typeof value === 'number') { + return Math.abs(value); + } + } + if (type.startsWith('int')) { + if (typeof value === 'number') { + const length = (0, parsers_1.getLength)(type); + return BigInt.asIntN(length, BigInt(value)); + } + } + return value; + } + /** + * For some reason `ethereumjs-abi` treats `address` and `address[]` differently + * so we need to normalize `address[]` differently. + * + * @param values - The values to normalize. + * @returns The normalized values. + */ + function normalizeAddresses(values) { + return values.map((value) => { + if (typeof value === 'number') { + return (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 32); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_1.padStart)((0, utils_2.hexToBytes)(value).subarray(0, 32), 32); + } + if (value instanceof Uint8Array) { + return (0, utils_1.padStart)(value.subarray(0, 32), 32); + } + return value; + }); + } + /** + * For some reason `ethereumjs-abi` treats `intN` and `intN[]` differently + * so we need to normalize `intN[]` differently. + * + * @param type - The type of the value to normalize. + * @param values - The values to normalize. + * @returns The normalized values. + */ + function normalizeIntegers(type, values) { + return values.map((value) => { + if (typeof value === 'string' || + typeof value === 'number' || + typeof value === 'bigint') { + const bigIntValue = parseNumber(type, value); + if (bigIntValue >= BigInt(0)) { + return (0, utils_1.padStart)((0, utils_2.bigIntToBytes)(bigIntValue), 32); + } + const length = (0, parsers_1.getLength)(type); + const asIntN = BigInt.asIntN(length, bigIntValue); + return (0, utils_2.signedBigIntToBytes)(asIntN, 32); + } + return value; + }); + } + /** + * Generate the "V1" hash for the provided typed message. + * + * The hash will be generated in accordance with an earlier version of the EIP-712 + * specification. This hash is used in `signTypedData_v1`. + * + * @param typedData - The typed message. + * @returns The hash representing the type of the provided message. + */ + function _typedSignatureHash(typedData) { + const error = new Error('Expect argument to be non-empty array'); + if (typeof typedData !== 'object' || + !('length' in typedData) || + !typedData.length) { + throw error; + } + const normalizedData = typedData.map(({ name, type, value }) => { + // Handle an edge case with `address[]` types. + if (type === 'address[]') { + return { + name, + type: 'bytes32[]', + value: normalizeAddresses(value), + }; + } + // Handle an edge case with `intN[]` types. + if (type.startsWith('int') && (0, parsers_1.isArrayType)(type)) { + const [innerType, length] = (0, parsers_1.getArrayType)(type); + return { + name, + type: `bytes32[${length !== null && length !== void 0 ? length : ''}]`, + value: normalizeIntegers(innerType, value), + }; + } + return { + name, + type, + value: normalizeValue(type, value), + }; + }); + const data = normalizedData.map((e) => { + if (e.type !== 'bytes') { + return e.value; + } + return (0, utils_3.legacyToBuffer)(e.value); + }); + const types = normalizedData.map((e) => { + if (e.type === 'function') { + throw new Error('Unsupported or invalid type: "function"'); + } + return e.type; + }); + const schema = typedData.map((e) => { + if (!e.name) { + throw error; + } + return `${e.type} ${e.name}`; + }); + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(['bytes32', 'bytes32'], [ + (0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(['string[]'], [schema], true)), + (0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(types, data, true)), + ]))); + } + /** + * Sign typed data according to EIP-712. The signing differs based upon the `version`. + * + * V1 is based upon [an early version of + * EIP-712](https://github.com/ethereum/EIPs/pull/712/commits/21abe254fe0452d8583d5b132b1d7be87c0439ca) + * that lacked some later security improvements, and should generally be neglected in favor of + * later versions. + * + * V3 is based on [EIP-712](https://eips.ethereum.org/EIPS/eip-712), except that arrays and + * recursive data structures are not supported. + * + * V4 is based on [EIP-712](https://eips.ethereum.org/EIPS/eip-712), and includes full support of + * arrays and recursive data structures. + * + * @param options - The signing options. + * @param options.privateKey - The private key to sign with. + * @param options.data - The typed data to sign. + * @param options.version - The signing version to use. + * @returns The '0x'-prefixed hex encoded signature. + */ + function signTypedData({ privateKey, data, version, }) { + validateVersion(version); + if ((0, utils_3.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_3.isNullish)(privateKey)) { + throw new Error('Missing private key parameter'); + } + const messageHash = version === SignTypedDataVersion.V1 + ? _typedSignatureHash(data) + : exports.TypedDataUtils.eip712Hash(data, version); + const sig = (0, util_1.ecsign)(messageHash, privateKey); + return (0, utils_3.concatSig)((0, util_1.arrToBufArr)((0, utils_2.bigIntToBytes)(sig.v)), sig.r, sig.s); + } + exports.signTypedData = signTypedData; + /** + * Recover the address of the account that created the given EIP-712 + * signature. The version provided must match the version used to + * create the signature. + * + * @param options - The signature recovery options. + * @param options.data - The typed data that was signed. + * @param options.signature - The '0x-prefixed hex encoded message signature. + * @param options.version - The signing version to use. + * @returns The '0x'-prefixed hex address of the signer. + */ + function recoverTypedSignature({ data, signature, version, }) { + validateVersion(version); + if ((0, utils_3.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_3.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const messageHash = version === SignTypedDataVersion.V1 + ? _typedSignatureHash(data) + : exports.TypedDataUtils.eip712Hash(data, version); + const publicKey = (0, utils_3.recoverPublicKey)(messageHash, signature); + const sender = (0, util_1.publicToAddress)(publicKey); + return (0, utils_2.bytesToHex)(sender); + } + exports.recoverTypedSignature = recoverTypedSignature; + +} (signTypedData)); + +var encryption = {}; + +var naclFast = {exports: {}}; + +(function (module) { + (function(nacl) { + + // Ported in 2014 by Dmitry Chestnykh and Devi Mandiri. + // Public domain. + // + // Implementation derived from TweetNaCl version 20140427. + // See for details: http://tweetnacl.cr.yp.to/ + + var gf = function(init) { + var i, r = new Float64Array(16); + if (init) for (i = 0; i < init.length; i++) r[i] = init[i]; + return r; + }; + + // Pluggable, initialized in high-level API below. + var randombytes = function(/* x, n */) { throw new Error('no PRNG'); }; + + var _0 = new Uint8Array(16); + var _9 = new Uint8Array(32); _9[0] = 9; + + var gf0 = gf(), + gf1 = gf([1]), + _121665 = gf([0xdb41, 1]), + D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]), + D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]), + X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]), + Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]), + I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); + + function ts64(x, i, h, l) { + x[i] = (h >> 24) & 0xff; + x[i+1] = (h >> 16) & 0xff; + x[i+2] = (h >> 8) & 0xff; + x[i+3] = h & 0xff; + x[i+4] = (l >> 24) & 0xff; + x[i+5] = (l >> 16) & 0xff; + x[i+6] = (l >> 8) & 0xff; + x[i+7] = l & 0xff; + } + + function vn(x, xi, y, yi, n) { + var i,d = 0; + for (i = 0; i < n; i++) d |= x[xi+i]^y[yi+i]; + return (1 & ((d - 1) >>> 8)) - 1; + } + + function crypto_verify_16(x, xi, y, yi) { + return vn(x,xi,y,yi,16); + } + + function crypto_verify_32(x, xi, y, yi) { + return vn(x,xi,y,yi,32); + } + + function core_salsa20(o, p, k, c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + x0 = x0 + j0 | 0; + x1 = x1 + j1 | 0; + x2 = x2 + j2 | 0; + x3 = x3 + j3 | 0; + x4 = x4 + j4 | 0; + x5 = x5 + j5 | 0; + x6 = x6 + j6 | 0; + x7 = x7 + j7 | 0; + x8 = x8 + j8 | 0; + x9 = x9 + j9 | 0; + x10 = x10 + j10 | 0; + x11 = x11 + j11 | 0; + x12 = x12 + j12 | 0; + x13 = x13 + j13 | 0; + x14 = x14 + j14 | 0; + x15 = x15 + j15 | 0; + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x1 >>> 0 & 0xff; + o[ 5] = x1 >>> 8 & 0xff; + o[ 6] = x1 >>> 16 & 0xff; + o[ 7] = x1 >>> 24 & 0xff; + + o[ 8] = x2 >>> 0 & 0xff; + o[ 9] = x2 >>> 8 & 0xff; + o[10] = x2 >>> 16 & 0xff; + o[11] = x2 >>> 24 & 0xff; + + o[12] = x3 >>> 0 & 0xff; + o[13] = x3 >>> 8 & 0xff; + o[14] = x3 >>> 16 & 0xff; + o[15] = x3 >>> 24 & 0xff; + + o[16] = x4 >>> 0 & 0xff; + o[17] = x4 >>> 8 & 0xff; + o[18] = x4 >>> 16 & 0xff; + o[19] = x4 >>> 24 & 0xff; + + o[20] = x5 >>> 0 & 0xff; + o[21] = x5 >>> 8 & 0xff; + o[22] = x5 >>> 16 & 0xff; + o[23] = x5 >>> 24 & 0xff; + + o[24] = x6 >>> 0 & 0xff; + o[25] = x6 >>> 8 & 0xff; + o[26] = x6 >>> 16 & 0xff; + o[27] = x6 >>> 24 & 0xff; + + o[28] = x7 >>> 0 & 0xff; + o[29] = x7 >>> 8 & 0xff; + o[30] = x7 >>> 16 & 0xff; + o[31] = x7 >>> 24 & 0xff; + + o[32] = x8 >>> 0 & 0xff; + o[33] = x8 >>> 8 & 0xff; + o[34] = x8 >>> 16 & 0xff; + o[35] = x8 >>> 24 & 0xff; + + o[36] = x9 >>> 0 & 0xff; + o[37] = x9 >>> 8 & 0xff; + o[38] = x9 >>> 16 & 0xff; + o[39] = x9 >>> 24 & 0xff; + + o[40] = x10 >>> 0 & 0xff; + o[41] = x10 >>> 8 & 0xff; + o[42] = x10 >>> 16 & 0xff; + o[43] = x10 >>> 24 & 0xff; + + o[44] = x11 >>> 0 & 0xff; + o[45] = x11 >>> 8 & 0xff; + o[46] = x11 >>> 16 & 0xff; + o[47] = x11 >>> 24 & 0xff; + + o[48] = x12 >>> 0 & 0xff; + o[49] = x12 >>> 8 & 0xff; + o[50] = x12 >>> 16 & 0xff; + o[51] = x12 >>> 24 & 0xff; + + o[52] = x13 >>> 0 & 0xff; + o[53] = x13 >>> 8 & 0xff; + o[54] = x13 >>> 16 & 0xff; + o[55] = x13 >>> 24 & 0xff; + + o[56] = x14 >>> 0 & 0xff; + o[57] = x14 >>> 8 & 0xff; + o[58] = x14 >>> 16 & 0xff; + o[59] = x14 >>> 24 & 0xff; + + o[60] = x15 >>> 0 & 0xff; + o[61] = x15 >>> 8 & 0xff; + o[62] = x15 >>> 16 & 0xff; + o[63] = x15 >>> 24 & 0xff; + } + + function core_hsalsa20(o,p,k,c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x5 >>> 0 & 0xff; + o[ 5] = x5 >>> 8 & 0xff; + o[ 6] = x5 >>> 16 & 0xff; + o[ 7] = x5 >>> 24 & 0xff; + + o[ 8] = x10 >>> 0 & 0xff; + o[ 9] = x10 >>> 8 & 0xff; + o[10] = x10 >>> 16 & 0xff; + o[11] = x10 >>> 24 & 0xff; + + o[12] = x15 >>> 0 & 0xff; + o[13] = x15 >>> 8 & 0xff; + o[14] = x15 >>> 16 & 0xff; + o[15] = x15 >>> 24 & 0xff; + + o[16] = x6 >>> 0 & 0xff; + o[17] = x6 >>> 8 & 0xff; + o[18] = x6 >>> 16 & 0xff; + o[19] = x6 >>> 24 & 0xff; + + o[20] = x7 >>> 0 & 0xff; + o[21] = x7 >>> 8 & 0xff; + o[22] = x7 >>> 16 & 0xff; + o[23] = x7 >>> 24 & 0xff; + + o[24] = x8 >>> 0 & 0xff; + o[25] = x8 >>> 8 & 0xff; + o[26] = x8 >>> 16 & 0xff; + o[27] = x8 >>> 24 & 0xff; + + o[28] = x9 >>> 0 & 0xff; + o[29] = x9 >>> 8 & 0xff; + o[30] = x9 >>> 16 & 0xff; + o[31] = x9 >>> 24 & 0xff; + } + + function crypto_core_salsa20(out,inp,k,c) { + core_salsa20(out,inp,k,c); + } + + function crypto_core_hsalsa20(out,inp,k,c) { + core_hsalsa20(out,inp,k,c); + } + + var sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]); + // "expand 32-byte k" + + function crypto_stream_salsa20_xor(c,cpos,m,mpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + mpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + } + return 0; + } + + function crypto_stream_salsa20(c,cpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = x[i]; + } + return 0; + } + + function crypto_stream(c,cpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20(c,cpos,d,sn,s); + } + + function crypto_stream_xor(c,cpos,m,mpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20_xor(c,cpos,m,mpos,d,sn,s); + } + + /* + * Port of Andrew Moon's Poly1305-donna-16. Public domain. + * https://github.com/floodyberry/poly1305-donna + */ + + var poly1305 = function(key) { + this.buffer = new Uint8Array(16); + this.r = new Uint16Array(10); + this.h = new Uint16Array(10); + this.pad = new Uint16Array(8); + this.leftover = 0; + this.fin = 0; + + var t0, t1, t2, t3, t4, t5, t6, t7; + + t0 = key[ 0] & 0xff | (key[ 1] & 0xff) << 8; this.r[0] = ( t0 ) & 0x1fff; + t1 = key[ 2] & 0xff | (key[ 3] & 0xff) << 8; this.r[1] = ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = key[ 4] & 0xff | (key[ 5] & 0xff) << 8; this.r[2] = ((t1 >>> 10) | (t2 << 6)) & 0x1f03; + t3 = key[ 6] & 0xff | (key[ 7] & 0xff) << 8; this.r[3] = ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = key[ 8] & 0xff | (key[ 9] & 0xff) << 8; this.r[4] = ((t3 >>> 4) | (t4 << 12)) & 0x00ff; + this.r[5] = ((t4 >>> 1)) & 0x1ffe; + t5 = key[10] & 0xff | (key[11] & 0xff) << 8; this.r[6] = ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = key[12] & 0xff | (key[13] & 0xff) << 8; this.r[7] = ((t5 >>> 11) | (t6 << 5)) & 0x1f81; + t7 = key[14] & 0xff | (key[15] & 0xff) << 8; this.r[8] = ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + this.r[9] = ((t7 >>> 5)) & 0x007f; + + this.pad[0] = key[16] & 0xff | (key[17] & 0xff) << 8; + this.pad[1] = key[18] & 0xff | (key[19] & 0xff) << 8; + this.pad[2] = key[20] & 0xff | (key[21] & 0xff) << 8; + this.pad[3] = key[22] & 0xff | (key[23] & 0xff) << 8; + this.pad[4] = key[24] & 0xff | (key[25] & 0xff) << 8; + this.pad[5] = key[26] & 0xff | (key[27] & 0xff) << 8; + this.pad[6] = key[28] & 0xff | (key[29] & 0xff) << 8; + this.pad[7] = key[30] & 0xff | (key[31] & 0xff) << 8; + }; + + poly1305.prototype.blocks = function(m, mpos, bytes) { + var hibit = this.fin ? 0 : (1 << 11); + var t0, t1, t2, t3, t4, t5, t6, t7, c; + var d0, d1, d2, d3, d4, d5, d6, d7, d8, d9; + + var h0 = this.h[0], + h1 = this.h[1], + h2 = this.h[2], + h3 = this.h[3], + h4 = this.h[4], + h5 = this.h[5], + h6 = this.h[6], + h7 = this.h[7], + h8 = this.h[8], + h9 = this.h[9]; + + var r0 = this.r[0], + r1 = this.r[1], + r2 = this.r[2], + r3 = this.r[3], + r4 = this.r[4], + r5 = this.r[5], + r6 = this.r[6], + r7 = this.r[7], + r8 = this.r[8], + r9 = this.r[9]; + + while (bytes >= 16) { + t0 = m[mpos+ 0] & 0xff | (m[mpos+ 1] & 0xff) << 8; h0 += ( t0 ) & 0x1fff; + t1 = m[mpos+ 2] & 0xff | (m[mpos+ 3] & 0xff) << 8; h1 += ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = m[mpos+ 4] & 0xff | (m[mpos+ 5] & 0xff) << 8; h2 += ((t1 >>> 10) | (t2 << 6)) & 0x1fff; + t3 = m[mpos+ 6] & 0xff | (m[mpos+ 7] & 0xff) << 8; h3 += ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = m[mpos+ 8] & 0xff | (m[mpos+ 9] & 0xff) << 8; h4 += ((t3 >>> 4) | (t4 << 12)) & 0x1fff; + h5 += ((t4 >>> 1)) & 0x1fff; + t5 = m[mpos+10] & 0xff | (m[mpos+11] & 0xff) << 8; h6 += ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = m[mpos+12] & 0xff | (m[mpos+13] & 0xff) << 8; h7 += ((t5 >>> 11) | (t6 << 5)) & 0x1fff; + t7 = m[mpos+14] & 0xff | (m[mpos+15] & 0xff) << 8; h8 += ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + h9 += ((t7 >>> 5)) | hibit; + + c = 0; + + d0 = c; + d0 += h0 * r0; + d0 += h1 * (5 * r9); + d0 += h2 * (5 * r8); + d0 += h3 * (5 * r7); + d0 += h4 * (5 * r6); + c = (d0 >>> 13); d0 &= 0x1fff; + d0 += h5 * (5 * r5); + d0 += h6 * (5 * r4); + d0 += h7 * (5 * r3); + d0 += h8 * (5 * r2); + d0 += h9 * (5 * r1); + c += (d0 >>> 13); d0 &= 0x1fff; + + d1 = c; + d1 += h0 * r1; + d1 += h1 * r0; + d1 += h2 * (5 * r9); + d1 += h3 * (5 * r8); + d1 += h4 * (5 * r7); + c = (d1 >>> 13); d1 &= 0x1fff; + d1 += h5 * (5 * r6); + d1 += h6 * (5 * r5); + d1 += h7 * (5 * r4); + d1 += h8 * (5 * r3); + d1 += h9 * (5 * r2); + c += (d1 >>> 13); d1 &= 0x1fff; + + d2 = c; + d2 += h0 * r2; + d2 += h1 * r1; + d2 += h2 * r0; + d2 += h3 * (5 * r9); + d2 += h4 * (5 * r8); + c = (d2 >>> 13); d2 &= 0x1fff; + d2 += h5 * (5 * r7); + d2 += h6 * (5 * r6); + d2 += h7 * (5 * r5); + d2 += h8 * (5 * r4); + d2 += h9 * (5 * r3); + c += (d2 >>> 13); d2 &= 0x1fff; + + d3 = c; + d3 += h0 * r3; + d3 += h1 * r2; + d3 += h2 * r1; + d3 += h3 * r0; + d3 += h4 * (5 * r9); + c = (d3 >>> 13); d3 &= 0x1fff; + d3 += h5 * (5 * r8); + d3 += h6 * (5 * r7); + d3 += h7 * (5 * r6); + d3 += h8 * (5 * r5); + d3 += h9 * (5 * r4); + c += (d3 >>> 13); d3 &= 0x1fff; + + d4 = c; + d4 += h0 * r4; + d4 += h1 * r3; + d4 += h2 * r2; + d4 += h3 * r1; + d4 += h4 * r0; + c = (d4 >>> 13); d4 &= 0x1fff; + d4 += h5 * (5 * r9); + d4 += h6 * (5 * r8); + d4 += h7 * (5 * r7); + d4 += h8 * (5 * r6); + d4 += h9 * (5 * r5); + c += (d4 >>> 13); d4 &= 0x1fff; + + d5 = c; + d5 += h0 * r5; + d5 += h1 * r4; + d5 += h2 * r3; + d5 += h3 * r2; + d5 += h4 * r1; + c = (d5 >>> 13); d5 &= 0x1fff; + d5 += h5 * r0; + d5 += h6 * (5 * r9); + d5 += h7 * (5 * r8); + d5 += h8 * (5 * r7); + d5 += h9 * (5 * r6); + c += (d5 >>> 13); d5 &= 0x1fff; + + d6 = c; + d6 += h0 * r6; + d6 += h1 * r5; + d6 += h2 * r4; + d6 += h3 * r3; + d6 += h4 * r2; + c = (d6 >>> 13); d6 &= 0x1fff; + d6 += h5 * r1; + d6 += h6 * r0; + d6 += h7 * (5 * r9); + d6 += h8 * (5 * r8); + d6 += h9 * (5 * r7); + c += (d6 >>> 13); d6 &= 0x1fff; + + d7 = c; + d7 += h0 * r7; + d7 += h1 * r6; + d7 += h2 * r5; + d7 += h3 * r4; + d7 += h4 * r3; + c = (d7 >>> 13); d7 &= 0x1fff; + d7 += h5 * r2; + d7 += h6 * r1; + d7 += h7 * r0; + d7 += h8 * (5 * r9); + d7 += h9 * (5 * r8); + c += (d7 >>> 13); d7 &= 0x1fff; + + d8 = c; + d8 += h0 * r8; + d8 += h1 * r7; + d8 += h2 * r6; + d8 += h3 * r5; + d8 += h4 * r4; + c = (d8 >>> 13); d8 &= 0x1fff; + d8 += h5 * r3; + d8 += h6 * r2; + d8 += h7 * r1; + d8 += h8 * r0; + d8 += h9 * (5 * r9); + c += (d8 >>> 13); d8 &= 0x1fff; + + d9 = c; + d9 += h0 * r9; + d9 += h1 * r8; + d9 += h2 * r7; + d9 += h3 * r6; + d9 += h4 * r5; + c = (d9 >>> 13); d9 &= 0x1fff; + d9 += h5 * r4; + d9 += h6 * r3; + d9 += h7 * r2; + d9 += h8 * r1; + d9 += h9 * r0; + c += (d9 >>> 13); d9 &= 0x1fff; + + c = (((c << 2) + c)) | 0; + c = (c + d0) | 0; + d0 = c & 0x1fff; + c = (c >>> 13); + d1 += c; + + h0 = d0; + h1 = d1; + h2 = d2; + h3 = d3; + h4 = d4; + h5 = d5; + h6 = d6; + h7 = d7; + h8 = d8; + h9 = d9; + + mpos += 16; + bytes -= 16; + } + this.h[0] = h0; + this.h[1] = h1; + this.h[2] = h2; + this.h[3] = h3; + this.h[4] = h4; + this.h[5] = h5; + this.h[6] = h6; + this.h[7] = h7; + this.h[8] = h8; + this.h[9] = h9; + }; + + poly1305.prototype.finish = function(mac, macpos) { + var g = new Uint16Array(10); + var c, mask, f, i; + + if (this.leftover) { + i = this.leftover; + this.buffer[i++] = 1; + for (; i < 16; i++) this.buffer[i] = 0; + this.fin = 1; + this.blocks(this.buffer, 0, 16); + } + + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + for (i = 2; i < 10; i++) { + this.h[i] += c; + c = this.h[i] >>> 13; + this.h[i] &= 0x1fff; + } + this.h[0] += (c * 5); + c = this.h[0] >>> 13; + this.h[0] &= 0x1fff; + this.h[1] += c; + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + this.h[2] += c; + + g[0] = this.h[0] + 5; + c = g[0] >>> 13; + g[0] &= 0x1fff; + for (i = 1; i < 10; i++) { + g[i] = this.h[i] + c; + c = g[i] >>> 13; + g[i] &= 0x1fff; + } + g[9] -= (1 << 13); + + mask = (c ^ 1) - 1; + for (i = 0; i < 10; i++) g[i] &= mask; + mask = ~mask; + for (i = 0; i < 10; i++) this.h[i] = (this.h[i] & mask) | g[i]; + + this.h[0] = ((this.h[0] ) | (this.h[1] << 13) ) & 0xffff; + this.h[1] = ((this.h[1] >>> 3) | (this.h[2] << 10) ) & 0xffff; + this.h[2] = ((this.h[2] >>> 6) | (this.h[3] << 7) ) & 0xffff; + this.h[3] = ((this.h[3] >>> 9) | (this.h[4] << 4) ) & 0xffff; + this.h[4] = ((this.h[4] >>> 12) | (this.h[5] << 1) | (this.h[6] << 14)) & 0xffff; + this.h[5] = ((this.h[6] >>> 2) | (this.h[7] << 11) ) & 0xffff; + this.h[6] = ((this.h[7] >>> 5) | (this.h[8] << 8) ) & 0xffff; + this.h[7] = ((this.h[8] >>> 8) | (this.h[9] << 5) ) & 0xffff; + + f = this.h[0] + this.pad[0]; + this.h[0] = f & 0xffff; + for (i = 1; i < 8; i++) { + f = (((this.h[i] + this.pad[i]) | 0) + (f >>> 16)) | 0; + this.h[i] = f & 0xffff; + } + + mac[macpos+ 0] = (this.h[0] >>> 0) & 0xff; + mac[macpos+ 1] = (this.h[0] >>> 8) & 0xff; + mac[macpos+ 2] = (this.h[1] >>> 0) & 0xff; + mac[macpos+ 3] = (this.h[1] >>> 8) & 0xff; + mac[macpos+ 4] = (this.h[2] >>> 0) & 0xff; + mac[macpos+ 5] = (this.h[2] >>> 8) & 0xff; + mac[macpos+ 6] = (this.h[3] >>> 0) & 0xff; + mac[macpos+ 7] = (this.h[3] >>> 8) & 0xff; + mac[macpos+ 8] = (this.h[4] >>> 0) & 0xff; + mac[macpos+ 9] = (this.h[4] >>> 8) & 0xff; + mac[macpos+10] = (this.h[5] >>> 0) & 0xff; + mac[macpos+11] = (this.h[5] >>> 8) & 0xff; + mac[macpos+12] = (this.h[6] >>> 0) & 0xff; + mac[macpos+13] = (this.h[6] >>> 8) & 0xff; + mac[macpos+14] = (this.h[7] >>> 0) & 0xff; + mac[macpos+15] = (this.h[7] >>> 8) & 0xff; + }; + + poly1305.prototype.update = function(m, mpos, bytes) { + var i, want; + + if (this.leftover) { + want = (16 - this.leftover); + if (want > bytes) + want = bytes; + for (i = 0; i < want; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + bytes -= want; + mpos += want; + this.leftover += want; + if (this.leftover < 16) + return; + this.blocks(this.buffer, 0, 16); + this.leftover = 0; + } + + if (bytes >= 16) { + want = bytes - (bytes % 16); + this.blocks(m, mpos, want); + mpos += want; + bytes -= want; + } + + if (bytes) { + for (i = 0; i < bytes; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + this.leftover += bytes; + } + }; + + function crypto_onetimeauth(out, outpos, m, mpos, n, k) { + var s = new poly1305(k); + s.update(m, mpos, n); + s.finish(out, outpos); + return 0; + } + + function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) { + var x = new Uint8Array(16); + crypto_onetimeauth(x,0,m,mpos,n,k); + return crypto_verify_16(h,hpos,x,0); + } + + function crypto_secretbox(c,m,d,n,k) { + var i; + if (d < 32) return -1; + crypto_stream_xor(c,0,m,0,d,n,k); + crypto_onetimeauth(c, 16, c, 32, d - 32, c); + for (i = 0; i < 16; i++) c[i] = 0; + return 0; + } + + function crypto_secretbox_open(m,c,d,n,k) { + var i; + var x = new Uint8Array(32); + if (d < 32) return -1; + crypto_stream(x,0,32,n,k); + if (crypto_onetimeauth_verify(c, 16,c, 32,d - 32,x) !== 0) return -1; + crypto_stream_xor(m,0,c,0,d,n,k); + for (i = 0; i < 32; i++) m[i] = 0; + return 0; + } + + function set25519(r, a) { + var i; + for (i = 0; i < 16; i++) r[i] = a[i]|0; + } + + function car25519(o) { + var i, v, c = 1; + for (i = 0; i < 16; i++) { + v = o[i] + c + 65535; + c = Math.floor(v / 65536); + o[i] = v - c * 65536; + } + o[0] += c-1 + 37 * (c-1); + } + + function sel25519(p, q, b) { + var t, c = ~(b-1); + for (var i = 0; i < 16; i++) { + t = c & (p[i] ^ q[i]); + p[i] ^= t; + q[i] ^= t; + } + } + + function pack25519(o, n) { + var i, j, b; + var m = gf(), t = gf(); + for (i = 0; i < 16; i++) t[i] = n[i]; + car25519(t); + car25519(t); + car25519(t); + for (j = 0; j < 2; j++) { + m[0] = t[0] - 0xffed; + for (i = 1; i < 15; i++) { + m[i] = t[i] - 0xffff - ((m[i-1]>>16) & 1); + m[i-1] &= 0xffff; + } + m[15] = t[15] - 0x7fff - ((m[14]>>16) & 1); + b = (m[15]>>16) & 1; + m[14] &= 0xffff; + sel25519(t, m, 1-b); + } + for (i = 0; i < 16; i++) { + o[2*i] = t[i] & 0xff; + o[2*i+1] = t[i]>>8; + } + } + + function neq25519(a, b) { + var c = new Uint8Array(32), d = new Uint8Array(32); + pack25519(c, a); + pack25519(d, b); + return crypto_verify_32(c, 0, d, 0); + } + + function par25519(a) { + var d = new Uint8Array(32); + pack25519(d, a); + return d[0] & 1; + } + + function unpack25519(o, n) { + var i; + for (i = 0; i < 16; i++) o[i] = n[2*i] + (n[2*i+1] << 8); + o[15] &= 0x7fff; + } + + function A(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] + b[i]; + } + + function Z(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] - b[i]; + } + + function M(o, a, b) { + var v, c, + t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0, + t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0, + t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0, + t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0, + b0 = b[0], + b1 = b[1], + b2 = b[2], + b3 = b[3], + b4 = b[4], + b5 = b[5], + b6 = b[6], + b7 = b[7], + b8 = b[8], + b9 = b[9], + b10 = b[10], + b11 = b[11], + b12 = b[12], + b13 = b[13], + b14 = b[14], + b15 = b[15]; + + v = a[0]; + t0 += v * b0; + t1 += v * b1; + t2 += v * b2; + t3 += v * b3; + t4 += v * b4; + t5 += v * b5; + t6 += v * b6; + t7 += v * b7; + t8 += v * b8; + t9 += v * b9; + t10 += v * b10; + t11 += v * b11; + t12 += v * b12; + t13 += v * b13; + t14 += v * b14; + t15 += v * b15; + v = a[1]; + t1 += v * b0; + t2 += v * b1; + t3 += v * b2; + t4 += v * b3; + t5 += v * b4; + t6 += v * b5; + t7 += v * b6; + t8 += v * b7; + t9 += v * b8; + t10 += v * b9; + t11 += v * b10; + t12 += v * b11; + t13 += v * b12; + t14 += v * b13; + t15 += v * b14; + t16 += v * b15; + v = a[2]; + t2 += v * b0; + t3 += v * b1; + t4 += v * b2; + t5 += v * b3; + t6 += v * b4; + t7 += v * b5; + t8 += v * b6; + t9 += v * b7; + t10 += v * b8; + t11 += v * b9; + t12 += v * b10; + t13 += v * b11; + t14 += v * b12; + t15 += v * b13; + t16 += v * b14; + t17 += v * b15; + v = a[3]; + t3 += v * b0; + t4 += v * b1; + t5 += v * b2; + t6 += v * b3; + t7 += v * b4; + t8 += v * b5; + t9 += v * b6; + t10 += v * b7; + t11 += v * b8; + t12 += v * b9; + t13 += v * b10; + t14 += v * b11; + t15 += v * b12; + t16 += v * b13; + t17 += v * b14; + t18 += v * b15; + v = a[4]; + t4 += v * b0; + t5 += v * b1; + t6 += v * b2; + t7 += v * b3; + t8 += v * b4; + t9 += v * b5; + t10 += v * b6; + t11 += v * b7; + t12 += v * b8; + t13 += v * b9; + t14 += v * b10; + t15 += v * b11; + t16 += v * b12; + t17 += v * b13; + t18 += v * b14; + t19 += v * b15; + v = a[5]; + t5 += v * b0; + t6 += v * b1; + t7 += v * b2; + t8 += v * b3; + t9 += v * b4; + t10 += v * b5; + t11 += v * b6; + t12 += v * b7; + t13 += v * b8; + t14 += v * b9; + t15 += v * b10; + t16 += v * b11; + t17 += v * b12; + t18 += v * b13; + t19 += v * b14; + t20 += v * b15; + v = a[6]; + t6 += v * b0; + t7 += v * b1; + t8 += v * b2; + t9 += v * b3; + t10 += v * b4; + t11 += v * b5; + t12 += v * b6; + t13 += v * b7; + t14 += v * b8; + t15 += v * b9; + t16 += v * b10; + t17 += v * b11; + t18 += v * b12; + t19 += v * b13; + t20 += v * b14; + t21 += v * b15; + v = a[7]; + t7 += v * b0; + t8 += v * b1; + t9 += v * b2; + t10 += v * b3; + t11 += v * b4; + t12 += v * b5; + t13 += v * b6; + t14 += v * b7; + t15 += v * b8; + t16 += v * b9; + t17 += v * b10; + t18 += v * b11; + t19 += v * b12; + t20 += v * b13; + t21 += v * b14; + t22 += v * b15; + v = a[8]; + t8 += v * b0; + t9 += v * b1; + t10 += v * b2; + t11 += v * b3; + t12 += v * b4; + t13 += v * b5; + t14 += v * b6; + t15 += v * b7; + t16 += v * b8; + t17 += v * b9; + t18 += v * b10; + t19 += v * b11; + t20 += v * b12; + t21 += v * b13; + t22 += v * b14; + t23 += v * b15; + v = a[9]; + t9 += v * b0; + t10 += v * b1; + t11 += v * b2; + t12 += v * b3; + t13 += v * b4; + t14 += v * b5; + t15 += v * b6; + t16 += v * b7; + t17 += v * b8; + t18 += v * b9; + t19 += v * b10; + t20 += v * b11; + t21 += v * b12; + t22 += v * b13; + t23 += v * b14; + t24 += v * b15; + v = a[10]; + t10 += v * b0; + t11 += v * b1; + t12 += v * b2; + t13 += v * b3; + t14 += v * b4; + t15 += v * b5; + t16 += v * b6; + t17 += v * b7; + t18 += v * b8; + t19 += v * b9; + t20 += v * b10; + t21 += v * b11; + t22 += v * b12; + t23 += v * b13; + t24 += v * b14; + t25 += v * b15; + v = a[11]; + t11 += v * b0; + t12 += v * b1; + t13 += v * b2; + t14 += v * b3; + t15 += v * b4; + t16 += v * b5; + t17 += v * b6; + t18 += v * b7; + t19 += v * b8; + t20 += v * b9; + t21 += v * b10; + t22 += v * b11; + t23 += v * b12; + t24 += v * b13; + t25 += v * b14; + t26 += v * b15; + v = a[12]; + t12 += v * b0; + t13 += v * b1; + t14 += v * b2; + t15 += v * b3; + t16 += v * b4; + t17 += v * b5; + t18 += v * b6; + t19 += v * b7; + t20 += v * b8; + t21 += v * b9; + t22 += v * b10; + t23 += v * b11; + t24 += v * b12; + t25 += v * b13; + t26 += v * b14; + t27 += v * b15; + v = a[13]; + t13 += v * b0; + t14 += v * b1; + t15 += v * b2; + t16 += v * b3; + t17 += v * b4; + t18 += v * b5; + t19 += v * b6; + t20 += v * b7; + t21 += v * b8; + t22 += v * b9; + t23 += v * b10; + t24 += v * b11; + t25 += v * b12; + t26 += v * b13; + t27 += v * b14; + t28 += v * b15; + v = a[14]; + t14 += v * b0; + t15 += v * b1; + t16 += v * b2; + t17 += v * b3; + t18 += v * b4; + t19 += v * b5; + t20 += v * b6; + t21 += v * b7; + t22 += v * b8; + t23 += v * b9; + t24 += v * b10; + t25 += v * b11; + t26 += v * b12; + t27 += v * b13; + t28 += v * b14; + t29 += v * b15; + v = a[15]; + t15 += v * b0; + t16 += v * b1; + t17 += v * b2; + t18 += v * b3; + t19 += v * b4; + t20 += v * b5; + t21 += v * b6; + t22 += v * b7; + t23 += v * b8; + t24 += v * b9; + t25 += v * b10; + t26 += v * b11; + t27 += v * b12; + t28 += v * b13; + t29 += v * b14; + t30 += v * b15; + + t0 += 38 * t16; + t1 += 38 * t17; + t2 += 38 * t18; + t3 += 38 * t19; + t4 += 38 * t20; + t5 += 38 * t21; + t6 += 38 * t22; + t7 += 38 * t23; + t8 += 38 * t24; + t9 += 38 * t25; + t10 += 38 * t26; + t11 += 38 * t27; + t12 += 38 * t28; + t13 += 38 * t29; + t14 += 38 * t30; + // t15 left as is + + // first car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + // second car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + o[ 0] = t0; + o[ 1] = t1; + o[ 2] = t2; + o[ 3] = t3; + o[ 4] = t4; + o[ 5] = t5; + o[ 6] = t6; + o[ 7] = t7; + o[ 8] = t8; + o[ 9] = t9; + o[10] = t10; + o[11] = t11; + o[12] = t12; + o[13] = t13; + o[14] = t14; + o[15] = t15; + } + + function S(o, a) { + M(o, a, a); + } + + function inv25519(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 253; a >= 0; a--) { + S(c, c); + if(a !== 2 && a !== 4) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; + } + + function pow2523(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 250; a >= 0; a--) { + S(c, c); + if(a !== 1) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; + } + + function crypto_scalarmult(q, n, p) { + var z = new Uint8Array(32); + var x = new Float64Array(80), r, i; + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(); + for (i = 0; i < 31; i++) z[i] = n[i]; + z[31]=(n[31]&127)|64; + z[0]&=248; + unpack25519(x,p); + for (i = 0; i < 16; i++) { + b[i]=x[i]; + d[i]=a[i]=c[i]=0; + } + a[0]=d[0]=1; + for (i=254; i>=0; --i) { + r=(z[i>>>3]>>>(i&7))&1; + sel25519(a,b,r); + sel25519(c,d,r); + A(e,a,c); + Z(a,a,c); + A(c,b,d); + Z(b,b,d); + S(d,e); + S(f,a); + M(a,c,a); + M(c,b,e); + A(e,a,c); + Z(a,a,c); + S(b,a); + Z(c,d,f); + M(a,c,_121665); + A(a,a,d); + M(c,c,a); + M(a,d,f); + M(d,b,x); + S(b,e); + sel25519(a,b,r); + sel25519(c,d,r); + } + for (i = 0; i < 16; i++) { + x[i+16]=a[i]; + x[i+32]=c[i]; + x[i+48]=b[i]; + x[i+64]=d[i]; + } + var x32 = x.subarray(32); + var x16 = x.subarray(16); + inv25519(x32,x32); + M(x16,x16,x32); + pack25519(q,x16); + return 0; + } + + function crypto_scalarmult_base(q, n) { + return crypto_scalarmult(q, n, _9); + } + + function crypto_box_keypair(y, x) { + randombytes(x, 32); + return crypto_scalarmult_base(y, x); + } + + function crypto_box_beforenm(k, y, x) { + var s = new Uint8Array(32); + crypto_scalarmult(s, x, y); + return crypto_core_hsalsa20(k, _0, s, sigma); + } + + var crypto_box_afternm = crypto_secretbox; + var crypto_box_open_afternm = crypto_secretbox_open; + + function crypto_box(c, m, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_afternm(c, m, d, n, k); + } + + function crypto_box_open(m, c, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_open_afternm(m, c, d, n, k); + } + + var K = [ + 0x428a2f98, 0xd728ae22, 0x71374491, 0x23ef65cd, + 0xb5c0fbcf, 0xec4d3b2f, 0xe9b5dba5, 0x8189dbbc, + 0x3956c25b, 0xf348b538, 0x59f111f1, 0xb605d019, + 0x923f82a4, 0xaf194f9b, 0xab1c5ed5, 0xda6d8118, + 0xd807aa98, 0xa3030242, 0x12835b01, 0x45706fbe, + 0x243185be, 0x4ee4b28c, 0x550c7dc3, 0xd5ffb4e2, + 0x72be5d74, 0xf27b896f, 0x80deb1fe, 0x3b1696b1, + 0x9bdc06a7, 0x25c71235, 0xc19bf174, 0xcf692694, + 0xe49b69c1, 0x9ef14ad2, 0xefbe4786, 0x384f25e3, + 0x0fc19dc6, 0x8b8cd5b5, 0x240ca1cc, 0x77ac9c65, + 0x2de92c6f, 0x592b0275, 0x4a7484aa, 0x6ea6e483, + 0x5cb0a9dc, 0xbd41fbd4, 0x76f988da, 0x831153b5, + 0x983e5152, 0xee66dfab, 0xa831c66d, 0x2db43210, + 0xb00327c8, 0x98fb213f, 0xbf597fc7, 0xbeef0ee4, + 0xc6e00bf3, 0x3da88fc2, 0xd5a79147, 0x930aa725, + 0x06ca6351, 0xe003826f, 0x14292967, 0x0a0e6e70, + 0x27b70a85, 0x46d22ffc, 0x2e1b2138, 0x5c26c926, + 0x4d2c6dfc, 0x5ac42aed, 0x53380d13, 0x9d95b3df, + 0x650a7354, 0x8baf63de, 0x766a0abb, 0x3c77b2a8, + 0x81c2c92e, 0x47edaee6, 0x92722c85, 0x1482353b, + 0xa2bfe8a1, 0x4cf10364, 0xa81a664b, 0xbc423001, + 0xc24b8b70, 0xd0f89791, 0xc76c51a3, 0x0654be30, + 0xd192e819, 0xd6ef5218, 0xd6990624, 0x5565a910, + 0xf40e3585, 0x5771202a, 0x106aa070, 0x32bbd1b8, + 0x19a4c116, 0xb8d2d0c8, 0x1e376c08, 0x5141ab53, + 0x2748774c, 0xdf8eeb99, 0x34b0bcb5, 0xe19b48a8, + 0x391c0cb3, 0xc5c95a63, 0x4ed8aa4a, 0xe3418acb, + 0x5b9cca4f, 0x7763e373, 0x682e6ff3, 0xd6b2b8a3, + 0x748f82ee, 0x5defb2fc, 0x78a5636f, 0x43172f60, + 0x84c87814, 0xa1f0ab72, 0x8cc70208, 0x1a6439ec, + 0x90befffa, 0x23631e28, 0xa4506ceb, 0xde82bde9, + 0xbef9a3f7, 0xb2c67915, 0xc67178f2, 0xe372532b, + 0xca273ece, 0xea26619c, 0xd186b8c7, 0x21c0c207, + 0xeada7dd6, 0xcde0eb1e, 0xf57d4f7f, 0xee6ed178, + 0x06f067aa, 0x72176fba, 0x0a637dc5, 0xa2c898a6, + 0x113f9804, 0xbef90dae, 0x1b710b35, 0x131c471b, + 0x28db77f5, 0x23047d84, 0x32caab7b, 0x40c72493, + 0x3c9ebe0a, 0x15c9bebc, 0x431d67c4, 0x9c100d4c, + 0x4cc5d4be, 0xcb3e42b6, 0x597f299c, 0xfc657e2a, + 0x5fcb6fab, 0x3ad6faec, 0x6c44198c, 0x4a475817 + ]; + + function crypto_hashblocks_hl(hh, hl, m, n) { + var wh = new Int32Array(16), wl = new Int32Array(16), + bh0, bh1, bh2, bh3, bh4, bh5, bh6, bh7, + bl0, bl1, bl2, bl3, bl4, bl5, bl6, bl7, + th, tl, i, j, h, l, a, b, c, d; + + var ah0 = hh[0], + ah1 = hh[1], + ah2 = hh[2], + ah3 = hh[3], + ah4 = hh[4], + ah5 = hh[5], + ah6 = hh[6], + ah7 = hh[7], + + al0 = hl[0], + al1 = hl[1], + al2 = hl[2], + al3 = hl[3], + al4 = hl[4], + al5 = hl[5], + al6 = hl[6], + al7 = hl[7]; + + var pos = 0; + while (n >= 128) { + for (i = 0; i < 16; i++) { + j = 8 * i + pos; + wh[i] = (m[j+0] << 24) | (m[j+1] << 16) | (m[j+2] << 8) | m[j+3]; + wl[i] = (m[j+4] << 24) | (m[j+5] << 16) | (m[j+6] << 8) | m[j+7]; + } + for (i = 0; i < 80; i++) { + bh0 = ah0; + bh1 = ah1; + bh2 = ah2; + bh3 = ah3; + bh4 = ah4; + bh5 = ah5; + bh6 = ah6; + bh7 = ah7; + + bl0 = al0; + bl1 = al1; + bl2 = al2; + bl3 = al3; + bl4 = al4; + bl5 = al5; + bl6 = al6; + bl7 = al7; + + // add + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma1 + h = ((ah4 >>> 14) | (al4 << (32-14))) ^ ((ah4 >>> 18) | (al4 << (32-18))) ^ ((al4 >>> (41-32)) | (ah4 << (32-(41-32)))); + l = ((al4 >>> 14) | (ah4 << (32-14))) ^ ((al4 >>> 18) | (ah4 << (32-18))) ^ ((ah4 >>> (41-32)) | (al4 << (32-(41-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Ch + h = (ah4 & ah5) ^ (~ah4 & ah6); + l = (al4 & al5) ^ (~al4 & al6); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // K + h = K[i*2]; + l = K[i*2+1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // w + h = wh[i%16]; + l = wl[i%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + th = c & 0xffff | d << 16; + tl = a & 0xffff | b << 16; + + // add + h = th; + l = tl; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma0 + h = ((ah0 >>> 28) | (al0 << (32-28))) ^ ((al0 >>> (34-32)) | (ah0 << (32-(34-32)))) ^ ((al0 >>> (39-32)) | (ah0 << (32-(39-32)))); + l = ((al0 >>> 28) | (ah0 << (32-28))) ^ ((ah0 >>> (34-32)) | (al0 << (32-(34-32)))) ^ ((ah0 >>> (39-32)) | (al0 << (32-(39-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Maj + h = (ah0 & ah1) ^ (ah0 & ah2) ^ (ah1 & ah2); + l = (al0 & al1) ^ (al0 & al2) ^ (al1 & al2); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh7 = (c & 0xffff) | (d << 16); + bl7 = (a & 0xffff) | (b << 16); + + // add + h = bh3; + l = bl3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = th; + l = tl; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh3 = (c & 0xffff) | (d << 16); + bl3 = (a & 0xffff) | (b << 16); + + ah1 = bh0; + ah2 = bh1; + ah3 = bh2; + ah4 = bh3; + ah5 = bh4; + ah6 = bh5; + ah7 = bh6; + ah0 = bh7; + + al1 = bl0; + al2 = bl1; + al3 = bl2; + al4 = bl3; + al5 = bl4; + al6 = bl5; + al7 = bl6; + al0 = bl7; + + if (i%16 === 15) { + for (j = 0; j < 16; j++) { + // add + h = wh[j]; + l = wl[j]; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = wh[(j+9)%16]; + l = wl[(j+9)%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma0 + th = wh[(j+1)%16]; + tl = wl[(j+1)%16]; + h = ((th >>> 1) | (tl << (32-1))) ^ ((th >>> 8) | (tl << (32-8))) ^ (th >>> 7); + l = ((tl >>> 1) | (th << (32-1))) ^ ((tl >>> 8) | (th << (32-8))) ^ ((tl >>> 7) | (th << (32-7))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma1 + th = wh[(j+14)%16]; + tl = wl[(j+14)%16]; + h = ((th >>> 19) | (tl << (32-19))) ^ ((tl >>> (61-32)) | (th << (32-(61-32)))) ^ (th >>> 6); + l = ((tl >>> 19) | (th << (32-19))) ^ ((th >>> (61-32)) | (tl << (32-(61-32)))) ^ ((tl >>> 6) | (th << (32-6))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + wh[j] = (c & 0xffff) | (d << 16); + wl[j] = (a & 0xffff) | (b << 16); + } + } + } + + // add + h = ah0; + l = al0; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[0]; + l = hl[0]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[0] = ah0 = (c & 0xffff) | (d << 16); + hl[0] = al0 = (a & 0xffff) | (b << 16); + + h = ah1; + l = al1; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[1]; + l = hl[1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[1] = ah1 = (c & 0xffff) | (d << 16); + hl[1] = al1 = (a & 0xffff) | (b << 16); + + h = ah2; + l = al2; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[2]; + l = hl[2]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[2] = ah2 = (c & 0xffff) | (d << 16); + hl[2] = al2 = (a & 0xffff) | (b << 16); + + h = ah3; + l = al3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[3]; + l = hl[3]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[3] = ah3 = (c & 0xffff) | (d << 16); + hl[3] = al3 = (a & 0xffff) | (b << 16); + + h = ah4; + l = al4; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[4]; + l = hl[4]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[4] = ah4 = (c & 0xffff) | (d << 16); + hl[4] = al4 = (a & 0xffff) | (b << 16); + + h = ah5; + l = al5; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[5]; + l = hl[5]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[5] = ah5 = (c & 0xffff) | (d << 16); + hl[5] = al5 = (a & 0xffff) | (b << 16); + + h = ah6; + l = al6; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[6]; + l = hl[6]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[6] = ah6 = (c & 0xffff) | (d << 16); + hl[6] = al6 = (a & 0xffff) | (b << 16); + + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[7]; + l = hl[7]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[7] = ah7 = (c & 0xffff) | (d << 16); + hl[7] = al7 = (a & 0xffff) | (b << 16); + + pos += 128; + n -= 128; + } + + return n; + } + + function crypto_hash(out, m, n) { + var hh = new Int32Array(8), + hl = new Int32Array(8), + x = new Uint8Array(256), + i, b = n; + + hh[0] = 0x6a09e667; + hh[1] = 0xbb67ae85; + hh[2] = 0x3c6ef372; + hh[3] = 0xa54ff53a; + hh[4] = 0x510e527f; + hh[5] = 0x9b05688c; + hh[6] = 0x1f83d9ab; + hh[7] = 0x5be0cd19; + + hl[0] = 0xf3bcc908; + hl[1] = 0x84caa73b; + hl[2] = 0xfe94f82b; + hl[3] = 0x5f1d36f1; + hl[4] = 0xade682d1; + hl[5] = 0x2b3e6c1f; + hl[6] = 0xfb41bd6b; + hl[7] = 0x137e2179; + + crypto_hashblocks_hl(hh, hl, m, n); + n %= 128; + + for (i = 0; i < n; i++) x[i] = m[b-n+i]; + x[n] = 128; + + n = 256-128*(n<112?1:0); + x[n-9] = 0; + ts64(x, n-8, (b / 0x20000000) | 0, b << 3); + crypto_hashblocks_hl(hh, hl, x, n); + + for (i = 0; i < 8; i++) ts64(out, 8*i, hh[i], hl[i]); + + return 0; + } + + function add(p, q) { + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(), + g = gf(), h = gf(), t = gf(); + + Z(a, p[1], p[0]); + Z(t, q[1], q[0]); + M(a, a, t); + A(b, p[0], p[1]); + A(t, q[0], q[1]); + M(b, b, t); + M(c, p[3], q[3]); + M(c, c, D2); + M(d, p[2], q[2]); + A(d, d, d); + Z(e, b, a); + Z(f, d, c); + A(g, d, c); + A(h, b, a); + + M(p[0], e, f); + M(p[1], h, g); + M(p[2], g, f); + M(p[3], e, h); + } + + function cswap(p, q, b) { + var i; + for (i = 0; i < 4; i++) { + sel25519(p[i], q[i], b); + } + } + + function pack(r, p) { + var tx = gf(), ty = gf(), zi = gf(); + inv25519(zi, p[2]); + M(tx, p[0], zi); + M(ty, p[1], zi); + pack25519(r, ty); + r[31] ^= par25519(tx) << 7; + } + + function scalarmult(p, q, s) { + var b, i; + set25519(p[0], gf0); + set25519(p[1], gf1); + set25519(p[2], gf1); + set25519(p[3], gf0); + for (i = 255; i >= 0; --i) { + b = (s[(i/8)|0] >> (i&7)) & 1; + cswap(p, q, b); + add(q, p); + add(p, p); + cswap(p, q, b); + } + } + + function scalarbase(p, s) { + var q = [gf(), gf(), gf(), gf()]; + set25519(q[0], X); + set25519(q[1], Y); + set25519(q[2], gf1); + M(q[3], X, Y); + scalarmult(p, q, s); + } + + function crypto_sign_keypair(pk, sk, seeded) { + var d = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()]; + var i; + + if (!seeded) randombytes(sk, 32); + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + scalarbase(p, d); + pack(pk, p); + + for (i = 0; i < 32; i++) sk[i+32] = pk[i]; + return 0; + } + + var L = new Float64Array([0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); + + function modL(r, x) { + var carry, i, j, k; + for (i = 63; i >= 32; --i) { + carry = 0; + for (j = i - 32, k = i - 12; j < k; ++j) { + x[j] += carry - 16 * x[i] * L[j - (i - 32)]; + carry = Math.floor((x[j] + 128) / 256); + x[j] -= carry * 256; + } + x[j] += carry; + x[i] = 0; + } + carry = 0; + for (j = 0; j < 32; j++) { + x[j] += carry - (x[31] >> 4) * L[j]; + carry = x[j] >> 8; + x[j] &= 255; + } + for (j = 0; j < 32; j++) x[j] -= carry * L[j]; + for (i = 0; i < 32; i++) { + x[i+1] += x[i] >> 8; + r[i] = x[i] & 255; + } + } + + function reduce(r) { + var x = new Float64Array(64), i; + for (i = 0; i < 64; i++) x[i] = r[i]; + for (i = 0; i < 64; i++) r[i] = 0; + modL(r, x); + } + + // Note: difference from C - smlen returned, not passed as argument. + function crypto_sign(sm, m, n, sk) { + var d = new Uint8Array(64), h = new Uint8Array(64), r = new Uint8Array(64); + var i, j, x = new Float64Array(64); + var p = [gf(), gf(), gf(), gf()]; + + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + var smlen = n + 64; + for (i = 0; i < n; i++) sm[64 + i] = m[i]; + for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i]; + + crypto_hash(r, sm.subarray(32), n+32); + reduce(r); + scalarbase(p, r); + pack(sm, p); + + for (i = 32; i < 64; i++) sm[i] = sk[i]; + crypto_hash(h, sm, n + 64); + reduce(h); + + for (i = 0; i < 64; i++) x[i] = 0; + for (i = 0; i < 32; i++) x[i] = r[i]; + for (i = 0; i < 32; i++) { + for (j = 0; j < 32; j++) { + x[i+j] += h[i] * d[j]; + } + } + + modL(sm.subarray(32), x); + return smlen; + } + + function unpackneg(r, p) { + var t = gf(), chk = gf(), num = gf(), + den = gf(), den2 = gf(), den4 = gf(), + den6 = gf(); + + set25519(r[2], gf1); + unpack25519(r[1], p); + S(num, r[1]); + M(den, num, D); + Z(num, num, r[2]); + A(den, r[2], den); + + S(den2, den); + S(den4, den2); + M(den6, den4, den2); + M(t, den6, num); + M(t, t, den); + + pow2523(t, t); + M(t, t, num); + M(t, t, den); + M(t, t, den); + M(r[0], t, den); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) M(r[0], r[0], I); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) return -1; + + if (par25519(r[0]) === (p[31]>>7)) Z(r[0], gf0, r[0]); + + M(r[3], r[0], r[1]); + return 0; + } + + function crypto_sign_open(m, sm, n, pk) { + var i; + var t = new Uint8Array(32), h = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()], + q = [gf(), gf(), gf(), gf()]; + + if (n < 64) return -1; + + if (unpackneg(q, pk)) return -1; + + for (i = 0; i < n; i++) m[i] = sm[i]; + for (i = 0; i < 32; i++) m[i+32] = pk[i]; + crypto_hash(h, m, n); + reduce(h); + scalarmult(p, q, h); + + scalarbase(q, sm.subarray(32)); + add(p, q); + pack(t, p); + + n -= 64; + if (crypto_verify_32(sm, 0, t, 0)) { + for (i = 0; i < n; i++) m[i] = 0; + return -1; + } + + for (i = 0; i < n; i++) m[i] = sm[i + 64]; + return n; + } + + var crypto_secretbox_KEYBYTES = 32, + crypto_secretbox_NONCEBYTES = 24, + crypto_secretbox_ZEROBYTES = 32, + crypto_secretbox_BOXZEROBYTES = 16, + crypto_scalarmult_BYTES = 32, + crypto_scalarmult_SCALARBYTES = 32, + crypto_box_PUBLICKEYBYTES = 32, + crypto_box_SECRETKEYBYTES = 32, + crypto_box_BEFORENMBYTES = 32, + crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES, + crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES, + crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES, + crypto_sign_BYTES = 64, + crypto_sign_PUBLICKEYBYTES = 32, + crypto_sign_SECRETKEYBYTES = 64, + crypto_sign_SEEDBYTES = 32, + crypto_hash_BYTES = 64; + + nacl.lowlevel = { + crypto_core_hsalsa20: crypto_core_hsalsa20, + crypto_stream_xor: crypto_stream_xor, + crypto_stream: crypto_stream, + crypto_stream_salsa20_xor: crypto_stream_salsa20_xor, + crypto_stream_salsa20: crypto_stream_salsa20, + crypto_onetimeauth: crypto_onetimeauth, + crypto_onetimeauth_verify: crypto_onetimeauth_verify, + crypto_verify_16: crypto_verify_16, + crypto_verify_32: crypto_verify_32, + crypto_secretbox: crypto_secretbox, + crypto_secretbox_open: crypto_secretbox_open, + crypto_scalarmult: crypto_scalarmult, + crypto_scalarmult_base: crypto_scalarmult_base, + crypto_box_beforenm: crypto_box_beforenm, + crypto_box_afternm: crypto_box_afternm, + crypto_box: crypto_box, + crypto_box_open: crypto_box_open, + crypto_box_keypair: crypto_box_keypair, + crypto_hash: crypto_hash, + crypto_sign: crypto_sign, + crypto_sign_keypair: crypto_sign_keypair, + crypto_sign_open: crypto_sign_open, + + crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES, + crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES, + crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES, + crypto_scalarmult_BYTES: crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES, + crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES, + crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES, + crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES, + crypto_box_NONCEBYTES: crypto_box_NONCEBYTES, + crypto_box_ZEROBYTES: crypto_box_ZEROBYTES, + crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES, + crypto_sign_BYTES: crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES, + crypto_hash_BYTES: crypto_hash_BYTES, + + gf: gf, + D: D, + L: L, + pack25519: pack25519, + unpack25519: unpack25519, + M: M, + A: A, + S: S, + Z: Z, + pow2523: pow2523, + add: add, + set25519: set25519, + modL: modL, + scalarmult: scalarmult, + scalarbase: scalarbase, + }; + + /* High-level API */ + + function checkLengths(k, n) { + if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size'); + if (n.length !== crypto_secretbox_NONCEBYTES) throw new Error('bad nonce size'); + } + + function checkBoxLengths(pk, sk) { + if (pk.length !== crypto_box_PUBLICKEYBYTES) throw new Error('bad public key size'); + if (sk.length !== crypto_box_SECRETKEYBYTES) throw new Error('bad secret key size'); + } + + function checkArrayTypes() { + for (var i = 0; i < arguments.length; i++) { + if (!(arguments[i] instanceof Uint8Array)) + throw new TypeError('unexpected type, use Uint8Array'); + } + } + + function cleanup(arr) { + for (var i = 0; i < arr.length; i++) arr[i] = 0; + } + + nacl.randomBytes = function(n) { + var b = new Uint8Array(n); + randombytes(b, n); + return b; + }; + + nacl.secretbox = function(msg, nonce, key) { + checkArrayTypes(msg, nonce, key); + checkLengths(key, nonce); + var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length); + var c = new Uint8Array(m.length); + for (var i = 0; i < msg.length; i++) m[i+crypto_secretbox_ZEROBYTES] = msg[i]; + crypto_secretbox(c, m, m.length, nonce, key); + return c.subarray(crypto_secretbox_BOXZEROBYTES); + }; + + nacl.secretbox.open = function(box, nonce, key) { + checkArrayTypes(box, nonce, key); + checkLengths(key, nonce); + var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length); + var m = new Uint8Array(c.length); + for (var i = 0; i < box.length; i++) c[i+crypto_secretbox_BOXZEROBYTES] = box[i]; + if (c.length < 32) return null; + if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return null; + return m.subarray(crypto_secretbox_ZEROBYTES); + }; + + nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES; + nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES; + nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES; + + nacl.scalarMult = function(n, p) { + checkArrayTypes(n, p); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult(q, n, p); + return q; + }; + + nacl.scalarMult.base = function(n) { + checkArrayTypes(n); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult_base(q, n); + return q; + }; + + nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES; + nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES; + + nacl.box = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox(msg, nonce, k); + }; + + nacl.box.before = function(publicKey, secretKey) { + checkArrayTypes(publicKey, secretKey); + checkBoxLengths(publicKey, secretKey); + var k = new Uint8Array(crypto_box_BEFORENMBYTES); + crypto_box_beforenm(k, publicKey, secretKey); + return k; + }; + + nacl.box.after = nacl.secretbox; + + nacl.box.open = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox.open(msg, nonce, k); + }; + + nacl.box.open.after = nacl.secretbox.open; + + nacl.box.keyPair = function() { + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_box_SECRETKEYBYTES); + crypto_box_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; + }; + + nacl.box.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_box_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + crypto_scalarmult_base(pk, secretKey); + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; + }; + + nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES; + nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES; + nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES; + nacl.box.nonceLength = crypto_box_NONCEBYTES; + nacl.box.overheadLength = nacl.secretbox.overheadLength; + + nacl.sign = function(msg, secretKey) { + checkArrayTypes(msg, secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var signedMsg = new Uint8Array(crypto_sign_BYTES+msg.length); + crypto_sign(signedMsg, msg, msg.length, secretKey); + return signedMsg; + }; + + nacl.sign.open = function(signedMsg, publicKey) { + checkArrayTypes(signedMsg, publicKey); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var tmp = new Uint8Array(signedMsg.length); + var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey); + if (mlen < 0) return null; + var m = new Uint8Array(mlen); + for (var i = 0; i < m.length; i++) m[i] = tmp[i]; + return m; + }; + + nacl.sign.detached = function(msg, secretKey) { + var signedMsg = nacl.sign(msg, secretKey); + var sig = new Uint8Array(crypto_sign_BYTES); + for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i]; + return sig; + }; + + nacl.sign.detached.verify = function(msg, sig, publicKey) { + checkArrayTypes(msg, sig, publicKey); + if (sig.length !== crypto_sign_BYTES) + throw new Error('bad signature size'); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var sm = new Uint8Array(crypto_sign_BYTES + msg.length); + var m = new Uint8Array(crypto_sign_BYTES + msg.length); + var i; + for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i]; + for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i]; + return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); + }; + + nacl.sign.keyPair = function() { + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + crypto_sign_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; + }; + + nacl.sign.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32+i]; + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; + }; + + nacl.sign.keyPair.fromSeed = function(seed) { + checkArrayTypes(seed); + if (seed.length !== crypto_sign_SEEDBYTES) + throw new Error('bad seed size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + for (var i = 0; i < 32; i++) sk[i] = seed[i]; + crypto_sign_keypair(pk, sk, true); + return {publicKey: pk, secretKey: sk}; + }; + + nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES; + nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES; + nacl.sign.seedLength = crypto_sign_SEEDBYTES; + nacl.sign.signatureLength = crypto_sign_BYTES; + + nacl.hash = function(msg) { + checkArrayTypes(msg); + var h = new Uint8Array(crypto_hash_BYTES); + crypto_hash(h, msg, msg.length); + return h; + }; + + nacl.hash.hashLength = crypto_hash_BYTES; + + nacl.verify = function(x, y) { + checkArrayTypes(x, y); + // Zero length arguments are considered not equal. + if (x.length === 0 || y.length === 0) return false; + if (x.length !== y.length) return false; + return (vn(x, 0, y, 0, x.length) === 0) ? true : false; + }; + + nacl.setPRNG = function(fn) { + randombytes = fn; + }; + + (function() { + // Initialize PRNG if environment provides CSPRNG. + // If not, methods calling randombytes will throw. + var crypto = typeof self !== 'undefined' ? (self.crypto || self.msCrypto) : null; + if (crypto && crypto.getRandomValues) { + // Browsers. + var QUOTA = 65536; + nacl.setPRNG(function(x, n) { + var i, v = new Uint8Array(n); + for (i = 0; i < n; i += QUOTA) { + crypto.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA))); + } + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } else if (typeof commonjsRequire !== 'undefined') { + // Node.js. + crypto = require$$5; + if (crypto && crypto.randomBytes) { + nacl.setPRNG(function(x, n) { + var i, v = crypto.randomBytes(n); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } + })(); + + })(module.exports ? module.exports : (self.nacl = self.nacl || {})); +} (naclFast)); + +var naclFastExports = naclFast.exports; + +var naclUtil$1 = {exports: {}}; + +(function (module) { + // Written in 2014-2016 by Dmitry Chestnykh and Devi Mandiri. + // Public domain. + (function(root, f) { + if (module.exports) module.exports = f(); + else if (root.nacl) root.nacl.util = f(); + else { + root.nacl = {}; + root.nacl.util = f(); + } + }(commonjsGlobal, function() { + + var util = {}; + + function validateBase64(s) { + if (!(/^(?:[A-Za-z0-9+\/]{2}[A-Za-z0-9+\/]{2})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/.test(s))) { + throw new TypeError('invalid encoding'); + } + } + + util.decodeUTF8 = function(s) { + if (typeof s !== 'string') throw new TypeError('expected string'); + var i, d = unescape(encodeURIComponent(s)), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; + }; + + util.encodeUTF8 = function(arr) { + var i, s = []; + for (i = 0; i < arr.length; i++) s.push(String.fromCharCode(arr[i])); + return decodeURIComponent(escape(s.join(''))); + }; + + if (typeof atob === 'undefined') { + // Node.js + + if (typeof Buffer.from !== 'undefined') { + // Node v6 and later + util.encodeBase64 = function (arr) { // v6 and later + return Buffer.from(arr).toString('base64'); + }; + + util.decodeBase64 = function (s) { + validateBase64(s); + return new Uint8Array(Array.prototype.slice.call(Buffer.from(s, 'base64'), 0)); + }; + + } else { + // Node earlier than v6 + util.encodeBase64 = function (arr) { // v6 and later + return (new Buffer(arr)).toString('base64'); + }; + + util.decodeBase64 = function(s) { + validateBase64(s); + return new Uint8Array(Array.prototype.slice.call(new Buffer(s, 'base64'), 0)); + }; + } + + } else { + // Browsers + + util.encodeBase64 = function(arr) { + var i, s = [], len = arr.length; + for (i = 0; i < len; i++) s.push(String.fromCharCode(arr[i])); + return btoa(s.join('')); + }; + + util.decodeBase64 = function(s) { + validateBase64(s); + var i, d = atob(s), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; + }; + + } + + return util; + + })); +} (naclUtil$1)); + +var naclUtilExports = naclUtil$1.exports; + +var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (commonjsGlobal && commonjsGlobal.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (commonjsGlobal && commonjsGlobal.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(encryption, "__esModule", { value: true }); +encryption.getEncryptionPublicKey = encryption.decryptSafely = encryption.decrypt = encryption.encryptSafely = encryption.encrypt = void 0; +const nacl = __importStar(naclFastExports); +const naclUtil = __importStar(naclUtilExports); +const utils_1 = utils$3; +/** + * Encrypt a message. + * + * @param options - The encryption options. + * @param options.publicKey - The public key of the message recipient. + * @param options.data - The message data. + * @param options.version - The type of encryption to use. + * @returns The encrypted data. + */ +function encrypt({ publicKey, data, version, }) { + if ((0, utils_1.isNullish)(publicKey)) { + throw new Error('Missing publicKey parameter'); + } + else if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(version)) { + throw new Error('Missing version parameter'); + } + switch (version) { + case 'x25519-xsalsa20-poly1305': { + if (typeof data !== 'string') { + throw new Error('Message data must be given as a string'); + } + // generate ephemeral keypair + const ephemeralKeyPair = nacl.box.keyPair(); + // assemble encryption parameters - from string to UInt8 + let pubKeyUInt8Array; + try { + pubKeyUInt8Array = naclUtil.decodeBase64(publicKey); + } + catch (err) { + throw new Error('Bad public key'); + } + const msgParamsUInt8Array = naclUtil.decodeUTF8(data); + const nonce = nacl.randomBytes(nacl.box.nonceLength); + // encrypt + const encryptedMessage = nacl.box(msgParamsUInt8Array, nonce, pubKeyUInt8Array, ephemeralKeyPair.secretKey); + // handle encrypted data + const output = { + version: 'x25519-xsalsa20-poly1305', + nonce: naclUtil.encodeBase64(nonce), + ephemPublicKey: naclUtil.encodeBase64(ephemeralKeyPair.publicKey), + ciphertext: naclUtil.encodeBase64(encryptedMessage), + }; + // return encrypted msg data + return output; + } + default: + throw new Error('Encryption type/version not supported'); + } +} +encryption.encrypt = encrypt; +/** + * Encrypt a message in a way that obscures the message length. + * + * The message is padded to a multiple of 2048 before being encrypted so that the length of the + * resulting encrypted message can't be used to guess the exact length of the original message. + * + * @param options - The encryption options. + * @param options.publicKey - The public key of the message recipient. + * @param options.data - The message data. + * @param options.version - The type of encryption to use. + * @returns The encrypted data. + */ +function encryptSafely({ publicKey, data, version, }) { + if ((0, utils_1.isNullish)(publicKey)) { + throw new Error('Missing publicKey parameter'); + } + else if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(version)) { + throw new Error('Missing version parameter'); + } + const DEFAULT_PADDING_LENGTH = 2 ** 11; + const NACL_EXTRA_BYTES = 16; + if (typeof data === 'object' && data && 'toJSON' in data) { + // remove toJSON attack vector + // TODO, check all possible children + throw new Error('Cannot encrypt with toJSON property. Please remove toJSON property'); + } + // add padding + const dataWithPadding = { + data, + padding: '', + }; + // calculate padding + const dataLength = Buffer.byteLength(JSON.stringify(dataWithPadding), 'utf-8'); + const modVal = dataLength % DEFAULT_PADDING_LENGTH; + let padLength = 0; + // Only pad if necessary + if (modVal > 0) { + padLength = DEFAULT_PADDING_LENGTH - modVal - NACL_EXTRA_BYTES; // nacl extra bytes + } + dataWithPadding.padding = '0'.repeat(padLength); + const paddedMessage = JSON.stringify(dataWithPadding); + return encrypt({ publicKey, data: paddedMessage, version }); +} +encryption.encryptSafely = encryptSafely; +/** + * Decrypt a message. + * + * @param options - The decryption options. + * @param options.encryptedData - The encrypted data. + * @param options.privateKey - The private key to decrypt with. + * @returns The decrypted message. + */ +function decrypt({ encryptedData, privateKey, }) { + if ((0, utils_1.isNullish)(encryptedData)) { + throw new Error('Missing encryptedData parameter'); + } + else if ((0, utils_1.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + switch (encryptedData.version) { + case 'x25519-xsalsa20-poly1305': { + // string to buffer to UInt8Array + const receiverPrivateKeyUint8Array = naclDecodeHex(privateKey); + const receiverEncryptionPrivateKey = nacl.box.keyPair.fromSecretKey(receiverPrivateKeyUint8Array).secretKey; + // assemble decryption parameters + const nonce = naclUtil.decodeBase64(encryptedData.nonce); + const ciphertext = naclUtil.decodeBase64(encryptedData.ciphertext); + const ephemPublicKey = naclUtil.decodeBase64(encryptedData.ephemPublicKey); + // decrypt + const decryptedMessage = nacl.box.open(ciphertext, nonce, ephemPublicKey, receiverEncryptionPrivateKey); + // return decrypted msg data + try { + if (!decryptedMessage) { + throw new Error(); + } + const output = naclUtil.encodeUTF8(decryptedMessage); + // TODO: This is probably extraneous but was kept to minimize changes during refactor + if (!output) { + throw new Error(); + } + return output; + } + catch (err) { + if (err && typeof err.message === 'string' && err.message.length) { + throw new Error(`Decryption failed: ${err.message}`); + } + throw new Error(`Decryption failed.`); + } + } + default: + throw new Error('Encryption type/version not supported.'); + } +} +encryption.decrypt = decrypt; +/** + * Decrypt a message that has been encrypted using `encryptSafely`. + * + * @param options - The decryption options. + * @param options.encryptedData - The encrypted data. + * @param options.privateKey - The private key to decrypt with. + * @returns The decrypted message. + */ +function decryptSafely({ encryptedData, privateKey, }) { + if ((0, utils_1.isNullish)(encryptedData)) { + throw new Error('Missing encryptedData parameter'); + } + else if ((0, utils_1.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + const dataWithPadding = JSON.parse(decrypt({ encryptedData, privateKey })); + return dataWithPadding.data; +} +encryption.decryptSafely = decryptSafely; +/** + * Get the encryption public key for the given key. + * + * @param privateKey - The private key to generate the encryption public key with. + * @returns The encryption public key. + */ +function getEncryptionPublicKey(privateKey) { + const privateKeyUint8Array = naclDecodeHex(privateKey); + const encryptionPublicKey = nacl.box.keyPair.fromSecretKey(privateKeyUint8Array).publicKey; + return naclUtil.encodeBase64(encryptionPublicKey); +} +encryption.getEncryptionPublicKey = getEncryptionPublicKey; +/** + * Convert a hex string to the UInt8Array format used by nacl. + * + * @param msgHex - The string to convert. + * @returns The converted string. + */ +function naclDecodeHex(msgHex) { + const msgBase64 = Buffer.from(msgHex, 'hex').toString('base64'); + return naclUtil.decodeBase64(msgBase64); +} + +(function (exports) { + var __createBinding = (commonjsGlobal && commonjsGlobal.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + })); + var __exportStar = (commonjsGlobal && commonjsGlobal.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); + }; + Object.defineProperty(exports, "__esModule", { value: true }); + exports.normalize = exports.concatSig = void 0; + __exportStar(personalSign$1, exports); + __exportStar(signTypedData, exports); + __exportStar(encryption, exports); + var utils_1 = utils$3; + Object.defineProperty(exports, "concatSig", { enumerable: true, get: function () { return utils_1.concatSig; } }); + Object.defineProperty(exports, "normalize", { enumerable: true, get: function () { return utils_1.normalize; } }); + +} (dist$5)); + +function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext }) { + const nonceBuf = toFixedHex(bytesToHex$3(base64ToBytes$1(nonce)), 24); + const ephemPublicKeyBuf = toFixedHex(bytesToHex$3(base64ToBytes$1(ephemPublicKey)), 32); + const ciphertextBuf = bytesToHex$3(base64ToBytes$1(ciphertext)); + const messageBuff = concatBytes$3(hexToBytes$3(nonceBuf), hexToBytes$3(ephemPublicKeyBuf), hexToBytes$3(ciphertextBuf)); + return bytesToHex$3(messageBuff); +} +function unpackEncryptedMessage(encryptedMessage) { + const messageBuff = hexToBytes$3(encryptedMessage); + const nonceBuf = bytesToBase64$1(messageBuff.slice(0, 24)); + const ephemPublicKeyBuf = bytesToBase64$1(messageBuff.slice(24, 56)); + const ciphertextBuf = bytesToBase64$1(messageBuff.slice(56)); + return { + messageBuff: bytesToHex$3(messageBuff), + version: "x25519-xsalsa20-poly1305", + nonce: nonceBuf, + ephemPublicKey: ephemPublicKeyBuf, + ciphertext: ciphertextBuf + }; +} +class NoteAccount { + constructor({ netId, blockNumber, recoveryKey, Echoer: Echoer2 }) { + if (!recoveryKey) { + recoveryKey = bytesToHex$3(crypto$2.getRandomValues(new Uint8Array(32))).slice(2); + } + this.netId = Math.floor(Number(netId)); + this.blockNumber = blockNumber; + this.recoveryKey = recoveryKey; + this.recoveryAddress = computeAddress("0x" + recoveryKey); + this.recoveryPublicKey = dist$5.getEncryptionPublicKey(recoveryKey); + this.Echoer = Echoer2; + } + /** + * Intends to mock eth_getEncryptionPublicKey behavior from MetaMask + * In order to make the recoveryKey retrival from Echoer possible from the bare private key + */ + static getWalletPublicKey(wallet) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + return dist$5.getEncryptionPublicKey(privateKey); + } + // This function intends to provide an encrypted value of recoveryKey for an on-chain Echoer backup purpose + // Thus, the pubKey should be derived by a Wallet instance or from Web3 wallets + // pubKey: base64 encoded 32 bytes key from https://docs.metamask.io/wallet/reference/eth_getencryptionpublickey/ + getEncryptedAccount(walletPublicKey) { + const encryptedData = dist$5.encrypt({ + publicKey: walletPublicKey, + data: this.recoveryKey, + version: "x25519-xsalsa20-poly1305" + }); + const data = packEncryptedMessage(encryptedData); + return { + // Use this later to save hexPrivateKey generated with + // Buffer.from(JSON.stringify(encryptedData)).toString('hex') + // As we don't use buffer with this library we should leave UI to do the rest + encryptedData, + // Data that could be used as an echo(data) params + data + }; + } + /** + * Decrypt Echoer backuped note encryption account with private keys + */ + decryptAccountsWithWallet(wallet, events) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedAccount); + const recoveryKey = dist$5.decrypt({ + encryptedData: unpackedMessage, + privateKey + }); + decryptedEvents.push( + new NoteAccount({ + netId: this.netId, + blockNumber: event.blockNumber, + recoveryKey, + Echoer: this.Echoer + }) + ); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + decryptNotes(events) { + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedNote); + const [address, noteHex] = dist$5.decrypt({ + encryptedData: unpackedMessage, + privateKey: this.recoveryKey + }).split("-"); + decryptedEvents.push({ + blockNumber: event.blockNumber, + address: getAddress(address), + noteHex + }); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + encryptNote({ address, noteHex }) { + const encryptedData = dist$5.encrypt({ + publicKey: this.recoveryPublicKey, + data: `${address}-${noteHex}`, + version: "x25519-xsalsa20-poly1305" + }); + return packEncryptedMessage(encryptedData); + } +} + const DUMMY_ADDRESS = "0x1111111111111111111111111111111111111111"; const DUMMY_NONCE = "0x1111111111111111111111111111111111111111111111111111111111111111"; const DUMMY_WITHDRAW_DATA = "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"; @@ -198632,7 +217753,7 @@ PartialMerkleTree$1.PartialMerkleTree = PartialMerkleTree; exports.default = FixedMerkleTree_1.default; } (lib)); -var __async$5 = (__this, __arguments, generator) => { +var __async$6 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -198657,7 +217778,7 @@ class Mimc { this.mimcPromise = this.initMimc(); } initMimc() { - return __async$5(this, null, function* () { + return __async$6(this, null, function* () { this.sponge = yield buildMimcSponge(); this.hash = (left, right) => { var _a, _b; @@ -198666,7 +217787,7 @@ class Mimc { }); } getHash() { - return __async$5(this, null, function* () { + return __async$6(this, null, function* () { yield this.mimcPromise; return { sponge: this.sponge, @@ -198677,7 +217798,7 @@ class Mimc { } const mimc = new Mimc(); -var __async$4 = (__this, __arguments, generator) => { +var __async$5 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -198703,7 +217824,7 @@ class MerkleTreeService { amount, currency, Tornado, - commitment, + commitmentHex, merkleTreeHeight = 20, emptyElement = "21663839004416932945382355908790599225266501822907911457504978515578255421292", merkleWorkerPath @@ -198714,13 +217835,13 @@ class MerkleTreeService { this.netId = Number(netId); this.Tornado = Tornado; this.instanceName = instanceName; - this.commitment = commitment; + this.commitmentHex = commitmentHex; this.merkleTreeHeight = merkleTreeHeight; this.emptyElement = emptyElement; this.merkleWorkerPath = merkleWorkerPath; } - createTree(_0) { - return __async$4(this, arguments, function* ({ events }) { + createTree(events) { + return __async$5(this, null, function* () { const { hash: hashFunction } = yield mimc.getHash(); if (this.merkleWorkerPath) { console.log("Using merkleWorker\n"); @@ -198771,15 +217892,69 @@ class MerkleTreeService { }); }); } - verifyTree(_0) { - return __async$4(this, arguments, function* ({ events }) { + createPartialTree(_0) { + return __async$5(this, arguments, function* ({ edge, elements }) { + const { hash: hashFunction } = yield mimc.getHash(); + if (this.merkleWorkerPath) { + console.log("Using merkleWorker\n"); + try { + if (isNode) { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new threads.Worker(this.merkleWorkerPath, { + workerData: { + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + } + }); + worker.on("message", resolve); + worker.on("error", reject); + worker.on("exit", (code) => { + if (code !== 0) { + reject(new Error(`Worker stopped with exit code ${code}`)); + } + }); + }); + return lib.PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } else { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker(this.merkleWorkerPath); + worker.onmessage = (e) => { + resolve(e.data); + }; + worker.onerror = (e) => { + reject(e); + }; + worker.postMessage({ + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + }); + }); + return lib.PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } + } catch (err) { + console.log("merkleWorker failed, falling back to synchronous merkle tree"); + console.log(err); + } + } + return new lib.PartialMerkleTree(this.merkleTreeHeight, edge, elements, { + zeroElement: this.emptyElement, + hashFunction + }); + }); + } + verifyTree(events) { + return __async$5(this, null, function* () { console.log( ` Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while ` ); console.time("Created tree in"); - const tree = yield this.createTree({ events: events.map(({ commitment }) => BigInt(commitment).toString()) }); + const tree = yield this.createTree(events.map(({ commitment }) => commitment)); console.timeEnd("Created tree in"); console.log(""); const isKnownRoot = yield this.Tornado.isKnownRoot(toFixedHex(BigInt(tree.root))); @@ -198792,648 +217967,6 @@ Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCa } } -const enabledChains = ["1", "10", "56", "100", "137", "42161", "43114", "11155111"]; -const theGraph = { - name: "Hosted Graph", - url: "https://api.thegraph.com" -}; -const tornado = { - name: "Tornado Subgraphs", - url: "https://tornadocash-rpc.com" -}; -const networkConfig = { - netId1: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 80, - fast: 50, - standard: 25, - low: 8 - }, - nativeCurrency: "eth", - currencyName: "ETH", - explorerUrl: { - tx: "https://etherscan.io/tx/", - address: "https://etherscan.io/address/", - block: "https://etherscan.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Ethereum Mainnet", - deployedBlock: 9116966, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - mevblockerRPC: { - name: "MevblockerRPC", - url: "https://rpc.mevblocker.io" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/ethereum-mainnet" - }, - noderealRPC: { - name: "NodeReal RPC", - url: "https://eth-mainnet.nodereal.io/v1/1659dfb40aa24bbb8153a677b98064d7" - }, - notadegenRPC: { - name: "NotADegen RPC", - url: "https://rpc.notadegen.com/eth" - }, - keydonixRPC: { - name: "Keydonix RPC", - url: "https://ethereum.keydonix.com/v1/mainnet" - }, - oneRPC: { - name: "1RPC", - url: "https://1rpc.io/eth" - } - }, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - routerContract: "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", - echoContract: "0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42", - aggregatorContract: "0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49", - reverseRecordsContract: "0x3671aE578E63FdF66ad4F3E12CC0c0d71Ac7510C", - tornadoSubgraph: "tornadocash/mainnet-tornado-subgraph", - registrySubgraph: "tornadocash/tornado-relayer-registry", - subgraphs: { - tornado, - theGraph - }, - tokens: { - eth: { - instanceAddress: { - "0.1": "0x12D66f87A04A9E220743712cE6d9bB1B5616B8Fc", - "1": "0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936", - "10": "0x910Cbd523D972eb0a6f4cAe4618aD62622b39DbF", - "100": "0xA160cdAB225685dA1d56aa342Ad8841c3b53f291" - }, - symbol: "ETH", - decimals: 18 - }, - dai: { - instanceAddress: { - "100": "0xD4B88Df4D29F5CedD6857912842cff3b20C8Cfa3", - "1000": "0xFD8610d20aA15b7B2E3Be39B396a1bC3516c7144", - "10000": "0x07687e702b410Fa43f4cB4Af7FA097918ffD2730", - "100000": "0x23773E65ed146A459791799d01336DB287f25334" - }, - tokenAddress: "0x6B175474E89094C44Da98b954EedeAC495271d0F", - tokenGasLimit: 7e4, - symbol: "DAI", - decimals: 18, - gasLimit: 7e5 - }, - cdai: { - instanceAddress: { - "5000": "0x22aaA7720ddd5388A3c0A3333430953C68f1849b", - "50000": "0x03893a7c7463AE47D46bc7f091665f1893656003", - "500000": "0x2717c5e28cf931547B621a5dddb772Ab6A35B701", - "5000000": "0xD21be7248e0197Ee08E0c20D4a96DEBdaC3D20Af" - }, - tokenAddress: "0x5d3a536E4D6DbD6114cc1Ead35777bAB948E3643", - tokenGasLimit: 2e5, - symbol: "cDAI", - decimals: 8, - gasLimit: 7e5 - }, - usdc: { - instanceAddress: { - "100": "0xd96f2B1c14Db8458374d9Aca76E26c3D18364307", - "1000": "0x4736dCf1b7A3d580672CcE6E7c65cd5cc9cFBa9D" - }, - tokenAddress: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", - tokenGasLimit: 7e4, - symbol: "USDC", - decimals: 6, - gasLimit: 7e5 - }, - usdt: { - instanceAddress: { - "100": "0x169AD27A470D064DEDE56a2D3ff727986b15D52B", - "1000": "0x0836222F2B2B24A3F36f98668Ed8F0B38D1a872f" - }, - tokenAddress: "0xdAC17F958D2ee523a2206206994597C13D831ec7", - tokenGasLimit: 7e4, - symbol: "USDT", - decimals: 6, - gasLimit: 7e5 - }, - wbtc: { - instanceAddress: { - "0.1": "0x178169B423a011fff22B9e3F3abeA13414dDD0F1", - "1": "0x610B717796ad172B316836AC95a2ffad065CeaB4", - "10": "0xbB93e510BbCD0B7beb5A853875f9eC60275CF498" - }, - tokenAddress: "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", - tokenGasLimit: 7e4, - symbol: "WBTC", - decimals: 8, - gasLimit: 7e5 - } - }, - ensSubdomainKey: "mainnet-tornado", - pollInterval: 15, - constants: { - GOVERNANCE_BLOCK: 11474695, - NOTE_ACCOUNT_BLOCK: 11842486, - ENCRYPTED_NOTES_BLOCK: 14248730, - REGISTRY_BLOCK: 14173129, - MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", - "governance.contract.tornadocash.eth": "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", - "tornado-router.contract.tornadocash.eth": "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - "staking-rewards.contract.tornadocash.eth": "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29" - }, - netId56: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 5, - fast: 5, - standard: 5, - low: 5 - }, - nativeCurrency: "bnb", - currencyName: "BNB", - explorerUrl: { - tx: "https://bscscan.com/tx/", - address: "https://bscscan.com/address/", - block: "https://bscscan.com/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Binance Smart Chain", - deployedBlock: 8158799, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - tornadoSubgraph: "tornadocash/bsc-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/bsc" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/bsc-mainnet" - }, - noderealRPC: { - name: "NodeReal RPC", - url: "https://bsc-mainnet.nodereal.io/v1/64a9df0874fb4a93b9d0a3849de012d3" - }, - oneRPC: { - name: "1RPC", - url: "https://1rpc.io/bnb" - } - }, - tokens: { - bnb: { - instanceAddress: { - "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", - "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", - "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" - }, - symbol: "BNB", - decimals: 18 - } - }, - ensSubdomainKey: "bsc-tornado", - pollInterval: 10, - constants: { - NOTE_ACCOUNT_BLOCK: 8159269, - ENCRYPTED_NOTES_BLOCK: 8159269 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId137: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 100, - fast: 75, - standard: 50, - low: 30 - }, - nativeCurrency: "matic", - currencyName: "MATIC", - explorerUrl: { - tx: "https://polygonscan.com/tx/", - address: "https://polygonscan.com/address/", - block: "https://polygonscan.com/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Polygon (Matic) Network", - deployedBlock: 16257962, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - gasPriceOracleContract: "0xF81A8D8D3581985D3969fe53bFA67074aDFa8F3C", - tornadoSubgraph: "tornadocash/matic-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - chainnodes: { - name: "Tornado RPC", - url: "https://polygon-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/polygon-mainnet" - }, - oneRpc: { - name: "1RPC", - url: "https://1rpc.io/matic" - } - }, - tokens: { - matic: { - instanceAddress: { - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", - "1000": "0xdf231d99Ff8b6c6CBF4E9B9a945CBAcEF9339178", - "10000": "0xaf4c0B70B2Ea9FB7487C7CbB37aDa259579fe040", - "100000": "0xa5C2254e4253490C54cef0a4347fddb8f75A4998" - }, - symbol: "MATIC", - decimals: 18 - } - }, - ensSubdomainKey: "polygon-tornado", - pollInterval: 10, - constants: { - NOTE_ACCOUNT_BLOCK: 16257996, - ENCRYPTED_NOTES_BLOCK: 16257996 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId10: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 1e-3, - fast: 1e-3, - standard: 1e-3, - low: 1e-3 - }, - nativeCurrency: "eth", - currencyName: "ETH", - explorerUrl: { - tx: "https://optimistic.etherscan.io/tx/", - address: "https://optimistic.etherscan.io/address/", - block: "https://optimistic.etherscan.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Optimism", - deployedBlock: 2243689, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - ovmGasPriceOracleContract: "0x420000000000000000000000000000000000000F", - tornadoSubgraph: "tornadocash/optimism-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/op" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://optimism-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - optimism: { - name: "Optimism RPC", - url: "https://mainnet.optimism.io" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/optimism-mainnet" - }, - oneRpc: { - name: "1RPC", - url: "https://1rpc.io/op" - } - }, - tokens: { - eth: { - instanceAddress: { - "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", - "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", - "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" - }, - symbol: "ETH", - decimals: 18 - } - }, - ensSubdomainKey: "optimism-tornado", - pollInterval: 15, - constants: { - NOTE_ACCOUNT_BLOCK: 2243694, - ENCRYPTED_NOTES_BLOCK: 2243694 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId42161: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 4, - fast: 3, - standard: 2.52, - low: 2.29 - }, - nativeCurrency: "eth", - currencyName: "ETH", - explorerUrl: { - tx: "https://arbiscan.io/tx/", - address: "https://arbiscan.io/address/", - block: "https://arbiscan.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Arbitrum One", - deployedBlock: 3430648, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - tornadoSubgraph: "tornadocash/arbitrum-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/arbitrum" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - arbitrum: { - name: "Arbitrum RPC", - url: "https://arb1.arbitrum.io/rpc" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/arbitrum-one" - }, - oneRpc: { - name: "1rpc", - url: "https://1rpc.io/arb" - } - }, - tokens: { - eth: { - instanceAddress: { - "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", - "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", - "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" - }, - symbol: "ETH", - decimals: 18 - } - }, - ensSubdomainKey: "arbitrum-tornado", - pollInterval: 15, - constants: { - NOTE_ACCOUNT_BLOCK: 3430605, - ENCRYPTED_NOTES_BLOCK: 3430605 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId100: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 6, - fast: 5, - standard: 4, - low: 1 - }, - nativeCurrency: "xdai", - currencyName: "xDAI", - explorerUrl: { - tx: "https://blockscout.com/xdai/mainnet/tx/", - address: "https://blockscout.com/xdai/mainnet/address/", - block: "https://blockscout.com/xdai/mainnet/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Gnosis Chain", - deployedBlock: 17754561, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - tornadoSubgraph: "tornadocash/xdai-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/gnosis" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - gnosis: { - name: "Gnosis RPC", - url: "https://rpc.gnosischain.com" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/arbitrum-one" - }, - blockPi: { - name: "BlockPi", - url: "https://gnosis.blockpi.network/v1/rpc/public" - } - }, - tokens: { - xdai: { - instanceAddress: { - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", - "1000": "0xdf231d99Ff8b6c6CBF4E9B9a945CBAcEF9339178", - "10000": "0xaf4c0B70B2Ea9FB7487C7CbB37aDa259579fe040", - "100000": "0xa5C2254e4253490C54cef0a4347fddb8f75A4998" - }, - symbol: "xDAI", - decimals: 18 - } - }, - ensSubdomainKey: "gnosis-tornado", - pollInterval: 15, - constants: { - NOTE_ACCOUNT_BLOCK: 17754564, - ENCRYPTED_NOTES_BLOCK: 17754564 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId43114: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 225, - fast: 35, - standard: 25, - low: 25 - }, - nativeCurrency: "avax", - currencyName: "AVAX", - explorerUrl: { - tx: "https://snowtrace.io/tx/", - address: "https://snowtrace.io/address/", - block: "https://snowtrace.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Avalanche Mainnet", - deployedBlock: 4429818, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - tornadoSubgraph: "tornadocash/avalanche-tornado-subgraph", - subgraphs: { - theGraph - }, - rpcUrls: { - oneRPC: { - name: "OneRPC", - url: "https://1rpc.io/avax/c" - }, - avalancheRPC: { - name: "Avalanche RPC", - url: "https://api.avax.network/ext/bc/C/rpc" - }, - meowRPC: { - name: "Meow RPC", - url: "https://avax.meowrpc.com" - } - }, - tokens: { - avax: { - instanceAddress: { - "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", - "500": "0xaf8d1839c3c67cf571aa74B5c12398d4901147B3" - }, - symbol: "AVAX", - decimals: 18 - } - }, - ensSubdomainKey: "avalanche-tornado", - pollInterval: 10, - constants: { - NOTE_ACCOUNT_BLOCK: 4429813, - ENCRYPTED_NOTES_BLOCK: 4429813 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId11155111: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 2, - fast: 2, - standard: 2, - low: 2 - }, - nativeCurrency: "eth", - currencyName: "SepoliaETH", - explorerUrl: { - tx: "https://sepolia.etherscan.io/tx/", - address: "https://sepolia.etherscan.io/address/", - block: "https://sepolia.etherscan.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Ethereum Sepolia", - deployedBlock: 5594395, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - routerContract: "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee", - registryContract: "0x1428e5d2356b13778A13108b10c440C83011dfB8", - echoContract: "0xcDD1fc3F5ac2782D83449d3AbE80D6b7B273B0e5", - aggregatorContract: "0x4088712AC9fad39ea133cdb9130E465d235e9642", - reverseRecordsContract: "0xEc29700C0283e5Be64AcdFe8077d6cC95dE23C23", - tornadoSubgraph: "tornadocash/sepolia-tornado-subgraph", - subgraphs: { - tornado - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/sepolia" - }, - sepolia: { - name: "Sepolia RPC", - url: "https://rpc.sepolia.org" - }, - chainnodes: { - name: "Chainnodes RPC", - url: "https://sepolia.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - } - }, - tokens: { - eth: { - instanceAddress: { - "0.1": "0x8C4A04d872a6C1BE37964A21ba3a138525dFF50b", - "1": "0x8cc930096B4Df705A007c4A039BDFA1320Ed2508", - "10": "0x8D10d506D29Fc62ABb8A290B99F66dB27Fc43585", - "100": "0x44c5C92ed73dB43888210264f0C8b36Fd68D8379" - }, - symbol: "ETH", - decimals: 18 - }, - dai: { - instanceAddress: { - "100": "0x6921fd1a97441dd603a997ED6DDF388658daf754", - "1000": "0x50a637770F5d161999420F7d70d888DE47207145", - "10000": "0xecD649870407cD43923A816Cc6334a5bdf113621", - "100000": "0x73B4BD04bF83206B6e979BE2507098F92EDf4F90" - }, - tokenAddress: "0xFF34B3d4Aee8ddCd6F9AFFFB6Fe49bD371b8a357", - tokenGasLimit: 7e4, - symbol: "DAI", - decimals: 18, - gasLimit: 7e5 - } - }, - ensSubdomainKey: "sepolia-tornado", - pollInterval: 15, - constants: { - GOVERNANCE_BLOCK: 5594395, - NOTE_ACCOUNT_BLOCK: 5594395, - ENCRYPTED_NOTES_BLOCK: 5594395, - MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x3AE6667167C0f44394106E197904519D808323cA", - "governance.contract.tornadocash.eth": "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", - "tornado-router.contract.tornadocash.eth": "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee" - } -}; -const subdomains = enabledChains.map((chain) => networkConfig[`netId${chain}`].ensSubdomainKey); - function parseNumber(value) { if (!value || isNaN(Number(value))) { throw new InvalidArgumentError("Invalid Number"); @@ -199487,6 +218020,17 @@ function parseKey(value) { } return value; } +function parseRecoveryKey(value) { + if (!value) { + throw new InvalidArgumentError("Invalid Recovery Key"); + } + try { + computeAddress("0x" + value); + } catch (e) { + throw new InvalidArgumentError("Invalid Recovery Key"); + } + return value; +} class TokenPriceOracle { constructor(provider, multicall2, oracle) { @@ -199509,26 +218053,26 @@ class TokenPriceOracle { } } -var __defProp$1 = Object.defineProperty; -var __defProps$1 = Object.defineProperties; -var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; -var __hasOwnProp$1 = Object.prototype.hasOwnProperty; -var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$1 = (a, b) => { +var __defProp$2 = Object.defineProperty; +var __defProps$2 = Object.defineProperties; +var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; +var __hasOwnProp$2 = Object.prototype.hasOwnProperty; +var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$2 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); - if (__getOwnPropSymbols$1) - for (var prop of __getOwnPropSymbols$1(b)) { - if (__propIsEnum$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); + if (__hasOwnProp$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); + if (__getOwnPropSymbols$2) + for (var prop of __getOwnPropSymbols$2(b)) { + if (__propIsEnum$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); } return a; }; -var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b)); -var __async$3 = (__this, __arguments, generator) => { +var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b)); +var __async$4 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -199556,11 +218100,11 @@ function parseSemanticVersion(version) { } function isRelayerUpdated(relayerVersion, netId) { const { major, patch, prerelease } = parseSemanticVersion(relayerVersion); - const requiredMajor = netId === 1 ? "4" : "5"; + const requiredMajor = netId === NetId.MAINNET ? "4" : "5"; const isUpdatedMajor = major === requiredMajor; if (prerelease) return false; - return isUpdatedMajor && (Number(patch) >= 5 || Number(netId) !== 1); + return isUpdatedMajor && (Number(patch) >= 5 || netId !== NetId.MAINNET); } function calculateScore({ stakeBalance, tornadoServiceFee }, minFee = 0.33, maxFee = 0.53) { if (tornadoServiceFee < minFee) { @@ -199582,9 +218126,15 @@ function getWeightRandom(weightsScores, random) { } return Math.floor(Math.random() * weightsScores.length); } +function getSupportedInstances(instanceList) { + const rawList = Object.values(instanceList).map(({ instanceAddress }) => { + return Object.values(instanceAddress); + }).flat(); + return rawList.map((l) => getAddress(l)); +} function pickWeightedRandomRelayer(relayers, netId) { let minFee, maxFee; - if (Number(netId) !== 1) { + if (netId !== NetId.MAINNET) { minFee = 0.01; maxFee = 0.3; } @@ -199598,19 +218148,19 @@ function pickWeightedRandomRelayer(relayers, netId) { } class RelayerClient { constructor({ netId, config, Aggregator, fetchDataOptions: fetchDataOptions2 }) { - this.netId = Number(netId); + this.netId = netId; this.config = config; this.Aggregator = Aggregator; this.fetchDataOptions = fetchDataOptions2; } askRelayerStatus(_0) { - return __async$3(this, arguments, function* ({ + return __async$4(this, arguments, function* ({ hostname, relayerAddress }) { var _a, _b; const url = `https://${!hostname.endsWith("/") ? hostname + "/" : hostname}`; - const rawStatus = yield fetchData(`${url}status`, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { + const rawStatus = yield fetchData(`${url}status`, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { headers: { "Content-Type": "application/json, application/x-www-form-urlencoded" }, @@ -199621,7 +218171,7 @@ class RelayerClient { if (!statusValidator(rawStatus)) { throw new Error("Invalid status schema"); } - const status = __spreadProps$1(__spreadValues$1({}, rawStatus), { + const status = __spreadProps$2(__spreadValues$2({}, rawStatus), { url }); if (status.currentQueue > 5) { @@ -199630,7 +218180,7 @@ class RelayerClient { if (status.netId !== this.netId) { throw new Error("This relayer serves a different network"); } - if (relayerAddress && this.netId === 1 && status.rewardAccount !== relayerAddress) { + if (relayerAddress && this.netId === NetId.MAINNET && status.rewardAccount !== relayerAddress) { throw new Error("The Relayer reward address must match registered address"); } if (!isRelayerUpdated(status.version, this.netId)) { @@ -199640,7 +218190,8 @@ class RelayerClient { }); } filterRelayer(curr, relayer, subdomains, debugRelayer = false) { - return __async$3(this, null, function* () { + return __async$4(this, null, function* () { + var _a; const { ensSubdomainKey } = this.config; const subdomainIndex = subdomains.indexOf(ensSubdomainKey); const mainnetSubdomain = curr.records[0]; @@ -199661,7 +218212,9 @@ class RelayerClient { ensName, stakeBalance, relayerAddress, - rewardAccount: status.rewardAccount, + rewardAccount: getAddress(status.rewardAccount), + instances: getSupportedInstances(status.instances), + gasPrice: (_a = status.gasPrices) == null ? void 0 : _a.fast, ethPrices: status.ethPrices, currentQueue: status.currentQueue, tornadoServiceFee: status.tornadoServiceFee @@ -199690,7 +218243,7 @@ class RelayerClient { }); } getValidRelayers(relayers, subdomains, debugRelayer = false) { - return __async$3(this, null, function* () { + return __async$4(this, null, function* () { const relayersSet = /* @__PURE__ */ new Set(); const uniqueRelayers = relayers.reverse().filter(({ ensName }) => { if (!relayersSet.has(ensName)) { @@ -199721,9 +218274,9 @@ class RelayerClient { return pickWeightedRandomRelayer(relayers, this.netId); } tornadoWithdraw(_0) { - return __async$3(this, arguments, function* ({ contract, proof, args }) { + return __async$4(this, arguments, function* ({ contract, proof, args }) { const { url } = this.selectedRelayer; - const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { + const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { method: "POST", headers: { "Content-Type": "application/json" @@ -199743,7 +218296,7 @@ class RelayerClient { console.log(`Job submitted: ${jobUrl} `); while (!relayerStatus || !["FAILED", "CONFIRMED"].includes(relayerStatus)) { - const jobResponse = yield fetchData(jobUrl, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { + const jobResponse = yield fetchData(jobUrl, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { method: "GET", headers: { "Content-Type": "application/json" @@ -199783,7 +218336,7 @@ class RelayerClient { } } -var __async$2 = (__this, __arguments, generator) => { +var __async$3 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -199804,7 +218357,7 @@ var __async$2 = (__this, __arguments, generator) => { }); }; function getTokenBalances(_0) { - return __async$2(this, arguments, function* ({ + return __async$3(this, arguments, function* ({ provider, Multicall: Multicall2, currencyName, @@ -199867,6 +218420,331 @@ function getTokenBalances(_0) { }); } +/** +* A simple bitview for Array buffer. +* @author: Joy Ghosh. +* @version: 0.0.1 +*/ + +var BitView$1 = function(buffer){ + this.buffer = buffer; + this.unit8 = new Uint8Array(this.buffer); +}; + +/** +* Returns the bit value at position 'index'. +*/ +BitView$1.prototype.get = function(index){ + var value = this.unit8[index >> 3]; + var offset = index & 0x7; + return ((value >> (7-offset)) & 1); +}; + +/** +* Sets the bit value at specified position 'index'. +*/ +BitView$1.prototype.set = function(index){ + var offset = index & 0x7; + this.unit8[index >> 3] |= (0x80 >> offset); +}; + +/** +* Clears the bit at position 'index'. +*/ +BitView$1.prototype.clear = function(index){ + var offset = index & 0x7; + this.unit8[index >> 3] &= ~(0x80 >> offset); +}; + +/** +* Returns the byte length of this array buffer. +*/ +BitView$1.prototype.length = function(){ + return this.unit8.byteLength; +}; + +/** +* Returns the array buffer. +*/ +BitView$1.prototype.view = function(){ + return this.unit8; +}; + +var bitview = BitView$1; + +/** +Fowler-Noll-Vo hash function. +@author: Joy Ghosh +@version: 0.0.1 +*/ +const FNV_OFFSET_BASIS = 2166136261; + +/** +FNV hash function. (32-bit version) +FNV step 1: hash = hash XOR byte_of_data. +FNV step 2: hash = hash * FNV_Prime. +*/ +function fnv_1a$1(value){ + + var hash = FNV_OFFSET_BASIS; + for(var i=0; i>> 0; +} + +//FNV step 1:hash = hash XOR byte_of_data. +function fnv_xor(hash, byte_of_data){ + return (hash ^ byte_of_data); +} + +//FNV step 2: hash = hash * FNV_Prime. +function fnv_multiply(hash){ + hash += (hash << 1) + (hash << 4) + (hash << 7) + (hash << 8) + (hash << 24); + return hash; +} + +var fnv = fnv_1a$1; + +/** +Jenkins one_at_a_time hash function. +@author: Joy Ghosh +@version: 0.0.1 +*/ + +/** +* Jenkins's one at a time hash function. +*/ +function one_at_a_time_hash$1(key){ + + var hash = 0; + for(var i=0;i> 6); + } + + hash += (hash << 3); + hash = hash ^ (hash >> 11); + hash += (hash << 15); + return hash; +} + +var jenkins = one_at_a_time_hash$1; + +/** +* Bloom filter. +* @author: Joy Ghosh +* @version: 0.0.1 +*/ + +var BitView = bitview; +var fnv_1a = fnv; +var one_at_a_time_hash = jenkins; + +//Constants. +const BITS_IN_BYTE = 8; +const FALSE_POSITIVE_TOLERANCE = 0.000001; + +/** +* Bloom filter object. +* n represents number of elements in this filter. +*/ +var BloomFilter$1 = function(n, false_postive_tolerance = FALSE_POSITIVE_TOLERANCE){ + //Bits in Bloom filter. + this.m = Math.ceil((-2)*n*Math.log(false_postive_tolerance)); + //Number of hash functions. + this.k = Math.ceil(0.7*(this.m/n)); + + //Normalize size. + this.size = (this.m > BITS_IN_BYTE) ? (Math.ceil(this.m/BITS_IN_BYTE)) : 1; //default size is a byte. + + //Initialize bit array for filter. + this.bitview = new BitView(new ArrayBuffer(this.size)); +}; + +//Generate hash value. +BloomFilter$1.prototype.calculateHash = function(x,m,i){ + //Double hash technique. + return ((fnv_1a(x) + (i*one_at_a_time_hash(x)))%m); +}; + +//Looks for membership. +BloomFilter$1.prototype.test = function(data){ + var hash = data; + for(var i=0; i key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$1 = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); + if (__getOwnPropSymbols$1) + for (var prop of __getOwnPropSymbols$1(b)) { + if (__propIsEnum$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); + } + return a; +}; +var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b)); +var __objRest = (source, exclude) => { + var target = {}; + for (var prop in source) + if (__hasOwnProp$1.call(source, prop) && exclude.indexOf(prop) < 0) + target[prop] = source[prop]; + if (source != null && __getOwnPropSymbols$1) + for (var prop of __getOwnPropSymbols$1(source)) { + if (exclude.indexOf(prop) < 0 && __propIsEnum$1.call(source, prop)) + target[prop] = source[prop]; + } + return target; +}; +var __async$2 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class TreeCache { + constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }) { + this.netId = netId; + this.amount = amount; + this.currency = currency; + this.userDirectory = userDirectory; + this.PARTS_COUNT = PARTS_COUNT; + } + getInstanceName() { + return `deposits_${this.netId}_${this.currency}_${this.amount}`; + } + createTree(events, tree) { + return __async$2(this, null, function* () { + const bloom = new BloomFilter(events.length); + console.log(`Creating cached tree for ${this.getInstanceName()} +`); + const eventsData = events.reduce( + (acc, _a, i) => { + var _b = _a, { leafIndex, commitment } = _b, rest = __objRest(_b, ["leafIndex", "commitment"]); + if (leafIndex !== i) { + throw new Error(`leafIndex (${leafIndex}) !== i (${i})`); + } + acc[commitment] = __spreadProps$1(__spreadValues$1({}, rest), { leafIndex }); + return acc; + }, + {} + ); + const slices = tree.getTreeSlices(this.PARTS_COUNT); + yield Promise.all( + slices.map((slice, index) => __async$2(this, null, function* () { + const metadata = slice.elements.reduce((acc, curr) => { + if (index < this.PARTS_COUNT - 1) { + bloom.add(curr); + } + acc.push(eventsData[curr]); + return acc; + }, []); + const dataString2 = JSON.stringify( + __spreadProps$1(__spreadValues$1({}, slice), { + metadata + }), + null, + 2 + ) + "\n"; + const fileName2 = `${this.getInstanceName()}_slice${index + 1}.json`; + yield saveUserFile({ + fileName: fileName2, + userDirectory: this.userDirectory, + dataString: dataString2 + }); + })) + ); + const dataString = bloom.serialize() + "\n"; + const fileName = `${this.getInstanceName()}_bloom.json`; + yield saveUserFile({ + fileName, + userDirectory: this.userDirectory, + dataString + }); + }); + } +} + var BigInteger = {exports: {}}; (function (module) { @@ -202558,7 +221436,7 @@ groth16_wasm$1.code = new Buffer("AGFzbQEAAAABPApgAn9/AGABfwBgAX8Bf2ACf38Bf2ADf3 /* globals WebAssembly, Blob, Worker, navigator, Promise, window */ const bigInt = BigIntegerExports; const groth16_wasm = groth16_wasm$1; -const assert = assert$6; +const assert = assert$a; const inBrowser = (typeof window !== "undefined"); let NodeWorker; @@ -203247,15 +222125,14 @@ var __async = (__this, __arguments, generator) => { step((generator = generator.apply(__this, __arguments)).next()); }); }; -const DEFAULT_GAS_LIMIT = 6e5; -const RELAYER_NETWORK = 1; -const TOKEN_PRICE_ORACLE = "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8"; +const DEFAULT_GAS_LIMIT = Number(process$2.env.DEFAULT_GAS_LIMIT) || 6e5; +const RELAYER_NETWORK = Number(process$2.env.RELAYER_NETWORK) || NetId.MAINNET; const STATIC_DIR = process$2.env.CACHE_DIR || path$3.join(__dirname, "../static"); const EVENTS_DIR = path$3.join(STATIC_DIR, "./events"); -path$3.join(STATIC_DIR, "./trees"); const MERKLE_WORKER_PATH = process$2.env.DISABLE_MERKLE_WORKER === "true" ? void 0 : path$3.join(STATIC_DIR, "./merkleTreeWorker.js"); const USER_DIR = process$2.env.USER_DIR || "."; const SAVED_DIR = path$3.join(USER_DIR, "./events"); +const SAVED_TREE_DIR = path$3.join(USER_DIR, "./trees"); const CIRCUIT_PATH = path$3.join(__dirname, "../static/tornado.json"); const KEY_PATH = path$3.join(__dirname, "../static/tornadoProvingKey.bin"); function promptConfirmation(nonInteractive) { @@ -203295,6 +222172,7 @@ function getProgramOptions(options) { graph: options.graph || (process$2.env.GRAPH_URL ? parseUrl(process$2.env.GRAPH_URL) : void 0), ethGraph: options.ethGraph || (process$2.env.ETHGRAPH_URL ? parseUrl(process$2.env.ETHGRAPH_URL) : void 0), disableGraph: Boolean(options.disableGraph) || (process$2.env.DISABLE_GRAPH === "true" ? true : void 0), + accountKey: options.accountKey || (process$2.env.ACCOUNT_KEY ? parseRecoveryKey(process$2.env.ACCOUNT_KEY) : void 0), relayer: options.relayer || (process$2.env.RELAYER ? parseRelayer(process$2.env.RELAYER) : void 0), walletWithdrawal: Boolean(options.walletWithdrawal) || (process$2.env.WALLET_WITHDRAWAL === "true" ? true : void 0), torPort: options.torPort || (process$2.env.TOR_PORT ? parseNumber(process$2.env.TOR_PORT) : void 0), @@ -203365,16 +222243,18 @@ function getProgramRelayer(_0) { fetchDataOptions: fetchDataOptions2, netId }) { + var _a; const { ethRpc, ethGraph, relayer, disableGraph } = options; - const netConfig = networkConfig[`netId${netId}`]; - const ethConfig = networkConfig[`netId${RELAYER_NETWORK}`]; + const netConfig = getConfig(netId); + const ethConfig = getConfig(RELAYER_NETWORK); + const subdomains = getSubdomains(); const { aggregatorContract, registryContract, registrySubgraph, constants: { REGISTRY_BLOCK } } = ethConfig; - const provider = getProgramProvider(1, ethRpc, ethConfig, __spreadValues({}, fetchDataOptions2)); + const provider = getProgramProvider(RELAYER_NETWORK, ethRpc, ethConfig, __spreadValues({}, fetchDataOptions2)); const graphApi = getProgramGraphAPI( { disableGraph, @@ -203408,7 +222288,11 @@ function getProgramRelayer(_0) { relayerClient.selectedRelayer = { netId: relayerStatus2.netId, url: relayerStatus2.url, - rewardAccount: relayerStatus2.rewardAccount, + hostname: new URL(relayerStatus2.url).hostname, + rewardAccount: getAddress(relayerStatus2.rewardAccount), + instances: getSupportedInstances(relayerStatus2.instances), + gasPrice: (_a = relayerStatus2.gasPrices) == null ? void 0 : _a.fast, + ethPrices: relayerStatus2.ethPrices, currentQueue: relayerStatus2.currentQueue, tornadoServiceFee: relayerStatus2.tornadoServiceFee }; @@ -203422,13 +222306,7 @@ function getProgramRelayer(_0) { const { validRelayers: validRelayers2 } = yield relayerClient.getValidRelayers([{ ensName: relayer }], subdomains, true); const relayerStatus2 = validRelayers2[0]; if (relayerStatus2) { - relayerClient.selectedRelayer = { - netId: relayerStatus2.netId, - url: relayerStatus2.url, - rewardAccount: relayerStatus2.rewardAccount, - currentQueue: relayerStatus2.currentQueue, - tornadoServiceFee: relayerStatus2.tornadoServiceFee - }; + relayerClient.selectedRelayer = relayerStatus2; } return { validRelayers: validRelayers2, @@ -203444,13 +222322,7 @@ function getProgramRelayer(_0) { ); const relayerStatus = relayerClient.pickWeightedRandomRelayer(validRelayers); if (relayerStatus) { - relayerClient.selectedRelayer = { - netId: relayerStatus.netId, - url: relayerStatus.url, - rewardAccount: relayerStatus.rewardAccount, - currentQueue: relayerStatus.currentQueue, - tornadoServiceFee: relayerStatus.tornadoServiceFee - }; + relayerClient.selectedRelayer = relayerStatus; } return { validRelayers, @@ -203509,7 +222381,7 @@ function tornadoProgram() { program.name(name).version(version).description(description); program.command("create").description("Creates Tornado Cash deposit note and deposit invoice").argument("", "Network Chain ID to connect with (see https://chainlist.org for examples)", parseNumber).argument("", "Currency to deposit on Tornado Cash").argument("", "Amount to deposit on Tornado Cash").action((netId, currency, amount) => __async(this, null, function* () { currency = currency.toLowerCase(); - const config = networkConfig[`netId${netId}`]; + const config = getConfig(netId); const { routerContract, nativeCurrency, @@ -203550,11 +222422,12 @@ function tornadoProgram() { ).argument("", "Network Chain ID to connect with (see https://chainlist.org for examples)", parseNumber).argument("", "Currency to deposit on Tornado Cash").argument("", "Amount to deposit on Tornado Cash").action((netId, currency, amount, cmdOptions) => __async(this, null, function* () { const { options, fetchDataOptions: fetchDataOptions2 } = yield getProgramOptions(cmdOptions); currency = currency.toLowerCase(); - const { rpc } = options; - const config = networkConfig[`netId${netId}`]; + const { rpc, accountKey } = options; + const config = getConfig(netId); const { - multicall: multicallAddress, + multicallContract, routerContract, + echoContract, nativeCurrency, tokens: { [currency]: currencyConfig } } = config; @@ -203570,13 +222443,18 @@ function tornadoProgram() { options, provider }); + const noteAccount = accountKey ? new NoteAccount({ + netId, + recoveryKey: accountKey, + Echoer: Echoer__factory.connect(echoContract, provider) + }) : void 0; if (!signer) { throw new Error( "Signer not defined, make sure you have either viewOnly address, mnemonic, or private key configured" ); } const TornadoProxy = TornadoRouter__factory.connect(routerContract, signer); - const Multicall = Multicall__factory.connect(multicallAddress, provider); + const Multicall = Multicall__factory.connect(multicallContract, provider); const Token = tokenAddress ? ERC20__factory.connect(tokenAddress, signer) : void 0; const [ethBalance, tokenBalance, tokenApprovals] = yield multicall(Multicall, [ { @@ -203584,7 +222462,6 @@ function tornadoProgram() { name: "getEthBalance", params: [signer.address] }, - /* eslint-disable prettier/prettier */ ...!isEth ? [ { contract: Token, @@ -203597,7 +222474,6 @@ function tornadoProgram() { params: [signer.address, routerContract] } ] : [] - /* eslint-enable prettier/prettier */ ]); if (isEth && denomination > ethBalance) { const errMsg = `Invalid ${currency.toUpperCase()} balance, wants ${amount} have ${formatUnits(ethBalance, decimals)}`; @@ -203621,17 +222497,31 @@ function tornadoProgram() { } const deposit = yield Deposit.createNote({ currency, amount, netId }); const { note, noteHex, commitmentHex } = deposit; + const encryptedNote = noteAccount ? noteAccount.encryptNote({ + address: instanceAddress, + noteHex + }) : "0x"; + const backupFile = `./backup-tornado-${currency}-${amount}-${netId}-${noteHex.slice(0, 10)}.txt`; console.log(`New deposit: ${deposit.toString()} `); - yield promises.writeFile(`./backup-tornado-${currency}-${amount}-${netId}-${noteHex.slice(0, 10)}.txt`, note, { - encoding: "utf8" - }); + console.log(`Writing note backup at ${backupFile} +`); + yield promises.writeFile(backupFile, note, { encoding: "utf8" }); + if (encryptedNote !== "0x") { + console.log(`Storing encrypted note on-chain for backup (Account key: ${accountKey}) +`); + } yield programSendTransaction({ signer, options, - populatedTransaction: yield TornadoProxy.deposit.populateTransaction(instanceAddress, commitmentHex, "0x", { - value: isEth ? denomination : BigInt(0) - }) + populatedTransaction: yield TornadoProxy.deposit.populateTransaction( + instanceAddress, + commitmentHex, + encryptedNote, + { + value: isEth ? denomination : BigInt(0) + } + ) }); process$2.exit(0); })); @@ -203641,9 +222531,9 @@ function tornadoProgram() { const { options, fetchDataOptions: fetchDataOptions2 } = yield getProgramOptions(cmdOptions); const { rpc } = options; const { currency, amount, netId, commitment } = new Invoice(invoiceString); - const config = networkConfig[`netId${netId}`]; + const config = getConfig(netId); const { - multicall: multicallAddress, + multicallContract, routerContract, nativeCurrency, tokens: { [currency]: currencyConfig } @@ -203666,7 +222556,7 @@ function tornadoProgram() { ); } const TornadoProxy = TornadoRouter__factory.connect(routerContract, signer); - const Multicall = Multicall__factory.connect(multicallAddress, provider); + const Multicall = Multicall__factory.connect(multicallContract, provider); const Token = tokenAddress ? ERC20__factory.connect(tokenAddress, signer) : void 0; const [ethBalance, tokenBalance, tokenApprovals] = yield multicall(Multicall, [ { @@ -203674,7 +222564,6 @@ function tornadoProgram() { name: "getEthBalance", params: [signer.address] }, - /* eslint-disable prettier/prettier */ ...!isEth ? [ { contract: Token, @@ -203687,7 +222576,6 @@ function tornadoProgram() { params: [signer.address, routerContract] } ] : [] - /* eslint-enable prettier/prettier */ ]); if (isEth && denomination > ethBalance) { const errMsg = `Invalid ${currency.toUpperCase()} balance, wants ${amount} have ${formatUnits(ethBalance, decimals)}`; @@ -203726,13 +222614,14 @@ function tornadoProgram() { const { rpc, walletWithdrawal } = options; const deposit = yield Deposit.parseNote(note); const { netId, currency, amount, commitmentHex, nullifierHex, nullifier, secret } = deposit; - const config = networkConfig[`netId${netId}`]; + const config = getConfig(netId); const { tornadoSubgraph, deployedBlock, nativeCurrency, + multicallContract, routerContract, - multicall: multicallAddress, + offchainOracleContract, ovmGasPriceOracleContract, tokens: { [currency]: currencyConfig } } = config; @@ -203759,14 +222648,14 @@ function tornadoProgram() { const graphApi = getProgramGraphAPI(options, config); const Tornado = Tornado__factory.connect(instanceAddress, provider); const TornadoProxy = TornadoRouter__factory.connect(routerContract, !walletWithdrawal ? provider : signer); - const Multicall = Multicall__factory.connect(multicallAddress, provider); + const Multicall = Multicall__factory.connect(multicallContract, provider); const tornadoFeeOracle = new TornadoFeeOracle( ovmGasPriceOracleContract ? OvmGasPriceOracle__factory.connect(ovmGasPriceOracleContract, provider) : void 0 ); const tokenPriceOracle = new TokenPriceOracle( provider, Multicall, - OffchainOracle__factory.connect(TOKEN_PRICE_ORACLE, provider) + offchainOracleContract ? OffchainOracle__factory.connect(offchainOracleContract, provider) : void 0 ); const depositsServiceConstructor = { netId, @@ -203797,9 +222686,9 @@ function tornadoProgram() { const depositEvents = (yield depositsService.updateEvents()).events; const depositTreeInitiator = yield (() => __async(this, null, function* () { if (MERKLE_WORKER_PATH) { - return () => merkleTreeService.verifyTree({ events: depositEvents }); + return () => merkleTreeService.verifyTree(depositEvents); } - return yield merkleTreeService.verifyTree({ events: depositEvents }); + return yield merkleTreeService.verifyTree(depositEvents); }))(); let depositTreePromise; if (typeof depositTreeInitiator === "function") { @@ -203850,13 +222739,11 @@ function tornadoProgram() { promises.readFile(CIRCUIT_PATH, { encoding: "utf8" }).then((s) => JSON.parse(s)), promises.readFile(KEY_PATH).then((b) => new Uint8Array(b).buffer), depositTreePromise, - /* eslint-disable prettier/prettier */ !walletWithdrawal ? getProgramRelayer({ options, fetchDataOptions: fetchDataOptions2, netId }).then(({ relayerClient: relayerClient2 }) => relayerClient2) : void 0, - /* eslint-enable prettier/prettier */ tornadoFeeOracle.fetchL1OptimismFee(), !isEth ? tokenPriceOracle.fetchPrices([tokenAddress]).then((p) => p[0]) : BigInt(0), provider.getFeeData() @@ -203995,7 +222882,7 @@ function tornadoProgram() { const { rpc } = options; const deposit = yield Deposit.parseNote(note); const { netId, currency, amount, commitmentHex, nullifierHex } = deposit; - const config = networkConfig[`netId${netId}`]; + const config = getConfig(netId); const { tornadoSubgraph, deployedBlock, @@ -204037,9 +222924,9 @@ function tornadoProgram() { const depositEvents = (yield depositsService.updateEvents()).events; const depositTreePromise = yield (() => __async(this, null, function* () { if (MERKLE_WORKER_PATH) { - return () => merkleTreeService.verifyTree({ events: depositEvents }); + return () => merkleTreeService.verifyTree(depositEvents); } - return yield merkleTreeService.verifyTree({ events: depositEvents }); + return yield merkleTreeService.verifyTree(depositEvents); }))(); const [withdrawalEvents] = yield Promise.all([ withdrawalsService.updateEvents().then(({ events }) => events), @@ -204084,22 +222971,25 @@ function tornadoProgram() { console.log("\n\n" + complianceTable.toString() + "\n"); process$2.exit(0); })); - program.command("syncEvents").description("Sync the local cache file of tornado cash events.\n\n").argument("[netId]", "Network Chain ID to connect with (see https://chainlist.org for examples)", parseNumber).argument("[currency]", "Currency to sync events").action( + program.command("updateEvents").description("Sync the local cache file of tornado cash events.\n\n").argument("[netId]", "Network Chain ID to connect with (see https://chainlist.org for examples)", parseNumber).argument("[currency]", "Currency to sync events").action( (netIdOpts, currencyOpts, cmdOptions) => __async(this, null, function* () { const { options, fetchDataOptions: fetchDataOptions2 } = yield getProgramOptions(cmdOptions); const { rpc } = options; const networks = netIdOpts ? [netIdOpts] : enabledChains; for (const netId of networks) { - const config = networkConfig[`netId${netId}`]; + const config = getConfig(netId); const { tornadoSubgraph, registrySubgraph, + governanceSubgraph, tokens, + nativeCurrency, routerContract, + echoContract, registryContract, - ["governance.contract.tornadocash.eth"]: governanceContract, + governanceContract, deployedBlock, - constants: { GOVERNANCE_BLOCK, REGISTRY_BLOCK, ENCRYPTED_NOTES_BLOCK } + constants: { GOVERNANCE_BLOCK, REGISTRY_BLOCK, NOTE_ACCOUNT_BLOCK, ENCRYPTED_NOTES_BLOCK } } = config; const provider = getProgramProvider(netId, rpc, config, __spreadValues({}, fetchDataOptions2)); const graphApi = getProgramGraphAPI(options, config); @@ -204107,9 +222997,8 @@ function tornadoProgram() { const governanceService = new NodeGovernanceService({ netId, provider, - // to-do connect governance with subgraph - graphApi: "", - subgraphName: "", + graphApi, + subgraphName: governanceSubgraph, Governance: Governance__factory.connect(governanceContract, provider), deployedBlock: GOVERNANCE_BLOCK, fetchDataOptions: fetchDataOptions2, @@ -204132,6 +223021,18 @@ function tornadoProgram() { }); yield registryService.updateEvents(); } + const echoService = new NodeEchoService({ + netId, + provider, + graphApi, + subgraphName: tornadoSubgraph, + Echoer: Echoer__factory.connect(echoContract, provider), + deployedBlock: NOTE_ACCOUNT_BLOCK, + fetchDataOptions: fetchDataOptions2, + cacheDirectory: EVENTS_DIR, + userDirectory: SAVED_DIR + }); + yield echoService.updateEvents(); const encryptedNotesService = new NodeEncryptedNotesService({ netId, provider, @@ -204177,19 +223078,26 @@ function tornadoProgram() { Tornado, merkleWorkerPath: MERKLE_WORKER_PATH }); + const treeCache = new TreeCache({ + netId, + amount, + currency, + userDirectory: SAVED_TREE_DIR + }); const depositEvents = (yield depositsService.updateEvents()).events; const depositTreePromise = yield (() => __async(this, null, function* () { if (MERKLE_WORKER_PATH) { - return () => merkleTreeService.verifyTree({ events: depositEvents }); + return () => merkleTreeService.verifyTree(depositEvents); } - return yield merkleTreeService.verifyTree({ - events: depositEvents - }); + return yield merkleTreeService.verifyTree(depositEvents); }))(); - yield Promise.all([ - withdrawalsService.updateEvents(), - typeof depositTreePromise === "function" ? depositTreePromise() : depositTreePromise + const [tree] = yield Promise.all([ + typeof depositTreePromise === "function" ? depositTreePromise() : depositTreePromise, + withdrawalsService.updateEvents() ]); + if (nativeCurrency === currency) { + yield treeCache.createTree(depositEvents, tree); + } } } } @@ -204205,6 +223113,7 @@ function tornadoProgram() { }); const validRelayers = allRelayers.validRelayers; const invalidRelayers = allRelayers.invalidRelayers; + console.log(validRelayers); const relayersTable = new Table(); relayersTable.push( [{ colSpan: 8, content: "Relayers", hAlign: "center" }], @@ -204245,12 +223154,145 @@ function tornadoProgram() { console.log(invalidRelayersTable.toString() + "\n"); process$2.exit(0); })); + program.command("createAccount").description( + "Creates and save on-chain account that would store encrypted notes. \n\nWould first lookup on on-chain records to see if the notes are stored. \n\nRequires a valid signable wallet (mnemonic or a private key) to work (Since they would encrypt or encrypted)" + ).argument("", "Network Chain ID to connect with (see https://chainlist.org for examples)", parseNumber).action((netId, cmdOptions) => __async(this, null, function* () { + const { options, fetchDataOptions: fetchDataOptions2 } = yield getProgramOptions(cmdOptions); + const { rpc } = options; + const config = getConfig(netId); + const { + echoContract, + tornadoSubgraph, + constants: { ["NOTE_ACCOUNT_BLOCK"]: deployedBlock } + } = config; + const provider = getProgramProvider(netId, rpc, config, __spreadValues({}, fetchDataOptions2)); + const signer = getProgramSigner({ + options, + provider + }); + const graphApi = getProgramGraphAPI(options, config); + if (!signer || signer instanceof VoidSigner) { + throw new Error( + "No wallet found, make your you have supplied a valid mnemonic or private key before using this command" + ); + } + const walletPublicKey = NoteAccount.getWalletPublicKey(signer); + const Echoer = Echoer__factory.connect(echoContract, provider); + const newAccount = new NoteAccount({ + netId, + Echoer + }); + const echoService = new NodeEchoService({ + netId, + provider, + graphApi, + subgraphName: tornadoSubgraph, + Echoer, + deployedBlock, + fetchDataOptions: fetchDataOptions2, + cacheDirectory: EVENTS_DIR, + userDirectory: SAVED_DIR + }); + console.log("Getting historic note accounts would take a while\n"); + const echoEvents = (yield echoService.updateEvents()).events; + const userEvents = echoEvents.filter(({ address }) => address === signer.address); + const existingAccounts = newAccount.decryptAccountsWithWallet(signer, userEvents); + const accountsTable = new Table(); + if (existingAccounts.length) { + accountsTable.push( + [{ colSpan: 2, content: `Note Accounts (${netId})`, hAlign: "center" }], + [{ colSpan: 2, content: `Backed up by: ${signer.address}`, hAlign: "center" }], + ["blockNumber", "noteAccount"].map((content) => ({ content: colors.red.bold(content) })), + ...existingAccounts.map(({ blockNumber, recoveryKey }) => { + return [blockNumber, recoveryKey]; + }) + ); + console.log(accountsTable.toString() + "\n"); + } else { + accountsTable.push( + [{ colSpan: 1, content: `New Note Account (${netId})`, hAlign: "center" }], + ["noteAccount"].map((content) => ({ content: colors.red.bold(content) })), + [newAccount.recoveryKey], + [{ colSpan: 1, content: `Would be backed up by: ${signer.address}`, hAlign: "center" }] + ); + const fileName = `backup-note-account-key-0x${newAccount.recoveryKey.slice(0, 8)}.txt`; + console.log("\n" + accountsTable.toString() + "\n"); + console.log(`Writing backup to ${fileName} +`); + yield promises.writeFile(fileName, newAccount.recoveryKey + "\n"); + console.log("Backup encrypted account on-chain to use on UI?\n"); + yield promptConfirmation(options.nonInteractive); + const { data } = newAccount.getEncryptedAccount(walletPublicKey); + console.log("Sending encrypted note account backup transaction through wallet\n"); + yield programSendTransaction({ + signer, + options, + populatedTransaction: yield Echoer.echo.populateTransaction(data) + }); + } + process$2.exit(0); + })); + program.command("decryptNotes").description("Fetch notes from deposit events and decrypt them. \n\nRequires a valid account key to work").argument("", "Network Chain ID to connect with (see https://chainlist.org for examples)", parseNumber).argument( + "[accountKey]", + "Account key generated from UI or the createAccount to store encrypted notes on-chain", + parseRecoveryKey + ).action((netId, accountKey, cmdOptions) => __async(this, null, function* () { + const { options, fetchDataOptions: fetchDataOptions2 } = yield getProgramOptions(cmdOptions); + const { rpc } = options; + if (!accountKey) { + accountKey = options.accountKey; + } + const config = getConfig(netId); + const { + routerContract, + echoContract, + tornadoSubgraph, + constants: { ENCRYPTED_NOTES_BLOCK } + } = config; + const provider = getProgramProvider(netId, rpc, config, __spreadValues({}, fetchDataOptions2)); + const graphApi = getProgramGraphAPI(options, config); + if (!accountKey) { + throw new Error( + "No account key find! Please supply correct account key from either UI or find one with createAccount command" + ); + } + const Echoer = Echoer__factory.connect(echoContract, provider); + const noteAccount = new NoteAccount({ + netId, + recoveryKey: accountKey, + Echoer + }); + const encryptedNotesService = new NodeEncryptedNotesService({ + netId, + provider, + graphApi, + subgraphName: tornadoSubgraph, + Router: TornadoRouter__factory.connect(routerContract, provider), + deployedBlock: ENCRYPTED_NOTES_BLOCK, + fetchDataOptions: fetchDataOptions2, + cacheDirectory: EVENTS_DIR, + userDirectory: SAVED_DIR + }); + const encryptedNoteEvents = (yield encryptedNotesService.updateEvents()).events; + const accountsTable = new Table(); + accountsTable.push( + [{ colSpan: 2, content: `Note Accounts (${netId})`, hAlign: "center" }], + [{ colSpan: 2, content: `Account key: ${accountKey}`, hAlign: "center" }], + ["blockNumber", "note"].map((content) => ({ content: colors.red.bold(content) })), + ...noteAccount.decryptNotes(encryptedNoteEvents).map(({ blockNumber, address, noteHex }) => { + const { amount, currency } = getInstanceByAddress({ netId, address }); + return [blockNumber, `tornado-${currency}-${amount}-${netId}-${noteHex}`]; + }) + ); + console.log("\n" + accountsTable.toString() + "\n"); + process$2.exit(0); + })); program.command("send").description("Send ETH or ERC20 token to address.\n\n").argument("", "Network Chain ID to connect with (see https://chainlist.org for examples)", parseNumber).argument("", "To address", parseAddress).argument("[amount]", "Sending amounts", parseNumber).argument("[token]", "ERC20 Token Contract to check Token Balance", parseAddress).action( (netId, to, amountArgs, tokenArgs, cmdOptions) => __async(this, null, function* () { const { options, fetchDataOptions: fetchDataOptions2 } = yield getProgramOptions(cmdOptions); const { rpc, token: tokenOpts } = options; - const config = networkConfig[`netId${netId}`]; - const { currencyName, multicall: multicallAddress } = config; + const config = getConfig(netId); + const { currencyName, multicallContract } = config; const provider = getProgramProvider(netId, rpc, config, __spreadValues({}, fetchDataOptions2)); const signer = getProgramSigner({ options, provider }); if (!signer) { @@ -204259,7 +223301,7 @@ function tornadoProgram() { ); } const tokenAddress = tokenArgs ? parseAddress(tokenArgs) : tokenOpts; - const Multicall = Multicall__factory.connect(multicallAddress, provider); + const Multicall = Multicall__factory.connect(multicallContract, provider); const Token = tokenAddress ? ERC20__factory.connect(tokenAddress, signer) : void 0; const [feeData, nonce, [{ balance: ethBalance }, tokenResults]] = yield Promise.all([ provider.getFeeData(), @@ -204306,6 +223348,10 @@ function tornadoProgram() { } else { const initCost = txGasPrice * BigInt("400000"); toSend = ethBalance - initCost; + if (ethBalance === BigInt(0) || ethBalance < initCost) { + const errMsg = `Invalid ${currencyName} balance, wants ${formatEther(initCost)} have ${formatEther(ethBalance)}`; + throw new Error(errMsg); + } const estimatedGas = yield provider.estimateGas(__spreadValues({ type: txType, from: signer.address, @@ -204336,23 +223382,18 @@ function tornadoProgram() { var _a; const { options, fetchDataOptions: fetchDataOptions2 } = yield getProgramOptions(cmdOptions); const { rpc, token: tokenOpts } = options; - const config = networkConfig[`netId${netId}`]; - const { - currencyName, - multicall: multicallAddress, - ["torn.contract.tornadocash.eth"]: tornTokenAddress, - tokens - } = config; + const config = getConfig(netId); + const { currencyName, multicallContract, tornContract, tokens } = config; const provider = getProgramProvider(netId, rpc, config, __spreadValues({}, fetchDataOptions2)); const userAddress = addressArgs ? parseAddress(addressArgs) : (_a = getProgramSigner({ options, provider })) == null ? void 0 : _a.address; const tokenAddress = tokenArgs ? parseAddress(tokenArgs) : tokenOpts; if (!userAddress) { throw new Error("Address is required however no user address is supplied"); } - const Multicall = Multicall__factory.connect(multicallAddress, provider); + const Multicall = Multicall__factory.connect(multicallContract, provider); const tokenAddresses = Object.values(tokens).map(({ tokenAddress: tokenAddress2 }) => tokenAddress2).filter((t) => t); - if (tornTokenAddress) { - tokenAddresses.push(tornTokenAddress); + if (tornContract) { + tokenAddresses.push(tornContract); } const tokenBalances2 = yield getTokenBalances({ provider, @@ -204377,7 +223418,7 @@ function tornadoProgram() { const { rpc } = options; const deserializedTx = Transaction.from(unsignedTx).toJSON(); const netId = Number(deserializedTx.chainId); - const config = networkConfig[`netId${netId}`]; + const config = getConfig(netId); const provider = getProgramProvider(netId, rpc, config, __spreadValues({}, fetchDataOptions2)); const signer = getProgramSigner({ options, provider }); if (!signer || signer instanceof VoidSigner) { @@ -204397,7 +223438,7 @@ function tornadoProgram() { if (!netId) { throw new Error("NetId for the transaction is invalid, this command only supports EIP-155 transactions"); } - const config = networkConfig[`netId${netId}`]; + const config = getConfig(netId); const provider = getProgramProvider(netId, rpc, config, __spreadValues({}, fetchDataOptions2)); const { hash } = yield provider.broadcastTransaction(signedTx); console.log(` @@ -204409,8 +223450,20 @@ Broadcastd tx: ${hash} cmd.option("-r, --rpc ", "The RPC that CLI should interact with", parseUrl); cmd.option("-e, --eth-rpc ", "The Ethereum Mainnet RPC that CLI should interact with", parseUrl); cmd.option("-g, --graph ", "The Subgraph API that CLI should interact with", parseUrl); - cmd.option("-G, --eth-graph ", "The Ethereum Mainnet Subgraph API that CLI should interact with", parseUrl); - cmd.option("-d, --disable-graph", "Disable Graph API - Does not enable Subgraph API and use only local RPC as an event source"); + cmd.option( + "-G, --eth-graph ", + "The Ethereum Mainnet Subgraph API that CLI should interact with", + parseUrl + ); + cmd.option( + "-d, --disable-graph", + "Disable Graph API - Does not enable Subgraph API and use only local RPC as an event source" + ); + cmd.option( + "-a, --account-key ", + "Account key generated from UI or the createAccount to store encrypted notes on-chain", + parseRecoveryKey + ); cmd.option("-R, --relayer ", "Withdraw via relayer (Should be either .eth name or URL)", parseRelayer); cmd.option("-w, --wallet-withdrawal", "Withdrawal via wallet (Should not be linked with deposits)"); cmd.option("-T, --tor-port ", "Optional tor port", parseNumber); @@ -204422,13 +223475,13 @@ Broadcastd tx: ${hash} ); cmd.option( "-m, --mnemonic ", - "Wallet BIP39 Mnemonic Phrase - If you didn't add it to .env file and it is needed for operation", + "Wallet BIP39 Mnemonic Phrase - If you did not add it to .env file and it is needed for operation", parseMnemonic ); cmd.option("-i, --mnemonic-index ", "Optional wallet mnemonic index", parseNumber); cmd.option( "-p, --private-key ", - "Wallet private key - If you didn't add it to .env file and it is needed for operation", + "Wallet private key - If you did not add it to .env file and it is needed for operation", parseKey ); cmd.option( diff --git a/dist/index.js b/dist/index.js index 764f8f9..db23629 100644 --- a/dist/index.js +++ b/dist/index.js @@ -5,7 +5,7 @@ var crossFetch = require('cross-fetch'); var httpProxyAgent = require('http-proxy-agent'); var httpsProxyAgent = require('https-proxy-agent'); var socksProxyAgent = require('socks-proxy-agent'); -var url = require('url'); +var crypto$1 = require('crypto'); var BN = require('bn.js'); var Table = require('cli-table3'); var moment = require('moment'); @@ -14,9 +14,11 @@ var promises = require('fs/promises'); var fflate = require('fflate'); var Ajv = require('ajv'); var circomlibjs = require('circomlibjs'); +var ethSigUtil = require('@metamask/eth-sig-util'); var worker_threads = require('worker_threads'); var fixedMerkleTree = require('@tornado/fixed-merkle-tree'); var commander = require('commander'); +var BloomFilter = require('bloomfilter.js'); var websnarkUtils = require('@tornado/websnark/src/utils'); var websnarkGroth = require('@tornado/websnark/src/groth16'); @@ -2580,13 +2582,14 @@ BigInt.prototype.toJSON = function() { return this.toString(); }; const isNode = !process.browser && typeof globalThis.window === "undefined"; +const crypto = isNode ? crypto$1.webcrypto : globalThis.crypto; const chunk = (arr, size) => [...Array(Math.ceil(arr.length / size))].map((_, i) => arr.slice(size * i, size + size * i)); function sleep(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); } -function validateUrl(url$1, protocols) { +function validateUrl(url, protocols) { try { - const parsedUrl = new url.URL(url$1); + const parsedUrl = new URL(url); if (protocols && protocols.length) { return protocols.map((p) => p.toLowerCase()).includes(parsedUrl.protocol); } @@ -2595,28 +2598,36 @@ function validateUrl(url$1, protocols) { return false; } } +function concatBytes(...arrays) { + const totalSize = arrays.reduce((acc, e) => acc + e.length, 0); + const merged = new Uint8Array(totalSize); + arrays.forEach((array, i, arrays2) => { + const offset = arrays2.slice(0, i).reduce((acc, e) => acc + e.length, 0); + merged.set(array, offset); + }); + return merged; +} function bufferToBytes(b) { return new Uint8Array(b.buffer); } function bytesToBase64(bytes) { - let binary = ""; - const len = bytes.byteLength; - for (let i = 0; i < len; ++i) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); + return btoa(String.fromCharCode.apply(null, Array.from(bytes))); } function base64ToBytes(base64) { - const binaryString = atob(base64); - const bytes = new Uint8Array(binaryString.length); - for (let i = 0; i < binaryString.length; i++) { - bytes[i] = binaryString.charCodeAt(i); - } - return bytes; + return Uint8Array.from(atob(base64), (c) => c.charCodeAt(0)); } function bytesToHex(bytes) { return "0x" + Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join(""); } +function hexToBytes(hexString) { + if (hexString.slice(0, 2) === "0x") { + hexString = hexString.replace("0x", ""); + } + if (hexString.length % 2 !== 0) { + hexString = "0" + hexString; + } + return Uint8Array.from(hexString.match(/.{1,2}/g).map((byte) => parseInt(byte, 16))); +} function bytesToBN(bytes) { return BigInt(bytesToHex(bytes)); } @@ -2656,7 +2667,7 @@ function substring(str, length = 10) { return `${str.substring(0, length)}...${str.substring(str.length - length)}`; } -var __async$d = (__this, __arguments, generator) => { +var __async$e = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -2677,7 +2688,7 @@ var __async$d = (__this, __arguments, generator) => { }); }; function multicall(Multicall2, calls) { - return __async$d(this, null, function* () { + return __async$e(this, null, function* () { const calldata = calls.map((call) => { var _a, _b, _c; const target = ((_a = call.contract) == null ? void 0 : _a.target) || call.address; @@ -2700,29 +2711,29 @@ function multicall(Multicall2, calls) { }); } -var __defProp$3 = Object.defineProperty; -var __defProps$3 = Object.defineProperties; -var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; +var __defProp$5 = Object.defineProperty; +var __defProps$4 = Object.defineProperties; +var __getOwnPropDescs$4 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols; var __getProtoOf$1 = Object.getPrototypeOf; -var __hasOwnProp$3 = Object.prototype.hasOwnProperty; -var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$5 = Object.prototype.hasOwnProperty; +var __propIsEnum$5 = Object.prototype.propertyIsEnumerable; var __reflectGet$1 = Reflect.get; -var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$3 = (a, b) => { +var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$5 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); - if (__getOwnPropSymbols$3) - for (var prop of __getOwnPropSymbols$3(b)) { - if (__propIsEnum$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); + if (__hasOwnProp$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); + if (__getOwnPropSymbols$5) + for (var prop of __getOwnPropSymbols$5(b)) { + if (__propIsEnum$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); } return a; }; -var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b)); +var __spreadProps$4 = (a, b) => __defProps$4(a, __getOwnPropDescs$4(b)); var __superGet$1 = (cls, obj, key) => __reflectGet$1(__getProtoOf$1(cls), key, obj); -var __async$c = (__this, __arguments, generator) => { +var __async$d = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -2768,7 +2779,7 @@ function getHttpAgent({ } } function fetchData(_0) { - return __async$c(this, arguments, function* (url, options = {}) { + return __async$d(this, arguments, function* (url, options = {}) { var _a, _b, _c; const MAX_RETRY = (_a = options.maxRetry) != null ? _a : 3; const RETRY_ON = (_b = options.retryOn) != null ? _b : 500; @@ -2860,7 +2871,7 @@ function fetchData(_0) { throw errorObject; }); } -const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$c(void 0, null, function* () { +const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, null, function* () { let signal; if (_signal) { const controller = new AbortController(); @@ -2869,7 +2880,7 @@ const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$c(void 0, nu controller.abort(); }); } - const init = __spreadProps$3(__spreadValues$3({}, options), { + const init = __spreadProps$4(__spreadValues$5({}, options), { method: req.method || "POST", headers: req.headers, body: req.body || void 0, @@ -2894,7 +2905,7 @@ const oracleMapper = /* @__PURE__ */ new Map(); const multicallMapper = /* @__PURE__ */ new Map(); function getGasOraclePlugin(networkKey, fetchOptions) { const gasStationApi = (fetchOptions == null ? void 0 : fetchOptions.gasStationApi) || "https://gasstation.polygon.technology/v2"; - return new ethers.FetchUrlFeeDataNetworkPlugin(gasStationApi, (fetchFeeData, provider, request) => __async$c(this, null, function* () { + return new ethers.FetchUrlFeeDataNetworkPlugin(gasStationApi, (fetchFeeData, provider, request) => __async$d(this, null, function* () { if (!oracleMapper.has(networkKey)) { oracleMapper.set(networkKey, GasPriceOracle__factory.connect(fetchOptions == null ? void 0 : fetchOptions.gasPriceOracle, provider)); } @@ -2953,7 +2964,7 @@ function getGasOraclePlugin(networkKey, fetchOptions) { })); } function getProvider(rpcUrl, fetchOptions) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { const fetchReq = new ethers.FetchRequest(rpcUrl); fetchReq.getUrlFunc = fetchGetUrlFunc(fetchOptions); const _staticNetwork = yield new ethers.JsonRpcProvider(fetchReq).getNetwork(); @@ -3003,7 +3014,7 @@ function getProviderWithNetId(netId, rpcUrl, config, fetchOptions) { provider.pollingInterval = (fetchOptions == null ? void 0 : fetchOptions.pollingInterval) || pollInterval * 1e3; return provider; } -const populateTransaction = (signer, tx) => __async$c(void 0, null, function* () { +const populateTransaction = (signer, tx) => __async$d(void 0, null, function* () { const provider = signer.provider; if (!tx.from) { tx.from = signer.address; @@ -3012,7 +3023,7 @@ const populateTransaction = (signer, tx) => __async$c(void 0, null, function* () throw new Error(errMsg); } const [feeData, nonce] = yield Promise.all([ - (() => __async$c(void 0, null, function* () { + (() => __async$d(void 0, null, function* () { if (tx.maxFeePerGas && tx.maxPriorityFeePerGas) { return new ethers.FeeData(null, BigInt(tx.maxFeePerGas), BigInt(tx.maxPriorityFeePerGas)); } @@ -3034,7 +3045,7 @@ const populateTransaction = (signer, tx) => __async$c(void 0, null, function* () ); } }))(), - (() => __async$c(void 0, null, function* () { + (() => __async$d(void 0, null, function* () { if (tx.nonce) { return tx.nonce; } @@ -3064,7 +3075,7 @@ const populateTransaction = (signer, tx) => __async$c(void 0, null, function* () delete tx.maxFeePerGas; delete tx.maxPriorityFeePerGas; } - tx.gasLimit = tx.gasLimit || (yield (() => __async$c(void 0, null, function* () { + tx.gasLimit = tx.gasLimit || (yield (() => __async$d(void 0, null, function* () { try { const gasLimit = yield provider.estimateGas(tx); return gasLimit === BigInt(21e3) ? gasLimit : gasLimit * (BigInt(1e4) + BigInt(signer.gasLimitBump)) / BigInt(1e4); @@ -3092,7 +3103,7 @@ class TornadoWallet extends ethers.Wallet { return new TornadoWallet(privateKey, provider, options); } populateTransaction(tx) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = txObject.nonce; return __superGet$1(TornadoWallet.prototype, this, "populateTransaction").call(this, txObject); @@ -3108,7 +3119,7 @@ class TornadoVoidSigner extends ethers.VoidSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } populateTransaction(tx) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = txObject.nonce; return __superGet$1(TornadoVoidSigner.prototype, this, "populateTransaction").call(this, txObject); @@ -3124,7 +3135,7 @@ class TornadoRpcSigner extends ethers.JsonRpcSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } sendUncheckedTransaction(tx) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { return __superGet$1(TornadoRpcSigner.prototype, this, "sendUncheckedTransaction").call(this, yield populateTransaction(this, tx)); }); } @@ -3135,7 +3146,7 @@ class TornadoBrowserProvider extends ethers.BrowserProvider { this.options = options; } getSigner(address) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { var _a, _b, _c, _d, _e, _f, _g, _h, _i; const signerAddress = (yield __superGet$1(TornadoBrowserProvider.prototype, this, "getSigner").call(this, address)).address; if (((_a = this.options) == null ? void 0 : _a.webChainId) && ((_b = this.options) == null ? void 0 : _b.connectWallet) && Number(yield __superGet$1(TornadoBrowserProvider.prototype, this, "send").call(this, "eth_chainId", [])) !== Number((_c = this.options) == null ? void 0 : _c.webChainId)) { @@ -3258,6 +3269,22 @@ const GET_NOTE_ACCOUNTS = ` } } `; +const GET_ECHO_EVENTS = ` + query getNoteAccounts($first: Int, $fromBlock: Int) { + noteAccounts(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + id + blockNumber + address + encryptedAccount + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; const GET_ENCRYPTED_NOTES = ` query getEncryptedNotes($first: Int, $fromBlock: Int) { encryptedNotes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { @@ -3274,27 +3301,80 @@ const GET_ENCRYPTED_NOTES = ` } } `; +const GET_GOVERNANCE_EVENTS = ` + query getGovernanceEvents($first: Int, $fromBlock: Int) { + proposals(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + proposer + target + startTime + endTime + description + } + votes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + voter + support + votes + from + input + } + delegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateTo + } + undelegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateFrom + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; +const GET_GOVERNANCE_APY = ` + stakeDailyBurns(first: 30, orderBy: date, orderDirection: desc) { + id + date + dailyAmountBurned + } +`; -var __defProp$2 = Object.defineProperty; -var __defProps$2 = Object.defineProperties; -var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; -var __hasOwnProp$2 = Object.prototype.hasOwnProperty; -var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$2 = (a, b) => { +var __defProp$4 = Object.defineProperty; +var __defProps$3 = Object.defineProperties; +var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols; +var __hasOwnProp$4 = Object.prototype.hasOwnProperty; +var __propIsEnum$4 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$4 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); - if (__getOwnPropSymbols$2) - for (var prop of __getOwnPropSymbols$2(b)) { - if (__propIsEnum$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); + if (__hasOwnProp$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); + if (__getOwnPropSymbols$4) + for (var prop of __getOwnPropSymbols$4(b)) { + if (__propIsEnum$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); } return a; }; -var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b)); -var __async$b = (__this, __arguments, generator) => { +var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b)); +var __async$c = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -3317,7 +3397,7 @@ var __async$b = (__this, __arguments, generator) => { const isEmptyArray = (arr) => !Array.isArray(arr) || !arr.length; const first = 1e3; function queryGraph(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, query, @@ -3326,7 +3406,7 @@ function queryGraph(_0) { }) { var _a; const graphUrl = `${graphApi}/subgraphs/name/${subgraphName}`; - const { data, errors } = yield fetchData(graphUrl, __spreadProps$2(__spreadValues$2({}, fetchDataOptions2), { + const { data, errors } = yield fetchData(graphUrl, __spreadProps$3(__spreadValues$4({}, fetchDataOptions2), { method: "POST", headers: { "Content-Type": "application/json" @@ -3346,7 +3426,7 @@ function queryGraph(_0) { }); } function getStatistic(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -3393,7 +3473,7 @@ function getStatistic(_0) { }); } function getMeta(_0) { - return __async$b(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { + return __async$c(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { try { const { _meta: { @@ -3438,7 +3518,7 @@ function getRegisters({ }); } function getAllRegisters(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -3527,7 +3607,7 @@ function getDeposits({ }); } function getAllDeposits(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -3624,7 +3704,7 @@ function getWithdrawals({ }); } function getAllWithdrawals(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -3700,7 +3780,7 @@ function getAllWithdrawals(_0) { }); } function getNoteAccounts(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, address, @@ -3717,7 +3797,7 @@ function getNoteAccounts(_0) { subgraphName, query: GET_NOTE_ACCOUNTS, variables: { - address + address: address.toLowerCase() }, fetchDataOptions: fetchDataOptions2 }); @@ -3735,6 +3815,95 @@ function getNoteAccounts(_0) { } }); } +function getGraphEchoEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_ECHO_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGraphEchoEvents(_0) { + return __async$c(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const events = []; + let lastSyncBlock = fromBlock; + while (true) { + let { + noteAccounts: result2, + _meta: { + // eslint-disable-next-line prefer-const + block: { number: currentBlock } + } + } = yield getGraphEchoEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + if (isEmptyArray(result2)) { + break; + } + const [firstEvent] = result2; + const [lastEvent2] = result2.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "EchoEvents", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: result2.length + }); + } + if (result2.length < 900) { + events.push(...result2); + break; + } + result2 = result2.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + events.push(...result2); + } + if (!events.length) { + return { + events: [], + lastSyncBlock + }; + } + const result = events.map((e) => { + const [transactionHash, logIndex] = e.id.split("-"); + return { + blockNumber: Number(e.blockNumber), + logIndex: Number(logIndex), + transactionHash, + address: ethers.getAddress(e.address), + encryptedAccount: e.encryptedAccount + }; + }); + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + console.log("Error from getAllGraphEchoEvents query"); + console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} function getEncryptedNotes({ graphApi, subgraphName, @@ -3753,7 +3922,7 @@ function getEncryptedNotes({ }); } function getAllEncryptedNotes(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -3820,11 +3989,160 @@ function getAllEncryptedNotes(_0) { } }); } +function getGovernanceEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_GOVERNANCE_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGovernanceEvents(_0) { + return __async$c(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const result = []; + let lastSyncBlock = fromBlock; + while (true) { + const { + proposals, + votes, + delegates, + undelegates, + _meta: { + block: { number: currentBlock } + } + } = yield getGovernanceEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + const eventsLength = proposals.length + votes.length + delegates.length + undelegates.length; + if (eventsLength === 0) { + break; + } + const formattedProposals = proposals.map( + ({ blockNumber, logIndex, transactionHash, proposalId, proposer, target, startTime, endTime, description }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "ProposalCreated", + id: Number(proposalId), + proposer: ethers.getAddress(proposer), + target: ethers.getAddress(target), + startTime: Number(startTime), + endTime: Number(endTime), + description + }; + } + ); + const formattedVotes = votes.map( + ({ blockNumber, logIndex, transactionHash, proposalId, voter, support, votes: votes2, from, input }) => { + if (!input || input.length > 2048) { + input = ""; + } + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Voted", + proposalId: Number(proposalId), + voter: ethers.getAddress(voter), + support, + votes: votes2, + from: ethers.getAddress(from), + input + }; + } + ); + const formattedDelegates = delegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateTo }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Delegated", + account: ethers.getAddress(account), + delegateTo: ethers.getAddress(delegateTo) + }; + } + ); + const formattedUndelegates = undelegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateFrom }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Undelegated", + account: ethers.getAddress(account), + delegateFrom: ethers.getAddress(delegateFrom) + }; + } + ); + let formattedEvents = [ + ...formattedProposals, + ...formattedVotes, + ...formattedDelegates, + ...formattedUndelegates + ].sort((a, b) => { + if (a.blockNumber === b.blockNumber) { + return a.logIndex - b.logIndex; + } + return a.blockNumber - b.blockNumber; + }); + if (eventsLength < 900) { + result.push(...formattedEvents); + break; + } + const [firstEvent] = formattedEvents; + const [lastEvent2] = formattedEvents.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "Governance Events", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: eventsLength + }); + } + formattedEvents = formattedEvents.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + result.push(...formattedEvents); + } + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + console.log("Error from getAllGovernance query"); + console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} var graph = /*#__PURE__*/Object.freeze({ __proto__: null, GET_DEPOSITS: GET_DEPOSITS, + GET_ECHO_EVENTS: GET_ECHO_EVENTS, GET_ENCRYPTED_NOTES: GET_ENCRYPTED_NOTES, + GET_GOVERNANCE_APY: GET_GOVERNANCE_APY, + GET_GOVERNANCE_EVENTS: GET_GOVERNANCE_EVENTS, GET_NOTE_ACCOUNTS: GET_NOTE_ACCOUNTS, GET_REGISTERED: GET_REGISTERED, GET_STATISTIC: GET_STATISTIC, @@ -3832,10 +4150,14 @@ var graph = /*#__PURE__*/Object.freeze({ _META: _META, getAllDeposits: getAllDeposits, getAllEncryptedNotes: getAllEncryptedNotes, + getAllGovernanceEvents: getAllGovernanceEvents, + getAllGraphEchoEvents: getAllGraphEchoEvents, getAllRegisters: getAllRegisters, getAllWithdrawals: getAllWithdrawals, getDeposits: getDeposits, getEncryptedNotes: getEncryptedNotes, + getGovernanceEvents: getGovernanceEvents, + getGraphEchoEvents: getGraphEchoEvents, getMeta: getMeta, getNoteAccounts: getNoteAccounts, getRegisters: getRegisters, @@ -3844,7 +4166,7 @@ var graph = /*#__PURE__*/Object.freeze({ queryGraph: queryGraph }); -var __async$a = (__this, __arguments, generator) => { +var __async$b = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -3883,7 +4205,7 @@ class BatchBlockService { this.retryOn = retryOn; } getBlock(blockTag) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { const blockObject = yield this.provider.getBlock(blockTag); if (!blockObject) { const errMsg = `No block for ${blockTag}`; @@ -3893,9 +4215,9 @@ class BatchBlockService { }); } createBatchRequest(batchArray) { - return batchArray.map((blocks, index) => __async$a(this, null, function* () { + return batchArray.map((blocks, index) => __async$b(this, null, function* () { yield sleep(20 * index); - return (() => __async$a(this, null, function* () { + return (() => __async$b(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -3912,7 +4234,7 @@ class BatchBlockService { })); } getBatchBlocks(blocks) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { let blockCount = 0; const results = []; for (const chunks of chunk(blocks, this.concurrencySize * this.batchSize)) { @@ -3950,7 +4272,7 @@ class BatchTransactionService { this.retryOn = retryOn; } getTransaction(txHash) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { const txObject = yield this.provider.getTransaction(txHash); if (!txObject) { const errMsg = `No transaction for ${txHash}`; @@ -3960,9 +4282,9 @@ class BatchTransactionService { }); } createBatchRequest(batchArray) { - return batchArray.map((txs, index) => __async$a(this, null, function* () { + return batchArray.map((txs, index) => __async$b(this, null, function* () { yield sleep(20 * index); - return (() => __async$a(this, null, function* () { + return (() => __async$b(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -3979,7 +4301,7 @@ class BatchTransactionService { })); } getBatchTransactions(txs) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { let txCount = 0; const results = []; for (const chunks of chunk(txs, this.concurrencySize * this.batchSize)) { @@ -4015,7 +4337,7 @@ class BatchEventsService { this.retryOn = retryOn; } getPastEvents(_0) { - return __async$a(this, arguments, function* ({ fromBlock, toBlock, type }) { + return __async$b(this, arguments, function* ({ fromBlock, toBlock, type }) { let err; let retries = 0; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -4035,13 +4357,13 @@ class BatchEventsService { }); } createBatchRequest(batchArray) { - return batchArray.map((event, index) => __async$a(this, null, function* () { + return batchArray.map((event, index) => __async$b(this, null, function* () { yield sleep(20 * index); return this.getPastEvents(event); })); } getBatchEvents(_0) { - return __async$a(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { + return __async$b(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { if (!toBlock) { toBlock = yield this.provider.getBlockNumber(); } @@ -4072,29 +4394,29 @@ class BatchEventsService { } } -var __defProp$1 = Object.defineProperty; -var __defProps$1 = Object.defineProperties; -var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; +var __defProp$3 = Object.defineProperty; +var __defProps$2 = Object.defineProperties; +var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; var __getProtoOf = Object.getPrototypeOf; -var __hasOwnProp$1 = Object.prototype.hasOwnProperty; -var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$3 = Object.prototype.hasOwnProperty; +var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; var __reflectGet = Reflect.get; -var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$1 = (a, b) => { +var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$3 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); - if (__getOwnPropSymbols$1) - for (var prop of __getOwnPropSymbols$1(b)) { - if (__propIsEnum$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); + if (__hasOwnProp$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); + if (__getOwnPropSymbols$3) + for (var prop of __getOwnPropSymbols$3(b)) { + if (__propIsEnum$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); } return a; }; -var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b)); +var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b)); var __superGet = (cls, obj, key) => __reflectGet(__getProtoOf(cls), key, obj); -var __async$9 = (__this, __arguments, generator) => { +var __async$a = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -4169,7 +4491,7 @@ class BaseEventsService { } /* eslint-enable @typescript-eslint/no-unused-vars */ formatEvents(events) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return yield new Promise((resolve) => resolve(events)); }); } @@ -4177,7 +4499,7 @@ class BaseEventsService { * Get saved or cached events */ getEventsFromDB() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return { events: [], lastBlock: null @@ -4185,7 +4507,7 @@ class BaseEventsService { }); } getEventsFromCache() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return { events: [], lastBlock: null @@ -4193,7 +4515,7 @@ class BaseEventsService { }); } getSavedEvents() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { let cachedEvents = yield this.getEventsFromDB(); if (!cachedEvents || !cachedEvents.events.length) { cachedEvents = yield this.getEventsFromCache(); @@ -4205,7 +4527,7 @@ class BaseEventsService { * Get latest events */ getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ + return __async$a(this, arguments, function* ({ fromBlock, methodName = "" }) { @@ -4215,7 +4537,7 @@ class BaseEventsService { lastBlock: fromBlock }; } - const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$1({ + const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$3({ fromBlock }, this.getGraphParams())); return { @@ -4225,7 +4547,7 @@ class BaseEventsService { }); } getEventsFromRpc(_0) { - return __async$9(this, arguments, function* ({ + return __async$a(this, arguments, function* ({ fromBlock, toBlock }) { @@ -4263,7 +4585,7 @@ class BaseEventsService { }); } getLatestEvents(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$a(this, arguments, function* ({ fromBlock }) { const allEvents = []; const graphEvents = yield this.getEventsFromGraph({ fromBlock }); const lastSyncBlock = graphEvents.lastBlock && graphEvents.lastBlock >= fromBlock ? graphEvents.lastBlock : fromBlock; @@ -4285,14 +4607,14 @@ class BaseEventsService { */ // eslint-disable-next-line @typescript-eslint/no-unused-vars saveEvents(_0) { - return __async$9(this, arguments, function* ({ events, lastBlock }) { + return __async$a(this, arguments, function* ({ events, lastBlock }) { }); } /** * Trigger saving and receiving latest events */ updateEvents() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { const savedEvents = yield this.getSavedEvents(); let fromBlock = this.deployedBlock; if (savedEvents && savedEvents.lastBlock) { @@ -4366,7 +4688,7 @@ class BaseDepositsService extends BaseEventsService { }; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { const type = this.getType().toLowerCase(); if (type === DEPOSIT) { const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { @@ -4385,7 +4707,7 @@ class BaseDepositsService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { from } = txs.find(({ hash }) => hash === event.transactionHash); - return __spreadProps$1(__spreadValues$1({}, event), { + return __spreadProps$2(__spreadValues$3({}, event), { from }); }); @@ -4406,7 +4728,7 @@ class BaseDepositsService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { timestamp } = blocks.find(({ number }) => number === event.blockNumber); - return __spreadProps$1(__spreadValues$1({}, event), { + return __spreadProps$2(__spreadValues$3({}, event), { timestamp }); }); @@ -4423,6 +4745,57 @@ class BaseDepositsService extends BaseEventsService { } } } +class BaseEchoService extends BaseEventsService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions: fetchDataOptions2 + }) { + super({ netId, provider, graphApi, subgraphName, contract: Echoer, deployedBlock, fetchDataOptions: fetchDataOptions2 }); + } + getInstanceName() { + return `echo_${this.netId}`; + } + getType() { + return "Echo"; + } + getGraphMethod() { + return "getAllGraphEchoEvents"; + } + formatEvents(events) { + return __async$a(this, null, function* () { + return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { + const { who, data } = args; + if (who && data) { + const eventObjects = { + blockNumber, + logIndex, + transactionHash + }; + return __spreadProps$2(__spreadValues$3({}, eventObjects), { + address: who, + encryptedAccount: data + }); + } + }).filter((e) => e); + }); + } + getEventsFromGraph(_0) { + return __async$a(this, arguments, function* ({ fromBlock }) { + if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + return { + events: [], + lastBlock: fromBlock + }; + } + return __superGet(BaseEchoService.prototype, this, "getEventsFromGraph").call(this, { fromBlock }); + }); + } +} class BaseEncryptedNotesService extends BaseEventsService { constructor({ netId, @@ -4445,7 +4818,7 @@ class BaseEncryptedNotesService extends BaseEventsService { return "getAllEncryptedNotes"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const { encryptedNote } = args; if (encryptedNote) { @@ -4454,7 +4827,7 @@ class BaseEncryptedNotesService extends BaseEventsService { logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$2(__spreadValues$3({}, eventObjects), { encryptedNote }); } @@ -4485,11 +4858,15 @@ class BaseGovernanceService extends BaseEventsService { return "*"; } getGraphMethod() { - return "governanceEvents"; + return "getAllGovernanceEvents"; } formatEvents(events) { - return __async$9(this, null, function* () { - const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { + return __async$a(this, null, function* () { + const proposalEvents = []; + const votedEvents = []; + const delegatedEvents = []; + const undelegatedEvents = []; + events.forEach(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { const eventObjects = { blockNumber, logIndex, @@ -4498,60 +4875,61 @@ class BaseGovernanceService extends BaseEventsService { }; if (event === "ProposalCreated") { const { id, proposer, target, startTime, endTime, description } = args; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { - id, + proposalEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), { + id: Number(id), proposer, target, - startTime, - endTime, + startTime: Number(startTime), + endTime: Number(endTime), description - }); + })); } if (event === "Voted") { const { proposalId, voter, support, votes } = args; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { - proposalId, + votedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), { + proposalId: Number(proposalId), voter, support, - votes - }); + votes, + from: "", + input: "" + })); } if (event === "Delegated") { const { account, to: delegateTo } = args; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + delegatedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), { account, delegateTo - }); + })); } if (event === "Undelegated") { const { account, from: delegateFrom } = args; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + undelegatedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), { account, delegateFrom - }); + })); } - }).filter((e) => e); - const votedEvents = formattedEvents.map((event, index) => __spreadProps$1(__spreadValues$1({}, event), { index })).filter(({ event }) => event === "Voted"); + }); if (votedEvents.length) { this.updateTransactionProgress({ percentage: 0 }); const txs = yield this.batchTransactionService.getBatchTransactions([ ...new Set(votedEvents.map(({ transactionHash }) => transactionHash)) ]); - votedEvents.forEach((event) => { + votedEvents.forEach((event, index) => { let { data: input, from } = txs.find((t) => t.hash === event.transactionHash); if (!input || input.length > 2048) { input = ""; } - formattedEvents[event.index].from = from; - formattedEvents[event.index].input = input; + votedEvents[index].from = from; + votedEvents[index].input = input; }); } - return formattedEvents; + return [...proposalEvents, ...votedEvents, ...delegatedEvents, ...undelegatedEvents]; }); } getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { - if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + return __async$a(this, arguments, function* ({ fromBlock }) { + if (!this.graphApi || !this.subgraphName || this.graphApi.includes("api.thegraph.com")) { return { events: [], lastBlock: fromBlock @@ -4585,14 +4963,14 @@ class BaseRegistryService extends BaseEventsService { return "getAllRegisters"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const eventObjects = { blockNumber, logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$2(__spreadValues$3({}, eventObjects), { ensName: args.ensName, relayerAddress: args.relayerAddress }); @@ -4600,13 +4978,13 @@ class BaseRegistryService extends BaseEventsService { }); } fetchRelayers() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return (yield this.updateEvents()).events; }); } } -var __async$8 = (__this, __arguments, generator) => { +var __async$9 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -4627,7 +5005,7 @@ var __async$8 = (__this, __arguments, generator) => { }); }; function existsAsync(fileOrDir) { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { try { yield promises.stat(fileOrDir); return true; @@ -4658,27 +5036,26 @@ function unzipAsync(data) { }); }); } -function saveEvents(_0) { - return __async$8(this, arguments, function* ({ - name, +function saveUserFile(_0) { + return __async$9(this, arguments, function* ({ + fileName, userDirectory, - events + dataString }) { - const fileName = `${name}.json`.toLowerCase(); + fileName = fileName.toLowerCase(); const filePath = path.join(userDirectory, fileName); - const stringEvents = JSON.stringify(events, null, 2) + "\n"; const payload = yield zipAsync({ - [fileName]: new TextEncoder().encode(stringEvents) + [fileName]: new TextEncoder().encode(dataString) }); if (!(yield existsAsync(userDirectory))) { yield promises.mkdir(userDirectory, { recursive: true }); } yield promises.writeFile(filePath + ".zip", payload); - yield promises.writeFile(filePath, stringEvents); + yield promises.writeFile(filePath, dataString); }); } function loadSavedEvents(_0) { - return __async$8(this, arguments, function* ({ + return __async$9(this, arguments, function* ({ name, userDirectory, deployedBlock @@ -4707,7 +5084,7 @@ function loadSavedEvents(_0) { }); } function download(_0) { - return __async$8(this, arguments, function* ({ name, cacheDirectory }) { + return __async$9(this, arguments, function* ({ name, cacheDirectory }) { const fileName = `${name}.json`.toLowerCase(); const zipName = `${fileName}.zip`; const zipPath = path.join(cacheDirectory, zipName); @@ -4717,7 +5094,7 @@ function download(_0) { }); } function loadCachedEvents(_0) { - return __async$8(this, arguments, function* ({ + return __async$9(this, arguments, function* ({ name, cacheDirectory, deployedBlock @@ -4747,7 +5124,7 @@ function loadCachedEvents(_0) { }); } -var __async$7 = (__this, __arguments, generator) => { +var __async$8 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -4832,7 +5209,7 @@ class NodeDepositsService extends BaseDepositsService { } } getEventsFromDB() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.userDirectory) { console.log( "Updating events for", @@ -4863,7 +5240,7 @@ class NodeDepositsService extends BaseDepositsService { }); } getEventsFromCache() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -4885,7 +5262,7 @@ class NodeDepositsService extends BaseDepositsService { }); } saveEvents(_0) { - return __async$7(this, arguments, function* ({ events, lastBlock }) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -4906,10 +5283,136 @@ class NodeDepositsService extends BaseDepositsService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" + }); + } + }); + } +} +class NodeEchoService extends BaseEchoService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions, + cacheDirectory, + userDirectory + }) { + super({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions + }); + this.cacheDirectory = cacheDirectory; + this.userDirectory = userDirectory; + } + updateEventProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + console.log(`fromBlock - ${fromBlock}`); + console.log(`toBlock - ${toBlock}`); + if (count) { + console.log(`downloaded ${type} events count - ${count}`); + console.log("____________________________________________"); + console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock} +`); + } + } + } + updateGraphProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + console.log(`fromBlock - ${fromBlock}`); + console.log(`toBlock - ${toBlock}`); + if (count) { + console.log(`downloaded ${type} events from graph node count - ${count}`); + console.log("____________________________________________"); + console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock} +`); + } + } + } + getEventsFromDB() { + return __async$8(this, null, function* () { + if (!this.userDirectory) { + console.log(`Updating events for ${this.netId} chain echo events +`); + console.log(`savedEvents count - ${0}`); + console.log(`savedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const savedEvents = yield loadSavedEvents({ + name: this.getInstanceName(), + userDirectory: this.userDirectory, + deployedBlock: this.deployedBlock + }); + console.log(`Updating events for ${this.netId} chain echo events +`); + console.log(`savedEvents count - ${savedEvents.events.length}`); + console.log(`savedEvents lastBlock - ${savedEvents.lastBlock} +`); + return savedEvents; + }); + } + getEventsFromCache() { + return __async$8(this, null, function* () { + if (!this.cacheDirectory) { + console.log(`cachedEvents count - ${0}`); + console.log(`cachedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const cachedEvents = yield loadCachedEvents({ + name: this.getInstanceName(), + cacheDirectory: this.cacheDirectory, + deployedBlock: this.deployedBlock + }); + console.log(`cachedEvents count - ${cachedEvents.events.length}`); + console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock} +`); + return cachedEvents; + }); + } + saveEvents(_0) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { + const instanceName = this.getInstanceName(); + console.log("\ntotalEvents count - ", events.length); + console.log( + `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock} +` + ); + const eventTable = new Table(); + eventTable.push( + [{ colSpan: 2, content: "Echo Accounts", hAlign: "center" }], + ["Network", `${this.netId} chain`], + ["Events", `${events.length} events`], + [{ colSpan: 2, content: "Latest events" }], + ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => { + const eventIndex = events.length - index; + return [eventIndex, blockNumber]; + }) + ); + console.log(eventTable.toString() + "\n"); + if (this.userDirectory) { + yield saveUserFile({ + fileName: instanceName + ".json", + userDirectory: this.userDirectory, + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -4964,7 +5467,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { } } getEventsFromDB() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain encrypted events `); @@ -4990,7 +5493,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { }); } getEventsFromCache() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -5012,7 +5515,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { }); } saveEvents(_0) { - return __async$7(this, arguments, function* ({ events, lastBlock }) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -5032,10 +5535,10 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -5095,7 +5598,7 @@ class NodeGovernanceService extends BaseGovernanceService { } } getEventsFromDB() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain governance events `); @@ -5121,7 +5624,7 @@ class NodeGovernanceService extends BaseGovernanceService { }); } getEventsFromCache() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -5143,7 +5646,7 @@ class NodeGovernanceService extends BaseGovernanceService { }); } saveEvents(_0) { - return __async$7(this, arguments, function* ({ events, lastBlock }) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -5163,10 +5666,10 @@ class NodeGovernanceService extends BaseGovernanceService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -5221,7 +5724,7 @@ class NodeRegistryService extends BaseRegistryService { } } getEventsFromDB() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain registry events `); @@ -5247,7 +5750,7 @@ class NodeRegistryService extends BaseRegistryService { }); } getEventsFromCache() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -5269,7 +5772,7 @@ class NodeRegistryService extends BaseRegistryService { }); } saveEvents(_0) { - return __async$7(this, arguments, function* ({ events, lastBlock }) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -5289,598 +5792,43 @@ class NodeRegistryService extends BaseRegistryService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); } } -const addressType = { type: "string", pattern: "^0x[a-fA-F0-9]{40}$" }; -const bnType = { type: "string", BN: true }; -const statusSchema = { - type: "object", - properties: { - rewardAccount: addressType, - gasPrices: { - type: "object", - properties: { - fast: { type: "number" }, - additionalProperties: { type: "number" } - }, - required: ["fast"] - }, - netId: { type: "integer" }, - tornadoServiceFee: { type: "number", maximum: 20, minimum: 0 }, - latestBlock: { type: "number" }, - version: { type: "string" }, - health: { - type: "object", - properties: { - status: { const: "true" }, - error: { type: "string" } - }, - required: ["status"] - }, - currentQueue: { type: "number" } - }, - required: ["rewardAccount", "instances", "netId", "tornadoServiceFee", "version", "health"] +var __defProp$2 = Object.defineProperty; +var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; +var __hasOwnProp$2 = Object.prototype.hasOwnProperty; +var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$2 = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); + if (__getOwnPropSymbols$2) + for (var prop of __getOwnPropSymbols$2(b)) { + if (__propIsEnum$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); + } + return a; }; -function getStatusSchema(netId, config) { - const { tokens, optionalTokens = [], nativeCurrency } = config; - const schema = JSON.parse(JSON.stringify(statusSchema)); - const instances = Object.keys(tokens).reduce( - (acc, token) => { - const { instanceAddress, tokenAddress, symbol, decimals, optionalInstances = [] } = tokens[token]; - const amounts = Object.keys(instanceAddress); - const instanceProperties = { - type: "object", - properties: { - instanceAddress: { - type: "object", - properties: amounts.reduce((acc2, cur) => { - acc2[cur] = addressType; - return acc2; - }, {}), - required: amounts.filter((amount) => !optionalInstances.includes(amount)) - }, - decimals: { enum: [decimals] } - }, - required: ["instanceAddress", "decimals"].concat( - tokenAddress ? ["tokenAddress"] : [], - symbol ? ["symbol"] : [] - ) - }; - if (tokenAddress) { - instanceProperties.properties.tokenAddress = addressType; - } - if (symbol) { - instanceProperties.properties.symbol = { enum: [symbol] }; - } - acc.properties[token] = instanceProperties; - if (!optionalTokens.includes(token)) { - acc.required.push(token); - } - return acc; - }, - { - type: "object", - properties: {}, - required: [] - } - ); - schema.properties.instances = instances; - if (Number(netId) === 1) { - const _tokens = Object.keys(tokens).filter((t) => t !== nativeCurrency); - const ethPrices = { - type: "object", - properties: _tokens.reduce((acc, token) => { - acc[token] = bnType; - return acc; - }, {}) - // required: _tokens - }; - schema.properties.ethPrices = ethPrices; - } - return schema; -} - -const jobsSchema = { - type: "object", - properties: { - error: { type: "string" }, - id: { type: "string" }, - type: { type: "string" }, - status: { type: "string" }, - contract: { type: "string" }, - proof: { type: "string" }, - args: { - type: "array", - items: { type: "string" } - }, - txHash: { type: "string" }, - confirmations: { type: "number" }, - failedReason: { type: "string" } - }, - required: ["id", "status"] -}; - -const ajv = new Ajv({ allErrors: true }); -ajv.addKeyword({ - keyword: "BN", - // eslint-disable-next-line @typescript-eslint/no-explicit-any - validate: (schema, data) => { - try { - BigInt(data); - return true; - } catch (e) { - return false; - } - }, - errors: true -}); - -var __async$6 = (__this, __arguments, generator) => { - return new Promise((resolve, reject) => { - var fulfilled = (value) => { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - }; - var rejected = (value) => { - try { - step(generator.throw(value)); - } catch (e) { - reject(e); - } - }; - var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); - step((generator = generator.apply(__this, __arguments)).next()); - }); -}; -class Pedersen { - constructor() { - this.pedersenPromise = this.initPedersen(); - } - initPedersen() { - return __async$6(this, null, function* () { - this.pedersenHash = yield circomlibjs.buildPedersenHash(); - this.babyJub = this.pedersenHash.babyJub; - }); - } - unpackPoint(buffer) { - return __async$6(this, null, function* () { - var _a, _b; - yield this.pedersenPromise; - return (_b = this.babyJub) == null ? void 0 : _b.unpackPoint((_a = this.pedersenHash) == null ? void 0 : _a.hash(buffer)); - }); - } - toStringBuffer(buffer) { - var _a; - return (_a = this.babyJub) == null ? void 0 : _a.F.toString(buffer); - } -} -const pedersen = new Pedersen(); -function buffPedersenHash(buffer) { - return __async$6(this, null, function* () { - const [hash] = yield pedersen.unpackPoint(buffer); - return pedersen.toStringBuffer(hash); - }); -} - -var __async$5 = (__this, __arguments, generator) => { - return new Promise((resolve, reject) => { - var fulfilled = (value) => { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - }; - var rejected = (value) => { - try { - step(generator.throw(value)); - } catch (e) { - reject(e); - } - }; - var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); - step((generator = generator.apply(__this, __arguments)).next()); - }); -}; -function createDeposit(_0) { - return __async$5(this, arguments, function* ({ nullifier, secret }) { - const preimage = new Uint8Array([...leInt2Buff(nullifier), ...leInt2Buff(secret)]); - const noteHex = toFixedHex(bytesToBN(preimage), 62); - const commitment = BigInt(yield buffPedersenHash(preimage)); - const commitmentHex = toFixedHex(commitment); - const nullifierHash = BigInt(yield buffPedersenHash(leInt2Buff(nullifier))); - const nullifierHex = toFixedHex(nullifierHash); - return { - preimage, - noteHex, - commitment, - commitmentHex, - nullifierHash, - nullifierHex - }; - }); -} -class Deposit { - constructor({ - currency, - amount, - netId, - nullifier, - secret, - note, - noteHex, - invoice, - commitmentHex, - nullifierHex - }) { - this.currency = currency; - this.amount = amount; - this.netId = netId; - this.nullifier = nullifier; - this.secret = secret; - this.note = note; - this.noteHex = noteHex; - this.invoice = invoice; - this.commitmentHex = commitmentHex; - this.nullifierHex = nullifierHex; - } - toString() { - return JSON.stringify( - { - currency: this.currency, - amount: this.amount, - netId: this.netId, - nullifier: this.nullifier, - secret: this.secret, - note: this.note, - noteHex: this.noteHex, - invoice: this.invoice, - commitmentHex: this.commitmentHex, - nullifierHex: this.nullifierHex - }, - null, - 2 - ); - } - static createNote(_0) { - return __async$5(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { - if (!nullifier) { - nullifier = rBigInt(31); - } - if (!secret) { - secret = rBigInt(31); - } - const depositObject = yield createDeposit({ - nullifier, - secret - }); - const newDeposit = new Deposit({ - currency: currency.toLowerCase(), - amount, - netId: Number(netId), - note: `tornado-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.noteHex}`, - noteHex: depositObject.noteHex, - invoice: `tornadoInvoice-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.commitmentHex}`, - nullifier, - secret, - commitmentHex: depositObject.commitmentHex, - nullifierHex: depositObject.nullifierHex - }); - return newDeposit; - }); - } - static parseNote(noteString) { - return __async$5(this, null, function* () { - const noteRegex = new RegExp("tornado-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{124})", "g"); - const match = noteRegex.exec(noteString); - if (!match) { - throw new Error("The note has invalid format"); - } - const matchGroup = match == null ? void 0 : match.groups; - const currency = matchGroup.currency.toLowerCase(); - const amount = matchGroup.amount; - const netId = Number(matchGroup.netId); - const bytes = bnToBytes("0x" + matchGroup.note); - const nullifier = BigInt(leBuff2Int(bytes.slice(0, 31)).toString()); - const secret = BigInt(leBuff2Int(bytes.slice(31, 62)).toString()); - const depositObject = yield createDeposit({ nullifier, secret }); - const invoice = `tornadoInvoice-${currency}-${amount}-${netId}-${depositObject.commitmentHex}`; - const newDeposit = new Deposit({ - currency, - amount, - netId, - note: noteString, - noteHex: depositObject.noteHex, - invoice, - nullifier, - secret, - commitmentHex: depositObject.commitmentHex, - nullifierHex: depositObject.nullifierHex - }); - return newDeposit; - }); - } -} -class Invoice { - constructor(invoiceString) { - const invoiceRegex = new RegExp("tornadoInvoice-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{64})", "g"); - const match = invoiceRegex.exec(invoiceString); - if (!match) { - throw new Error("The note has invalid format"); - } - const matchGroup = match == null ? void 0 : match.groups; - const currency = matchGroup.currency.toLowerCase(); - const amount = matchGroup.amount; - const netId = Number(matchGroup.netId); - this.currency = currency; - this.amount = amount; - this.netId = netId; - this.commitment = "0x" + matchGroup.commitment; - this.invoice = invoiceString; - } - toString() { - return JSON.stringify( - { - currency: this.currency, - amount: this.amount, - netId: this.netId, - commitment: this.commitment, - invoice: this.invoice - }, - null, - 2 - ); - } -} - -const DUMMY_ADDRESS = "0x1111111111111111111111111111111111111111"; -const DUMMY_NONCE = "0x1111111111111111111111111111111111111111111111111111111111111111"; -const DUMMY_WITHDRAW_DATA = "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"; -function convertETHToTokenAmount(amountInWei, tokenPriceInWei, tokenDecimals = 18) { - const tokenDecimalsMultiplier = BigInt(10 ** Number(tokenDecimals)); - return BigInt(amountInWei) * tokenDecimalsMultiplier / BigInt(tokenPriceInWei); -} -class TornadoFeeOracle { - constructor(ovmGasPriceOracle) { - if (ovmGasPriceOracle) { - this.ovmGasPriceOracle = ovmGasPriceOracle; - } - } - /** - * Calculate L1 fee for op-stack chains - * - * This is required since relayers would pay the full transaction fees for users - */ - fetchL1OptimismFee(tx) { - if (!this.ovmGasPriceOracle) { - return new Promise((resolve) => resolve(BigInt(0))); - } - if (!tx) { - tx = { - type: 0, - gasLimit: 1e6, - nonce: Number(DUMMY_NONCE), - data: DUMMY_WITHDRAW_DATA, - gasPrice: ethers.parseUnits("1", "gwei"), - from: DUMMY_ADDRESS, - to: DUMMY_ADDRESS - }; - } - return this.ovmGasPriceOracle.getL1Fee.staticCall(ethers.Transaction.from(tx).unsignedSerialized); - } - /** - * We don't need to distinguish default refunds by tokens since most users interact with other defi protocols after withdrawal - * So we default with 1M gas which is enough for two or three swaps - * Using 30 gwei for default but it is recommended to supply cached gasPrice value from the UI - */ - defaultEthRefund(gasPrice, gasLimit) { - return (gasPrice ? BigInt(gasPrice) : ethers.parseUnits("30", "gwei")) * BigInt(gasLimit || 1e6); - } - /** - * Calculates token amount for required ethRefund purchases required to calculate fees - */ - calculateTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals) { - return convertETHToTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals); - } - /** - * Warning: For tokens you need to check if the fees are above denomination - * (Usually happens for small denomination pool or if the gas price is high) - */ - calculateRelayerFee({ - gasPrice, - gasLimit = 6e5, - l1Fee = 0, - denomination, - ethRefund = BigInt(0), - tokenPriceInWei, - tokenDecimals = 18, - relayerFeePercent = 0.33, - isEth = true, - premiumPercent = 20 - }) { - const gasCosts = BigInt(gasPrice) * BigInt(gasLimit) + BigInt(l1Fee); - const relayerFee = BigInt(denomination) * BigInt(Math.floor(1e4 * relayerFeePercent)) / BigInt(1e4 * 100); - if (isEth) { - return (gasCosts + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100) / BigInt(100); - } - const feeInEth = gasCosts + BigInt(ethRefund); - return (convertETHToTokenAmount(feeInEth, tokenPriceInWei, tokenDecimals) + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100) / BigInt(100); - } -} - -var __async$4 = (__this, __arguments, generator) => { - return new Promise((resolve, reject) => { - var fulfilled = (value) => { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - }; - var rejected = (value) => { - try { - step(generator.throw(value)); - } catch (e) { - reject(e); - } - }; - var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); - step((generator = generator.apply(__this, __arguments)).next()); - }); -}; -class Mimc { - constructor() { - this.mimcPromise = this.initMimc(); - } - initMimc() { - return __async$4(this, null, function* () { - this.sponge = yield circomlibjs.buildMimcSponge(); - this.hash = (left, right) => { - var _a, _b; - return (_b = this.sponge) == null ? void 0 : _b.F.toString((_a = this.sponge) == null ? void 0 : _a.multiHash([BigInt(left), BigInt(right)])); - }; - }); - } - getHash() { - return __async$4(this, null, function* () { - yield this.mimcPromise; - return { - sponge: this.sponge, - hash: this.hash - }; - }); - } -} -const mimc = new Mimc(); - -var __async$3 = (__this, __arguments, generator) => { - return new Promise((resolve, reject) => { - var fulfilled = (value) => { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - }; - var rejected = (value) => { - try { - step(generator.throw(value)); - } catch (e) { - reject(e); - } - }; - var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); - step((generator = generator.apply(__this, __arguments)).next()); - }); -}; -class MerkleTreeService { - constructor({ - netId, - amount, - currency, - Tornado, - commitment, - merkleTreeHeight = 20, - emptyElement = "21663839004416932945382355908790599225266501822907911457504978515578255421292", - merkleWorkerPath - }) { - const instanceName = `${netId}_${currency}_${amount}`; - this.currency = currency; - this.amount = amount; - this.netId = Number(netId); - this.Tornado = Tornado; - this.instanceName = instanceName; - this.commitment = commitment; - this.merkleTreeHeight = merkleTreeHeight; - this.emptyElement = emptyElement; - this.merkleWorkerPath = merkleWorkerPath; - } - createTree(_0) { - return __async$3(this, arguments, function* ({ events }) { - const { hash: hashFunction } = yield mimc.getHash(); - if (this.merkleWorkerPath) { - console.log("Using merkleWorker\n"); - try { - if (isNode) { - const merkleWorkerPromise = new Promise((resolve, reject) => { - const worker = new worker_threads.Worker(this.merkleWorkerPath, { - workerData: { - merkleTreeHeight: this.merkleTreeHeight, - elements: events, - zeroElement: this.emptyElement - } - }); - worker.on("message", resolve); - worker.on("error", reject); - worker.on("exit", (code) => { - if (code !== 0) { - reject(new Error(`Worker stopped with exit code ${code}`)); - } - }); - }); - return fixedMerkleTree.MerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); - } else { - const merkleWorkerPromise = new Promise((resolve, reject) => { - const worker = new Worker(this.merkleWorkerPath); - worker.onmessage = (e) => { - resolve(e.data); - }; - worker.onerror = (e) => { - reject(e); - }; - worker.postMessage({ - merkleTreeHeight: this.merkleTreeHeight, - elements: events, - zeroElement: this.emptyElement - }); - }); - return fixedMerkleTree.MerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); - } - } catch (err) { - console.log("merkleWorker failed, falling back to synchronous merkle tree"); - console.log(err); - } - } - return new fixedMerkleTree.MerkleTree(this.merkleTreeHeight, events, { - zeroElement: this.emptyElement, - hashFunction - }); - }); - } - verifyTree(_0) { - return __async$3(this, arguments, function* ({ events }) { - console.log( - ` -Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while -` - ); - console.time("Created tree in"); - const tree = yield this.createTree({ events: events.map(({ commitment }) => BigInt(commitment).toString()) }); - console.timeEnd("Created tree in"); - console.log(""); - const isKnownRoot = yield this.Tornado.isKnownRoot(toFixedHex(BigInt(tree.root))); - if (!isKnownRoot) { - const errMsg = `Deposit Event ${this.netId} ${this.amount} ${this.currency} is invalid`; - throw new Error(errMsg); - } - return tree; - }); - } -} - -const blockSyncInterval = 1e4; -const enabledChains = ["1", "10", "56", "100", "137", "42161", "43114", "11155111"]; +var NetId = /* @__PURE__ */ ((NetId2) => { + NetId2[NetId2["MAINNET"] = 1] = "MAINNET"; + NetId2[NetId2["BSC"] = 56] = "BSC"; + NetId2[NetId2["POLYGON"] = 137] = "POLYGON"; + NetId2[NetId2["OPTIMISM"] = 10] = "OPTIMISM"; + NetId2[NetId2["ARBITRUM"] = 42161] = "ARBITRUM"; + NetId2[NetId2["GNOSIS"] = 100] = "GNOSIS"; + NetId2[NetId2["AVALANCHE"] = 43114] = "AVALANCHE"; + NetId2[NetId2["SEPOLIA"] = 11155111] = "SEPOLIA"; + return NetId2; +})(NetId || {}); const theGraph = { name: "Hosted Graph", url: "https://api.thegraph.com" @@ -5889,8 +5837,8 @@ const tornado = { name: "Tornado Subgraphs", url: "https://tornadocash-rpc.com" }; -const networkConfig = { - netId1: { +const defaultConfig = { + [1 /* MAINNET */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 80, @@ -5900,11 +5848,7 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://etherscan.io/tx/", - address: "https://etherscan.io/address/", - block: "https://etherscan.io/block/" - }, + explorerUrl: "https://etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Ethereum Mainnet", @@ -5915,7 +5859,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, mevblockerRPC: { @@ -5943,14 +5887,19 @@ const networkConfig = { url: "https://1rpc.io/eth" } }, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", echoContract: "0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornContract: "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", + governanceContract: "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", + stakingRewardsContract: "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29", + registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", aggregatorContract: "0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49", reverseRecordsContract: "0x3671aE578E63FdF66ad4F3E12CC0c0d71Ac7510C", tornadoSubgraph: "tornadocash/mainnet-tornado-subgraph", registrySubgraph: "tornadocash/tornado-relayer-registry", + governanceSubgraph: "tornadocash/tornado-governance", subgraphs: { tornado, theGraph @@ -6032,16 +5981,12 @@ const networkConfig = { constants: { GOVERNANCE_BLOCK: 11474695, NOTE_ACCOUNT_BLOCK: 11842486, - ENCRYPTED_NOTES_BLOCK: 14248730, + ENCRYPTED_NOTES_BLOCK: 12143762, REGISTRY_BLOCK: 14173129, MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", - "governance.contract.tornadocash.eth": "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", - "tornado-router.contract.tornadocash.eth": "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - "staking-rewards.contract.tornadocash.eth": "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29" + } }, - netId56: { + [56 /* BSC */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 5, @@ -6051,18 +5996,15 @@ const networkConfig = { }, nativeCurrency: "bnb", currencyName: "BNB", - explorerUrl: { - tx: "https://bscscan.com/tx/", - address: "https://bscscan.com/address/", - block: "https://bscscan.com/block/" - }, + explorerUrl: "https://bscscan.com", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Binance Smart Chain", deployedBlock: 8158799, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/bsc-tornado-subgraph", subgraphs: { tornado, @@ -6074,7 +6016,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/bsc" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, stackup: { @@ -6107,10 +6049,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 8159269, ENCRYPTED_NOTES_BLOCK: 8159269 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId137: { + [137 /* POLYGON */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 100, @@ -6120,18 +6061,15 @@ const networkConfig = { }, nativeCurrency: "matic", currencyName: "MATIC", - explorerUrl: { - tx: "https://polygonscan.com/tx/", - address: "https://polygonscan.com/address/", - block: "https://polygonscan.com/block/" - }, + explorerUrl: "https://polygonscan.com", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Polygon (Matic) Network", deployedBlock: 16257962, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", gasPriceOracleContract: "0xF81A8D8D3581985D3969fe53bFA67074aDFa8F3C", tornadoSubgraph: "tornadocash/matic-tornado-subgraph", subgraphs: { @@ -6169,10 +6107,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 16257996, ENCRYPTED_NOTES_BLOCK: 16257996 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId10: { + [10 /* OPTIMISM */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 1e-3, @@ -6182,18 +6119,15 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://optimistic.etherscan.io/tx/", - address: "https://optimistic.etherscan.io/address/", - block: "https://optimistic.etherscan.io/block/" - }, + explorerUrl: "https://optimistic.etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Optimism", deployedBlock: 2243689, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", ovmGasPriceOracleContract: "0x420000000000000000000000000000000000000F", tornadoSubgraph: "tornadocash/optimism-tornado-subgraph", subgraphs: { @@ -6206,7 +6140,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/op" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://optimism-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, optimism: { @@ -6239,10 +6173,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 2243694, ENCRYPTED_NOTES_BLOCK: 2243694 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId42161: { + [42161 /* ARBITRUM */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 4, @@ -6252,18 +6185,15 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://arbiscan.io/tx/", - address: "https://arbiscan.io/address/", - block: "https://arbiscan.io/block/" - }, + explorerUrl: "https://arbiscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Arbitrum One", deployedBlock: 3430648, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/arbitrum-tornado-subgraph", subgraphs: { tornado, @@ -6275,7 +6205,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/arbitrum" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, arbitrum: { @@ -6308,10 +6238,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 3430605, ENCRYPTED_NOTES_BLOCK: 3430605 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId100: { + [100 /* GNOSIS */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 6, @@ -6321,18 +6250,15 @@ const networkConfig = { }, nativeCurrency: "xdai", currencyName: "xDAI", - explorerUrl: { - tx: "https://blockscout.com/xdai/mainnet/tx/", - address: "https://blockscout.com/xdai/mainnet/address/", - block: "https://blockscout.com/xdai/mainnet/block/" - }, + explorerUrl: "https://gnosisscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Gnosis Chain", deployedBlock: 17754561, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/xdai-tornado-subgraph", subgraphs: { tornado, @@ -6344,7 +6270,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/gnosis" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, gnosis: { @@ -6377,10 +6303,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 17754564, ENCRYPTED_NOTES_BLOCK: 17754564 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId43114: { + [43114 /* AVALANCHE */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 225, @@ -6390,18 +6315,15 @@ const networkConfig = { }, nativeCurrency: "avax", currencyName: "AVAX", - explorerUrl: { - tx: "https://snowtrace.io/tx/", - address: "https://snowtrace.io/address/", - block: "https://snowtrace.io/block/" - }, + explorerUrl: "https://snowtrace.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Avalanche Mainnet", deployedBlock: 4429818, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/avalanche-tornado-subgraph", subgraphs: { theGraph @@ -6436,10 +6358,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 4429813, ENCRYPTED_NOTES_BLOCK: 4429813 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId11155111: { + [11155111 /* SEPOLIA */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 2, @@ -6449,19 +6370,18 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "SepoliaETH", - explorerUrl: { - tx: "https://sepolia.etherscan.io/tx/", - address: "https://sepolia.etherscan.io/address/", - block: "https://sepolia.etherscan.io/block/" - }, + explorerUrl: "https://sepolia.etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Ethereum Sepolia", deployedBlock: 5594395, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + tornContract: "0x3AE6667167C0f44394106E197904519D808323cA", + governanceContract: "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", + stakingRewardsContract: "0x6d0018890751Efd31feb8166711B16732E2b496b", registryContract: "0x1428e5d2356b13778A13108b10c440C83011dfB8", - echoContract: "0xcDD1fc3F5ac2782D83449d3AbE80D6b7B273B0e5", aggregatorContract: "0x4088712AC9fad39ea133cdb9130E465d235e9642", reverseRecordsContract: "0xEc29700C0283e5Be64AcdFe8077d6cC95dE23C23", tornadoSubgraph: "tornadocash/sepolia-tornado-subgraph", @@ -6514,13 +6434,806 @@ const networkConfig = { NOTE_ACCOUNT_BLOCK: 5594395, ENCRYPTED_NOTES_BLOCK: 5594395, MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x3AE6667167C0f44394106E197904519D808323cA", - "governance.contract.tornadocash.eth": "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", - "tornado-router.contract.tornadocash.eth": "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee" + } } }; -const subdomains = enabledChains.map((chain) => networkConfig[`netId${chain}`].ensSubdomainKey); +const enabledChains = Object.values(NetId); +exports.customConfig = {}; +function addNetwork(newConfig) { + enabledChains.push( + ...Object.keys(newConfig).map((netId) => Number(netId)).filter((netId) => !enabledChains.includes(netId)) + ); + exports.customConfig = __spreadValues$2(__spreadValues$2({}, exports.customConfig), newConfig); +} +function getNetworkConfig() { + const allConfig = __spreadValues$2(__spreadValues$2({}, defaultConfig), exports.customConfig); + return enabledChains.reduce((acc, curr) => { + acc[curr] = allConfig[curr]; + return acc; + }, {}); +} +function getConfig(netId) { + const allConfig = getNetworkConfig(); + const chainConfig = allConfig[netId]; + if (!chainConfig) { + const errMsg = `No config found for network ${netId}!`; + throw new Error(errMsg); + } + return chainConfig; +} +function getInstanceByAddress({ netId, address }) { + const { tokens } = getConfig(netId); + for (const [currency, { instanceAddress }] of Object.entries(tokens)) { + for (const [amount, instance] of Object.entries(instanceAddress)) { + if (instance === address) { + return { + amount, + currency + }; + } + } + } +} +function getSubdomains() { + const allConfig = getNetworkConfig(); + return enabledChains.map((chain) => allConfig[chain].ensSubdomainKey); +} + +const addressType = { type: "string", pattern: "^0x[a-fA-F0-9]{40}$" }; +const bnType = { type: "string", BN: true }; +const statusSchema = { + type: "object", + properties: { + rewardAccount: addressType, + gasPrices: { + type: "object", + properties: { + fast: { type: "number" }, + additionalProperties: { type: "number" } + }, + required: ["fast"] + }, + netId: { type: "integer" }, + tornadoServiceFee: { type: "number", maximum: 20, minimum: 0 }, + latestBlock: { type: "number" }, + version: { type: "string" }, + health: { + type: "object", + properties: { + status: { const: "true" }, + error: { type: "string" } + }, + required: ["status"] + }, + currentQueue: { type: "number" } + }, + required: ["rewardAccount", "instances", "netId", "tornadoServiceFee", "version", "health"] +}; +function getStatusSchema(netId, config) { + const { tokens, optionalTokens = [], nativeCurrency } = config; + const schema = JSON.parse(JSON.stringify(statusSchema)); + const instances = Object.keys(tokens).reduce( + (acc, token) => { + const { instanceAddress, tokenAddress, symbol, decimals, optionalInstances = [] } = tokens[token]; + const amounts = Object.keys(instanceAddress); + const instanceProperties = { + type: "object", + properties: { + instanceAddress: { + type: "object", + properties: amounts.reduce((acc2, cur) => { + acc2[cur] = addressType; + return acc2; + }, {}), + required: amounts.filter((amount) => !optionalInstances.includes(amount)) + }, + decimals: { enum: [decimals] } + }, + required: ["instanceAddress", "decimals"].concat( + tokenAddress ? ["tokenAddress"] : [], + symbol ? ["symbol"] : [] + ) + }; + if (tokenAddress) { + instanceProperties.properties.tokenAddress = addressType; + } + if (symbol) { + instanceProperties.properties.symbol = { enum: [symbol] }; + } + acc.properties[token] = instanceProperties; + if (!optionalTokens.includes(token)) { + acc.required.push(token); + } + return acc; + }, + { + type: "object", + properties: {}, + required: [] + } + ); + schema.properties.instances = instances; + if (netId === NetId.MAINNET) { + const _tokens = Object.keys(tokens).filter((t) => t !== nativeCurrency); + const ethPrices = { + type: "object", + properties: _tokens.reduce((acc, token) => { + acc[token] = bnType; + return acc; + }, {}) + // required: _tokens + }; + schema.properties.ethPrices = ethPrices; + } + return schema; +} + +const jobsSchema = { + type: "object", + properties: { + error: { type: "string" }, + id: { type: "string" }, + type: { type: "string" }, + status: { type: "string" }, + contract: { type: "string" }, + proof: { type: "string" }, + args: { + type: "array", + items: { type: "string" } + }, + txHash: { type: "string" }, + confirmations: { type: "number" }, + failedReason: { type: "string" } + }, + required: ["id", "status"] +}; + +const ajv = new Ajv({ allErrors: true }); +ajv.addKeyword({ + keyword: "BN", + // eslint-disable-next-line @typescript-eslint/no-explicit-any + validate: (schema, data) => { + try { + BigInt(data); + return true; + } catch (e) { + return false; + } + }, + errors: true +}); + +var __async$7 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class Pedersen { + constructor() { + this.pedersenPromise = this.initPedersen(); + } + initPedersen() { + return __async$7(this, null, function* () { + this.pedersenHash = yield circomlibjs.buildPedersenHash(); + this.babyJub = this.pedersenHash.babyJub; + }); + } + unpackPoint(buffer) { + return __async$7(this, null, function* () { + var _a, _b; + yield this.pedersenPromise; + return (_b = this.babyJub) == null ? void 0 : _b.unpackPoint((_a = this.pedersenHash) == null ? void 0 : _a.hash(buffer)); + }); + } + toStringBuffer(buffer) { + var _a; + return (_a = this.babyJub) == null ? void 0 : _a.F.toString(buffer); + } +} +const pedersen = new Pedersen(); +function buffPedersenHash(buffer) { + return __async$7(this, null, function* () { + const [hash] = yield pedersen.unpackPoint(buffer); + return pedersen.toStringBuffer(hash); + }); +} + +var __async$6 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +function createDeposit(_0) { + return __async$6(this, arguments, function* ({ nullifier, secret }) { + const preimage = new Uint8Array([...leInt2Buff(nullifier), ...leInt2Buff(secret)]); + const noteHex = toFixedHex(bytesToBN(preimage), 62); + const commitment = BigInt(yield buffPedersenHash(preimage)); + const commitmentHex = toFixedHex(commitment); + const nullifierHash = BigInt(yield buffPedersenHash(leInt2Buff(nullifier))); + const nullifierHex = toFixedHex(nullifierHash); + return { + preimage, + noteHex, + commitment, + commitmentHex, + nullifierHash, + nullifierHex + }; + }); +} +class Deposit { + constructor({ + currency, + amount, + netId, + nullifier, + secret, + note, + noteHex, + invoice, + commitmentHex, + nullifierHex + }) { + this.currency = currency; + this.amount = amount; + this.netId = netId; + this.nullifier = nullifier; + this.secret = secret; + this.note = note; + this.noteHex = noteHex; + this.invoice = invoice; + this.commitmentHex = commitmentHex; + this.nullifierHex = nullifierHex; + } + toString() { + return JSON.stringify( + { + currency: this.currency, + amount: this.amount, + netId: this.netId, + nullifier: this.nullifier, + secret: this.secret, + note: this.note, + noteHex: this.noteHex, + invoice: this.invoice, + commitmentHex: this.commitmentHex, + nullifierHex: this.nullifierHex + }, + null, + 2 + ); + } + static createNote(_0) { + return __async$6(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { + if (!nullifier) { + nullifier = rBigInt(31); + } + if (!secret) { + secret = rBigInt(31); + } + const depositObject = yield createDeposit({ + nullifier, + secret + }); + const newDeposit = new Deposit({ + currency: currency.toLowerCase(), + amount, + netId, + note: `tornado-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.noteHex}`, + noteHex: depositObject.noteHex, + invoice: `tornadoInvoice-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.commitmentHex}`, + nullifier, + secret, + commitmentHex: depositObject.commitmentHex, + nullifierHex: depositObject.nullifierHex + }); + return newDeposit; + }); + } + static parseNote(noteString) { + return __async$6(this, null, function* () { + const noteRegex = new RegExp("tornado-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{124})", "g"); + const match = noteRegex.exec(noteString); + if (!match) { + throw new Error("The note has invalid format"); + } + const matchGroup = match == null ? void 0 : match.groups; + const currency = matchGroup.currency.toLowerCase(); + const amount = matchGroup.amount; + const netId = Number(matchGroup.netId); + const bytes = bnToBytes("0x" + matchGroup.note); + const nullifier = BigInt(leBuff2Int(bytes.slice(0, 31)).toString()); + const secret = BigInt(leBuff2Int(bytes.slice(31, 62)).toString()); + const depositObject = yield createDeposit({ nullifier, secret }); + const invoice = `tornadoInvoice-${currency}-${amount}-${netId}-${depositObject.commitmentHex}`; + const newDeposit = new Deposit({ + currency, + amount, + netId, + note: noteString, + noteHex: depositObject.noteHex, + invoice, + nullifier, + secret, + commitmentHex: depositObject.commitmentHex, + nullifierHex: depositObject.nullifierHex + }); + return newDeposit; + }); + } +} +class Invoice { + constructor(invoiceString) { + const invoiceRegex = new RegExp("tornadoInvoice-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{64})", "g"); + const match = invoiceRegex.exec(invoiceString); + if (!match) { + throw new Error("The note has invalid format"); + } + const matchGroup = match == null ? void 0 : match.groups; + const currency = matchGroup.currency.toLowerCase(); + const amount = matchGroup.amount; + const netId = Number(matchGroup.netId); + this.currency = currency; + this.amount = amount; + this.netId = netId; + this.commitment = "0x" + matchGroup.commitment; + this.invoice = invoiceString; + } + toString() { + return JSON.stringify( + { + currency: this.currency, + amount: this.amount, + netId: this.netId, + commitment: this.commitment, + invoice: this.invoice + }, + null, + 2 + ); + } +} + +function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext }) { + const nonceBuf = toFixedHex(bytesToHex(base64ToBytes(nonce)), 24); + const ephemPublicKeyBuf = toFixedHex(bytesToHex(base64ToBytes(ephemPublicKey)), 32); + const ciphertextBuf = bytesToHex(base64ToBytes(ciphertext)); + const messageBuff = concatBytes(hexToBytes(nonceBuf), hexToBytes(ephemPublicKeyBuf), hexToBytes(ciphertextBuf)); + return bytesToHex(messageBuff); +} +function unpackEncryptedMessage(encryptedMessage) { + const messageBuff = hexToBytes(encryptedMessage); + const nonceBuf = bytesToBase64(messageBuff.slice(0, 24)); + const ephemPublicKeyBuf = bytesToBase64(messageBuff.slice(24, 56)); + const ciphertextBuf = bytesToBase64(messageBuff.slice(56)); + return { + messageBuff: bytesToHex(messageBuff), + version: "x25519-xsalsa20-poly1305", + nonce: nonceBuf, + ephemPublicKey: ephemPublicKeyBuf, + ciphertext: ciphertextBuf + }; +} +class NoteAccount { + constructor({ netId, blockNumber, recoveryKey, Echoer: Echoer2 }) { + if (!recoveryKey) { + recoveryKey = bytesToHex(crypto.getRandomValues(new Uint8Array(32))).slice(2); + } + this.netId = Math.floor(Number(netId)); + this.blockNumber = blockNumber; + this.recoveryKey = recoveryKey; + this.recoveryAddress = ethers.computeAddress("0x" + recoveryKey); + this.recoveryPublicKey = ethSigUtil.getEncryptionPublicKey(recoveryKey); + this.Echoer = Echoer2; + } + /** + * Intends to mock eth_getEncryptionPublicKey behavior from MetaMask + * In order to make the recoveryKey retrival from Echoer possible from the bare private key + */ + static getWalletPublicKey(wallet) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + return ethSigUtil.getEncryptionPublicKey(privateKey); + } + // This function intends to provide an encrypted value of recoveryKey for an on-chain Echoer backup purpose + // Thus, the pubKey should be derived by a Wallet instance or from Web3 wallets + // pubKey: base64 encoded 32 bytes key from https://docs.metamask.io/wallet/reference/eth_getencryptionpublickey/ + getEncryptedAccount(walletPublicKey) { + const encryptedData = ethSigUtil.encrypt({ + publicKey: walletPublicKey, + data: this.recoveryKey, + version: "x25519-xsalsa20-poly1305" + }); + const data = packEncryptedMessage(encryptedData); + return { + // Use this later to save hexPrivateKey generated with + // Buffer.from(JSON.stringify(encryptedData)).toString('hex') + // As we don't use buffer with this library we should leave UI to do the rest + encryptedData, + // Data that could be used as an echo(data) params + data + }; + } + /** + * Decrypt Echoer backuped note encryption account with private keys + */ + decryptAccountsWithWallet(wallet, events) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedAccount); + const recoveryKey = ethSigUtil.decrypt({ + encryptedData: unpackedMessage, + privateKey + }); + decryptedEvents.push( + new NoteAccount({ + netId: this.netId, + blockNumber: event.blockNumber, + recoveryKey, + Echoer: this.Echoer + }) + ); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + decryptNotes(events) { + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedNote); + const [address, noteHex] = ethSigUtil.decrypt({ + encryptedData: unpackedMessage, + privateKey: this.recoveryKey + }).split("-"); + decryptedEvents.push({ + blockNumber: event.blockNumber, + address: ethers.getAddress(address), + noteHex + }); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + encryptNote({ address, noteHex }) { + const encryptedData = ethSigUtil.encrypt({ + publicKey: this.recoveryPublicKey, + data: `${address}-${noteHex}`, + version: "x25519-xsalsa20-poly1305" + }); + return packEncryptedMessage(encryptedData); + } +} + +const DUMMY_ADDRESS = "0x1111111111111111111111111111111111111111"; +const DUMMY_NONCE = "0x1111111111111111111111111111111111111111111111111111111111111111"; +const DUMMY_WITHDRAW_DATA = "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"; +function convertETHToTokenAmount(amountInWei, tokenPriceInWei, tokenDecimals = 18) { + const tokenDecimalsMultiplier = BigInt(10 ** Number(tokenDecimals)); + return BigInt(amountInWei) * tokenDecimalsMultiplier / BigInt(tokenPriceInWei); +} +class TornadoFeeOracle { + constructor(ovmGasPriceOracle) { + if (ovmGasPriceOracle) { + this.ovmGasPriceOracle = ovmGasPriceOracle; + } + } + /** + * Calculate L1 fee for op-stack chains + * + * This is required since relayers would pay the full transaction fees for users + */ + fetchL1OptimismFee(tx) { + if (!this.ovmGasPriceOracle) { + return new Promise((resolve) => resolve(BigInt(0))); + } + if (!tx) { + tx = { + type: 0, + gasLimit: 1e6, + nonce: Number(DUMMY_NONCE), + data: DUMMY_WITHDRAW_DATA, + gasPrice: ethers.parseUnits("1", "gwei"), + from: DUMMY_ADDRESS, + to: DUMMY_ADDRESS + }; + } + return this.ovmGasPriceOracle.getL1Fee.staticCall(ethers.Transaction.from(tx).unsignedSerialized); + } + /** + * We don't need to distinguish default refunds by tokens since most users interact with other defi protocols after withdrawal + * So we default with 1M gas which is enough for two or three swaps + * Using 30 gwei for default but it is recommended to supply cached gasPrice value from the UI + */ + defaultEthRefund(gasPrice, gasLimit) { + return (gasPrice ? BigInt(gasPrice) : ethers.parseUnits("30", "gwei")) * BigInt(gasLimit || 1e6); + } + /** + * Calculates token amount for required ethRefund purchases required to calculate fees + */ + calculateTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals) { + return convertETHToTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals); + } + /** + * Warning: For tokens you need to check if the fees are above denomination + * (Usually happens for small denomination pool or if the gas price is high) + */ + calculateRelayerFee({ + gasPrice, + gasLimit = 6e5, + l1Fee = 0, + denomination, + ethRefund = BigInt(0), + tokenPriceInWei, + tokenDecimals = 18, + relayerFeePercent = 0.33, + isEth = true, + premiumPercent = 20 + }) { + const gasCosts = BigInt(gasPrice) * BigInt(gasLimit) + BigInt(l1Fee); + const relayerFee = BigInt(denomination) * BigInt(Math.floor(1e4 * relayerFeePercent)) / BigInt(1e4 * 100); + if (isEth) { + return (gasCosts + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100) / BigInt(100); + } + const feeInEth = gasCosts + BigInt(ethRefund); + return (convertETHToTokenAmount(feeInEth, tokenPriceInWei, tokenDecimals) + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100) / BigInt(100); + } +} + +var __async$5 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class Mimc { + constructor() { + this.mimcPromise = this.initMimc(); + } + initMimc() { + return __async$5(this, null, function* () { + this.sponge = yield circomlibjs.buildMimcSponge(); + this.hash = (left, right) => { + var _a, _b; + return (_b = this.sponge) == null ? void 0 : _b.F.toString((_a = this.sponge) == null ? void 0 : _a.multiHash([BigInt(left), BigInt(right)])); + }; + }); + } + getHash() { + return __async$5(this, null, function* () { + yield this.mimcPromise; + return { + sponge: this.sponge, + hash: this.hash + }; + }); + } +} +const mimc = new Mimc(); + +var __async$4 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class MerkleTreeService { + constructor({ + netId, + amount, + currency, + Tornado, + commitmentHex, + merkleTreeHeight = 20, + emptyElement = "21663839004416932945382355908790599225266501822907911457504978515578255421292", + merkleWorkerPath + }) { + const instanceName = `${netId}_${currency}_${amount}`; + this.currency = currency; + this.amount = amount; + this.netId = Number(netId); + this.Tornado = Tornado; + this.instanceName = instanceName; + this.commitmentHex = commitmentHex; + this.merkleTreeHeight = merkleTreeHeight; + this.emptyElement = emptyElement; + this.merkleWorkerPath = merkleWorkerPath; + } + createTree(events) { + return __async$4(this, null, function* () { + const { hash: hashFunction } = yield mimc.getHash(); + if (this.merkleWorkerPath) { + console.log("Using merkleWorker\n"); + try { + if (isNode) { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new worker_threads.Worker(this.merkleWorkerPath, { + workerData: { + merkleTreeHeight: this.merkleTreeHeight, + elements: events, + zeroElement: this.emptyElement + } + }); + worker.on("message", resolve); + worker.on("error", reject); + worker.on("exit", (code) => { + if (code !== 0) { + reject(new Error(`Worker stopped with exit code ${code}`)); + } + }); + }); + return fixedMerkleTree.MerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } else { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker(this.merkleWorkerPath); + worker.onmessage = (e) => { + resolve(e.data); + }; + worker.onerror = (e) => { + reject(e); + }; + worker.postMessage({ + merkleTreeHeight: this.merkleTreeHeight, + elements: events, + zeroElement: this.emptyElement + }); + }); + return fixedMerkleTree.MerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } + } catch (err) { + console.log("merkleWorker failed, falling back to synchronous merkle tree"); + console.log(err); + } + } + return new fixedMerkleTree.MerkleTree(this.merkleTreeHeight, events, { + zeroElement: this.emptyElement, + hashFunction + }); + }); + } + createPartialTree(_0) { + return __async$4(this, arguments, function* ({ edge, elements }) { + const { hash: hashFunction } = yield mimc.getHash(); + if (this.merkleWorkerPath) { + console.log("Using merkleWorker\n"); + try { + if (isNode) { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new worker_threads.Worker(this.merkleWorkerPath, { + workerData: { + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + } + }); + worker.on("message", resolve); + worker.on("error", reject); + worker.on("exit", (code) => { + if (code !== 0) { + reject(new Error(`Worker stopped with exit code ${code}`)); + } + }); + }); + return fixedMerkleTree.PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } else { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker(this.merkleWorkerPath); + worker.onmessage = (e) => { + resolve(e.data); + }; + worker.onerror = (e) => { + reject(e); + }; + worker.postMessage({ + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + }); + }); + return fixedMerkleTree.PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } + } catch (err) { + console.log("merkleWorker failed, falling back to synchronous merkle tree"); + console.log(err); + } + } + return new fixedMerkleTree.PartialMerkleTree(this.merkleTreeHeight, edge, elements, { + zeroElement: this.emptyElement, + hashFunction + }); + }); + } + verifyTree(events) { + return __async$4(this, null, function* () { + console.log( + ` +Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while +` + ); + console.time("Created tree in"); + const tree = yield this.createTree(events.map(({ commitment }) => commitment)); + console.timeEnd("Created tree in"); + console.log(""); + const isKnownRoot = yield this.Tornado.isKnownRoot(toFixedHex(BigInt(tree.root))); + if (!isKnownRoot) { + const errMsg = `Deposit Event ${this.netId} ${this.amount} ${this.currency} is invalid`; + throw new Error(errMsg); + } + return tree; + }); + } +} function parseNumber(value) { if (!value || isNaN(Number(value))) { @@ -6575,6 +7288,17 @@ function parseKey(value) { } return value; } +function parseRecoveryKey(value) { + if (!value) { + throw new commander.InvalidArgumentError("Invalid Recovery Key"); + } + try { + ethers.computeAddress("0x" + value); + } catch (e) { + throw new commander.InvalidArgumentError("Invalid Recovery Key"); + } + return value; +} class TokenPriceOracle { constructor(provider, multicall2, oracle) { @@ -6597,26 +7321,26 @@ class TokenPriceOracle { } } -var __defProp = Object.defineProperty; -var __defProps = Object.defineProperties; -var __getOwnPropDescs = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols = Object.getOwnPropertySymbols; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __propIsEnum = Object.prototype.propertyIsEnumerable; -var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues = (a, b) => { +var __defProp$1 = Object.defineProperty; +var __defProps$1 = Object.defineProperties; +var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; +var __hasOwnProp$1 = Object.prototype.hasOwnProperty; +var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$1 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp.call(b, prop)) - __defNormalProp(a, prop, b[prop]); - if (__getOwnPropSymbols) - for (var prop of __getOwnPropSymbols(b)) { - if (__propIsEnum.call(b, prop)) - __defNormalProp(a, prop, b[prop]); + if (__hasOwnProp$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); + if (__getOwnPropSymbols$1) + for (var prop of __getOwnPropSymbols$1(b)) { + if (__propIsEnum$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); } return a; }; -var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); -var __async$2 = (__this, __arguments, generator) => { +var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b)); +var __async$3 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -6644,11 +7368,11 @@ function parseSemanticVersion(version) { } function isRelayerUpdated(relayerVersion, netId) { const { major, patch, prerelease } = parseSemanticVersion(relayerVersion); - const requiredMajor = netId === 1 ? "4" : "5"; + const requiredMajor = netId === NetId.MAINNET ? "4" : "5"; const isUpdatedMajor = major === requiredMajor; if (prerelease) return false; - return isUpdatedMajor && (Number(patch) >= 5 || Number(netId) !== 1); + return isUpdatedMajor && (Number(patch) >= 5 || netId !== NetId.MAINNET); } function calculateScore({ stakeBalance, tornadoServiceFee }, minFee = 0.33, maxFee = 0.53) { if (tornadoServiceFee < minFee) { @@ -6670,9 +7394,15 @@ function getWeightRandom(weightsScores, random) { } return Math.floor(Math.random() * weightsScores.length); } +function getSupportedInstances(instanceList) { + const rawList = Object.values(instanceList).map(({ instanceAddress }) => { + return Object.values(instanceAddress); + }).flat(); + return rawList.map((l) => ethers.getAddress(l)); +} function pickWeightedRandomRelayer(relayers, netId) { let minFee, maxFee; - if (Number(netId) !== 1) { + if (netId !== NetId.MAINNET) { minFee = 0.01; maxFee = 0.3; } @@ -6686,19 +7416,19 @@ function pickWeightedRandomRelayer(relayers, netId) { } class RelayerClient { constructor({ netId, config, Aggregator, fetchDataOptions: fetchDataOptions2 }) { - this.netId = Number(netId); + this.netId = netId; this.config = config; this.Aggregator = Aggregator; this.fetchDataOptions = fetchDataOptions2; } askRelayerStatus(_0) { - return __async$2(this, arguments, function* ({ + return __async$3(this, arguments, function* ({ hostname, relayerAddress }) { var _a, _b; const url = `https://${!hostname.endsWith("/") ? hostname + "/" : hostname}`; - const rawStatus = yield fetchData(`${url}status`, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const rawStatus = yield fetchData(`${url}status`, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { headers: { "Content-Type": "application/json, application/x-www-form-urlencoded" }, @@ -6709,7 +7439,7 @@ class RelayerClient { if (!statusValidator(rawStatus)) { throw new Error("Invalid status schema"); } - const status = __spreadProps(__spreadValues({}, rawStatus), { + const status = __spreadProps$1(__spreadValues$1({}, rawStatus), { url }); if (status.currentQueue > 5) { @@ -6718,7 +7448,7 @@ class RelayerClient { if (status.netId !== this.netId) { throw new Error("This relayer serves a different network"); } - if (relayerAddress && this.netId === 1 && status.rewardAccount !== relayerAddress) { + if (relayerAddress && this.netId === NetId.MAINNET && status.rewardAccount !== relayerAddress) { throw new Error("The Relayer reward address must match registered address"); } if (!isRelayerUpdated(status.version, this.netId)) { @@ -6728,7 +7458,8 @@ class RelayerClient { }); } filterRelayer(curr, relayer, subdomains, debugRelayer = false) { - return __async$2(this, null, function* () { + return __async$3(this, null, function* () { + var _a; const { ensSubdomainKey } = this.config; const subdomainIndex = subdomains.indexOf(ensSubdomainKey); const mainnetSubdomain = curr.records[0]; @@ -6749,7 +7480,9 @@ class RelayerClient { ensName, stakeBalance, relayerAddress, - rewardAccount: status.rewardAccount, + rewardAccount: ethers.getAddress(status.rewardAccount), + instances: getSupportedInstances(status.instances), + gasPrice: (_a = status.gasPrices) == null ? void 0 : _a.fast, ethPrices: status.ethPrices, currentQueue: status.currentQueue, tornadoServiceFee: status.tornadoServiceFee @@ -6778,7 +7511,7 @@ class RelayerClient { }); } getValidRelayers(relayers, subdomains, debugRelayer = false) { - return __async$2(this, null, function* () { + return __async$3(this, null, function* () { const relayersSet = /* @__PURE__ */ new Set(); const uniqueRelayers = relayers.reverse().filter(({ ensName }) => { if (!relayersSet.has(ensName)) { @@ -6809,9 +7542,9 @@ class RelayerClient { return pickWeightedRandomRelayer(relayers, this.netId); } tornadoWithdraw(_0) { - return __async$2(this, arguments, function* ({ contract, proof, args }) { + return __async$3(this, arguments, function* ({ contract, proof, args }) { const { url } = this.selectedRelayer; - const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { method: "POST", headers: { "Content-Type": "application/json" @@ -6831,7 +7564,7 @@ class RelayerClient { console.log(`Job submitted: ${jobUrl} `); while (!relayerStatus || !["FAILED", "CONFIRMED"].includes(relayerStatus)) { - const jobResponse = yield fetchData(jobUrl, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const jobResponse = yield fetchData(jobUrl, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { method: "GET", headers: { "Content-Type": "application/json" @@ -6871,7 +7604,7 @@ class RelayerClient { } } -var __async$1 = (__this, __arguments, generator) => { +var __async$2 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -6892,7 +7625,7 @@ var __async$1 = (__this, __arguments, generator) => { }); }; function getTokenBalances(_0) { - return __async$1(this, arguments, function* ({ + return __async$2(this, arguments, function* ({ provider, Multicall: Multicall2, currencyName, @@ -6955,6 +7688,120 @@ function getTokenBalances(_0) { }); } +var __defProp = Object.defineProperty; +var __defProps = Object.defineProperties; +var __getOwnPropDescs = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols = Object.getOwnPropertySymbols; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __propIsEnum = Object.prototype.propertyIsEnumerable; +var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + if (__getOwnPropSymbols) + for (var prop of __getOwnPropSymbols(b)) { + if (__propIsEnum.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + } + return a; +}; +var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); +var __objRest = (source, exclude) => { + var target = {}; + for (var prop in source) + if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0) + target[prop] = source[prop]; + if (source != null && __getOwnPropSymbols) + for (var prop of __getOwnPropSymbols(source)) { + if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop)) + target[prop] = source[prop]; + } + return target; +}; +var __async$1 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class TreeCache { + constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }) { + this.netId = netId; + this.amount = amount; + this.currency = currency; + this.userDirectory = userDirectory; + this.PARTS_COUNT = PARTS_COUNT; + } + getInstanceName() { + return `deposits_${this.netId}_${this.currency}_${this.amount}`; + } + createTree(events, tree) { + return __async$1(this, null, function* () { + const bloom = new BloomFilter(events.length); + console.log(`Creating cached tree for ${this.getInstanceName()} +`); + const eventsData = events.reduce( + (acc, _a, i) => { + var _b = _a, { leafIndex, commitment } = _b, rest = __objRest(_b, ["leafIndex", "commitment"]); + if (leafIndex !== i) { + throw new Error(`leafIndex (${leafIndex}) !== i (${i})`); + } + acc[commitment] = __spreadProps(__spreadValues({}, rest), { leafIndex }); + return acc; + }, + {} + ); + const slices = tree.getTreeSlices(this.PARTS_COUNT); + yield Promise.all( + slices.map((slice, index) => __async$1(this, null, function* () { + const metadata = slice.elements.reduce((acc, curr) => { + if (index < this.PARTS_COUNT - 1) { + bloom.add(curr); + } + acc.push(eventsData[curr]); + return acc; + }, []); + const dataString2 = JSON.stringify( + __spreadProps(__spreadValues({}, slice), { + metadata + }), + null, + 2 + ) + "\n"; + const fileName2 = `${this.getInstanceName()}_slice${index + 1}.json`; + yield saveUserFile({ + fileName: fileName2, + userDirectory: this.userDirectory, + dataString: dataString2 + }); + })) + ); + const dataString = bloom.serialize() + "\n"; + const fileName = `${this.getInstanceName()}_bloom.json`; + yield saveUserFile({ + fileName, + userDirectory: this.userDirectory, + dataString + }); + }); + } +} + var __async = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { @@ -7014,6 +7861,7 @@ function calculateSnarkProof(input, circuit, provingKey) { } exports.BaseDepositsService = BaseDepositsService; +exports.BaseEchoService = BaseEchoService; exports.BaseEncryptedNotesService = BaseEncryptedNotesService; exports.BaseEventsService = BaseEventsService; exports.BaseGovernanceService = BaseGovernanceService; @@ -7026,7 +7874,10 @@ exports.Deposit = Deposit; exports.ENS__factory = ENS__factory; exports.ERC20__factory = ERC20__factory; exports.GET_DEPOSITS = GET_DEPOSITS; +exports.GET_ECHO_EVENTS = GET_ECHO_EVENTS; exports.GET_ENCRYPTED_NOTES = GET_ENCRYPTED_NOTES; +exports.GET_GOVERNANCE_APY = GET_GOVERNANCE_APY; +exports.GET_GOVERNANCE_EVENTS = GET_GOVERNANCE_EVENTS; exports.GET_NOTE_ACCOUNTS = GET_NOTE_ACCOUNTS; exports.GET_REGISTERED = GET_REGISTERED; exports.GET_STATISTIC = GET_STATISTIC; @@ -7037,10 +7888,13 @@ exports.MIN_STAKE_BALANCE = MIN_STAKE_BALANCE; exports.MerkleTreeService = MerkleTreeService; exports.Mimc = Mimc; exports.Multicall__factory = Multicall__factory; +exports.NetId = NetId; exports.NodeDepositsService = NodeDepositsService; +exports.NodeEchoService = NodeEchoService; exports.NodeEncryptedNotesService = NodeEncryptedNotesService; exports.NodeGovernanceService = NodeGovernanceService; exports.NodeRegistryService = NodeRegistryService; +exports.NoteAccount = NoteAccount; exports.OffchainOracle__factory = OffchainOracle__factory; exports.OvmGasPriceOracle__factory = OvmGasPriceOracle__factory; exports.Pedersen = Pedersen; @@ -7052,12 +7906,13 @@ exports.TornadoFeeOracle = TornadoFeeOracle; exports.TornadoRpcSigner = TornadoRpcSigner; exports.TornadoVoidSigner = TornadoVoidSigner; exports.TornadoWallet = TornadoWallet; +exports.TreeCache = TreeCache; exports.WITHDRAWAL = WITHDRAWAL; exports._META = _META; +exports.addNetwork = addNetwork; exports.ajv = ajv; exports.base64ToBytes = base64ToBytes; exports.bigIntReplacer = bigIntReplacer; -exports.blockSyncInterval = blockSyncInterval; exports.bnToBytes = bnToBytes; exports.buffPedersenHash = buffPedersenHash; exports.bufferToBytes = bufferToBytes; @@ -7067,8 +7922,11 @@ exports.bytesToHex = bytesToHex; exports.calculateScore = calculateScore; exports.calculateSnarkProof = calculateSnarkProof; exports.chunk = chunk; +exports.concatBytes = concatBytes; exports.convertETHToTokenAmount = convertETHToTokenAmount; exports.createDeposit = createDeposit; +exports.crypto = crypto; +exports.defaultConfig = defaultConfig; exports.defaultUserAgent = defaultUserAgent; exports.download = download; exports.enabledChains = enabledChains; @@ -7079,22 +7937,32 @@ exports.fetchData = fetchData; exports.fetchGetUrlFunc = fetchGetUrlFunc; exports.getAllDeposits = getAllDeposits; exports.getAllEncryptedNotes = getAllEncryptedNotes; +exports.getAllGovernanceEvents = getAllGovernanceEvents; +exports.getAllGraphEchoEvents = getAllGraphEchoEvents; exports.getAllRegisters = getAllRegisters; exports.getAllWithdrawals = getAllWithdrawals; +exports.getConfig = getConfig; exports.getDeposits = getDeposits; exports.getEncryptedNotes = getEncryptedNotes; exports.getGasOraclePlugin = getGasOraclePlugin; +exports.getGovernanceEvents = getGovernanceEvents; +exports.getGraphEchoEvents = getGraphEchoEvents; exports.getHttpAgent = getHttpAgent; +exports.getInstanceByAddress = getInstanceByAddress; exports.getMeta = getMeta; +exports.getNetworkConfig = getNetworkConfig; exports.getNoteAccounts = getNoteAccounts; exports.getProvider = getProvider; exports.getProviderWithNetId = getProviderWithNetId; exports.getRegisters = getRegisters; exports.getStatistic = getStatistic; exports.getStatusSchema = getStatusSchema; +exports.getSubdomains = getSubdomains; +exports.getSupportedInstances = getSupportedInstances; exports.getTokenBalances = getTokenBalances; exports.getWeightRandom = getWeightRandom; exports.getWithdrawals = getWithdrawals; +exports.hexToBytes = hexToBytes; exports.isNode = isNode; exports.isRelayerUpdated = isRelayerUpdated; exports.jobsSchema = jobsSchema; @@ -7104,11 +7972,12 @@ exports.loadCachedEvents = loadCachedEvents; exports.loadSavedEvents = loadSavedEvents; exports.mimc = mimc; exports.multicall = multicall; -exports.networkConfig = networkConfig; +exports.packEncryptedMessage = packEncryptedMessage; exports.parseAddress = parseAddress; exports.parseKey = parseKey; exports.parseMnemonic = parseMnemonic; exports.parseNumber = parseNumber; +exports.parseRecoveryKey = parseRecoveryKey; exports.parseRelayer = parseRelayer; exports.parseSemanticVersion = parseSemanticVersion; exports.parseUrl = parseUrl; @@ -7117,12 +7986,12 @@ exports.pickWeightedRandomRelayer = pickWeightedRandomRelayer; exports.populateTransaction = populateTransaction; exports.queryGraph = queryGraph; exports.rBigInt = rBigInt; -exports.saveEvents = saveEvents; +exports.saveUserFile = saveUserFile; exports.sleep = sleep; -exports.subdomains = subdomains; exports.substring = substring; exports.toFixedHex = toFixedHex; exports.toFixedLength = toFixedLength; +exports.unpackEncryptedMessage = unpackEncryptedMessage; exports.unzipAsync = unzipAsync; exports.validateUrl = validateUrl; exports.zipAsync = zipAsync; diff --git a/dist/index.mjs b/dist/index.mjs index 432eedc..ef3b4ba 100644 --- a/dist/index.mjs +++ b/dist/index.mjs @@ -1,9 +1,9 @@ -import { Interface, Contract, FetchUrlFeeDataNetworkPlugin, FetchRequest, Network, EnsPlugin, GasCostPlugin, JsonRpcProvider, Wallet, HDNodeWallet, VoidSigner, JsonRpcSigner, BrowserProvider, parseUnits, FeeData, getAddress, Transaction, Mnemonic, computeAddress, parseEther, namehash, ZeroAddress } from 'ethers'; +import { Interface, Contract, FetchUrlFeeDataNetworkPlugin, FetchRequest, Network, EnsPlugin, GasCostPlugin, JsonRpcProvider, Wallet, HDNodeWallet, VoidSigner, JsonRpcSigner, BrowserProvider, parseUnits, FeeData, getAddress, computeAddress, Transaction, Mnemonic, parseEther, namehash, ZeroAddress } from 'ethers'; import crossFetch from 'cross-fetch'; import { HttpProxyAgent } from 'http-proxy-agent'; import { HttpsProxyAgent } from 'https-proxy-agent'; import { SocksProxyAgent } from 'socks-proxy-agent'; -import { URL } from 'url'; +import { webcrypto } from 'crypto'; import BN from 'bn.js'; import Table from 'cli-table3'; import moment from 'moment'; @@ -12,9 +12,11 @@ import { stat, mkdir, writeFile, readFile } from 'fs/promises'; import { zip, unzip } from 'fflate'; import Ajv from 'ajv'; import { buildPedersenHash, buildMimcSponge } from 'circomlibjs'; +import { getEncryptionPublicKey, encrypt, decrypt } from '@metamask/eth-sig-util'; import { Worker as Worker$1 } from 'worker_threads'; -import { MerkleTree } from '@tornado/fixed-merkle-tree'; +import { MerkleTree, PartialMerkleTree } from '@tornado/fixed-merkle-tree'; import { InvalidArgumentError } from 'commander'; +import BloomFilter from 'bloomfilter.js'; import * as websnarkUtils from '@tornado/websnark/src/utils'; import websnarkGroth from '@tornado/websnark/src/groth16'; @@ -2559,6 +2561,7 @@ BigInt.prototype.toJSON = function() { return this.toString(); }; const isNode = !process.browser && typeof globalThis.window === "undefined"; +const crypto = isNode ? webcrypto : globalThis.crypto; const chunk = (arr, size) => [...Array(Math.ceil(arr.length / size))].map((_, i) => arr.slice(size * i, size + size * i)); function sleep(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); @@ -2574,28 +2577,36 @@ function validateUrl(url, protocols) { return false; } } +function concatBytes(...arrays) { + const totalSize = arrays.reduce((acc, e) => acc + e.length, 0); + const merged = new Uint8Array(totalSize); + arrays.forEach((array, i, arrays2) => { + const offset = arrays2.slice(0, i).reduce((acc, e) => acc + e.length, 0); + merged.set(array, offset); + }); + return merged; +} function bufferToBytes(b) { return new Uint8Array(b.buffer); } function bytesToBase64(bytes) { - let binary = ""; - const len = bytes.byteLength; - for (let i = 0; i < len; ++i) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); + return btoa(String.fromCharCode.apply(null, Array.from(bytes))); } function base64ToBytes(base64) { - const binaryString = atob(base64); - const bytes = new Uint8Array(binaryString.length); - for (let i = 0; i < binaryString.length; i++) { - bytes[i] = binaryString.charCodeAt(i); - } - return bytes; + return Uint8Array.from(atob(base64), (c) => c.charCodeAt(0)); } function bytesToHex(bytes) { return "0x" + Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join(""); } +function hexToBytes(hexString) { + if (hexString.slice(0, 2) === "0x") { + hexString = hexString.replace("0x", ""); + } + if (hexString.length % 2 !== 0) { + hexString = "0" + hexString; + } + return Uint8Array.from(hexString.match(/.{1,2}/g).map((byte) => parseInt(byte, 16))); +} function bytesToBN(bytes) { return BigInt(bytesToHex(bytes)); } @@ -2635,7 +2646,7 @@ function substring(str, length = 10) { return `${str.substring(0, length)}...${str.substring(str.length - length)}`; } -var __async$d = (__this, __arguments, generator) => { +var __async$e = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -2656,7 +2667,7 @@ var __async$d = (__this, __arguments, generator) => { }); }; function multicall(Multicall2, calls) { - return __async$d(this, null, function* () { + return __async$e(this, null, function* () { const calldata = calls.map((call) => { var _a, _b, _c; const target = ((_a = call.contract) == null ? void 0 : _a.target) || call.address; @@ -2679,29 +2690,29 @@ function multicall(Multicall2, calls) { }); } -var __defProp$3 = Object.defineProperty; -var __defProps$3 = Object.defineProperties; -var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; +var __defProp$5 = Object.defineProperty; +var __defProps$4 = Object.defineProperties; +var __getOwnPropDescs$4 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols; var __getProtoOf$1 = Object.getPrototypeOf; -var __hasOwnProp$3 = Object.prototype.hasOwnProperty; -var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$5 = Object.prototype.hasOwnProperty; +var __propIsEnum$5 = Object.prototype.propertyIsEnumerable; var __reflectGet$1 = Reflect.get; -var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$3 = (a, b) => { +var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$5 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); - if (__getOwnPropSymbols$3) - for (var prop of __getOwnPropSymbols$3(b)) { - if (__propIsEnum$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); + if (__hasOwnProp$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); + if (__getOwnPropSymbols$5) + for (var prop of __getOwnPropSymbols$5(b)) { + if (__propIsEnum$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); } return a; }; -var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b)); +var __spreadProps$4 = (a, b) => __defProps$4(a, __getOwnPropDescs$4(b)); var __superGet$1 = (cls, obj, key) => __reflectGet$1(__getProtoOf$1(cls), key, obj); -var __async$c = (__this, __arguments, generator) => { +var __async$d = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -2747,7 +2758,7 @@ function getHttpAgent({ } } function fetchData(_0) { - return __async$c(this, arguments, function* (url, options = {}) { + return __async$d(this, arguments, function* (url, options = {}) { var _a, _b, _c; const MAX_RETRY = (_a = options.maxRetry) != null ? _a : 3; const RETRY_ON = (_b = options.retryOn) != null ? _b : 500; @@ -2839,7 +2850,7 @@ function fetchData(_0) { throw errorObject; }); } -const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$c(void 0, null, function* () { +const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, null, function* () { let signal; if (_signal) { const controller = new AbortController(); @@ -2848,7 +2859,7 @@ const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$c(void 0, nu controller.abort(); }); } - const init = __spreadProps$3(__spreadValues$3({}, options), { + const init = __spreadProps$4(__spreadValues$5({}, options), { method: req.method || "POST", headers: req.headers, body: req.body || void 0, @@ -2873,7 +2884,7 @@ const oracleMapper = /* @__PURE__ */ new Map(); const multicallMapper = /* @__PURE__ */ new Map(); function getGasOraclePlugin(networkKey, fetchOptions) { const gasStationApi = (fetchOptions == null ? void 0 : fetchOptions.gasStationApi) || "https://gasstation.polygon.technology/v2"; - return new FetchUrlFeeDataNetworkPlugin(gasStationApi, (fetchFeeData, provider, request) => __async$c(this, null, function* () { + return new FetchUrlFeeDataNetworkPlugin(gasStationApi, (fetchFeeData, provider, request) => __async$d(this, null, function* () { if (!oracleMapper.has(networkKey)) { oracleMapper.set(networkKey, GasPriceOracle__factory.connect(fetchOptions == null ? void 0 : fetchOptions.gasPriceOracle, provider)); } @@ -2932,7 +2943,7 @@ function getGasOraclePlugin(networkKey, fetchOptions) { })); } function getProvider(rpcUrl, fetchOptions) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { const fetchReq = new FetchRequest(rpcUrl); fetchReq.getUrlFunc = fetchGetUrlFunc(fetchOptions); const _staticNetwork = yield new JsonRpcProvider(fetchReq).getNetwork(); @@ -2982,7 +2993,7 @@ function getProviderWithNetId(netId, rpcUrl, config, fetchOptions) { provider.pollingInterval = (fetchOptions == null ? void 0 : fetchOptions.pollingInterval) || pollInterval * 1e3; return provider; } -const populateTransaction = (signer, tx) => __async$c(void 0, null, function* () { +const populateTransaction = (signer, tx) => __async$d(void 0, null, function* () { const provider = signer.provider; if (!tx.from) { tx.from = signer.address; @@ -2991,7 +3002,7 @@ const populateTransaction = (signer, tx) => __async$c(void 0, null, function* () throw new Error(errMsg); } const [feeData, nonce] = yield Promise.all([ - (() => __async$c(void 0, null, function* () { + (() => __async$d(void 0, null, function* () { if (tx.maxFeePerGas && tx.maxPriorityFeePerGas) { return new FeeData(null, BigInt(tx.maxFeePerGas), BigInt(tx.maxPriorityFeePerGas)); } @@ -3013,7 +3024,7 @@ const populateTransaction = (signer, tx) => __async$c(void 0, null, function* () ); } }))(), - (() => __async$c(void 0, null, function* () { + (() => __async$d(void 0, null, function* () { if (tx.nonce) { return tx.nonce; } @@ -3043,7 +3054,7 @@ const populateTransaction = (signer, tx) => __async$c(void 0, null, function* () delete tx.maxFeePerGas; delete tx.maxPriorityFeePerGas; } - tx.gasLimit = tx.gasLimit || (yield (() => __async$c(void 0, null, function* () { + tx.gasLimit = tx.gasLimit || (yield (() => __async$d(void 0, null, function* () { try { const gasLimit = yield provider.estimateGas(tx); return gasLimit === BigInt(21e3) ? gasLimit : gasLimit * (BigInt(1e4) + BigInt(signer.gasLimitBump)) / BigInt(1e4); @@ -3071,7 +3082,7 @@ class TornadoWallet extends Wallet { return new TornadoWallet(privateKey, provider, options); } populateTransaction(tx) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = txObject.nonce; return __superGet$1(TornadoWallet.prototype, this, "populateTransaction").call(this, txObject); @@ -3087,7 +3098,7 @@ class TornadoVoidSigner extends VoidSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } populateTransaction(tx) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = txObject.nonce; return __superGet$1(TornadoVoidSigner.prototype, this, "populateTransaction").call(this, txObject); @@ -3103,7 +3114,7 @@ class TornadoRpcSigner extends JsonRpcSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } sendUncheckedTransaction(tx) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { return __superGet$1(TornadoRpcSigner.prototype, this, "sendUncheckedTransaction").call(this, yield populateTransaction(this, tx)); }); } @@ -3114,7 +3125,7 @@ class TornadoBrowserProvider extends BrowserProvider { this.options = options; } getSigner(address) { - return __async$c(this, null, function* () { + return __async$d(this, null, function* () { var _a, _b, _c, _d, _e, _f, _g, _h, _i; const signerAddress = (yield __superGet$1(TornadoBrowserProvider.prototype, this, "getSigner").call(this, address)).address; if (((_a = this.options) == null ? void 0 : _a.webChainId) && ((_b = this.options) == null ? void 0 : _b.connectWallet) && Number(yield __superGet$1(TornadoBrowserProvider.prototype, this, "send").call(this, "eth_chainId", [])) !== Number((_c = this.options) == null ? void 0 : _c.webChainId)) { @@ -3237,6 +3248,22 @@ const GET_NOTE_ACCOUNTS = ` } } `; +const GET_ECHO_EVENTS = ` + query getNoteAccounts($first: Int, $fromBlock: Int) { + noteAccounts(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + id + blockNumber + address + encryptedAccount + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; const GET_ENCRYPTED_NOTES = ` query getEncryptedNotes($first: Int, $fromBlock: Int) { encryptedNotes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { @@ -3253,27 +3280,80 @@ const GET_ENCRYPTED_NOTES = ` } } `; +const GET_GOVERNANCE_EVENTS = ` + query getGovernanceEvents($first: Int, $fromBlock: Int) { + proposals(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + proposer + target + startTime + endTime + description + } + votes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + voter + support + votes + from + input + } + delegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateTo + } + undelegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateFrom + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; +const GET_GOVERNANCE_APY = ` + stakeDailyBurns(first: 30, orderBy: date, orderDirection: desc) { + id + date + dailyAmountBurned + } +`; -var __defProp$2 = Object.defineProperty; -var __defProps$2 = Object.defineProperties; -var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; -var __hasOwnProp$2 = Object.prototype.hasOwnProperty; -var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$2 = (a, b) => { +var __defProp$4 = Object.defineProperty; +var __defProps$3 = Object.defineProperties; +var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols; +var __hasOwnProp$4 = Object.prototype.hasOwnProperty; +var __propIsEnum$4 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$4 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); - if (__getOwnPropSymbols$2) - for (var prop of __getOwnPropSymbols$2(b)) { - if (__propIsEnum$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); + if (__hasOwnProp$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); + if (__getOwnPropSymbols$4) + for (var prop of __getOwnPropSymbols$4(b)) { + if (__propIsEnum$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); } return a; }; -var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b)); -var __async$b = (__this, __arguments, generator) => { +var __spreadProps$3 = (a, b) => __defProps$3(a, __getOwnPropDescs$3(b)); +var __async$c = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -3296,7 +3376,7 @@ var __async$b = (__this, __arguments, generator) => { const isEmptyArray = (arr) => !Array.isArray(arr) || !arr.length; const first = 1e3; function queryGraph(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, query, @@ -3305,7 +3385,7 @@ function queryGraph(_0) { }) { var _a; const graphUrl = `${graphApi}/subgraphs/name/${subgraphName}`; - const { data, errors } = yield fetchData(graphUrl, __spreadProps$2(__spreadValues$2({}, fetchDataOptions2), { + const { data, errors } = yield fetchData(graphUrl, __spreadProps$3(__spreadValues$4({}, fetchDataOptions2), { method: "POST", headers: { "Content-Type": "application/json" @@ -3325,7 +3405,7 @@ function queryGraph(_0) { }); } function getStatistic(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -3372,7 +3452,7 @@ function getStatistic(_0) { }); } function getMeta(_0) { - return __async$b(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { + return __async$c(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { try { const { _meta: { @@ -3417,7 +3497,7 @@ function getRegisters({ }); } function getAllRegisters(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -3506,7 +3586,7 @@ function getDeposits({ }); } function getAllDeposits(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -3603,7 +3683,7 @@ function getWithdrawals({ }); } function getAllWithdrawals(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -3679,7 +3759,7 @@ function getAllWithdrawals(_0) { }); } function getNoteAccounts(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, address, @@ -3696,7 +3776,7 @@ function getNoteAccounts(_0) { subgraphName, query: GET_NOTE_ACCOUNTS, variables: { - address + address: address.toLowerCase() }, fetchDataOptions: fetchDataOptions2 }); @@ -3714,6 +3794,95 @@ function getNoteAccounts(_0) { } }); } +function getGraphEchoEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_ECHO_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGraphEchoEvents(_0) { + return __async$c(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const events = []; + let lastSyncBlock = fromBlock; + while (true) { + let { + noteAccounts: result2, + _meta: { + // eslint-disable-next-line prefer-const + block: { number: currentBlock } + } + } = yield getGraphEchoEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + if (isEmptyArray(result2)) { + break; + } + const [firstEvent] = result2; + const [lastEvent2] = result2.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "EchoEvents", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: result2.length + }); + } + if (result2.length < 900) { + events.push(...result2); + break; + } + result2 = result2.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + events.push(...result2); + } + if (!events.length) { + return { + events: [], + lastSyncBlock + }; + } + const result = events.map((e) => { + const [transactionHash, logIndex] = e.id.split("-"); + return { + blockNumber: Number(e.blockNumber), + logIndex: Number(logIndex), + transactionHash, + address: getAddress(e.address), + encryptedAccount: e.encryptedAccount + }; + }); + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + console.log("Error from getAllGraphEchoEvents query"); + console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} function getEncryptedNotes({ graphApi, subgraphName, @@ -3732,7 +3901,7 @@ function getEncryptedNotes({ }); } function getAllEncryptedNotes(_0) { - return __async$b(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -3799,11 +3968,160 @@ function getAllEncryptedNotes(_0) { } }); } +function getGovernanceEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_GOVERNANCE_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGovernanceEvents(_0) { + return __async$c(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const result = []; + let lastSyncBlock = fromBlock; + while (true) { + const { + proposals, + votes, + delegates, + undelegates, + _meta: { + block: { number: currentBlock } + } + } = yield getGovernanceEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + const eventsLength = proposals.length + votes.length + delegates.length + undelegates.length; + if (eventsLength === 0) { + break; + } + const formattedProposals = proposals.map( + ({ blockNumber, logIndex, transactionHash, proposalId, proposer, target, startTime, endTime, description }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "ProposalCreated", + id: Number(proposalId), + proposer: getAddress(proposer), + target: getAddress(target), + startTime: Number(startTime), + endTime: Number(endTime), + description + }; + } + ); + const formattedVotes = votes.map( + ({ blockNumber, logIndex, transactionHash, proposalId, voter, support, votes: votes2, from, input }) => { + if (!input || input.length > 2048) { + input = ""; + } + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Voted", + proposalId: Number(proposalId), + voter: getAddress(voter), + support, + votes: votes2, + from: getAddress(from), + input + }; + } + ); + const formattedDelegates = delegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateTo }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Delegated", + account: getAddress(account), + delegateTo: getAddress(delegateTo) + }; + } + ); + const formattedUndelegates = undelegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateFrom }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Undelegated", + account: getAddress(account), + delegateFrom: getAddress(delegateFrom) + }; + } + ); + let formattedEvents = [ + ...formattedProposals, + ...formattedVotes, + ...formattedDelegates, + ...formattedUndelegates + ].sort((a, b) => { + if (a.blockNumber === b.blockNumber) { + return a.logIndex - b.logIndex; + } + return a.blockNumber - b.blockNumber; + }); + if (eventsLength < 900) { + result.push(...formattedEvents); + break; + } + const [firstEvent] = formattedEvents; + const [lastEvent2] = formattedEvents.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "Governance Events", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: eventsLength + }); + } + formattedEvents = formattedEvents.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + result.push(...formattedEvents); + } + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + console.log("Error from getAllGovernance query"); + console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} var graph = /*#__PURE__*/Object.freeze({ __proto__: null, GET_DEPOSITS: GET_DEPOSITS, + GET_ECHO_EVENTS: GET_ECHO_EVENTS, GET_ENCRYPTED_NOTES: GET_ENCRYPTED_NOTES, + GET_GOVERNANCE_APY: GET_GOVERNANCE_APY, + GET_GOVERNANCE_EVENTS: GET_GOVERNANCE_EVENTS, GET_NOTE_ACCOUNTS: GET_NOTE_ACCOUNTS, GET_REGISTERED: GET_REGISTERED, GET_STATISTIC: GET_STATISTIC, @@ -3811,10 +4129,14 @@ var graph = /*#__PURE__*/Object.freeze({ _META: _META, getAllDeposits: getAllDeposits, getAllEncryptedNotes: getAllEncryptedNotes, + getAllGovernanceEvents: getAllGovernanceEvents, + getAllGraphEchoEvents: getAllGraphEchoEvents, getAllRegisters: getAllRegisters, getAllWithdrawals: getAllWithdrawals, getDeposits: getDeposits, getEncryptedNotes: getEncryptedNotes, + getGovernanceEvents: getGovernanceEvents, + getGraphEchoEvents: getGraphEchoEvents, getMeta: getMeta, getNoteAccounts: getNoteAccounts, getRegisters: getRegisters, @@ -3823,7 +4145,7 @@ var graph = /*#__PURE__*/Object.freeze({ queryGraph: queryGraph }); -var __async$a = (__this, __arguments, generator) => { +var __async$b = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -3862,7 +4184,7 @@ class BatchBlockService { this.retryOn = retryOn; } getBlock(blockTag) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { const blockObject = yield this.provider.getBlock(blockTag); if (!blockObject) { const errMsg = `No block for ${blockTag}`; @@ -3872,9 +4194,9 @@ class BatchBlockService { }); } createBatchRequest(batchArray) { - return batchArray.map((blocks, index) => __async$a(this, null, function* () { + return batchArray.map((blocks, index) => __async$b(this, null, function* () { yield sleep(20 * index); - return (() => __async$a(this, null, function* () { + return (() => __async$b(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -3891,7 +4213,7 @@ class BatchBlockService { })); } getBatchBlocks(blocks) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { let blockCount = 0; const results = []; for (const chunks of chunk(blocks, this.concurrencySize * this.batchSize)) { @@ -3929,7 +4251,7 @@ class BatchTransactionService { this.retryOn = retryOn; } getTransaction(txHash) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { const txObject = yield this.provider.getTransaction(txHash); if (!txObject) { const errMsg = `No transaction for ${txHash}`; @@ -3939,9 +4261,9 @@ class BatchTransactionService { }); } createBatchRequest(batchArray) { - return batchArray.map((txs, index) => __async$a(this, null, function* () { + return batchArray.map((txs, index) => __async$b(this, null, function* () { yield sleep(20 * index); - return (() => __async$a(this, null, function* () { + return (() => __async$b(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -3958,7 +4280,7 @@ class BatchTransactionService { })); } getBatchTransactions(txs) { - return __async$a(this, null, function* () { + return __async$b(this, null, function* () { let txCount = 0; const results = []; for (const chunks of chunk(txs, this.concurrencySize * this.batchSize)) { @@ -3994,7 +4316,7 @@ class BatchEventsService { this.retryOn = retryOn; } getPastEvents(_0) { - return __async$a(this, arguments, function* ({ fromBlock, toBlock, type }) { + return __async$b(this, arguments, function* ({ fromBlock, toBlock, type }) { let err; let retries = 0; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -4014,13 +4336,13 @@ class BatchEventsService { }); } createBatchRequest(batchArray) { - return batchArray.map((event, index) => __async$a(this, null, function* () { + return batchArray.map((event, index) => __async$b(this, null, function* () { yield sleep(20 * index); return this.getPastEvents(event); })); } getBatchEvents(_0) { - return __async$a(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { + return __async$b(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { if (!toBlock) { toBlock = yield this.provider.getBlockNumber(); } @@ -4051,29 +4373,29 @@ class BatchEventsService { } } -var __defProp$1 = Object.defineProperty; -var __defProps$1 = Object.defineProperties; -var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; +var __defProp$3 = Object.defineProperty; +var __defProps$2 = Object.defineProperties; +var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; var __getProtoOf = Object.getPrototypeOf; -var __hasOwnProp$1 = Object.prototype.hasOwnProperty; -var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$3 = Object.prototype.hasOwnProperty; +var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; var __reflectGet = Reflect.get; -var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$1 = (a, b) => { +var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$3 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); - if (__getOwnPropSymbols$1) - for (var prop of __getOwnPropSymbols$1(b)) { - if (__propIsEnum$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); + if (__hasOwnProp$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); + if (__getOwnPropSymbols$3) + for (var prop of __getOwnPropSymbols$3(b)) { + if (__propIsEnum$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); } return a; }; -var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b)); +var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b)); var __superGet = (cls, obj, key) => __reflectGet(__getProtoOf(cls), key, obj); -var __async$9 = (__this, __arguments, generator) => { +var __async$a = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -4148,7 +4470,7 @@ class BaseEventsService { } /* eslint-enable @typescript-eslint/no-unused-vars */ formatEvents(events) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return yield new Promise((resolve) => resolve(events)); }); } @@ -4156,7 +4478,7 @@ class BaseEventsService { * Get saved or cached events */ getEventsFromDB() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return { events: [], lastBlock: null @@ -4164,7 +4486,7 @@ class BaseEventsService { }); } getEventsFromCache() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return { events: [], lastBlock: null @@ -4172,7 +4494,7 @@ class BaseEventsService { }); } getSavedEvents() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { let cachedEvents = yield this.getEventsFromDB(); if (!cachedEvents || !cachedEvents.events.length) { cachedEvents = yield this.getEventsFromCache(); @@ -4184,7 +4506,7 @@ class BaseEventsService { * Get latest events */ getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ + return __async$a(this, arguments, function* ({ fromBlock, methodName = "" }) { @@ -4194,7 +4516,7 @@ class BaseEventsService { lastBlock: fromBlock }; } - const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$1({ + const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$3({ fromBlock }, this.getGraphParams())); return { @@ -4204,7 +4526,7 @@ class BaseEventsService { }); } getEventsFromRpc(_0) { - return __async$9(this, arguments, function* ({ + return __async$a(this, arguments, function* ({ fromBlock, toBlock }) { @@ -4242,7 +4564,7 @@ class BaseEventsService { }); } getLatestEvents(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$a(this, arguments, function* ({ fromBlock }) { const allEvents = []; const graphEvents = yield this.getEventsFromGraph({ fromBlock }); const lastSyncBlock = graphEvents.lastBlock && graphEvents.lastBlock >= fromBlock ? graphEvents.lastBlock : fromBlock; @@ -4264,14 +4586,14 @@ class BaseEventsService { */ // eslint-disable-next-line @typescript-eslint/no-unused-vars saveEvents(_0) { - return __async$9(this, arguments, function* ({ events, lastBlock }) { + return __async$a(this, arguments, function* ({ events, lastBlock }) { }); } /** * Trigger saving and receiving latest events */ updateEvents() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { const savedEvents = yield this.getSavedEvents(); let fromBlock = this.deployedBlock; if (savedEvents && savedEvents.lastBlock) { @@ -4345,7 +4667,7 @@ class BaseDepositsService extends BaseEventsService { }; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { const type = this.getType().toLowerCase(); if (type === DEPOSIT) { const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { @@ -4364,7 +4686,7 @@ class BaseDepositsService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { from } = txs.find(({ hash }) => hash === event.transactionHash); - return __spreadProps$1(__spreadValues$1({}, event), { + return __spreadProps$2(__spreadValues$3({}, event), { from }); }); @@ -4385,7 +4707,7 @@ class BaseDepositsService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { timestamp } = blocks.find(({ number }) => number === event.blockNumber); - return __spreadProps$1(__spreadValues$1({}, event), { + return __spreadProps$2(__spreadValues$3({}, event), { timestamp }); }); @@ -4402,6 +4724,57 @@ class BaseDepositsService extends BaseEventsService { } } } +class BaseEchoService extends BaseEventsService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions: fetchDataOptions2 + }) { + super({ netId, provider, graphApi, subgraphName, contract: Echoer, deployedBlock, fetchDataOptions: fetchDataOptions2 }); + } + getInstanceName() { + return `echo_${this.netId}`; + } + getType() { + return "Echo"; + } + getGraphMethod() { + return "getAllGraphEchoEvents"; + } + formatEvents(events) { + return __async$a(this, null, function* () { + return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { + const { who, data } = args; + if (who && data) { + const eventObjects = { + blockNumber, + logIndex, + transactionHash + }; + return __spreadProps$2(__spreadValues$3({}, eventObjects), { + address: who, + encryptedAccount: data + }); + } + }).filter((e) => e); + }); + } + getEventsFromGraph(_0) { + return __async$a(this, arguments, function* ({ fromBlock }) { + if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + return { + events: [], + lastBlock: fromBlock + }; + } + return __superGet(BaseEchoService.prototype, this, "getEventsFromGraph").call(this, { fromBlock }); + }); + } +} class BaseEncryptedNotesService extends BaseEventsService { constructor({ netId, @@ -4424,7 +4797,7 @@ class BaseEncryptedNotesService extends BaseEventsService { return "getAllEncryptedNotes"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const { encryptedNote } = args; if (encryptedNote) { @@ -4433,7 +4806,7 @@ class BaseEncryptedNotesService extends BaseEventsService { logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$2(__spreadValues$3({}, eventObjects), { encryptedNote }); } @@ -4464,11 +4837,15 @@ class BaseGovernanceService extends BaseEventsService { return "*"; } getGraphMethod() { - return "governanceEvents"; + return "getAllGovernanceEvents"; } formatEvents(events) { - return __async$9(this, null, function* () { - const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { + return __async$a(this, null, function* () { + const proposalEvents = []; + const votedEvents = []; + const delegatedEvents = []; + const undelegatedEvents = []; + events.forEach(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { const eventObjects = { blockNumber, logIndex, @@ -4477,60 +4854,61 @@ class BaseGovernanceService extends BaseEventsService { }; if (event === "ProposalCreated") { const { id, proposer, target, startTime, endTime, description } = args; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { - id, + proposalEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), { + id: Number(id), proposer, target, - startTime, - endTime, + startTime: Number(startTime), + endTime: Number(endTime), description - }); + })); } if (event === "Voted") { const { proposalId, voter, support, votes } = args; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { - proposalId, + votedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), { + proposalId: Number(proposalId), voter, support, - votes - }); + votes, + from: "", + input: "" + })); } if (event === "Delegated") { const { account, to: delegateTo } = args; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + delegatedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), { account, delegateTo - }); + })); } if (event === "Undelegated") { const { account, from: delegateFrom } = args; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + undelegatedEvents.push(__spreadProps$2(__spreadValues$3({}, eventObjects), { account, delegateFrom - }); + })); } - }).filter((e) => e); - const votedEvents = formattedEvents.map((event, index) => __spreadProps$1(__spreadValues$1({}, event), { index })).filter(({ event }) => event === "Voted"); + }); if (votedEvents.length) { this.updateTransactionProgress({ percentage: 0 }); const txs = yield this.batchTransactionService.getBatchTransactions([ ...new Set(votedEvents.map(({ transactionHash }) => transactionHash)) ]); - votedEvents.forEach((event) => { + votedEvents.forEach((event, index) => { let { data: input, from } = txs.find((t) => t.hash === event.transactionHash); if (!input || input.length > 2048) { input = ""; } - formattedEvents[event.index].from = from; - formattedEvents[event.index].input = input; + votedEvents[index].from = from; + votedEvents[index].input = input; }); } - return formattedEvents; + return [...proposalEvents, ...votedEvents, ...delegatedEvents, ...undelegatedEvents]; }); } getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { - if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + return __async$a(this, arguments, function* ({ fromBlock }) { + if (!this.graphApi || !this.subgraphName || this.graphApi.includes("api.thegraph.com")) { return { events: [], lastBlock: fromBlock @@ -4564,14 +4942,14 @@ class BaseRegistryService extends BaseEventsService { return "getAllRegisters"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const eventObjects = { blockNumber, logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$2(__spreadValues$3({}, eventObjects), { ensName: args.ensName, relayerAddress: args.relayerAddress }); @@ -4579,13 +4957,13 @@ class BaseRegistryService extends BaseEventsService { }); } fetchRelayers() { - return __async$9(this, null, function* () { + return __async$a(this, null, function* () { return (yield this.updateEvents()).events; }); } } -var __async$8 = (__this, __arguments, generator) => { +var __async$9 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -4606,7 +4984,7 @@ var __async$8 = (__this, __arguments, generator) => { }); }; function existsAsync(fileOrDir) { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { try { yield stat(fileOrDir); return true; @@ -4637,27 +5015,26 @@ function unzipAsync(data) { }); }); } -function saveEvents(_0) { - return __async$8(this, arguments, function* ({ - name, +function saveUserFile(_0) { + return __async$9(this, arguments, function* ({ + fileName, userDirectory, - events + dataString }) { - const fileName = `${name}.json`.toLowerCase(); + fileName = fileName.toLowerCase(); const filePath = path.join(userDirectory, fileName); - const stringEvents = JSON.stringify(events, null, 2) + "\n"; const payload = yield zipAsync({ - [fileName]: new TextEncoder().encode(stringEvents) + [fileName]: new TextEncoder().encode(dataString) }); if (!(yield existsAsync(userDirectory))) { yield mkdir(userDirectory, { recursive: true }); } yield writeFile(filePath + ".zip", payload); - yield writeFile(filePath, stringEvents); + yield writeFile(filePath, dataString); }); } function loadSavedEvents(_0) { - return __async$8(this, arguments, function* ({ + return __async$9(this, arguments, function* ({ name, userDirectory, deployedBlock @@ -4686,7 +5063,7 @@ function loadSavedEvents(_0) { }); } function download(_0) { - return __async$8(this, arguments, function* ({ name, cacheDirectory }) { + return __async$9(this, arguments, function* ({ name, cacheDirectory }) { const fileName = `${name}.json`.toLowerCase(); const zipName = `${fileName}.zip`; const zipPath = path.join(cacheDirectory, zipName); @@ -4696,7 +5073,7 @@ function download(_0) { }); } function loadCachedEvents(_0) { - return __async$8(this, arguments, function* ({ + return __async$9(this, arguments, function* ({ name, cacheDirectory, deployedBlock @@ -4726,7 +5103,7 @@ function loadCachedEvents(_0) { }); } -var __async$7 = (__this, __arguments, generator) => { +var __async$8 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -4811,7 +5188,7 @@ class NodeDepositsService extends BaseDepositsService { } } getEventsFromDB() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.userDirectory) { console.log( "Updating events for", @@ -4842,7 +5219,7 @@ class NodeDepositsService extends BaseDepositsService { }); } getEventsFromCache() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -4864,7 +5241,7 @@ class NodeDepositsService extends BaseDepositsService { }); } saveEvents(_0) { - return __async$7(this, arguments, function* ({ events, lastBlock }) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -4885,10 +5262,136 @@ class NodeDepositsService extends BaseDepositsService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" + }); + } + }); + } +} +class NodeEchoService extends BaseEchoService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions, + cacheDirectory, + userDirectory + }) { + super({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions + }); + this.cacheDirectory = cacheDirectory; + this.userDirectory = userDirectory; + } + updateEventProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + console.log(`fromBlock - ${fromBlock}`); + console.log(`toBlock - ${toBlock}`); + if (count) { + console.log(`downloaded ${type} events count - ${count}`); + console.log("____________________________________________"); + console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock} +`); + } + } + } + updateGraphProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + console.log(`fromBlock - ${fromBlock}`); + console.log(`toBlock - ${toBlock}`); + if (count) { + console.log(`downloaded ${type} events from graph node count - ${count}`); + console.log("____________________________________________"); + console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock} +`); + } + } + } + getEventsFromDB() { + return __async$8(this, null, function* () { + if (!this.userDirectory) { + console.log(`Updating events for ${this.netId} chain echo events +`); + console.log(`savedEvents count - ${0}`); + console.log(`savedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const savedEvents = yield loadSavedEvents({ + name: this.getInstanceName(), + userDirectory: this.userDirectory, + deployedBlock: this.deployedBlock + }); + console.log(`Updating events for ${this.netId} chain echo events +`); + console.log(`savedEvents count - ${savedEvents.events.length}`); + console.log(`savedEvents lastBlock - ${savedEvents.lastBlock} +`); + return savedEvents; + }); + } + getEventsFromCache() { + return __async$8(this, null, function* () { + if (!this.cacheDirectory) { + console.log(`cachedEvents count - ${0}`); + console.log(`cachedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const cachedEvents = yield loadCachedEvents({ + name: this.getInstanceName(), + cacheDirectory: this.cacheDirectory, + deployedBlock: this.deployedBlock + }); + console.log(`cachedEvents count - ${cachedEvents.events.length}`); + console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock} +`); + return cachedEvents; + }); + } + saveEvents(_0) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { + const instanceName = this.getInstanceName(); + console.log("\ntotalEvents count - ", events.length); + console.log( + `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock} +` + ); + const eventTable = new Table(); + eventTable.push( + [{ colSpan: 2, content: "Echo Accounts", hAlign: "center" }], + ["Network", `${this.netId} chain`], + ["Events", `${events.length} events`], + [{ colSpan: 2, content: "Latest events" }], + ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => { + const eventIndex = events.length - index; + return [eventIndex, blockNumber]; + }) + ); + console.log(eventTable.toString() + "\n"); + if (this.userDirectory) { + yield saveUserFile({ + fileName: instanceName + ".json", + userDirectory: this.userDirectory, + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -4943,7 +5446,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { } } getEventsFromDB() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain encrypted events `); @@ -4969,7 +5472,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { }); } getEventsFromCache() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -4991,7 +5494,7 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { }); } saveEvents(_0) { - return __async$7(this, arguments, function* ({ events, lastBlock }) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -5011,10 +5514,10 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -5074,7 +5577,7 @@ class NodeGovernanceService extends BaseGovernanceService { } } getEventsFromDB() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain governance events `); @@ -5100,7 +5603,7 @@ class NodeGovernanceService extends BaseGovernanceService { }); } getEventsFromCache() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -5122,7 +5625,7 @@ class NodeGovernanceService extends BaseGovernanceService { }); } saveEvents(_0) { - return __async$7(this, arguments, function* ({ events, lastBlock }) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -5142,10 +5645,10 @@ class NodeGovernanceService extends BaseGovernanceService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -5200,7 +5703,7 @@ class NodeRegistryService extends BaseRegistryService { } } getEventsFromDB() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.userDirectory) { console.log(`Updating events for ${this.netId} chain registry events `); @@ -5226,7 +5729,7 @@ class NodeRegistryService extends BaseRegistryService { }); } getEventsFromCache() { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { if (!this.cacheDirectory) { console.log(`cachedEvents count - ${0}`); console.log(`cachedEvents lastBlock - ${this.deployedBlock} @@ -5248,7 +5751,7 @@ class NodeRegistryService extends BaseRegistryService { }); } saveEvents(_0) { - return __async$7(this, arguments, function* ({ events, lastBlock }) { + return __async$8(this, arguments, function* ({ events, lastBlock }) { const instanceName = this.getInstanceName(); console.log("\ntotalEvents count - ", events.length); console.log( @@ -5268,598 +5771,43 @@ class NodeRegistryService extends BaseRegistryService { ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); } } -const addressType = { type: "string", pattern: "^0x[a-fA-F0-9]{40}$" }; -const bnType = { type: "string", BN: true }; -const statusSchema = { - type: "object", - properties: { - rewardAccount: addressType, - gasPrices: { - type: "object", - properties: { - fast: { type: "number" }, - additionalProperties: { type: "number" } - }, - required: ["fast"] - }, - netId: { type: "integer" }, - tornadoServiceFee: { type: "number", maximum: 20, minimum: 0 }, - latestBlock: { type: "number" }, - version: { type: "string" }, - health: { - type: "object", - properties: { - status: { const: "true" }, - error: { type: "string" } - }, - required: ["status"] - }, - currentQueue: { type: "number" } - }, - required: ["rewardAccount", "instances", "netId", "tornadoServiceFee", "version", "health"] +var __defProp$2 = Object.defineProperty; +var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; +var __hasOwnProp$2 = Object.prototype.hasOwnProperty; +var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$2 = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); + if (__getOwnPropSymbols$2) + for (var prop of __getOwnPropSymbols$2(b)) { + if (__propIsEnum$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); + } + return a; }; -function getStatusSchema(netId, config) { - const { tokens, optionalTokens = [], nativeCurrency } = config; - const schema = JSON.parse(JSON.stringify(statusSchema)); - const instances = Object.keys(tokens).reduce( - (acc, token) => { - const { instanceAddress, tokenAddress, symbol, decimals, optionalInstances = [] } = tokens[token]; - const amounts = Object.keys(instanceAddress); - const instanceProperties = { - type: "object", - properties: { - instanceAddress: { - type: "object", - properties: amounts.reduce((acc2, cur) => { - acc2[cur] = addressType; - return acc2; - }, {}), - required: amounts.filter((amount) => !optionalInstances.includes(amount)) - }, - decimals: { enum: [decimals] } - }, - required: ["instanceAddress", "decimals"].concat( - tokenAddress ? ["tokenAddress"] : [], - symbol ? ["symbol"] : [] - ) - }; - if (tokenAddress) { - instanceProperties.properties.tokenAddress = addressType; - } - if (symbol) { - instanceProperties.properties.symbol = { enum: [symbol] }; - } - acc.properties[token] = instanceProperties; - if (!optionalTokens.includes(token)) { - acc.required.push(token); - } - return acc; - }, - { - type: "object", - properties: {}, - required: [] - } - ); - schema.properties.instances = instances; - if (Number(netId) === 1) { - const _tokens = Object.keys(tokens).filter((t) => t !== nativeCurrency); - const ethPrices = { - type: "object", - properties: _tokens.reduce((acc, token) => { - acc[token] = bnType; - return acc; - }, {}) - // required: _tokens - }; - schema.properties.ethPrices = ethPrices; - } - return schema; -} - -const jobsSchema = { - type: "object", - properties: { - error: { type: "string" }, - id: { type: "string" }, - type: { type: "string" }, - status: { type: "string" }, - contract: { type: "string" }, - proof: { type: "string" }, - args: { - type: "array", - items: { type: "string" } - }, - txHash: { type: "string" }, - confirmations: { type: "number" }, - failedReason: { type: "string" } - }, - required: ["id", "status"] -}; - -const ajv = new Ajv({ allErrors: true }); -ajv.addKeyword({ - keyword: "BN", - // eslint-disable-next-line @typescript-eslint/no-explicit-any - validate: (schema, data) => { - try { - BigInt(data); - return true; - } catch (e) { - return false; - } - }, - errors: true -}); - -var __async$6 = (__this, __arguments, generator) => { - return new Promise((resolve, reject) => { - var fulfilled = (value) => { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - }; - var rejected = (value) => { - try { - step(generator.throw(value)); - } catch (e) { - reject(e); - } - }; - var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); - step((generator = generator.apply(__this, __arguments)).next()); - }); -}; -class Pedersen { - constructor() { - this.pedersenPromise = this.initPedersen(); - } - initPedersen() { - return __async$6(this, null, function* () { - this.pedersenHash = yield buildPedersenHash(); - this.babyJub = this.pedersenHash.babyJub; - }); - } - unpackPoint(buffer) { - return __async$6(this, null, function* () { - var _a, _b; - yield this.pedersenPromise; - return (_b = this.babyJub) == null ? void 0 : _b.unpackPoint((_a = this.pedersenHash) == null ? void 0 : _a.hash(buffer)); - }); - } - toStringBuffer(buffer) { - var _a; - return (_a = this.babyJub) == null ? void 0 : _a.F.toString(buffer); - } -} -const pedersen = new Pedersen(); -function buffPedersenHash(buffer) { - return __async$6(this, null, function* () { - const [hash] = yield pedersen.unpackPoint(buffer); - return pedersen.toStringBuffer(hash); - }); -} - -var __async$5 = (__this, __arguments, generator) => { - return new Promise((resolve, reject) => { - var fulfilled = (value) => { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - }; - var rejected = (value) => { - try { - step(generator.throw(value)); - } catch (e) { - reject(e); - } - }; - var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); - step((generator = generator.apply(__this, __arguments)).next()); - }); -}; -function createDeposit(_0) { - return __async$5(this, arguments, function* ({ nullifier, secret }) { - const preimage = new Uint8Array([...leInt2Buff(nullifier), ...leInt2Buff(secret)]); - const noteHex = toFixedHex(bytesToBN(preimage), 62); - const commitment = BigInt(yield buffPedersenHash(preimage)); - const commitmentHex = toFixedHex(commitment); - const nullifierHash = BigInt(yield buffPedersenHash(leInt2Buff(nullifier))); - const nullifierHex = toFixedHex(nullifierHash); - return { - preimage, - noteHex, - commitment, - commitmentHex, - nullifierHash, - nullifierHex - }; - }); -} -class Deposit { - constructor({ - currency, - amount, - netId, - nullifier, - secret, - note, - noteHex, - invoice, - commitmentHex, - nullifierHex - }) { - this.currency = currency; - this.amount = amount; - this.netId = netId; - this.nullifier = nullifier; - this.secret = secret; - this.note = note; - this.noteHex = noteHex; - this.invoice = invoice; - this.commitmentHex = commitmentHex; - this.nullifierHex = nullifierHex; - } - toString() { - return JSON.stringify( - { - currency: this.currency, - amount: this.amount, - netId: this.netId, - nullifier: this.nullifier, - secret: this.secret, - note: this.note, - noteHex: this.noteHex, - invoice: this.invoice, - commitmentHex: this.commitmentHex, - nullifierHex: this.nullifierHex - }, - null, - 2 - ); - } - static createNote(_0) { - return __async$5(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { - if (!nullifier) { - nullifier = rBigInt(31); - } - if (!secret) { - secret = rBigInt(31); - } - const depositObject = yield createDeposit({ - nullifier, - secret - }); - const newDeposit = new Deposit({ - currency: currency.toLowerCase(), - amount, - netId: Number(netId), - note: `tornado-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.noteHex}`, - noteHex: depositObject.noteHex, - invoice: `tornadoInvoice-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.commitmentHex}`, - nullifier, - secret, - commitmentHex: depositObject.commitmentHex, - nullifierHex: depositObject.nullifierHex - }); - return newDeposit; - }); - } - static parseNote(noteString) { - return __async$5(this, null, function* () { - const noteRegex = new RegExp("tornado-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{124})", "g"); - const match = noteRegex.exec(noteString); - if (!match) { - throw new Error("The note has invalid format"); - } - const matchGroup = match == null ? void 0 : match.groups; - const currency = matchGroup.currency.toLowerCase(); - const amount = matchGroup.amount; - const netId = Number(matchGroup.netId); - const bytes = bnToBytes("0x" + matchGroup.note); - const nullifier = BigInt(leBuff2Int(bytes.slice(0, 31)).toString()); - const secret = BigInt(leBuff2Int(bytes.slice(31, 62)).toString()); - const depositObject = yield createDeposit({ nullifier, secret }); - const invoice = `tornadoInvoice-${currency}-${amount}-${netId}-${depositObject.commitmentHex}`; - const newDeposit = new Deposit({ - currency, - amount, - netId, - note: noteString, - noteHex: depositObject.noteHex, - invoice, - nullifier, - secret, - commitmentHex: depositObject.commitmentHex, - nullifierHex: depositObject.nullifierHex - }); - return newDeposit; - }); - } -} -class Invoice { - constructor(invoiceString) { - const invoiceRegex = new RegExp("tornadoInvoice-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{64})", "g"); - const match = invoiceRegex.exec(invoiceString); - if (!match) { - throw new Error("The note has invalid format"); - } - const matchGroup = match == null ? void 0 : match.groups; - const currency = matchGroup.currency.toLowerCase(); - const amount = matchGroup.amount; - const netId = Number(matchGroup.netId); - this.currency = currency; - this.amount = amount; - this.netId = netId; - this.commitment = "0x" + matchGroup.commitment; - this.invoice = invoiceString; - } - toString() { - return JSON.stringify( - { - currency: this.currency, - amount: this.amount, - netId: this.netId, - commitment: this.commitment, - invoice: this.invoice - }, - null, - 2 - ); - } -} - -const DUMMY_ADDRESS = "0x1111111111111111111111111111111111111111"; -const DUMMY_NONCE = "0x1111111111111111111111111111111111111111111111111111111111111111"; -const DUMMY_WITHDRAW_DATA = "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"; -function convertETHToTokenAmount(amountInWei, tokenPriceInWei, tokenDecimals = 18) { - const tokenDecimalsMultiplier = BigInt(10 ** Number(tokenDecimals)); - return BigInt(amountInWei) * tokenDecimalsMultiplier / BigInt(tokenPriceInWei); -} -class TornadoFeeOracle { - constructor(ovmGasPriceOracle) { - if (ovmGasPriceOracle) { - this.ovmGasPriceOracle = ovmGasPriceOracle; - } - } - /** - * Calculate L1 fee for op-stack chains - * - * This is required since relayers would pay the full transaction fees for users - */ - fetchL1OptimismFee(tx) { - if (!this.ovmGasPriceOracle) { - return new Promise((resolve) => resolve(BigInt(0))); - } - if (!tx) { - tx = { - type: 0, - gasLimit: 1e6, - nonce: Number(DUMMY_NONCE), - data: DUMMY_WITHDRAW_DATA, - gasPrice: parseUnits("1", "gwei"), - from: DUMMY_ADDRESS, - to: DUMMY_ADDRESS - }; - } - return this.ovmGasPriceOracle.getL1Fee.staticCall(Transaction.from(tx).unsignedSerialized); - } - /** - * We don't need to distinguish default refunds by tokens since most users interact with other defi protocols after withdrawal - * So we default with 1M gas which is enough for two or three swaps - * Using 30 gwei for default but it is recommended to supply cached gasPrice value from the UI - */ - defaultEthRefund(gasPrice, gasLimit) { - return (gasPrice ? BigInt(gasPrice) : parseUnits("30", "gwei")) * BigInt(gasLimit || 1e6); - } - /** - * Calculates token amount for required ethRefund purchases required to calculate fees - */ - calculateTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals) { - return convertETHToTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals); - } - /** - * Warning: For tokens you need to check if the fees are above denomination - * (Usually happens for small denomination pool or if the gas price is high) - */ - calculateRelayerFee({ - gasPrice, - gasLimit = 6e5, - l1Fee = 0, - denomination, - ethRefund = BigInt(0), - tokenPriceInWei, - tokenDecimals = 18, - relayerFeePercent = 0.33, - isEth = true, - premiumPercent = 20 - }) { - const gasCosts = BigInt(gasPrice) * BigInt(gasLimit) + BigInt(l1Fee); - const relayerFee = BigInt(denomination) * BigInt(Math.floor(1e4 * relayerFeePercent)) / BigInt(1e4 * 100); - if (isEth) { - return (gasCosts + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100) / BigInt(100); - } - const feeInEth = gasCosts + BigInt(ethRefund); - return (convertETHToTokenAmount(feeInEth, tokenPriceInWei, tokenDecimals) + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100) / BigInt(100); - } -} - -var __async$4 = (__this, __arguments, generator) => { - return new Promise((resolve, reject) => { - var fulfilled = (value) => { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - }; - var rejected = (value) => { - try { - step(generator.throw(value)); - } catch (e) { - reject(e); - } - }; - var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); - step((generator = generator.apply(__this, __arguments)).next()); - }); -}; -class Mimc { - constructor() { - this.mimcPromise = this.initMimc(); - } - initMimc() { - return __async$4(this, null, function* () { - this.sponge = yield buildMimcSponge(); - this.hash = (left, right) => { - var _a, _b; - return (_b = this.sponge) == null ? void 0 : _b.F.toString((_a = this.sponge) == null ? void 0 : _a.multiHash([BigInt(left), BigInt(right)])); - }; - }); - } - getHash() { - return __async$4(this, null, function* () { - yield this.mimcPromise; - return { - sponge: this.sponge, - hash: this.hash - }; - }); - } -} -const mimc = new Mimc(); - -var __async$3 = (__this, __arguments, generator) => { - return new Promise((resolve, reject) => { - var fulfilled = (value) => { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - }; - var rejected = (value) => { - try { - step(generator.throw(value)); - } catch (e) { - reject(e); - } - }; - var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); - step((generator = generator.apply(__this, __arguments)).next()); - }); -}; -class MerkleTreeService { - constructor({ - netId, - amount, - currency, - Tornado, - commitment, - merkleTreeHeight = 20, - emptyElement = "21663839004416932945382355908790599225266501822907911457504978515578255421292", - merkleWorkerPath - }) { - const instanceName = `${netId}_${currency}_${amount}`; - this.currency = currency; - this.amount = amount; - this.netId = Number(netId); - this.Tornado = Tornado; - this.instanceName = instanceName; - this.commitment = commitment; - this.merkleTreeHeight = merkleTreeHeight; - this.emptyElement = emptyElement; - this.merkleWorkerPath = merkleWorkerPath; - } - createTree(_0) { - return __async$3(this, arguments, function* ({ events }) { - const { hash: hashFunction } = yield mimc.getHash(); - if (this.merkleWorkerPath) { - console.log("Using merkleWorker\n"); - try { - if (isNode) { - const merkleWorkerPromise = new Promise((resolve, reject) => { - const worker = new Worker$1(this.merkleWorkerPath, { - workerData: { - merkleTreeHeight: this.merkleTreeHeight, - elements: events, - zeroElement: this.emptyElement - } - }); - worker.on("message", resolve); - worker.on("error", reject); - worker.on("exit", (code) => { - if (code !== 0) { - reject(new Error(`Worker stopped with exit code ${code}`)); - } - }); - }); - return MerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); - } else { - const merkleWorkerPromise = new Promise((resolve, reject) => { - const worker = new Worker(this.merkleWorkerPath); - worker.onmessage = (e) => { - resolve(e.data); - }; - worker.onerror = (e) => { - reject(e); - }; - worker.postMessage({ - merkleTreeHeight: this.merkleTreeHeight, - elements: events, - zeroElement: this.emptyElement - }); - }); - return MerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); - } - } catch (err) { - console.log("merkleWorker failed, falling back to synchronous merkle tree"); - console.log(err); - } - } - return new MerkleTree(this.merkleTreeHeight, events, { - zeroElement: this.emptyElement, - hashFunction - }); - }); - } - verifyTree(_0) { - return __async$3(this, arguments, function* ({ events }) { - console.log( - ` -Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while -` - ); - console.time("Created tree in"); - const tree = yield this.createTree({ events: events.map(({ commitment }) => BigInt(commitment).toString()) }); - console.timeEnd("Created tree in"); - console.log(""); - const isKnownRoot = yield this.Tornado.isKnownRoot(toFixedHex(BigInt(tree.root))); - if (!isKnownRoot) { - const errMsg = `Deposit Event ${this.netId} ${this.amount} ${this.currency} is invalid`; - throw new Error(errMsg); - } - return tree; - }); - } -} - -const blockSyncInterval = 1e4; -const enabledChains = ["1", "10", "56", "100", "137", "42161", "43114", "11155111"]; +var NetId = /* @__PURE__ */ ((NetId2) => { + NetId2[NetId2["MAINNET"] = 1] = "MAINNET"; + NetId2[NetId2["BSC"] = 56] = "BSC"; + NetId2[NetId2["POLYGON"] = 137] = "POLYGON"; + NetId2[NetId2["OPTIMISM"] = 10] = "OPTIMISM"; + NetId2[NetId2["ARBITRUM"] = 42161] = "ARBITRUM"; + NetId2[NetId2["GNOSIS"] = 100] = "GNOSIS"; + NetId2[NetId2["AVALANCHE"] = 43114] = "AVALANCHE"; + NetId2[NetId2["SEPOLIA"] = 11155111] = "SEPOLIA"; + return NetId2; +})(NetId || {}); const theGraph = { name: "Hosted Graph", url: "https://api.thegraph.com" @@ -5868,8 +5816,8 @@ const tornado = { name: "Tornado Subgraphs", url: "https://tornadocash-rpc.com" }; -const networkConfig = { - netId1: { +const defaultConfig = { + [1 /* MAINNET */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 80, @@ -5879,11 +5827,7 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://etherscan.io/tx/", - address: "https://etherscan.io/address/", - block: "https://etherscan.io/block/" - }, + explorerUrl: "https://etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Ethereum Mainnet", @@ -5894,7 +5838,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, mevblockerRPC: { @@ -5922,14 +5866,19 @@ const networkConfig = { url: "https://1rpc.io/eth" } }, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", echoContract: "0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornContract: "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", + governanceContract: "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", + stakingRewardsContract: "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29", + registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", aggregatorContract: "0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49", reverseRecordsContract: "0x3671aE578E63FdF66ad4F3E12CC0c0d71Ac7510C", tornadoSubgraph: "tornadocash/mainnet-tornado-subgraph", registrySubgraph: "tornadocash/tornado-relayer-registry", + governanceSubgraph: "tornadocash/tornado-governance", subgraphs: { tornado, theGraph @@ -6011,16 +5960,12 @@ const networkConfig = { constants: { GOVERNANCE_BLOCK: 11474695, NOTE_ACCOUNT_BLOCK: 11842486, - ENCRYPTED_NOTES_BLOCK: 14248730, + ENCRYPTED_NOTES_BLOCK: 12143762, REGISTRY_BLOCK: 14173129, MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", - "governance.contract.tornadocash.eth": "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", - "tornado-router.contract.tornadocash.eth": "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - "staking-rewards.contract.tornadocash.eth": "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29" + } }, - netId56: { + [56 /* BSC */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 5, @@ -6030,18 +5975,15 @@ const networkConfig = { }, nativeCurrency: "bnb", currencyName: "BNB", - explorerUrl: { - tx: "https://bscscan.com/tx/", - address: "https://bscscan.com/address/", - block: "https://bscscan.com/block/" - }, + explorerUrl: "https://bscscan.com", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Binance Smart Chain", deployedBlock: 8158799, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/bsc-tornado-subgraph", subgraphs: { tornado, @@ -6053,7 +5995,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/bsc" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, stackup: { @@ -6086,10 +6028,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 8159269, ENCRYPTED_NOTES_BLOCK: 8159269 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId137: { + [137 /* POLYGON */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 100, @@ -6099,18 +6040,15 @@ const networkConfig = { }, nativeCurrency: "matic", currencyName: "MATIC", - explorerUrl: { - tx: "https://polygonscan.com/tx/", - address: "https://polygonscan.com/address/", - block: "https://polygonscan.com/block/" - }, + explorerUrl: "https://polygonscan.com", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Polygon (Matic) Network", deployedBlock: 16257962, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", gasPriceOracleContract: "0xF81A8D8D3581985D3969fe53bFA67074aDFa8F3C", tornadoSubgraph: "tornadocash/matic-tornado-subgraph", subgraphs: { @@ -6148,10 +6086,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 16257996, ENCRYPTED_NOTES_BLOCK: 16257996 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId10: { + [10 /* OPTIMISM */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 1e-3, @@ -6161,18 +6098,15 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://optimistic.etherscan.io/tx/", - address: "https://optimistic.etherscan.io/address/", - block: "https://optimistic.etherscan.io/block/" - }, + explorerUrl: "https://optimistic.etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Optimism", deployedBlock: 2243689, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", ovmGasPriceOracleContract: "0x420000000000000000000000000000000000000F", tornadoSubgraph: "tornadocash/optimism-tornado-subgraph", subgraphs: { @@ -6185,7 +6119,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/op" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://optimism-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, optimism: { @@ -6218,10 +6152,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 2243694, ENCRYPTED_NOTES_BLOCK: 2243694 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId42161: { + [42161 /* ARBITRUM */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 4, @@ -6231,18 +6164,15 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://arbiscan.io/tx/", - address: "https://arbiscan.io/address/", - block: "https://arbiscan.io/block/" - }, + explorerUrl: "https://arbiscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Arbitrum One", deployedBlock: 3430648, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/arbitrum-tornado-subgraph", subgraphs: { tornado, @@ -6254,7 +6184,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/arbitrum" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, arbitrum: { @@ -6287,10 +6217,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 3430605, ENCRYPTED_NOTES_BLOCK: 3430605 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId100: { + [100 /* GNOSIS */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 6, @@ -6300,18 +6229,15 @@ const networkConfig = { }, nativeCurrency: "xdai", currencyName: "xDAI", - explorerUrl: { - tx: "https://blockscout.com/xdai/mainnet/tx/", - address: "https://blockscout.com/xdai/mainnet/address/", - block: "https://blockscout.com/xdai/mainnet/block/" - }, + explorerUrl: "https://gnosisscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Gnosis Chain", deployedBlock: 17754561, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/xdai-tornado-subgraph", subgraphs: { tornado, @@ -6323,7 +6249,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/gnosis" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, gnosis: { @@ -6356,10 +6282,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 17754564, ENCRYPTED_NOTES_BLOCK: 17754564 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId43114: { + [43114 /* AVALANCHE */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 225, @@ -6369,18 +6294,15 @@ const networkConfig = { }, nativeCurrency: "avax", currencyName: "AVAX", - explorerUrl: { - tx: "https://snowtrace.io/tx/", - address: "https://snowtrace.io/address/", - block: "https://snowtrace.io/block/" - }, + explorerUrl: "https://snowtrace.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Avalanche Mainnet", deployedBlock: 4429818, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/avalanche-tornado-subgraph", subgraphs: { theGraph @@ -6415,10 +6337,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 4429813, ENCRYPTED_NOTES_BLOCK: 4429813 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId11155111: { + [11155111 /* SEPOLIA */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 2, @@ -6428,19 +6349,18 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "SepoliaETH", - explorerUrl: { - tx: "https://sepolia.etherscan.io/tx/", - address: "https://sepolia.etherscan.io/address/", - block: "https://sepolia.etherscan.io/block/" - }, + explorerUrl: "https://sepolia.etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Ethereum Sepolia", deployedBlock: 5594395, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + tornContract: "0x3AE6667167C0f44394106E197904519D808323cA", + governanceContract: "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", + stakingRewardsContract: "0x6d0018890751Efd31feb8166711B16732E2b496b", registryContract: "0x1428e5d2356b13778A13108b10c440C83011dfB8", - echoContract: "0xcDD1fc3F5ac2782D83449d3AbE80D6b7B273B0e5", aggregatorContract: "0x4088712AC9fad39ea133cdb9130E465d235e9642", reverseRecordsContract: "0xEc29700C0283e5Be64AcdFe8077d6cC95dE23C23", tornadoSubgraph: "tornadocash/sepolia-tornado-subgraph", @@ -6493,13 +6413,806 @@ const networkConfig = { NOTE_ACCOUNT_BLOCK: 5594395, ENCRYPTED_NOTES_BLOCK: 5594395, MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x3AE6667167C0f44394106E197904519D808323cA", - "governance.contract.tornadocash.eth": "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", - "tornado-router.contract.tornadocash.eth": "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee" + } } }; -const subdomains = enabledChains.map((chain) => networkConfig[`netId${chain}`].ensSubdomainKey); +const enabledChains = Object.values(NetId); +let customConfig = {}; +function addNetwork(newConfig) { + enabledChains.push( + ...Object.keys(newConfig).map((netId) => Number(netId)).filter((netId) => !enabledChains.includes(netId)) + ); + customConfig = __spreadValues$2(__spreadValues$2({}, customConfig), newConfig); +} +function getNetworkConfig() { + const allConfig = __spreadValues$2(__spreadValues$2({}, defaultConfig), customConfig); + return enabledChains.reduce((acc, curr) => { + acc[curr] = allConfig[curr]; + return acc; + }, {}); +} +function getConfig(netId) { + const allConfig = getNetworkConfig(); + const chainConfig = allConfig[netId]; + if (!chainConfig) { + const errMsg = `No config found for network ${netId}!`; + throw new Error(errMsg); + } + return chainConfig; +} +function getInstanceByAddress({ netId, address }) { + const { tokens } = getConfig(netId); + for (const [currency, { instanceAddress }] of Object.entries(tokens)) { + for (const [amount, instance] of Object.entries(instanceAddress)) { + if (instance === address) { + return { + amount, + currency + }; + } + } + } +} +function getSubdomains() { + const allConfig = getNetworkConfig(); + return enabledChains.map((chain) => allConfig[chain].ensSubdomainKey); +} + +const addressType = { type: "string", pattern: "^0x[a-fA-F0-9]{40}$" }; +const bnType = { type: "string", BN: true }; +const statusSchema = { + type: "object", + properties: { + rewardAccount: addressType, + gasPrices: { + type: "object", + properties: { + fast: { type: "number" }, + additionalProperties: { type: "number" } + }, + required: ["fast"] + }, + netId: { type: "integer" }, + tornadoServiceFee: { type: "number", maximum: 20, minimum: 0 }, + latestBlock: { type: "number" }, + version: { type: "string" }, + health: { + type: "object", + properties: { + status: { const: "true" }, + error: { type: "string" } + }, + required: ["status"] + }, + currentQueue: { type: "number" } + }, + required: ["rewardAccount", "instances", "netId", "tornadoServiceFee", "version", "health"] +}; +function getStatusSchema(netId, config) { + const { tokens, optionalTokens = [], nativeCurrency } = config; + const schema = JSON.parse(JSON.stringify(statusSchema)); + const instances = Object.keys(tokens).reduce( + (acc, token) => { + const { instanceAddress, tokenAddress, symbol, decimals, optionalInstances = [] } = tokens[token]; + const amounts = Object.keys(instanceAddress); + const instanceProperties = { + type: "object", + properties: { + instanceAddress: { + type: "object", + properties: amounts.reduce((acc2, cur) => { + acc2[cur] = addressType; + return acc2; + }, {}), + required: amounts.filter((amount) => !optionalInstances.includes(amount)) + }, + decimals: { enum: [decimals] } + }, + required: ["instanceAddress", "decimals"].concat( + tokenAddress ? ["tokenAddress"] : [], + symbol ? ["symbol"] : [] + ) + }; + if (tokenAddress) { + instanceProperties.properties.tokenAddress = addressType; + } + if (symbol) { + instanceProperties.properties.symbol = { enum: [symbol] }; + } + acc.properties[token] = instanceProperties; + if (!optionalTokens.includes(token)) { + acc.required.push(token); + } + return acc; + }, + { + type: "object", + properties: {}, + required: [] + } + ); + schema.properties.instances = instances; + if (netId === NetId.MAINNET) { + const _tokens = Object.keys(tokens).filter((t) => t !== nativeCurrency); + const ethPrices = { + type: "object", + properties: _tokens.reduce((acc, token) => { + acc[token] = bnType; + return acc; + }, {}) + // required: _tokens + }; + schema.properties.ethPrices = ethPrices; + } + return schema; +} + +const jobsSchema = { + type: "object", + properties: { + error: { type: "string" }, + id: { type: "string" }, + type: { type: "string" }, + status: { type: "string" }, + contract: { type: "string" }, + proof: { type: "string" }, + args: { + type: "array", + items: { type: "string" } + }, + txHash: { type: "string" }, + confirmations: { type: "number" }, + failedReason: { type: "string" } + }, + required: ["id", "status"] +}; + +const ajv = new Ajv({ allErrors: true }); +ajv.addKeyword({ + keyword: "BN", + // eslint-disable-next-line @typescript-eslint/no-explicit-any + validate: (schema, data) => { + try { + BigInt(data); + return true; + } catch (e) { + return false; + } + }, + errors: true +}); + +var __async$7 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class Pedersen { + constructor() { + this.pedersenPromise = this.initPedersen(); + } + initPedersen() { + return __async$7(this, null, function* () { + this.pedersenHash = yield buildPedersenHash(); + this.babyJub = this.pedersenHash.babyJub; + }); + } + unpackPoint(buffer) { + return __async$7(this, null, function* () { + var _a, _b; + yield this.pedersenPromise; + return (_b = this.babyJub) == null ? void 0 : _b.unpackPoint((_a = this.pedersenHash) == null ? void 0 : _a.hash(buffer)); + }); + } + toStringBuffer(buffer) { + var _a; + return (_a = this.babyJub) == null ? void 0 : _a.F.toString(buffer); + } +} +const pedersen = new Pedersen(); +function buffPedersenHash(buffer) { + return __async$7(this, null, function* () { + const [hash] = yield pedersen.unpackPoint(buffer); + return pedersen.toStringBuffer(hash); + }); +} + +var __async$6 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +function createDeposit(_0) { + return __async$6(this, arguments, function* ({ nullifier, secret }) { + const preimage = new Uint8Array([...leInt2Buff(nullifier), ...leInt2Buff(secret)]); + const noteHex = toFixedHex(bytesToBN(preimage), 62); + const commitment = BigInt(yield buffPedersenHash(preimage)); + const commitmentHex = toFixedHex(commitment); + const nullifierHash = BigInt(yield buffPedersenHash(leInt2Buff(nullifier))); + const nullifierHex = toFixedHex(nullifierHash); + return { + preimage, + noteHex, + commitment, + commitmentHex, + nullifierHash, + nullifierHex + }; + }); +} +class Deposit { + constructor({ + currency, + amount, + netId, + nullifier, + secret, + note, + noteHex, + invoice, + commitmentHex, + nullifierHex + }) { + this.currency = currency; + this.amount = amount; + this.netId = netId; + this.nullifier = nullifier; + this.secret = secret; + this.note = note; + this.noteHex = noteHex; + this.invoice = invoice; + this.commitmentHex = commitmentHex; + this.nullifierHex = nullifierHex; + } + toString() { + return JSON.stringify( + { + currency: this.currency, + amount: this.amount, + netId: this.netId, + nullifier: this.nullifier, + secret: this.secret, + note: this.note, + noteHex: this.noteHex, + invoice: this.invoice, + commitmentHex: this.commitmentHex, + nullifierHex: this.nullifierHex + }, + null, + 2 + ); + } + static createNote(_0) { + return __async$6(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { + if (!nullifier) { + nullifier = rBigInt(31); + } + if (!secret) { + secret = rBigInt(31); + } + const depositObject = yield createDeposit({ + nullifier, + secret + }); + const newDeposit = new Deposit({ + currency: currency.toLowerCase(), + amount, + netId, + note: `tornado-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.noteHex}`, + noteHex: depositObject.noteHex, + invoice: `tornadoInvoice-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.commitmentHex}`, + nullifier, + secret, + commitmentHex: depositObject.commitmentHex, + nullifierHex: depositObject.nullifierHex + }); + return newDeposit; + }); + } + static parseNote(noteString) { + return __async$6(this, null, function* () { + const noteRegex = new RegExp("tornado-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{124})", "g"); + const match = noteRegex.exec(noteString); + if (!match) { + throw new Error("The note has invalid format"); + } + const matchGroup = match == null ? void 0 : match.groups; + const currency = matchGroup.currency.toLowerCase(); + const amount = matchGroup.amount; + const netId = Number(matchGroup.netId); + const bytes = bnToBytes("0x" + matchGroup.note); + const nullifier = BigInt(leBuff2Int(bytes.slice(0, 31)).toString()); + const secret = BigInt(leBuff2Int(bytes.slice(31, 62)).toString()); + const depositObject = yield createDeposit({ nullifier, secret }); + const invoice = `tornadoInvoice-${currency}-${amount}-${netId}-${depositObject.commitmentHex}`; + const newDeposit = new Deposit({ + currency, + amount, + netId, + note: noteString, + noteHex: depositObject.noteHex, + invoice, + nullifier, + secret, + commitmentHex: depositObject.commitmentHex, + nullifierHex: depositObject.nullifierHex + }); + return newDeposit; + }); + } +} +class Invoice { + constructor(invoiceString) { + const invoiceRegex = new RegExp("tornadoInvoice-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{64})", "g"); + const match = invoiceRegex.exec(invoiceString); + if (!match) { + throw new Error("The note has invalid format"); + } + const matchGroup = match == null ? void 0 : match.groups; + const currency = matchGroup.currency.toLowerCase(); + const amount = matchGroup.amount; + const netId = Number(matchGroup.netId); + this.currency = currency; + this.amount = amount; + this.netId = netId; + this.commitment = "0x" + matchGroup.commitment; + this.invoice = invoiceString; + } + toString() { + return JSON.stringify( + { + currency: this.currency, + amount: this.amount, + netId: this.netId, + commitment: this.commitment, + invoice: this.invoice + }, + null, + 2 + ); + } +} + +function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext }) { + const nonceBuf = toFixedHex(bytesToHex(base64ToBytes(nonce)), 24); + const ephemPublicKeyBuf = toFixedHex(bytesToHex(base64ToBytes(ephemPublicKey)), 32); + const ciphertextBuf = bytesToHex(base64ToBytes(ciphertext)); + const messageBuff = concatBytes(hexToBytes(nonceBuf), hexToBytes(ephemPublicKeyBuf), hexToBytes(ciphertextBuf)); + return bytesToHex(messageBuff); +} +function unpackEncryptedMessage(encryptedMessage) { + const messageBuff = hexToBytes(encryptedMessage); + const nonceBuf = bytesToBase64(messageBuff.slice(0, 24)); + const ephemPublicKeyBuf = bytesToBase64(messageBuff.slice(24, 56)); + const ciphertextBuf = bytesToBase64(messageBuff.slice(56)); + return { + messageBuff: bytesToHex(messageBuff), + version: "x25519-xsalsa20-poly1305", + nonce: nonceBuf, + ephemPublicKey: ephemPublicKeyBuf, + ciphertext: ciphertextBuf + }; +} +class NoteAccount { + constructor({ netId, blockNumber, recoveryKey, Echoer: Echoer2 }) { + if (!recoveryKey) { + recoveryKey = bytesToHex(crypto.getRandomValues(new Uint8Array(32))).slice(2); + } + this.netId = Math.floor(Number(netId)); + this.blockNumber = blockNumber; + this.recoveryKey = recoveryKey; + this.recoveryAddress = computeAddress("0x" + recoveryKey); + this.recoveryPublicKey = getEncryptionPublicKey(recoveryKey); + this.Echoer = Echoer2; + } + /** + * Intends to mock eth_getEncryptionPublicKey behavior from MetaMask + * In order to make the recoveryKey retrival from Echoer possible from the bare private key + */ + static getWalletPublicKey(wallet) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + return getEncryptionPublicKey(privateKey); + } + // This function intends to provide an encrypted value of recoveryKey for an on-chain Echoer backup purpose + // Thus, the pubKey should be derived by a Wallet instance or from Web3 wallets + // pubKey: base64 encoded 32 bytes key from https://docs.metamask.io/wallet/reference/eth_getencryptionpublickey/ + getEncryptedAccount(walletPublicKey) { + const encryptedData = encrypt({ + publicKey: walletPublicKey, + data: this.recoveryKey, + version: "x25519-xsalsa20-poly1305" + }); + const data = packEncryptedMessage(encryptedData); + return { + // Use this later to save hexPrivateKey generated with + // Buffer.from(JSON.stringify(encryptedData)).toString('hex') + // As we don't use buffer with this library we should leave UI to do the rest + encryptedData, + // Data that could be used as an echo(data) params + data + }; + } + /** + * Decrypt Echoer backuped note encryption account with private keys + */ + decryptAccountsWithWallet(wallet, events) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedAccount); + const recoveryKey = decrypt({ + encryptedData: unpackedMessage, + privateKey + }); + decryptedEvents.push( + new NoteAccount({ + netId: this.netId, + blockNumber: event.blockNumber, + recoveryKey, + Echoer: this.Echoer + }) + ); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + decryptNotes(events) { + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedNote); + const [address, noteHex] = decrypt({ + encryptedData: unpackedMessage, + privateKey: this.recoveryKey + }).split("-"); + decryptedEvents.push({ + blockNumber: event.blockNumber, + address: getAddress(address), + noteHex + }); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + encryptNote({ address, noteHex }) { + const encryptedData = encrypt({ + publicKey: this.recoveryPublicKey, + data: `${address}-${noteHex}`, + version: "x25519-xsalsa20-poly1305" + }); + return packEncryptedMessage(encryptedData); + } +} + +const DUMMY_ADDRESS = "0x1111111111111111111111111111111111111111"; +const DUMMY_NONCE = "0x1111111111111111111111111111111111111111111111111111111111111111"; +const DUMMY_WITHDRAW_DATA = "0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111"; +function convertETHToTokenAmount(amountInWei, tokenPriceInWei, tokenDecimals = 18) { + const tokenDecimalsMultiplier = BigInt(10 ** Number(tokenDecimals)); + return BigInt(amountInWei) * tokenDecimalsMultiplier / BigInt(tokenPriceInWei); +} +class TornadoFeeOracle { + constructor(ovmGasPriceOracle) { + if (ovmGasPriceOracle) { + this.ovmGasPriceOracle = ovmGasPriceOracle; + } + } + /** + * Calculate L1 fee for op-stack chains + * + * This is required since relayers would pay the full transaction fees for users + */ + fetchL1OptimismFee(tx) { + if (!this.ovmGasPriceOracle) { + return new Promise((resolve) => resolve(BigInt(0))); + } + if (!tx) { + tx = { + type: 0, + gasLimit: 1e6, + nonce: Number(DUMMY_NONCE), + data: DUMMY_WITHDRAW_DATA, + gasPrice: parseUnits("1", "gwei"), + from: DUMMY_ADDRESS, + to: DUMMY_ADDRESS + }; + } + return this.ovmGasPriceOracle.getL1Fee.staticCall(Transaction.from(tx).unsignedSerialized); + } + /** + * We don't need to distinguish default refunds by tokens since most users interact with other defi protocols after withdrawal + * So we default with 1M gas which is enough for two or three swaps + * Using 30 gwei for default but it is recommended to supply cached gasPrice value from the UI + */ + defaultEthRefund(gasPrice, gasLimit) { + return (gasPrice ? BigInt(gasPrice) : parseUnits("30", "gwei")) * BigInt(gasLimit || 1e6); + } + /** + * Calculates token amount for required ethRefund purchases required to calculate fees + */ + calculateTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals) { + return convertETHToTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals); + } + /** + * Warning: For tokens you need to check if the fees are above denomination + * (Usually happens for small denomination pool or if the gas price is high) + */ + calculateRelayerFee({ + gasPrice, + gasLimit = 6e5, + l1Fee = 0, + denomination, + ethRefund = BigInt(0), + tokenPriceInWei, + tokenDecimals = 18, + relayerFeePercent = 0.33, + isEth = true, + premiumPercent = 20 + }) { + const gasCosts = BigInt(gasPrice) * BigInt(gasLimit) + BigInt(l1Fee); + const relayerFee = BigInt(denomination) * BigInt(Math.floor(1e4 * relayerFeePercent)) / BigInt(1e4 * 100); + if (isEth) { + return (gasCosts + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100) / BigInt(100); + } + const feeInEth = gasCosts + BigInt(ethRefund); + return (convertETHToTokenAmount(feeInEth, tokenPriceInWei, tokenDecimals) + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100) / BigInt(100); + } +} + +var __async$5 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class Mimc { + constructor() { + this.mimcPromise = this.initMimc(); + } + initMimc() { + return __async$5(this, null, function* () { + this.sponge = yield buildMimcSponge(); + this.hash = (left, right) => { + var _a, _b; + return (_b = this.sponge) == null ? void 0 : _b.F.toString((_a = this.sponge) == null ? void 0 : _a.multiHash([BigInt(left), BigInt(right)])); + }; + }); + } + getHash() { + return __async$5(this, null, function* () { + yield this.mimcPromise; + return { + sponge: this.sponge, + hash: this.hash + }; + }); + } +} +const mimc = new Mimc(); + +var __async$4 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class MerkleTreeService { + constructor({ + netId, + amount, + currency, + Tornado, + commitmentHex, + merkleTreeHeight = 20, + emptyElement = "21663839004416932945382355908790599225266501822907911457504978515578255421292", + merkleWorkerPath + }) { + const instanceName = `${netId}_${currency}_${amount}`; + this.currency = currency; + this.amount = amount; + this.netId = Number(netId); + this.Tornado = Tornado; + this.instanceName = instanceName; + this.commitmentHex = commitmentHex; + this.merkleTreeHeight = merkleTreeHeight; + this.emptyElement = emptyElement; + this.merkleWorkerPath = merkleWorkerPath; + } + createTree(events) { + return __async$4(this, null, function* () { + const { hash: hashFunction } = yield mimc.getHash(); + if (this.merkleWorkerPath) { + console.log("Using merkleWorker\n"); + try { + if (isNode) { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker$1(this.merkleWorkerPath, { + workerData: { + merkleTreeHeight: this.merkleTreeHeight, + elements: events, + zeroElement: this.emptyElement + } + }); + worker.on("message", resolve); + worker.on("error", reject); + worker.on("exit", (code) => { + if (code !== 0) { + reject(new Error(`Worker stopped with exit code ${code}`)); + } + }); + }); + return MerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } else { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker(this.merkleWorkerPath); + worker.onmessage = (e) => { + resolve(e.data); + }; + worker.onerror = (e) => { + reject(e); + }; + worker.postMessage({ + merkleTreeHeight: this.merkleTreeHeight, + elements: events, + zeroElement: this.emptyElement + }); + }); + return MerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } + } catch (err) { + console.log("merkleWorker failed, falling back to synchronous merkle tree"); + console.log(err); + } + } + return new MerkleTree(this.merkleTreeHeight, events, { + zeroElement: this.emptyElement, + hashFunction + }); + }); + } + createPartialTree(_0) { + return __async$4(this, arguments, function* ({ edge, elements }) { + const { hash: hashFunction } = yield mimc.getHash(); + if (this.merkleWorkerPath) { + console.log("Using merkleWorker\n"); + try { + if (isNode) { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker$1(this.merkleWorkerPath, { + workerData: { + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + } + }); + worker.on("message", resolve); + worker.on("error", reject); + worker.on("exit", (code) => { + if (code !== 0) { + reject(new Error(`Worker stopped with exit code ${code}`)); + } + }); + }); + return PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } else { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker(this.merkleWorkerPath); + worker.onmessage = (e) => { + resolve(e.data); + }; + worker.onerror = (e) => { + reject(e); + }; + worker.postMessage({ + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + }); + }); + return PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } + } catch (err) { + console.log("merkleWorker failed, falling back to synchronous merkle tree"); + console.log(err); + } + } + return new PartialMerkleTree(this.merkleTreeHeight, edge, elements, { + zeroElement: this.emptyElement, + hashFunction + }); + }); + } + verifyTree(events) { + return __async$4(this, null, function* () { + console.log( + ` +Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while +` + ); + console.time("Created tree in"); + const tree = yield this.createTree(events.map(({ commitment }) => commitment)); + console.timeEnd("Created tree in"); + console.log(""); + const isKnownRoot = yield this.Tornado.isKnownRoot(toFixedHex(BigInt(tree.root))); + if (!isKnownRoot) { + const errMsg = `Deposit Event ${this.netId} ${this.amount} ${this.currency} is invalid`; + throw new Error(errMsg); + } + return tree; + }); + } +} function parseNumber(value) { if (!value || isNaN(Number(value))) { @@ -6554,6 +7267,17 @@ function parseKey(value) { } return value; } +function parseRecoveryKey(value) { + if (!value) { + throw new InvalidArgumentError("Invalid Recovery Key"); + } + try { + computeAddress("0x" + value); + } catch (e) { + throw new InvalidArgumentError("Invalid Recovery Key"); + } + return value; +} class TokenPriceOracle { constructor(provider, multicall2, oracle) { @@ -6576,26 +7300,26 @@ class TokenPriceOracle { } } -var __defProp = Object.defineProperty; -var __defProps = Object.defineProperties; -var __getOwnPropDescs = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols = Object.getOwnPropertySymbols; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __propIsEnum = Object.prototype.propertyIsEnumerable; -var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues = (a, b) => { +var __defProp$1 = Object.defineProperty; +var __defProps$1 = Object.defineProperties; +var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; +var __hasOwnProp$1 = Object.prototype.hasOwnProperty; +var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$1 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp.call(b, prop)) - __defNormalProp(a, prop, b[prop]); - if (__getOwnPropSymbols) - for (var prop of __getOwnPropSymbols(b)) { - if (__propIsEnum.call(b, prop)) - __defNormalProp(a, prop, b[prop]); + if (__hasOwnProp$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); + if (__getOwnPropSymbols$1) + for (var prop of __getOwnPropSymbols$1(b)) { + if (__propIsEnum$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); } return a; }; -var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); -var __async$2 = (__this, __arguments, generator) => { +var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b)); +var __async$3 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -6623,11 +7347,11 @@ function parseSemanticVersion(version) { } function isRelayerUpdated(relayerVersion, netId) { const { major, patch, prerelease } = parseSemanticVersion(relayerVersion); - const requiredMajor = netId === 1 ? "4" : "5"; + const requiredMajor = netId === NetId.MAINNET ? "4" : "5"; const isUpdatedMajor = major === requiredMajor; if (prerelease) return false; - return isUpdatedMajor && (Number(patch) >= 5 || Number(netId) !== 1); + return isUpdatedMajor && (Number(patch) >= 5 || netId !== NetId.MAINNET); } function calculateScore({ stakeBalance, tornadoServiceFee }, minFee = 0.33, maxFee = 0.53) { if (tornadoServiceFee < minFee) { @@ -6649,9 +7373,15 @@ function getWeightRandom(weightsScores, random) { } return Math.floor(Math.random() * weightsScores.length); } +function getSupportedInstances(instanceList) { + const rawList = Object.values(instanceList).map(({ instanceAddress }) => { + return Object.values(instanceAddress); + }).flat(); + return rawList.map((l) => getAddress(l)); +} function pickWeightedRandomRelayer(relayers, netId) { let minFee, maxFee; - if (Number(netId) !== 1) { + if (netId !== NetId.MAINNET) { minFee = 0.01; maxFee = 0.3; } @@ -6665,19 +7395,19 @@ function pickWeightedRandomRelayer(relayers, netId) { } class RelayerClient { constructor({ netId, config, Aggregator, fetchDataOptions: fetchDataOptions2 }) { - this.netId = Number(netId); + this.netId = netId; this.config = config; this.Aggregator = Aggregator; this.fetchDataOptions = fetchDataOptions2; } askRelayerStatus(_0) { - return __async$2(this, arguments, function* ({ + return __async$3(this, arguments, function* ({ hostname, relayerAddress }) { var _a, _b; const url = `https://${!hostname.endsWith("/") ? hostname + "/" : hostname}`; - const rawStatus = yield fetchData(`${url}status`, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const rawStatus = yield fetchData(`${url}status`, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { headers: { "Content-Type": "application/json, application/x-www-form-urlencoded" }, @@ -6688,7 +7418,7 @@ class RelayerClient { if (!statusValidator(rawStatus)) { throw new Error("Invalid status schema"); } - const status = __spreadProps(__spreadValues({}, rawStatus), { + const status = __spreadProps$1(__spreadValues$1({}, rawStatus), { url }); if (status.currentQueue > 5) { @@ -6697,7 +7427,7 @@ class RelayerClient { if (status.netId !== this.netId) { throw new Error("This relayer serves a different network"); } - if (relayerAddress && this.netId === 1 && status.rewardAccount !== relayerAddress) { + if (relayerAddress && this.netId === NetId.MAINNET && status.rewardAccount !== relayerAddress) { throw new Error("The Relayer reward address must match registered address"); } if (!isRelayerUpdated(status.version, this.netId)) { @@ -6707,7 +7437,8 @@ class RelayerClient { }); } filterRelayer(curr, relayer, subdomains, debugRelayer = false) { - return __async$2(this, null, function* () { + return __async$3(this, null, function* () { + var _a; const { ensSubdomainKey } = this.config; const subdomainIndex = subdomains.indexOf(ensSubdomainKey); const mainnetSubdomain = curr.records[0]; @@ -6728,7 +7459,9 @@ class RelayerClient { ensName, stakeBalance, relayerAddress, - rewardAccount: status.rewardAccount, + rewardAccount: getAddress(status.rewardAccount), + instances: getSupportedInstances(status.instances), + gasPrice: (_a = status.gasPrices) == null ? void 0 : _a.fast, ethPrices: status.ethPrices, currentQueue: status.currentQueue, tornadoServiceFee: status.tornadoServiceFee @@ -6757,7 +7490,7 @@ class RelayerClient { }); } getValidRelayers(relayers, subdomains, debugRelayer = false) { - return __async$2(this, null, function* () { + return __async$3(this, null, function* () { const relayersSet = /* @__PURE__ */ new Set(); const uniqueRelayers = relayers.reverse().filter(({ ensName }) => { if (!relayersSet.has(ensName)) { @@ -6788,9 +7521,9 @@ class RelayerClient { return pickWeightedRandomRelayer(relayers, this.netId); } tornadoWithdraw(_0) { - return __async$2(this, arguments, function* ({ contract, proof, args }) { + return __async$3(this, arguments, function* ({ contract, proof, args }) { const { url } = this.selectedRelayer; - const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { method: "POST", headers: { "Content-Type": "application/json" @@ -6810,7 +7543,7 @@ class RelayerClient { console.log(`Job submitted: ${jobUrl} `); while (!relayerStatus || !["FAILED", "CONFIRMED"].includes(relayerStatus)) { - const jobResponse = yield fetchData(jobUrl, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const jobResponse = yield fetchData(jobUrl, __spreadProps$1(__spreadValues$1({}, this.fetchDataOptions), { method: "GET", headers: { "Content-Type": "application/json" @@ -6850,7 +7583,7 @@ class RelayerClient { } } -var __async$1 = (__this, __arguments, generator) => { +var __async$2 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -6871,7 +7604,7 @@ var __async$1 = (__this, __arguments, generator) => { }); }; function getTokenBalances(_0) { - return __async$1(this, arguments, function* ({ + return __async$2(this, arguments, function* ({ provider, Multicall: Multicall2, currencyName, @@ -6934,6 +7667,120 @@ function getTokenBalances(_0) { }); } +var __defProp = Object.defineProperty; +var __defProps = Object.defineProperties; +var __getOwnPropDescs = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols = Object.getOwnPropertySymbols; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __propIsEnum = Object.prototype.propertyIsEnumerable; +var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + if (__getOwnPropSymbols) + for (var prop of __getOwnPropSymbols(b)) { + if (__propIsEnum.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + } + return a; +}; +var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); +var __objRest = (source, exclude) => { + var target = {}; + for (var prop in source) + if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0) + target[prop] = source[prop]; + if (source != null && __getOwnPropSymbols) + for (var prop of __getOwnPropSymbols(source)) { + if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop)) + target[prop] = source[prop]; + } + return target; +}; +var __async$1 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +class TreeCache { + constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }) { + this.netId = netId; + this.amount = amount; + this.currency = currency; + this.userDirectory = userDirectory; + this.PARTS_COUNT = PARTS_COUNT; + } + getInstanceName() { + return `deposits_${this.netId}_${this.currency}_${this.amount}`; + } + createTree(events, tree) { + return __async$1(this, null, function* () { + const bloom = new BloomFilter(events.length); + console.log(`Creating cached tree for ${this.getInstanceName()} +`); + const eventsData = events.reduce( + (acc, _a, i) => { + var _b = _a, { leafIndex, commitment } = _b, rest = __objRest(_b, ["leafIndex", "commitment"]); + if (leafIndex !== i) { + throw new Error(`leafIndex (${leafIndex}) !== i (${i})`); + } + acc[commitment] = __spreadProps(__spreadValues({}, rest), { leafIndex }); + return acc; + }, + {} + ); + const slices = tree.getTreeSlices(this.PARTS_COUNT); + yield Promise.all( + slices.map((slice, index) => __async$1(this, null, function* () { + const metadata = slice.elements.reduce((acc, curr) => { + if (index < this.PARTS_COUNT - 1) { + bloom.add(curr); + } + acc.push(eventsData[curr]); + return acc; + }, []); + const dataString2 = JSON.stringify( + __spreadProps(__spreadValues({}, slice), { + metadata + }), + null, + 2 + ) + "\n"; + const fileName2 = `${this.getInstanceName()}_slice${index + 1}.json`; + yield saveUserFile({ + fileName: fileName2, + userDirectory: this.userDirectory, + dataString: dataString2 + }); + })) + ); + const dataString = bloom.serialize() + "\n"; + const fileName = `${this.getInstanceName()}_bloom.json`; + yield saveUserFile({ + fileName, + userDirectory: this.userDirectory, + dataString + }); + }); + } +} + var __async = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { @@ -6992,4 +7839,4 @@ function calculateSnarkProof(input, circuit, provingKey) { }); } -export { BaseDepositsService, BaseEncryptedNotesService, BaseEventsService, BaseGovernanceService, BaseRegistryService, BatchBlockService, BatchEventsService, BatchTransactionService, DEPOSIT, Deposit, ENS__factory, ERC20__factory, GET_DEPOSITS, GET_ENCRYPTED_NOTES, GET_NOTE_ACCOUNTS, GET_REGISTERED, GET_STATISTIC, GET_WITHDRAWALS, GasPriceOracle__factory, Invoice, MIN_STAKE_BALANCE, MerkleTreeService, Mimc, Multicall__factory, NodeDepositsService, NodeEncryptedNotesService, NodeGovernanceService, NodeRegistryService, OffchainOracle__factory, OvmGasPriceOracle__factory, Pedersen, RelayerClient, ReverseRecords__factory, TokenPriceOracle, TornadoBrowserProvider, TornadoFeeOracle, TornadoRpcSigner, TornadoVoidSigner, TornadoWallet, WITHDRAWAL, _META, ajv, base64ToBytes, bigIntReplacer, blockSyncInterval, bnToBytes, buffPedersenHash, bufferToBytes, bytesToBN, bytesToBase64, bytesToHex, calculateScore, calculateSnarkProof, chunk, convertETHToTokenAmount, createDeposit, defaultUserAgent, download, enabledChains, existsAsync, index as factories, fetch, fetchData, fetchGetUrlFunc, getAllDeposits, getAllEncryptedNotes, getAllRegisters, getAllWithdrawals, getDeposits, getEncryptedNotes, getGasOraclePlugin, getHttpAgent, getMeta, getNoteAccounts, getProvider, getProviderWithNetId, getRegisters, getStatistic, getStatusSchema, getTokenBalances, getWeightRandom, getWithdrawals, isNode, isRelayerUpdated, jobsSchema, leBuff2Int, leInt2Buff, loadCachedEvents, loadSavedEvents, mimc, multicall, networkConfig, parseAddress, parseKey, parseMnemonic, parseNumber, parseRelayer, parseSemanticVersion, parseUrl, pedersen, pickWeightedRandomRelayer, populateTransaction, queryGraph, rBigInt, saveEvents, sleep, subdomains, substring, toFixedHex, toFixedLength, unzipAsync, validateUrl, zipAsync }; +export { BaseDepositsService, BaseEchoService, BaseEncryptedNotesService, BaseEventsService, BaseGovernanceService, BaseRegistryService, BatchBlockService, BatchEventsService, BatchTransactionService, DEPOSIT, Deposit, ENS__factory, ERC20__factory, GET_DEPOSITS, GET_ECHO_EVENTS, GET_ENCRYPTED_NOTES, GET_GOVERNANCE_APY, GET_GOVERNANCE_EVENTS, GET_NOTE_ACCOUNTS, GET_REGISTERED, GET_STATISTIC, GET_WITHDRAWALS, GasPriceOracle__factory, Invoice, MIN_STAKE_BALANCE, MerkleTreeService, Mimc, Multicall__factory, NetId, NodeDepositsService, NodeEchoService, NodeEncryptedNotesService, NodeGovernanceService, NodeRegistryService, NoteAccount, OffchainOracle__factory, OvmGasPriceOracle__factory, Pedersen, RelayerClient, ReverseRecords__factory, TokenPriceOracle, TornadoBrowserProvider, TornadoFeeOracle, TornadoRpcSigner, TornadoVoidSigner, TornadoWallet, TreeCache, WITHDRAWAL, _META, addNetwork, ajv, base64ToBytes, bigIntReplacer, bnToBytes, buffPedersenHash, bufferToBytes, bytesToBN, bytesToBase64, bytesToHex, calculateScore, calculateSnarkProof, chunk, concatBytes, convertETHToTokenAmount, createDeposit, crypto, customConfig, defaultConfig, defaultUserAgent, download, enabledChains, existsAsync, index as factories, fetch, fetchData, fetchGetUrlFunc, getAllDeposits, getAllEncryptedNotes, getAllGovernanceEvents, getAllGraphEchoEvents, getAllRegisters, getAllWithdrawals, getConfig, getDeposits, getEncryptedNotes, getGasOraclePlugin, getGovernanceEvents, getGraphEchoEvents, getHttpAgent, getInstanceByAddress, getMeta, getNetworkConfig, getNoteAccounts, getProvider, getProviderWithNetId, getRegisters, getStatistic, getStatusSchema, getSubdomains, getSupportedInstances, getTokenBalances, getWeightRandom, getWithdrawals, hexToBytes, isNode, isRelayerUpdated, jobsSchema, leBuff2Int, leInt2Buff, loadCachedEvents, loadSavedEvents, mimc, multicall, packEncryptedMessage, parseAddress, parseKey, parseMnemonic, parseNumber, parseRecoveryKey, parseRelayer, parseSemanticVersion, parseUrl, pedersen, pickWeightedRandomRelayer, populateTransaction, queryGraph, rBigInt, saveUserFile, sleep, substring, toFixedHex, toFixedLength, unpackEncryptedMessage, unzipAsync, validateUrl, zipAsync }; diff --git a/dist/index.umd.js b/dist/index.umd.js index 718ddf3..0811698 100644 --- a/dist/index.umd.js +++ b/dist/index.umd.js @@ -11,6 +11,11175 @@ return /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ +/***/ 66289: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RLP = exports.utils = exports.decode = exports.encode = void 0; +/** + * RLP Encoding based on https://ethereum.org/en/developers/docs/data-structures-and-encoding/rlp/ + * This function takes in data, converts it to Uint8Array if not, + * and adds a length for recursion. + * @param input Will be converted to Uint8Array + * @returns Uint8Array of encoded data + **/ +function encode(input) { + if (Array.isArray(input)) { + const output = []; + let outputLength = 0; + for (let i = 0; i < input.length; i++) { + const encoded = encode(input[i]); + output.push(encoded); + outputLength += encoded.length; + } + return concatBytes(encodeLength(outputLength, 192), ...output); + } + const inputBuf = toBytes(input); + if (inputBuf.length === 1 && inputBuf[0] < 128) { + return inputBuf; + } + return concatBytes(encodeLength(inputBuf.length, 128), inputBuf); +} +exports.encode = encode; +/** + * Slices a Uint8Array, throws if the slice goes out-of-bounds of the Uint8Array. + * E.g. `safeSlice(hexToBytes('aa'), 1, 2)` will throw. + * @param input + * @param start + * @param end + */ +function safeSlice(input, start, end) { + if (end > input.length) { + throw new Error('invalid RLP (safeSlice): end slice of Uint8Array out-of-bounds'); + } + return input.slice(start, end); +} +/** + * Parse integers. Check if there is no leading zeros + * @param v The value to parse + */ +function decodeLength(v) { + if (v[0] === 0) { + throw new Error('invalid RLP: extra zeros'); + } + return parseHexByte(bytesToHex(v)); +} +function encodeLength(len, offset) { + if (len < 56) { + return Uint8Array.from([len + offset]); + } + const hexLength = numberToHex(len); + const lLength = hexLength.length / 2; + const firstByte = numberToHex(offset + 55 + lLength); + return Uint8Array.from(hexToBytes(firstByte + hexLength)); +} +function decode(input, stream = false) { + if (typeof input === 'undefined' || input === null || input.length === 0) { + return Uint8Array.from([]); + } + const inputBytes = toBytes(input); + const decoded = _decode(inputBytes); + if (stream) { + return decoded; + } + if (decoded.remainder.length !== 0) { + throw new Error('invalid RLP: remainder must be zero'); + } + return decoded.data; +} +exports.decode = decode; +/** Decode an input with RLP */ +function _decode(input) { + let length, llength, data, innerRemainder, d; + const decoded = []; + const firstByte = input[0]; + if (firstByte <= 0x7f) { + // a single byte whose value is in the [0x00, 0x7f] range, that byte is its own RLP encoding. + return { + data: input.slice(0, 1), + remainder: input.slice(1), + }; + } + else if (firstByte <= 0xb7) { + // string is 0-55 bytes long. A single byte with value 0x80 plus the length of the string followed by the string + // The range of the first byte is [0x80, 0xb7] + length = firstByte - 0x7f; + // set 0x80 null to 0 + if (firstByte === 0x80) { + data = Uint8Array.from([]); + } + else { + data = safeSlice(input, 1, length); + } + if (length === 2 && data[0] < 0x80) { + throw new Error('invalid RLP encoding: invalid prefix, single byte < 0x80 are not prefixed'); + } + return { + data, + remainder: input.slice(length), + }; + } + else if (firstByte <= 0xbf) { + // string is greater than 55 bytes long. A single byte with the value (0xb7 plus the length of the length), + // followed by the length, followed by the string + llength = firstByte - 0xb6; + if (input.length - 1 < llength) { + throw new Error('invalid RLP: not enough bytes for string length'); + } + length = decodeLength(safeSlice(input, 1, llength)); + if (length <= 55) { + throw new Error('invalid RLP: expected string length to be greater than 55'); + } + data = safeSlice(input, llength, length + llength); + return { + data, + remainder: input.slice(length + llength), + }; + } + else if (firstByte <= 0xf7) { + // a list between 0-55 bytes long + length = firstByte - 0xbf; + innerRemainder = safeSlice(input, 1, length); + while (innerRemainder.length) { + d = _decode(innerRemainder); + decoded.push(d.data); + innerRemainder = d.remainder; + } + return { + data: decoded, + remainder: input.slice(length), + }; + } + else { + // a list over 55 bytes long + llength = firstByte - 0xf6; + length = decodeLength(safeSlice(input, 1, llength)); + if (length < 56) { + throw new Error('invalid RLP: encoded list too short'); + } + const totalLength = llength + length; + if (totalLength > input.length) { + throw new Error('invalid RLP: total length is larger than the data'); + } + innerRemainder = safeSlice(input, llength, totalLength); + while (innerRemainder.length) { + d = _decode(innerRemainder); + decoded.push(d.data); + innerRemainder = d.remainder; + } + return { + data: decoded, + remainder: input.slice(totalLength), + }; + } +} +const cachedHexes = Array.from({ length: 256 }, (_v, i) => i.toString(16).padStart(2, '0')); +function bytesToHex(uint8a) { + // Pre-caching chars with `cachedHexes` speeds this up 6x + let hex = ''; + for (let i = 0; i < uint8a.length; i++) { + hex += cachedHexes[uint8a[i]]; + } + return hex; +} +function parseHexByte(hexByte) { + const byte = Number.parseInt(hexByte, 16); + if (Number.isNaN(byte)) + throw new Error('Invalid byte sequence'); + return byte; +} +// Caching slows it down 2-3x +function hexToBytes(hex) { + if (typeof hex !== 'string') { + throw new TypeError('hexToBytes: expected string, got ' + typeof hex); + } + if (hex.length % 2) + throw new Error('hexToBytes: received invalid unpadded hex'); + const array = new Uint8Array(hex.length / 2); + for (let i = 0; i < array.length; i++) { + const j = i * 2; + array[i] = parseHexByte(hex.slice(j, j + 2)); + } + return array; +} +/** Concatenates two Uint8Arrays into one. */ +function concatBytes(...arrays) { + if (arrays.length === 1) + return arrays[0]; + const length = arrays.reduce((a, arr) => a + arr.length, 0); + const result = new Uint8Array(length); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const arr = arrays[i]; + result.set(arr, pad); + pad += arr.length; + } + return result; +} +function utf8ToBytes(utf) { + return new TextEncoder().encode(utf); +} +/** Transform an integer into its hexadecimal value */ +function numberToHex(integer) { + if (integer < 0) { + throw new Error('Invalid integer as argument, must be unsigned!'); + } + const hex = integer.toString(16); + return hex.length % 2 ? `0${hex}` : hex; +} +/** Pad a string to be even */ +function padToEven(a) { + return a.length % 2 ? `0${a}` : a; +} +/** Check if a string is prefixed by 0x */ +function isHexPrefixed(str) { + return str.length >= 2 && str[0] === '0' && str[1] === 'x'; +} +/** Removes 0x from a given String */ +function stripHexPrefix(str) { + if (typeof str !== 'string') { + return str; + } + return isHexPrefixed(str) ? str.slice(2) : str; +} +/** Transform anything into a Uint8Array */ +function toBytes(v) { + if (v instanceof Uint8Array) { + return v; + } + if (typeof v === 'string') { + if (isHexPrefixed(v)) { + return hexToBytes(padToEven(stripHexPrefix(v))); + } + return utf8ToBytes(v); + } + if (typeof v === 'number' || typeof v === 'bigint') { + if (!v) { + return Uint8Array.from([]); + } + return hexToBytes(numberToHex(v)); + } + if (v === null || v === undefined) { + return Uint8Array.from([]); + } + throw new Error('toBytes: received unsupported type ' + typeof v); +} +exports.utils = { + bytesToHex, + concatBytes, + hexToBytes, + utf8ToBytes, +}; +exports.RLP = { encode, decode }; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 16284: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.accountBodyToRLP = exports.accountBodyToSlim = exports.accountBodyFromSlim = exports.isZeroAddress = exports.zeroAddress = exports.importPublic = exports.privateToAddress = exports.privateToPublic = exports.publicToAddress = exports.pubToAddress = exports.isValidPublic = exports.isValidPrivate = exports.generateAddress2 = exports.generateAddress = exports.isValidChecksumAddress = exports.toChecksumAddress = exports.isValidAddress = exports.Account = void 0; +const rlp_1 = __webpack_require__(66289); +const keccak_1 = __webpack_require__(32019); +const secp256k1_1 = __webpack_require__(26513); +const utils_1 = __webpack_require__(82672); +const bytes_1 = __webpack_require__(77312); +const constants_1 = __webpack_require__(89838); +const helpers_1 = __webpack_require__(35546); +const internal_1 = __webpack_require__(59498); +const _0n = BigInt(0); +class Account { + /** + * This constructor assigns and validates the values. + * Use the static factory methods to assist in creating an Account from varying data types. + */ + constructor(nonce = _0n, balance = _0n, storageRoot = constants_1.KECCAK256_RLP, codeHash = constants_1.KECCAK256_NULL) { + this.nonce = nonce; + this.balance = balance; + this.storageRoot = storageRoot; + this.codeHash = codeHash; + this._validate(); + } + static fromAccountData(accountData) { + const { nonce, balance, storageRoot, codeHash } = accountData; + return new Account(nonce !== undefined ? (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(nonce)) : undefined, balance !== undefined ? (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(balance)) : undefined, storageRoot !== undefined ? (0, bytes_1.toBuffer)(storageRoot) : undefined, codeHash !== undefined ? (0, bytes_1.toBuffer)(codeHash) : undefined); + } + static fromRlpSerializedAccount(serialized) { + const values = (0, bytes_1.arrToBufArr)(rlp_1.RLP.decode(Uint8Array.from(serialized))); + if (!Array.isArray(values)) { + throw new Error('Invalid serialized account input. Must be array'); + } + return this.fromValuesArray(values); + } + static fromValuesArray(values) { + const [nonce, balance, storageRoot, codeHash] = values; + return new Account((0, bytes_1.bufferToBigInt)(nonce), (0, bytes_1.bufferToBigInt)(balance), storageRoot, codeHash); + } + _validate() { + if (this.nonce < _0n) { + throw new Error('nonce must be greater than zero'); + } + if (this.balance < _0n) { + throw new Error('balance must be greater than zero'); + } + if (this.storageRoot.length !== 32) { + throw new Error('storageRoot must have a length of 32'); + } + if (this.codeHash.length !== 32) { + throw new Error('codeHash must have a length of 32'); + } + } + /** + * Returns a Buffer Array of the raw Buffers for the account, in order. + */ + raw() { + return [ + (0, bytes_1.bigIntToUnpaddedBuffer)(this.nonce), + (0, bytes_1.bigIntToUnpaddedBuffer)(this.balance), + this.storageRoot, + this.codeHash, + ]; + } + /** + * Returns the RLP serialization of the account as a `Buffer`. + */ + serialize() { + return Buffer.from(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)(this.raw()))); + } + /** + * Returns a `Boolean` determining if the account is a contract. + */ + isContract() { + return !this.codeHash.equals(constants_1.KECCAK256_NULL); + } + /** + * Returns a `Boolean` determining if the account is empty complying to the definition of + * account emptiness in [EIP-161](https://eips.ethereum.org/EIPS/eip-161): + * "An account is considered empty when it has no code and zero nonce and zero balance." + */ + isEmpty() { + return this.balance === _0n && this.nonce === _0n && this.codeHash.equals(constants_1.KECCAK256_NULL); + } +} +exports.Account = Account; +/** + * Checks if the address is a valid. Accepts checksummed addresses too. + */ +const isValidAddress = function (hexAddress) { + try { + (0, helpers_1.assertIsString)(hexAddress); + } + catch (e) { + return false; + } + return /^0x[0-9a-fA-F]{40}$/.test(hexAddress); +}; +exports.isValidAddress = isValidAddress; +/** + * Returns a checksummed address. + * + * If an eip1191ChainId is provided, the chainId will be included in the checksum calculation. This + * has the effect of checksummed addresses for one chain having invalid checksums for others. + * For more details see [EIP-1191](https://eips.ethereum.org/EIPS/eip-1191). + * + * WARNING: Checksums with and without the chainId will differ and the EIP-1191 checksum is not + * backwards compatible to the original widely adopted checksum format standard introduced in + * [EIP-55](https://eips.ethereum.org/EIPS/eip-55), so this will break in existing applications. + * Usage of this EIP is therefore discouraged unless you have a very targeted use case. + */ +const toChecksumAddress = function (hexAddress, eip1191ChainId) { + (0, helpers_1.assertIsHexString)(hexAddress); + const address = (0, internal_1.stripHexPrefix)(hexAddress).toLowerCase(); + let prefix = ''; + if (eip1191ChainId !== undefined) { + const chainId = (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(eip1191ChainId)); + prefix = chainId.toString() + '0x'; + } + const buf = Buffer.from(prefix + address, 'utf8'); + const hash = (0, utils_1.bytesToHex)((0, keccak_1.keccak256)(buf)); + let ret = '0x'; + for (let i = 0; i < address.length; i++) { + if (parseInt(hash[i], 16) >= 8) { + ret += address[i].toUpperCase(); + } + else { + ret += address[i]; + } + } + return ret; +}; +exports.toChecksumAddress = toChecksumAddress; +/** + * Checks if the address is a valid checksummed address. + * + * See toChecksumAddress' documentation for details about the eip1191ChainId parameter. + */ +const isValidChecksumAddress = function (hexAddress, eip1191ChainId) { + return (0, exports.isValidAddress)(hexAddress) && (0, exports.toChecksumAddress)(hexAddress, eip1191ChainId) === hexAddress; +}; +exports.isValidChecksumAddress = isValidChecksumAddress; +/** + * Generates an address of a newly created contract. + * @param from The address which is creating this new address + * @param nonce The nonce of the from account + */ +const generateAddress = function (from, nonce) { + (0, helpers_1.assertIsBuffer)(from); + (0, helpers_1.assertIsBuffer)(nonce); + if ((0, bytes_1.bufferToBigInt)(nonce) === BigInt(0)) { + // in RLP we want to encode null in the case of zero nonce + // read the RLP documentation for an answer if you dare + return Buffer.from((0, keccak_1.keccak256)(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)([from, null])))).slice(-20); + } + // Only take the lower 160bits of the hash + return Buffer.from((0, keccak_1.keccak256)(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)([from, nonce])))).slice(-20); +}; +exports.generateAddress = generateAddress; +/** + * Generates an address for a contract created using CREATE2. + * @param from The address which is creating this new address + * @param salt A salt + * @param initCode The init code of the contract being created + */ +const generateAddress2 = function (from, salt, initCode) { + (0, helpers_1.assertIsBuffer)(from); + (0, helpers_1.assertIsBuffer)(salt); + (0, helpers_1.assertIsBuffer)(initCode); + if (from.length !== 20) { + throw new Error('Expected from to be of length 20'); + } + if (salt.length !== 32) { + throw new Error('Expected salt to be of length 32'); + } + const address = (0, keccak_1.keccak256)(Buffer.concat([Buffer.from('ff', 'hex'), from, salt, (0, keccak_1.keccak256)(initCode)])); + return (0, bytes_1.toBuffer)(address).slice(-20); +}; +exports.generateAddress2 = generateAddress2; +/** + * Checks if the private key satisfies the rules of the curve secp256k1. + */ +const isValidPrivate = function (privateKey) { + return secp256k1_1.secp256k1.utils.isValidPrivateKey(privateKey); +}; +exports.isValidPrivate = isValidPrivate; +/** + * Checks if the public key satisfies the rules of the curve secp256k1 + * and the requirements of Ethereum. + * @param publicKey The two points of an uncompressed key, unless sanitize is enabled + * @param sanitize Accept public keys in other formats + */ +const isValidPublic = function (publicKey, sanitize = false) { + (0, helpers_1.assertIsBuffer)(publicKey); + if (publicKey.length === 64) { + // Convert to SEC1 for secp256k1 + // Automatically checks whether point is on curve + try { + secp256k1_1.secp256k1.ProjectivePoint.fromHex(Buffer.concat([Buffer.from([4]), publicKey])); + return true; + } + catch (e) { + return false; + } + } + if (!sanitize) { + return false; + } + try { + secp256k1_1.secp256k1.ProjectivePoint.fromHex(publicKey); + return true; + } + catch (e) { + return false; + } +}; +exports.isValidPublic = isValidPublic; +/** + * Returns the ethereum address of a given public key. + * Accepts "Ethereum public keys" and SEC1 encoded keys. + * @param pubKey The two points of an uncompressed key, unless sanitize is enabled + * @param sanitize Accept public keys in other formats + */ +const pubToAddress = function (pubKey, sanitize = false) { + (0, helpers_1.assertIsBuffer)(pubKey); + if (sanitize && pubKey.length !== 64) { + pubKey = Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromHex(pubKey).toRawBytes(false).slice(1)); + } + if (pubKey.length !== 64) { + throw new Error('Expected pubKey to be of length 64'); + } + // Only take the lower 160bits of the hash + return Buffer.from((0, keccak_1.keccak256)(pubKey)).slice(-20); +}; +exports.pubToAddress = pubToAddress; +exports.publicToAddress = exports.pubToAddress; +/** + * Returns the ethereum public key of a given private key. + * @param privateKey A private key must be 256 bits wide + */ +const privateToPublic = function (privateKey) { + (0, helpers_1.assertIsBuffer)(privateKey); + // skip the type flag and use the X, Y points + return Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromPrivateKey(privateKey).toRawBytes(false).slice(1)); +}; +exports.privateToPublic = privateToPublic; +/** + * Returns the ethereum address of a given private key. + * @param privateKey A private key must be 256 bits wide + */ +const privateToAddress = function (privateKey) { + return (0, exports.publicToAddress)((0, exports.privateToPublic)(privateKey)); +}; +exports.privateToAddress = privateToAddress; +/** + * Converts a public key to the Ethereum format. + */ +const importPublic = function (publicKey) { + (0, helpers_1.assertIsBuffer)(publicKey); + if (publicKey.length !== 64) { + publicKey = Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromHex(publicKey).toRawBytes(false).slice(1)); + } + return publicKey; +}; +exports.importPublic = importPublic; +/** + * Returns the zero address. + */ +const zeroAddress = function () { + const addressLength = 20; + const addr = (0, bytes_1.zeros)(addressLength); + return (0, bytes_1.bufferToHex)(addr); +}; +exports.zeroAddress = zeroAddress; +/** + * Checks if a given address is the zero address. + */ +const isZeroAddress = function (hexAddress) { + try { + (0, helpers_1.assertIsString)(hexAddress); + } + catch (e) { + return false; + } + const zeroAddr = (0, exports.zeroAddress)(); + return zeroAddr === hexAddress; +}; +exports.isZeroAddress = isZeroAddress; +function accountBodyFromSlim(body) { + const [nonce, balance, storageRoot, codeHash] = body; + return [ + nonce, + balance, + (0, bytes_1.arrToBufArr)(storageRoot).length === 0 ? constants_1.KECCAK256_RLP : storageRoot, + (0, bytes_1.arrToBufArr)(codeHash).length === 0 ? constants_1.KECCAK256_NULL : codeHash, + ]; +} +exports.accountBodyFromSlim = accountBodyFromSlim; +const emptyUint8Arr = new Uint8Array(0); +function accountBodyToSlim(body) { + const [nonce, balance, storageRoot, codeHash] = body; + return [ + nonce, + balance, + (0, bytes_1.arrToBufArr)(storageRoot).equals(constants_1.KECCAK256_RLP) ? emptyUint8Arr : storageRoot, + (0, bytes_1.arrToBufArr)(codeHash).equals(constants_1.KECCAK256_NULL) ? emptyUint8Arr : codeHash, + ]; +} +exports.accountBodyToSlim = accountBodyToSlim; +/** + * Converts a slim account (per snap protocol spec) to the RLP encoded version of the account + * @param body Array of 4 Buffer-like items to represent the account + * @returns RLP encoded version of the account + */ +function accountBodyToRLP(body, couldBeSlim = true) { + const accountBody = couldBeSlim ? accountBodyFromSlim(body) : body; + return (0, bytes_1.arrToBufArr)(rlp_1.RLP.encode(accountBody)); +} +exports.accountBodyToRLP = accountBodyToRLP; +//# sourceMappingURL=account.js.map + +/***/ }), + +/***/ 86727: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Address = void 0; +const account_1 = __webpack_require__(16284); +const bytes_1 = __webpack_require__(77312); +/** + * Handling and generating Ethereum addresses + */ +class Address { + constructor(buf) { + if (buf.length !== 20) { + throw new Error('Invalid address length'); + } + this.buf = buf; + } + /** + * Returns the zero address. + */ + static zero() { + return new Address((0, bytes_1.zeros)(20)); + } + /** + * Returns an Address object from a hex-encoded string. + * @param str - Hex-encoded address + */ + static fromString(str) { + if (!(0, account_1.isValidAddress)(str)) { + throw new Error('Invalid address'); + } + return new Address((0, bytes_1.toBuffer)(str)); + } + /** + * Returns an address for a given public key. + * @param pubKey The two points of an uncompressed key + */ + static fromPublicKey(pubKey) { + if (!Buffer.isBuffer(pubKey)) { + throw new Error('Public key should be Buffer'); + } + const buf = (0, account_1.pubToAddress)(pubKey); + return new Address(buf); + } + /** + * Returns an address for a given private key. + * @param privateKey A private key must be 256 bits wide + */ + static fromPrivateKey(privateKey) { + if (!Buffer.isBuffer(privateKey)) { + throw new Error('Private key should be Buffer'); + } + const buf = (0, account_1.privateToAddress)(privateKey); + return new Address(buf); + } + /** + * Generates an address for a newly created contract. + * @param from The address which is creating this new address + * @param nonce The nonce of the from account + */ + static generate(from, nonce) { + if (typeof nonce !== 'bigint') { + throw new Error('Expected nonce to be a bigint'); + } + return new Address((0, account_1.generateAddress)(from.buf, (0, bytes_1.bigIntToBuffer)(nonce))); + } + /** + * Generates an address for a contract created using CREATE2. + * @param from The address which is creating this new address + * @param salt A salt + * @param initCode The init code of the contract being created + */ + static generate2(from, salt, initCode) { + if (!Buffer.isBuffer(salt)) { + throw new Error('Expected salt to be a Buffer'); + } + if (!Buffer.isBuffer(initCode)) { + throw new Error('Expected initCode to be a Buffer'); + } + return new Address((0, account_1.generateAddress2)(from.buf, salt, initCode)); + } + /** + * Is address equal to another. + */ + equals(address) { + return this.buf.equals(address.buf); + } + /** + * Is address zero. + */ + isZero() { + return this.equals(Address.zero()); + } + /** + * True if address is in the address range defined + * by EIP-1352 + */ + isPrecompileOrSystemAddress() { + const address = (0, bytes_1.bufferToBigInt)(this.buf); + const rangeMin = BigInt(0); + const rangeMax = BigInt('0xffff'); + return address >= rangeMin && address <= rangeMax; + } + /** + * Returns hex encoding of address. + */ + toString() { + return '0x' + this.buf.toString('hex'); + } + /** + * Returns Buffer representation of address. + */ + toBuffer() { + return Buffer.from(this.buf); + } +} +exports.Address = Address; +//# sourceMappingURL=address.js.map + +/***/ }), + +/***/ 98421: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +/** + * Ported to Typescript from original implementation below: + * https://github.com/ahultgren/async-eventemitter -- MIT licensed + * + * Type Definitions based on work by: patarapolw -- MIT licensed + * that was contributed to Definitely Typed below: + * https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/async-eventemitter + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AsyncEventEmitter = void 0; +const events_1 = __webpack_require__(37007); +async function runInSeries(context, tasks, data) { + let error; + for await (const task of tasks) { + try { + if (task.length < 2) { + //sync + task.call(context, data); + } + else { + await new Promise((resolve, reject) => { + task.call(context, data, (error) => { + if (error) { + reject(error); + } + else { + resolve(); + } + }); + }); + } + } + catch (e) { + error = e; + } + } + if (error) { + throw error; + } +} +class AsyncEventEmitter extends events_1.EventEmitter { + emit(event, ...args) { + let [data, callback] = args; + const self = this; + let listeners = self._events[event] ?? []; + // Optional data argument + if (callback === undefined && typeof data === 'function') { + callback = data; + data = undefined; + } + // Special treatment of internal newListener and removeListener events + if (event === 'newListener' || event === 'removeListener') { + data = { + event: data, + fn: callback, + }; + callback = undefined; + } + // A single listener is just a function not an array... + listeners = Array.isArray(listeners) ? listeners : [listeners]; + runInSeries(self, listeners.slice(), data).then(callback).catch(callback); + return self.listenerCount(event) > 0; + } + once(event, listener) { + const self = this; + let g; + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + // Hack to support set arity + if (listener.length >= 2) { + g = function (e, next) { + self.removeListener(event, g); + void listener(e, next); + }; + } + else { + g = function (e) { + self.removeListener(event, g); + void listener(e, g); + }; + } + self.on(event, g); + return self; + } + first(event, listener) { + let listeners = this._events[event] ?? []; + // Contract + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + // Listeners are not always an array + if (!Array.isArray(listeners)) { + ; + this._events[event] = listeners = [listeners]; + } + listeners.unshift(listener); + return this; + } + before(event, target, listener) { + return this.beforeOrAfter(event, target, listener); + } + after(event, target, listener) { + return this.beforeOrAfter(event, target, listener, 'after'); + } + beforeOrAfter(event, target, listener, beforeOrAfter) { + let listeners = this._events[event] ?? []; + let i; + let index; + const add = beforeOrAfter === 'after' ? 1 : 0; + // Contract + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + if (typeof target !== 'function') { + throw new TypeError('target must be a function'); + } + // Listeners are not always an array + if (!Array.isArray(listeners)) { + ; + this._events[event] = listeners = [listeners]; + } + index = listeners.length; + for (i = listeners.length; i--;) { + if (listeners[i] === target) { + index = i + add; + break; + } + } + listeners.splice(index, 0, listener); + return this; + } + on(event, listener) { + return super.on(event, listener); + } + addListener(event, listener) { + return super.addListener(event, listener); + } + prependListener(event, listener) { + return super.prependListener(event, listener); + } + prependOnceListener(event, listener) { + return super.prependOnceListener(event, listener); + } + removeAllListeners(event) { + return super.removeAllListeners(event); + } + removeListener(event, listener) { + return super.removeListener(event, listener); + } + eventNames() { + return super.eventNames(); + } + listeners(event) { + return super.listeners(event); + } + listenerCount(event) { + return super.listenerCount(event); + } + getMaxListeners() { + return super.getMaxListeners(); + } + setMaxListeners(maxListeners) { + return super.setMaxListeners(maxListeners); + } +} +exports.AsyncEventEmitter = AsyncEventEmitter; +//# sourceMappingURL=asyncEventEmitter.js.map + +/***/ }), + +/***/ 77312: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.intToUnpaddedBuffer = exports.bigIntToUnpaddedBuffer = exports.bigIntToHex = exports.bufArrToArr = exports.arrToBufArr = exports.validateNoLeadingZeroes = exports.baToJSON = exports.toUtf8 = exports.short = exports.addHexPrefix = exports.toUnsigned = exports.fromSigned = exports.bufferToInt = exports.bigIntToBuffer = exports.bufferToBigInt = exports.bufferToHex = exports.toBuffer = exports.unpadHexString = exports.unpadArray = exports.unpadBuffer = exports.setLengthRight = exports.setLengthLeft = exports.zeros = exports.intToBuffer = exports.intToHex = void 0; +const helpers_1 = __webpack_require__(35546); +const internal_1 = __webpack_require__(59498); +/** + * Converts a `Number` into a hex `String` + * @param {Number} i + * @return {String} + */ +const intToHex = function (i) { + if (!Number.isSafeInteger(i) || i < 0) { + throw new Error(`Received an invalid integer type: ${i}`); + } + return `0x${i.toString(16)}`; +}; +exports.intToHex = intToHex; +/** + * Converts an `Number` to a `Buffer` + * @param {Number} i + * @return {Buffer} + */ +const intToBuffer = function (i) { + const hex = (0, exports.intToHex)(i); + return Buffer.from((0, internal_1.padToEven)(hex.slice(2)), 'hex'); +}; +exports.intToBuffer = intToBuffer; +/** + * Returns a buffer filled with 0s. + * @param bytes the number of bytes the buffer should be + */ +const zeros = function (bytes) { + return Buffer.allocUnsafe(bytes).fill(0); +}; +exports.zeros = zeros; +/** + * Pads a `Buffer` with zeros till it has `length` bytes. + * Truncates the beginning or end of input if its length exceeds `length`. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @param right whether to start padding form the left or right + * @return (Buffer) + */ +const setLength = function (msg, length, right) { + const buf = (0, exports.zeros)(length); + if (right) { + if (msg.length < length) { + msg.copy(buf); + return buf; + } + return msg.slice(0, length); + } + else { + if (msg.length < length) { + msg.copy(buf, length - msg.length); + return buf; + } + return msg.slice(-length); + } +}; +/** + * Left Pads a `Buffer` with leading zeros till it has `length` bytes. + * Or it truncates the beginning if it exceeds. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @return (Buffer) + */ +const setLengthLeft = function (msg, length) { + (0, helpers_1.assertIsBuffer)(msg); + return setLength(msg, length, false); +}; +exports.setLengthLeft = setLengthLeft; +/** + * Right Pads a `Buffer` with trailing zeros till it has `length` bytes. + * it truncates the end if it exceeds. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @return (Buffer) + */ +const setLengthRight = function (msg, length) { + (0, helpers_1.assertIsBuffer)(msg); + return setLength(msg, length, true); +}; +exports.setLengthRight = setLengthRight; +/** + * Trims leading zeros from a `Buffer`, `String` or `Number[]`. + * @param a (Buffer|Array|String) + * @return (Buffer|Array|String) + */ +const stripZeros = function (a) { + let first = a[0]; + while (a.length > 0 && first.toString() === '0') { + a = a.slice(1); + first = a[0]; + } + return a; +}; +/** + * Trims leading zeros from a `Buffer`. + * @param a (Buffer) + * @return (Buffer) + */ +const unpadBuffer = function (a) { + (0, helpers_1.assertIsBuffer)(a); + return stripZeros(a); +}; +exports.unpadBuffer = unpadBuffer; +/** + * Trims leading zeros from an `Array` (of numbers). + * @param a (number[]) + * @return (number[]) + */ +const unpadArray = function (a) { + (0, helpers_1.assertIsArray)(a); + return stripZeros(a); +}; +exports.unpadArray = unpadArray; +/** + * Trims leading zeros from a hex-prefixed `String`. + * @param a (String) + * @return (String) + */ +const unpadHexString = function (a) { + (0, helpers_1.assertIsHexString)(a); + a = (0, internal_1.stripHexPrefix)(a); + return ('0x' + stripZeros(a)); +}; +exports.unpadHexString = unpadHexString; +/** + * Attempts to turn a value into a `Buffer`. + * Inputs supported: `Buffer`, `String` (hex-prefixed), `Number`, null/undefined, `BigInt` and other objects + * with a `toArray()` or `toBuffer()` method. + * @param v the value + */ +const toBuffer = function (v) { + if (v === null || v === undefined) { + return Buffer.allocUnsafe(0); + } + if (Buffer.isBuffer(v)) { + return Buffer.from(v); + } + if (Array.isArray(v) || v instanceof Uint8Array) { + return Buffer.from(v); + } + if (typeof v === 'string') { + if (!(0, internal_1.isHexString)(v)) { + throw new Error(`Cannot convert string to buffer. toBuffer only supports 0x-prefixed hex strings and this string was given: ${v}`); + } + return Buffer.from((0, internal_1.padToEven)((0, internal_1.stripHexPrefix)(v)), 'hex'); + } + if (typeof v === 'number') { + return (0, exports.intToBuffer)(v); + } + if (typeof v === 'bigint') { + if (v < BigInt(0)) { + throw new Error(`Cannot convert negative bigint to buffer. Given: ${v}`); + } + let n = v.toString(16); + if (n.length % 2) + n = '0' + n; + return Buffer.from(n, 'hex'); + } + if (v.toArray) { + // converts a BN to a Buffer + return Buffer.from(v.toArray()); + } + if (v.toBuffer) { + return Buffer.from(v.toBuffer()); + } + throw new Error('invalid type'); +}; +exports.toBuffer = toBuffer; +/** + * Converts a `Buffer` into a `0x`-prefixed hex `String`. + * @param buf `Buffer` object to convert + */ +const bufferToHex = function (buf) { + buf = (0, exports.toBuffer)(buf); + return '0x' + buf.toString('hex'); +}; +exports.bufferToHex = bufferToHex; +/** + * Converts a {@link Buffer} to a {@link bigint} + */ +function bufferToBigInt(buf) { + const hex = (0, exports.bufferToHex)(buf); + if (hex === '0x') { + return BigInt(0); + } + return BigInt(hex); +} +exports.bufferToBigInt = bufferToBigInt; +/** + * Converts a {@link bigint} to a {@link Buffer} + */ +function bigIntToBuffer(num) { + return (0, exports.toBuffer)('0x' + num.toString(16)); +} +exports.bigIntToBuffer = bigIntToBuffer; +/** + * Converts a `Buffer` to a `Number`. + * @param buf `Buffer` object to convert + * @throws If the input number exceeds 53 bits. + */ +const bufferToInt = function (buf) { + const res = Number(bufferToBigInt(buf)); + if (!Number.isSafeInteger(res)) + throw new Error('Number exceeds 53 bits'); + return res; +}; +exports.bufferToInt = bufferToInt; +/** + * Interprets a `Buffer` as a signed integer and returns a `BigInt`. Assumes 256-bit numbers. + * @param num Signed integer value + */ +const fromSigned = function (num) { + return BigInt.asIntN(256, bufferToBigInt(num)); +}; +exports.fromSigned = fromSigned; +/** + * Converts a `BigInt` to an unsigned integer and returns it as a `Buffer`. Assumes 256-bit numbers. + * @param num + */ +const toUnsigned = function (num) { + return bigIntToBuffer(BigInt.asUintN(256, num)); +}; +exports.toUnsigned = toUnsigned; +/** + * Adds "0x" to a given `String` if it does not already start with "0x". + */ +const addHexPrefix = function (str) { + if (typeof str !== 'string') { + return str; + } + return (0, internal_1.isHexPrefixed)(str) ? str : '0x' + str; +}; +exports.addHexPrefix = addHexPrefix; +/** + * Shortens a string or buffer's hex string representation to maxLength (default 50). + * + * Examples: + * + * Input: '657468657265756d000000000000000000000000000000000000000000000000' + * Output: '657468657265756d0000000000000000000000000000000000…' + */ +function short(buffer, maxLength = 50) { + const bufferStr = Buffer.isBuffer(buffer) ? buffer.toString('hex') : buffer; + if (bufferStr.length <= maxLength) { + return bufferStr; + } + return bufferStr.slice(0, maxLength) + '…'; +} +exports.short = short; +/** + * Returns the utf8 string representation from a hex string. + * + * Examples: + * + * Input 1: '657468657265756d000000000000000000000000000000000000000000000000' + * Input 2: '657468657265756d' + * Input 3: '000000000000000000000000000000000000000000000000657468657265756d' + * + * Output (all 3 input variants): 'ethereum' + * + * Note that this method is not intended to be used with hex strings + * representing quantities in both big endian or little endian notation. + * + * @param string Hex string, should be `0x` prefixed + * @return Utf8 string + */ +const toUtf8 = function (hex) { + const zerosRegexp = /^(00)+|(00)+$/g; + hex = (0, internal_1.stripHexPrefix)(hex); + if (hex.length % 2 !== 0) { + throw new Error('Invalid non-even hex string input for toUtf8() provided'); + } + const bufferVal = Buffer.from(hex.replace(zerosRegexp, ''), 'hex'); + return bufferVal.toString('utf8'); +}; +exports.toUtf8 = toUtf8; +/** + * Converts a `Buffer` or `Array` to JSON. + * @param ba (Buffer|Array) + * @return (Array|String|null) + */ +const baToJSON = function (ba) { + if (Buffer.isBuffer(ba)) { + return `0x${ba.toString('hex')}`; + } + else if (ba instanceof Array) { + const array = []; + for (let i = 0; i < ba.length; i++) { + array.push((0, exports.baToJSON)(ba[i])); + } + return array; + } +}; +exports.baToJSON = baToJSON; +/** + * Checks provided Buffers for leading zeroes and throws if found. + * + * Examples: + * + * Valid values: 0x1, 0x, 0x01, 0x1234 + * Invalid values: 0x0, 0x00, 0x001, 0x0001 + * + * Note: This method is useful for validating that RLP encoded integers comply with the rule that all + * integer values encoded to RLP must be in the most compact form and contain no leading zero bytes + * @param values An object containing string keys and Buffer values + * @throws if any provided value is found to have leading zero bytes + */ +const validateNoLeadingZeroes = function (values) { + for (const [k, v] of Object.entries(values)) { + if (v !== undefined && v.length > 0 && v[0] === 0) { + throw new Error(`${k} cannot have leading zeroes, received: ${v.toString('hex')}`); + } + } +}; +exports.validateNoLeadingZeroes = validateNoLeadingZeroes; +function arrToBufArr(arr) { + if (!Array.isArray(arr)) { + return Buffer.from(arr); + } + return arr.map((a) => arrToBufArr(a)); +} +exports.arrToBufArr = arrToBufArr; +function bufArrToArr(arr) { + if (!Array.isArray(arr)) { + return Uint8Array.from(arr ?? []); + } + return arr.map((a) => bufArrToArr(a)); +} +exports.bufArrToArr = bufArrToArr; +/** + * Converts a {@link bigint} to a `0x` prefixed hex string + */ +const bigIntToHex = (num) => { + return '0x' + num.toString(16); +}; +exports.bigIntToHex = bigIntToHex; +/** + * Convert value from bigint to an unpadded Buffer + * (useful for RLP transport) + * @param value value to convert + */ +function bigIntToUnpaddedBuffer(value) { + return (0, exports.unpadBuffer)(bigIntToBuffer(value)); +} +exports.bigIntToUnpaddedBuffer = bigIntToUnpaddedBuffer; +function intToUnpaddedBuffer(value) { + return (0, exports.unpadBuffer)((0, exports.intToBuffer)(value)); +} +exports.intToUnpaddedBuffer = intToUnpaddedBuffer; +//# sourceMappingURL=bytes.js.map + +/***/ }), + +/***/ 89838: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MAX_WITHDRAWALS_PER_PAYLOAD = exports.RLP_EMPTY_STRING = exports.KECCAK256_RLP = exports.KECCAK256_RLP_S = exports.KECCAK256_RLP_ARRAY = exports.KECCAK256_RLP_ARRAY_S = exports.KECCAK256_NULL = exports.KECCAK256_NULL_S = exports.TWO_POW256 = exports.SECP256K1_ORDER_DIV_2 = exports.SECP256K1_ORDER = exports.MAX_INTEGER_BIGINT = exports.MAX_INTEGER = exports.MAX_UINT64 = void 0; +const buffer_1 = __webpack_require__(48287); +const secp256k1_1 = __webpack_require__(26513); +/** + * 2^64-1 + */ +exports.MAX_UINT64 = BigInt('0xffffffffffffffff'); +/** + * The max integer that the evm can handle (2^256-1) + */ +exports.MAX_INTEGER = BigInt('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'); +/** + * The max integer that the evm can handle (2^256-1) as a bigint + * 2^256-1 equals to 340282366920938463463374607431768211455 + * We use literal value instead of calculated value for compatibility issue. + */ +exports.MAX_INTEGER_BIGINT = BigInt('115792089237316195423570985008687907853269984665640564039457584007913129639935'); +exports.SECP256K1_ORDER = secp256k1_1.secp256k1.CURVE.n; +exports.SECP256K1_ORDER_DIV_2 = secp256k1_1.secp256k1.CURVE.n / BigInt(2); +/** + * 2^256 + */ +exports.TWO_POW256 = BigInt('0x10000000000000000000000000000000000000000000000000000000000000000'); +/** + * Keccak-256 hash of null + */ +exports.KECCAK256_NULL_S = 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'; +/** + * Keccak-256 hash of null + */ +exports.KECCAK256_NULL = buffer_1.Buffer.from(exports.KECCAK256_NULL_S, 'hex'); +/** + * Keccak-256 of an RLP of an empty array + */ +exports.KECCAK256_RLP_ARRAY_S = '1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347'; +/** + * Keccak-256 of an RLP of an empty array + */ +exports.KECCAK256_RLP_ARRAY = buffer_1.Buffer.from(exports.KECCAK256_RLP_ARRAY_S, 'hex'); +/** + * Keccak-256 hash of the RLP of null + */ +exports.KECCAK256_RLP_S = '56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421'; +/** + * Keccak-256 hash of the RLP of null + */ +exports.KECCAK256_RLP = buffer_1.Buffer.from(exports.KECCAK256_RLP_S, 'hex'); +/** + * RLP encoded empty string + */ +exports.RLP_EMPTY_STRING = buffer_1.Buffer.from([0x80]); +exports.MAX_WITHDRAWALS_PER_PAYLOAD = 16; +//# sourceMappingURL=constants.js.map + +/***/ }), + +/***/ 45062: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.compactBytesToNibbles = exports.bytesToNibbles = exports.nibblesToCompactBytes = exports.nibblesToBytes = exports.hasTerminator = void 0; +// Reference: https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/ +/** + * + * @param s byte sequence + * @returns boolean indicating if input hex nibble sequence has terminator indicating leaf-node + * terminator is represented with 16 because a nibble ranges from 0 - 15(f) + */ +const hasTerminator = (nibbles) => { + return nibbles.length > 0 && nibbles[nibbles.length - 1] === 16; +}; +exports.hasTerminator = hasTerminator; +const nibblesToBytes = (nibbles, bytes) => { + for (let bi = 0, ni = 0; ni < nibbles.length; bi += 1, ni += 2) { + bytes[bi] = (nibbles[ni] << 4) | nibbles[ni + 1]; + } +}; +exports.nibblesToBytes = nibblesToBytes; +const nibblesToCompactBytes = (nibbles) => { + let terminator = 0; + if ((0, exports.hasTerminator)(nibbles)) { + terminator = 1; + // Remove the terminator from the sequence + nibbles = nibbles.subarray(0, nibbles.length - 1); + } + const buf = new Uint8Array(nibbles.length / 2 + 1); + // Shift the terminator info into the first nibble of buf[0] + buf[0] = terminator << 5; + // If odd length, then add that flag into the first nibble and put the odd nibble to + // second part of buf[0] which otherwise will be left padded with a 0 + if ((nibbles.length & 1) === 1) { + buf[0] |= 1 << 4; + buf[0] |= nibbles[0]; + nibbles = nibbles.subarray(1); + } + // create bytes out of the rest even nibbles + (0, exports.nibblesToBytes)(nibbles, buf.subarray(1)); + return buf; +}; +exports.nibblesToCompactBytes = nibblesToCompactBytes; +const bytesToNibbles = (str) => { + const l = str.length * 2 + 1; + const nibbles = new Uint8Array(l); + for (let i = 0; i < str.length; i++) { + const b = str[i]; + nibbles[i * 2] = b / 16; + nibbles[i * 2 + 1] = b % 16; + } + // This will get removed from calling function if the first nibble + // indicates that terminator is not present + nibbles[l - 1] = 16; + return nibbles; +}; +exports.bytesToNibbles = bytesToNibbles; +const compactBytesToNibbles = (compact) => { + if (compact.length === 0) { + return compact; + } + let base = (0, exports.bytesToNibbles)(compact); + // delete terminator flag if terminator flag was not in first nibble + if (base[0] < 2) { + base = base.subarray(0, base.length - 1); + } + // chop the terminator nibble and the even padding (if there is one) + // i.e. chop 2 left nibbles when even else 1 when odd + const chop = 2 - (base[0] & 1); + return base.subarray(chop); +}; +exports.compactBytesToNibbles = compactBytesToNibbles; +/** + * A test helper to generates compact path for a subset of key bytes + * + * TODO: Commenting the code for now as this seems to be helper function + * (from geth codebase ) + * + */ +// +// +// export const getPathTo = (tillBytes: number, key: Buffer) => { +// const hexNibbles = bytesToNibbles(key).subarray(0, tillBytes) +// // Remove the terminator if its there, although it would be there only if tillBytes >= key.length +// // This seems to be a test helper to generate paths so correctness of this isn't necessary +// hexNibbles[hexNibbles.length - 1] = 0 +// const compactBytes = nibblesToCompactBytes(hexNibbles) +// return [Buffer.from(compactBytes)] +// } +//# sourceMappingURL=encoding.js.map + +/***/ }), + +/***/ 35546: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.assertIsString = exports.assertIsArray = exports.assertIsBuffer = exports.assertIsHexString = void 0; +const internal_1 = __webpack_require__(59498); +/** + * Throws if a string is not hex prefixed + * @param {string} input string to check hex prefix of + */ +const assertIsHexString = function (input) { + if (!(0, internal_1.isHexString)(input)) { + const msg = `This method only supports 0x-prefixed hex strings but input was: ${input}`; + throw new Error(msg); + } +}; +exports.assertIsHexString = assertIsHexString; +/** + * Throws if input is not a buffer + * @param {Buffer} input value to check + */ +const assertIsBuffer = function (input) { + if (!Buffer.isBuffer(input)) { + const msg = `This method only supports Buffer but input was: ${input}`; + throw new Error(msg); + } +}; +exports.assertIsBuffer = assertIsBuffer; +/** + * Throws if input is not an array + * @param {number[]} input value to check + */ +const assertIsArray = function (input) { + if (!Array.isArray(input)) { + const msg = `This method only supports number arrays but input was: ${input}`; + throw new Error(msg); + } +}; +exports.assertIsArray = assertIsArray; +/** + * Throws if input is not a string + * @param {string} input value to check + */ +const assertIsString = function (input) { + if (typeof input !== 'string') { + const msg = `This method only supports strings but input was: ${input}`; + throw new Error(msg); + } +}; +exports.assertIsString = assertIsString; +//# sourceMappingURL=helpers.js.map + +/***/ }), + +/***/ 68683: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toAscii = exports.stripHexPrefix = exports.padToEven = exports.isHexString = exports.isHexPrefixed = exports.getKeys = exports.getBinarySize = exports.fromUtf8 = exports.fromAscii = exports.arrayContainsArray = void 0; +/** + * Constants + */ +__exportStar(__webpack_require__(89838), exports); +/** + * Units helpers + */ +__exportStar(__webpack_require__(52652), exports); +/** + * Account class and helper functions + */ +__exportStar(__webpack_require__(16284), exports); +/** + * Address type + */ +__exportStar(__webpack_require__(86727), exports); +/** + * Withdrawal type + */ +__exportStar(__webpack_require__(37380), exports); +/** + * ECDSA signature + */ +__exportStar(__webpack_require__(92133), exports); +/** + * Utilities for manipulating Buffers, byte arrays, etc. + */ +__exportStar(__webpack_require__(77312), exports); +/** + * Helpful TypeScript types + */ +__exportStar(__webpack_require__(42666), exports); +/** + * Helper function for working with compact encoding + */ +__exportStar(__webpack_require__(45062), exports); +/** + * Export ethjs-util methods + */ +__exportStar(__webpack_require__(98421), exports); +var internal_1 = __webpack_require__(59498); +Object.defineProperty(exports, "arrayContainsArray", ({ enumerable: true, get: function () { return internal_1.arrayContainsArray; } })); +Object.defineProperty(exports, "fromAscii", ({ enumerable: true, get: function () { return internal_1.fromAscii; } })); +Object.defineProperty(exports, "fromUtf8", ({ enumerable: true, get: function () { return internal_1.fromUtf8; } })); +Object.defineProperty(exports, "getBinarySize", ({ enumerable: true, get: function () { return internal_1.getBinarySize; } })); +Object.defineProperty(exports, "getKeys", ({ enumerable: true, get: function () { return internal_1.getKeys; } })); +Object.defineProperty(exports, "isHexPrefixed", ({ enumerable: true, get: function () { return internal_1.isHexPrefixed; } })); +Object.defineProperty(exports, "isHexString", ({ enumerable: true, get: function () { return internal_1.isHexString; } })); +Object.defineProperty(exports, "padToEven", ({ enumerable: true, get: function () { return internal_1.padToEven; } })); +Object.defineProperty(exports, "stripHexPrefix", ({ enumerable: true, get: function () { return internal_1.stripHexPrefix; } })); +Object.defineProperty(exports, "toAscii", ({ enumerable: true, get: function () { return internal_1.toAscii; } })); +__exportStar(__webpack_require__(31708), exports); +__exportStar(__webpack_require__(81862), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 59498: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +/* +The MIT License + +Copyright (c) 2016 Nick Dodson. nickdodson.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isHexString = exports.getKeys = exports.fromAscii = exports.fromUtf8 = exports.toAscii = exports.arrayContainsArray = exports.getBinarySize = exports.padToEven = exports.stripHexPrefix = exports.isHexPrefixed = void 0; +/** + * Returns a `Boolean` on whether or not the a `String` starts with '0x' + * @param str the string input value + * @return a boolean if it is or is not hex prefixed + * @throws if the str input is not a string + */ +function isHexPrefixed(str) { + if (typeof str !== 'string') { + throw new Error(`[isHexPrefixed] input must be type 'string', received type ${typeof str}`); + } + return str[0] === '0' && str[1] === 'x'; +} +exports.isHexPrefixed = isHexPrefixed; +/** + * Removes '0x' from a given `String` if present + * @param str the string value + * @returns the string without 0x prefix + */ +const stripHexPrefix = (str) => { + if (typeof str !== 'string') + throw new Error(`[stripHexPrefix] input must be type 'string', received ${typeof str}`); + return isHexPrefixed(str) ? str.slice(2) : str; +}; +exports.stripHexPrefix = stripHexPrefix; +/** + * Pads a `String` to have an even length + * @param value + * @return output + */ +function padToEven(value) { + let a = value; + if (typeof a !== 'string') { + throw new Error(`[padToEven] value must be type 'string', received ${typeof a}`); + } + if (a.length % 2) + a = `0${a}`; + return a; +} +exports.padToEven = padToEven; +/** + * Get the binary size of a string + * @param str + * @returns the number of bytes contained within the string + */ +function getBinarySize(str) { + if (typeof str !== 'string') { + throw new Error(`[getBinarySize] method requires input type 'string', received ${typeof str}`); + } + return Buffer.byteLength(str, 'utf8'); +} +exports.getBinarySize = getBinarySize; +/** + * Returns TRUE if the first specified array contains all elements + * from the second one. FALSE otherwise. + * + * @param superset + * @param subset + * + */ +function arrayContainsArray(superset, subset, some) { + if (Array.isArray(superset) !== true) { + throw new Error(`[arrayContainsArray] method requires input 'superset' to be an array, got type '${typeof superset}'`); + } + if (Array.isArray(subset) !== true) { + throw new Error(`[arrayContainsArray] method requires input 'subset' to be an array, got type '${typeof subset}'`); + } + return subset[some === true ? 'some' : 'every']((value) => superset.indexOf(value) >= 0); +} +exports.arrayContainsArray = arrayContainsArray; +/** + * Should be called to get ascii from its hex representation + * + * @param string in hex + * @returns ascii string representation of hex value + */ +function toAscii(hex) { + let str = ''; + let i = 0; + const l = hex.length; + if (hex.substring(0, 2) === '0x') + i = 2; + for (; i < l; i += 2) { + const code = parseInt(hex.substr(i, 2), 16); + str += String.fromCharCode(code); + } + return str; +} +exports.toAscii = toAscii; +/** + * Should be called to get hex representation (prefixed by 0x) of utf8 string + * + * @param string + * @param optional padding + * @returns hex representation of input string + */ +function fromUtf8(stringValue) { + const str = Buffer.from(stringValue, 'utf8'); + return `0x${padToEven(str.toString('hex')).replace(/^0+|0+$/g, '')}`; +} +exports.fromUtf8 = fromUtf8; +/** + * Should be called to get hex representation (prefixed by 0x) of ascii string + * + * @param string + * @param optional padding + * @returns hex representation of input string + */ +function fromAscii(stringValue) { + let hex = ''; + for (let i = 0; i < stringValue.length; i++) { + const code = stringValue.charCodeAt(i); + const n = code.toString(16); + hex += n.length < 2 ? `0${n}` : n; + } + return `0x${hex}`; +} +exports.fromAscii = fromAscii; +/** + * Returns the keys from an array of objects. + * @example + * ```js + * getKeys([{a: '1', b: '2'}, {a: '3', b: '4'}], 'a') => ['1', '3'] + *```` + * @param params + * @param key + * @param allowEmpty + * @returns output just a simple array of output keys + */ +function getKeys(params, key, allowEmpty) { + if (!Array.isArray(params)) { + throw new Error(`[getKeys] method expects input 'params' to be an array, got ${typeof params}`); + } + if (typeof key !== 'string') { + throw new Error(`[getKeys] method expects input 'key' to be type 'string', got ${typeof params}`); + } + const result = []; + for (let i = 0; i < params.length; i++) { + let value = params[i][key]; + if (allowEmpty === true && !value) { + value = ''; + } + else if (typeof value !== 'string') { + throw new Error(`invalid abi - expected type 'string', received ${typeof value}`); + } + result.push(value); + } + return result; +} +exports.getKeys = getKeys; +/** + * Is the string a hex string. + * + * @param value + * @param length + * @returns output the string is a hex string + */ +function isHexString(value, length) { + if (typeof value !== 'string' || !value.match(/^0x[0-9A-Fa-f]*$/)) + return false; + if (typeof length !== 'undefined' && length > 0 && value.length !== 2 + 2 * length) + return false; + return true; +} +exports.isHexString = isHexString; +//# sourceMappingURL=internal.js.map + +/***/ }), + +/***/ 31708: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var console = __webpack_require__(96763); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Lock = void 0; +// Based on https://github.com/jsoendermann/semaphore-async-await/blob/master/src/Semaphore.ts +class Lock { + constructor() { + this.permits = 1; + this.promiseResolverQueue = []; + } + /** + * Returns a promise used to wait for a permit to become available. This method should be awaited on. + * @returns A promise that gets resolved when execution is allowed to proceed. + */ + async acquire() { + if (this.permits > 0) { + this.permits -= 1; + return Promise.resolve(true); + } + // If there is no permit available, we return a promise that resolves once the semaphore gets + // signaled enough times that permits is equal to one. + return new Promise((resolver) => this.promiseResolverQueue.push(resolver)); + } + /** + * Increases the number of permits by one. If there are other functions waiting, one of them will + * continue to execute in a future iteration of the event loop. + */ + release() { + this.permits += 1; + if (this.permits > 1 && this.promiseResolverQueue.length > 0) { + // eslint-disable-next-line no-console + console.warn('Lock.permits should never be > 0 when there is someone waiting.'); + } + else if (this.permits === 1 && this.promiseResolverQueue.length > 0) { + // If there is someone else waiting, immediately consume the permit that was released + // at the beginning of this function and let the waiting function resume. + this.permits -= 1; + const nextResolver = this.promiseResolverQueue.shift(); + if (nextResolver) { + nextResolver(true); + } + } + } +} +exports.Lock = Lock; +//# sourceMappingURL=lock.js.map + +/***/ }), + +/***/ 81862: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getProvider = exports.fetchFromProvider = void 0; +const micro_ftch_1 = __webpack_require__(6215); +const fetchFromProvider = async (url, params) => { + const res = await (0, micro_ftch_1.default)(url, { + headers: { + 'content-type': 'application/json', + }, + type: 'json', + data: { + method: params.method, + params: params.params, + jsonrpc: '2.0', + id: 1, + }, + }); + return res.result; +}; +exports.fetchFromProvider = fetchFromProvider; +const getProvider = (provider) => { + if (typeof provider === 'string') { + return provider; + } + else if (provider?.connection?.url !== undefined) { + return provider.connection.url; + } + else { + throw new Error('Must provide valid provider URL or Web3Provider'); + } +}; +exports.getProvider = getProvider; +//# sourceMappingURL=provider.js.map + +/***/ }), + +/***/ 92133: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hashPersonalMessage = exports.isValidSignature = exports.fromRpcSig = exports.toCompactSig = exports.toRpcSig = exports.ecrecover = exports.ecsign = void 0; +const keccak_1 = __webpack_require__(32019); +const secp256k1_1 = __webpack_require__(26513); +const bytes_1 = __webpack_require__(77312); +const constants_1 = __webpack_require__(89838); +const helpers_1 = __webpack_require__(35546); +/** + * Returns the ECDSA signature of a message hash. + * + * If `chainId` is provided assume an EIP-155-style signature and calculate the `v` value + * accordingly, otherwise return a "static" `v` just derived from the `recovery` bit + */ +function ecsign(msgHash, privateKey, chainId) { + const sig = secp256k1_1.secp256k1.sign(msgHash, privateKey); + const buf = sig.toCompactRawBytes(); + const r = Buffer.from(buf.slice(0, 32)); + const s = Buffer.from(buf.slice(32, 64)); + const v = chainId === undefined + ? BigInt(sig.recovery + 27) + : BigInt(sig.recovery + 35) + BigInt(chainId) * BigInt(2); + return { r, s, v }; +} +exports.ecsign = ecsign; +function calculateSigRecovery(v, chainId) { + if (v === BigInt(0) || v === BigInt(1)) + return v; + if (chainId === undefined) { + return v - BigInt(27); + } + return v - (chainId * BigInt(2) + BigInt(35)); +} +function isValidSigRecovery(recovery) { + return recovery === BigInt(0) || recovery === BigInt(1); +} +/** + * ECDSA public key recovery from signature. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Recovered public key + */ +const ecrecover = function (msgHash, v, r, s, chainId) { + const signature = Buffer.concat([(0, bytes_1.setLengthLeft)(r, 32), (0, bytes_1.setLengthLeft)(s, 32)], 64); + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + const sig = secp256k1_1.secp256k1.Signature.fromCompact(signature).addRecoveryBit(Number(recovery)); + const senderPubKey = sig.recoverPublicKey(msgHash); + return Buffer.from(senderPubKey.toRawBytes(false).slice(1)); +}; +exports.ecrecover = ecrecover; +/** + * Convert signature parameters into the format of `eth_sign` RPC method. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Signature + */ +const toRpcSig = function (v, r, s, chainId) { + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + // geth (and the RPC eth_sign method) uses the 65 byte format used by Bitcoin + return (0, bytes_1.bufferToHex)(Buffer.concat([(0, bytes_1.setLengthLeft)(r, 32), (0, bytes_1.setLengthLeft)(s, 32), (0, bytes_1.toBuffer)(v)])); +}; +exports.toRpcSig = toRpcSig; +/** + * Convert signature parameters into the format of Compact Signature Representation (EIP-2098). + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Signature + */ +const toCompactSig = function (v, r, s, chainId) { + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + let ss = s; + if ((v > BigInt(28) && v % BigInt(2) === BigInt(1)) || v === BigInt(1) || v === BigInt(28)) { + ss = Buffer.from(s); + ss[0] |= 0x80; + } + return (0, bytes_1.bufferToHex)(Buffer.concat([(0, bytes_1.setLengthLeft)(r, 32), (0, bytes_1.setLengthLeft)(ss, 32)])); +}; +exports.toCompactSig = toCompactSig; +/** + * Convert signature format of the `eth_sign` RPC method to signature parameters + * + * NOTE: For an extracted `v` value < 27 (see Geth bug https://github.com/ethereum/go-ethereum/issues/2053) + * `v + 27` is returned for the `v` value + * NOTE: After EIP1559, `v` could be `0` or `1` but this function assumes + * it's a signed message (EIP-191 or EIP-712) adding `27` at the end. Remove if needed. + */ +const fromRpcSig = function (sig) { + const buf = (0, bytes_1.toBuffer)(sig); + let r; + let s; + let v; + if (buf.length >= 65) { + r = buf.slice(0, 32); + s = buf.slice(32, 64); + v = (0, bytes_1.bufferToBigInt)(buf.slice(64)); + } + else if (buf.length === 64) { + // Compact Signature Representation (https://eips.ethereum.org/EIPS/eip-2098) + r = buf.slice(0, 32); + s = buf.slice(32, 64); + v = BigInt((0, bytes_1.bufferToInt)(buf.slice(32, 33)) >> 7); + s[0] &= 0x7f; + } + else { + throw new Error('Invalid signature length'); + } + // support both versions of `eth_sign` responses + if (v < 27) { + v = v + BigInt(27); + } + return { + v, + r, + s, + }; +}; +exports.fromRpcSig = fromRpcSig; +/** + * Validate a ECDSA signature. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @param homesteadOrLater Indicates whether this is being used on either the homestead hardfork or a later one + */ +const isValidSignature = function (v, r, s, homesteadOrLater = true, chainId) { + if (r.length !== 32 || s.length !== 32) { + return false; + } + if (!isValidSigRecovery(calculateSigRecovery(v, chainId))) { + return false; + } + const rBigInt = (0, bytes_1.bufferToBigInt)(r); + const sBigInt = (0, bytes_1.bufferToBigInt)(s); + if (rBigInt === BigInt(0) || + rBigInt >= constants_1.SECP256K1_ORDER || + sBigInt === BigInt(0) || + sBigInt >= constants_1.SECP256K1_ORDER) { + return false; + } + if (homesteadOrLater && sBigInt >= constants_1.SECP256K1_ORDER_DIV_2) { + return false; + } + return true; +}; +exports.isValidSignature = isValidSignature; +/** + * Returns the keccak-256 hash of `message`, prefixed with the header used by the `eth_sign` RPC call. + * The output of this function can be fed into `ecsign` to produce the same signature as the `eth_sign` + * call for a given `message`, or fed to `ecrecover` along with a signature to recover the public key + * used to produce the signature. + */ +const hashPersonalMessage = function (message) { + (0, helpers_1.assertIsBuffer)(message); + const prefix = Buffer.from(`\u0019Ethereum Signed Message:\n${message.length}`, 'utf-8'); + return Buffer.from((0, keccak_1.keccak256)(Buffer.concat([prefix, message]))); +}; +exports.hashPersonalMessage = hashPersonalMessage; +//# sourceMappingURL=signature.js.map + +/***/ }), + +/***/ 42666: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toType = exports.TypeOutput = void 0; +const bytes_1 = __webpack_require__(77312); +const internal_1 = __webpack_require__(59498); +/** + * Type output options + */ +var TypeOutput; +(function (TypeOutput) { + TypeOutput[TypeOutput["Number"] = 0] = "Number"; + TypeOutput[TypeOutput["BigInt"] = 1] = "BigInt"; + TypeOutput[TypeOutput["Buffer"] = 2] = "Buffer"; + TypeOutput[TypeOutput["PrefixedHexString"] = 3] = "PrefixedHexString"; +})(TypeOutput = exports.TypeOutput || (exports.TypeOutput = {})); +function toType(input, outputType) { + if (input === null) { + return null; + } + if (input === undefined) { + return undefined; + } + if (typeof input === 'string' && !(0, internal_1.isHexString)(input)) { + throw new Error(`A string must be provided with a 0x-prefix, given: ${input}`); + } + else if (typeof input === 'number' && !Number.isSafeInteger(input)) { + throw new Error('The provided number is greater than MAX_SAFE_INTEGER (please use an alternative input type)'); + } + const output = (0, bytes_1.toBuffer)(input); + switch (outputType) { + case TypeOutput.Buffer: + return output; + case TypeOutput.BigInt: + return (0, bytes_1.bufferToBigInt)(output); + case TypeOutput.Number: { + const bigInt = (0, bytes_1.bufferToBigInt)(output); + if (bigInt > BigInt(Number.MAX_SAFE_INTEGER)) { + throw new Error('The provided number is greater than MAX_SAFE_INTEGER (please use an alternative output type)'); + } + return Number(bigInt); + } + case TypeOutput.PrefixedHexString: + return (0, bytes_1.bufferToHex)(output); + default: + throw new Error('unknown outputType'); + } +} +exports.toType = toType; +//# sourceMappingURL=types.js.map + +/***/ }), + +/***/ 52652: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GWEI_TO_WEI = void 0; +/** Easy conversion from Gwei to wei */ +exports.GWEI_TO_WEI = BigInt(1000000000); +//# sourceMappingURL=units.js.map + +/***/ }), + +/***/ 37380: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Withdrawal = void 0; +const address_1 = __webpack_require__(86727); +const bytes_1 = __webpack_require__(77312); +const types_1 = __webpack_require__(42666); +/** + * Representation of EIP-4895 withdrawal data + */ +class Withdrawal { + /** + * This constructor assigns and validates the values. + * Use the static factory methods to assist in creating a Withdrawal object from varying data types. + * Its amount is in Gwei to match CL representation and for eventual ssz withdrawalsRoot + */ + constructor(index, validatorIndex, address, + /** + * withdrawal amount in Gwei to match the CL repesentation and eventually ssz withdrawalsRoot + */ + amount) { + this.index = index; + this.validatorIndex = validatorIndex; + this.address = address; + this.amount = amount; + } + static fromWithdrawalData(withdrawalData) { + const { index: indexData, validatorIndex: validatorIndexData, address: addressData, amount: amountData, } = withdrawalData; + const index = (0, types_1.toType)(indexData, types_1.TypeOutput.BigInt); + const validatorIndex = (0, types_1.toType)(validatorIndexData, types_1.TypeOutput.BigInt); + const address = new address_1.Address((0, types_1.toType)(addressData, types_1.TypeOutput.Buffer)); + const amount = (0, types_1.toType)(amountData, types_1.TypeOutput.BigInt); + return new Withdrawal(index, validatorIndex, address, amount); + } + static fromValuesArray(withdrawalArray) { + if (withdrawalArray.length !== 4) { + throw Error(`Invalid withdrawalArray length expected=4 actual=${withdrawalArray.length}`); + } + const [index, validatorIndex, address, amount] = withdrawalArray; + return Withdrawal.fromWithdrawalData({ index, validatorIndex, address, amount }); + } + /** + * Convert a withdrawal to a buffer array + * @param withdrawal the withdrawal to convert + * @returns buffer array of the withdrawal + */ + static toBufferArray(withdrawal) { + const { index, validatorIndex, address, amount } = withdrawal; + const indexBuffer = (0, types_1.toType)(index, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(index, types_1.TypeOutput.Buffer); + const validatorIndexBuffer = (0, types_1.toType)(validatorIndex, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(validatorIndex, types_1.TypeOutput.Buffer); + let addressBuffer; + if (address instanceof address_1.Address) { + addressBuffer = address.buf; + } + else { + addressBuffer = (0, types_1.toType)(address, types_1.TypeOutput.Buffer); + } + const amountBuffer = (0, types_1.toType)(amount, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(amount, types_1.TypeOutput.Buffer); + return [indexBuffer, validatorIndexBuffer, addressBuffer, amountBuffer]; + } + raw() { + return Withdrawal.toBufferArray(this); + } + toValue() { + return { + index: this.index, + validatorIndex: this.validatorIndex, + address: this.address.buf, + amount: this.amount, + }; + } + toJSON() { + return { + index: (0, bytes_1.bigIntToHex)(this.index), + validatorIndex: (0, bytes_1.bigIntToHex)(this.validatorIndex), + address: '0x' + this.address.buf.toString('hex'), + amount: (0, bytes_1.bigIntToHex)(this.amount), + }; + } +} +exports.Withdrawal = Withdrawal; +//# sourceMappingURL=withdrawal.js.map + +/***/ }), + +/***/ 56498: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +// ESLint gets confused by the nested list and tables in the docs, so we disable +// the rule for this file. +/* eslint-disable jsdoc/check-indentation, jsdoc/match-description */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decodeSingle = exports.decode = exports.encodePacked = exports.encodeSingle = exports.encode = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const packer_1 = __webpack_require__(37700); +/** + * Encode the data with the provided types. The types must be valid Solidity + * ABI types. + * + * This will attempt to parse the values into the correct types. For example, + * if you pass in a hex string for a `uint256`, it will be parsed into a + * `bigint`. Regular strings are interpreted as UTF-8 strings. If you want to + * pass in a hex string, you must pass it in as a `Uint8Array`, or use the + * "0x"-prefix. + * + * It will also attempt to infer the types of the values. For example, if you + * pass in a string for a `uint256`, it will result in a TypeScript compile-time + * error. This does not work for all types, however. For example, if you use + * nested arrays or tuples, the type will be inferred as `unknown`. + * + * The following types are supported: + * + * - `address`: A 20-byte Ethereum address. + * - As a 40-character-long hexadecimal string, starting with "0x". + * - As a 20-byte-long byte array, i.e., `Uint8Array`. + * - `bool`: A boolean value. + * - As a boolean literal, i.e., `true` or `false`. + * - As the strings "true" or "false". + * - `bytes(n)`: A dynamic byte array. + * - As a hexadecimal string, starting with "0x". + * - As a byte array, i.e., `Uint8Array`. + * - As a regular string, which will be interpreted as UTF-8. + * - `function`: A Solidity function. + * - As a 48-character-long hexadecimal string, starting with "0x". + * - As a 24-byte-long byte array, i.e., `Uint8Array`. + * - As a {@link SolidityFunction} object. + * - `int(n)`: A signed integer. + * - As a number. + * - As a `bigint`. + * - As a hexadecimal string, starting with "0x". + * - `string`: A dynamic UTF-8 string. + * - As a regular string. + * - As a hexadecimal string, starting with "0x". + * - As a byte array, i.e., `Uint8Array`. + * - `tuple`: A tuple of values. + * - As an array of values. + * - `uint(n)`: An unsigned integer. + * - As a number. + * - As a `bigint`. + * - As a hexadecimal string, starting with "0x". + * + * @example + * ```typescript + * import { encode, decode } from '@metamask/abi-utils'; + * + * const types = ['uint256', 'string']; + * const encoded = encode(types, [42, 'Hello, world!']); + * const decoded = decode(types, encoded); + * + * console.log(decoded); // [42n, 'Hello, world!'] + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html + * @param types - The types to encode. + * @param values - The values to encode. This array must have the same length as + * the types array. + * @param packed - Whether to use the non-standard packed mode. Defaults to + * `false`. + * @param tight - Whether to pack the values tightly. When enabled, the values + * will be packed without any padding. This matches the behaviour of + * `ethereumjs-abi`. Defaults to `false`. + * @returns The ABI encoded bytes. + */ +const encode = (types, values, packed, tight) => { + try { + return (0, packer_1.pack)({ types, values, packed, tight }); + } + catch (error) { + if (error instanceof errors_1.ParserError) { + throw new errors_1.ParserError(`Unable to encode value: ${error.message}`, error); + } + throw new errors_1.ParserError(`An unexpected error occurred: ${(0, errors_1.getErrorMessage)(error)}`, error); + } +}; +exports.encode = encode; +/** + * Encode the data with the provided type. The type must be a valid Solidity + * ABI type. + * + * See {@link encode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodeSingle, decodeSingle } from '@metamask/abi-utils'; + * + * const encoded = encodeSingle('uint256', 42); + * const decoded = decodeSingle('uint256', encoded); + * + * console.log(decoded); // 42n + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param type - The type to encode. + * @param value - The value to encode. + * @returns The ABI encoded bytes. + */ +const encodeSingle = (type, value) => { + return (0, exports.encode)([type], [value]); +}; +exports.encodeSingle = encodeSingle; +/** + * Encode the data with the provided types. The types must be valid Solidity + * ABI types. This is similar to {@link encode}, but the values are encoded in + * the non-standard packed mode. This differs from the standard encoding in the + * following ways: + * + * - Most values are packed tightly, without alignment padding. + * - The exception is array values, which are padded to 32 bytes. + * - Values are still padded to their full size, i.e., `uint16` values are still + * padded to 2 bytes, regardless of the length of the value. + * - The encoding of dynamic types (`bytes`, `string`) is different. The length + * of the dynamic type is not included in the encoding, and the dynamic type is + * not padded to a multiple of 32 bytes. + * - All values are encoded in-place, without any offsets. + * + * The encoding of this is ambiguous as soon as there is more than one dynamic + * type. That means that these values cannot be decoded with {@link decode} or + * Solidity's `abi.decode` function. + * + * See {@link encode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodePacked } from '@metamask/abi-utils'; + * + * const encoded = encodePacked(['uint8'], [42]); + * + * console.log(encoded); // `Uint8Array [ 42 ]` + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#non-standard-packed-mode + * @param types - The types to encode. + * @param values - The values to encode. + * @param tight - Whether to pack the values tightly. When enabled, `bytesN` + * values in arrays will be packed without any padding. This matches the + * behaviour of `ethereumjs-abi`. Defaults to `false`. + * @returns The ABI encoded bytes. + */ +const encodePacked = (types, values, tight) => { + return (0, exports.encode)(types, values, true, tight); +}; +exports.encodePacked = encodePacked; +/** + * Decode an ABI encoded buffer with the specified types. The types must be + * valid Solidity ABI types. + * + * This will attempt to infer the output types from the input types. For + * example, if you use `uint256` as an input type, the output type will be + * `bigint`. This does not work for all types, however. For example, if you use + * nested array types or tuple types, the output type will be `unknown`. + * + * The resulting types of the values will be as follows: + * + * | Contract ABI Type | Resulting JavaScript Type | + * | ----------------- | ------------------------- | + * | `address` | `string` | + * | `bool` | `boolean` | + * | `bytes(n)` | `Uint8Array` | + * | `function` | {@link SolidityFunction} | + * | `int(n)` | `bigint` | + * | `string` | `string` | + * | `tuple` | `Array` | + * | `array` | `Array` | + * | `uint(n)` | `bigint` | + * + * @example + * ```typescript + * import { encode, decode } from '@metamask/abi-utils'; + * + * const types = ['uint256', 'string']; + * const encoded = encode(types, [42, 'Hello, world!']); + * const decoded = decode(types, encoded); + * + * console.log(decoded); // [42n, 'Hello, world!'] + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param types - The types to decode the bytes with. + * @param value - The bytes-like value to decode. + * @returns The decoded values as array. + */ +const decode = (types, value) => { + const bytes = (0, utils_1.createBytes)(value); + try { + return (0, packer_1.unpack)(types, bytes); + } + catch (error) { + if (error instanceof errors_1.ParserError) { + throw new errors_1.ParserError(`Unable to decode value: ${error.message}`, error); + } + throw new errors_1.ParserError(`An unexpected error occurred: ${(0, errors_1.getErrorMessage)(error)}`, error); + } +}; +exports.decode = decode; +/** + * Decode the data with the provided type. The type must be a valid Solidity + * ABI type. + * + * See {@link decode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodeSingle, decodeSingle } from '@metamask/abi-utils'; + * + * const encoded = encodeSingle('uint256', 42); + * const decoded = decodeSingle('uint256', encoded); + * + * console.log(decoded); // 42n + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param type - The type to decode. + * @param value - The bytes-like value to decode. + * @returns The decoded value. + */ +const decodeSingle = (type, value) => { + const result = (0, exports.decode)([type], value); + (0, utils_1.assert)(result.length === 1, new errors_1.ParserError('Decoded value array has unexpected length.')); + return result[0]; +}; +exports.decodeSingle = decodeSingle; +//# sourceMappingURL=abi.js.map + +/***/ }), + +/***/ 5961: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ParserError = exports.getErrorStack = exports.getErrorMessage = void 0; +const utils_1 = __webpack_require__(22049); +/** + * Attempt to get an error message from a value. + * + * - If the value is an error, the error's message is returned. + * - If the value is an object with a `message` property, the value of that + * property is returned. + * - If the value is a string, the value is returned. + * - Otherwise, "Unknown error." is returned. + * + * @param error - The value to get an error message from. + * @returns The error message. + * @internal + */ +const getErrorMessage = (error) => { + if (typeof error === 'string') { + return error; + } + if (error instanceof Error) { + return error.message; + } + if ((0, utils_1.isObject)(error) && + (0, utils_1.hasProperty)(error, 'message') && + typeof error.message === 'string') { + return error.message; + } + return 'Unknown error.'; +}; +exports.getErrorMessage = getErrorMessage; +/** + * Get the error stack from a value. If the value is an error, the error's stack + * is returned. Otherwise, it returns `undefined`. + * + * @param error - The value to get an error stack from. + * @returns The error stack, or `undefined` if the value is not an error. + * @internal + */ +const getErrorStack = (error) => { + if (error instanceof Error) { + return error.stack; + } + return undefined; +}; +exports.getErrorStack = getErrorStack; +/** + * An error that is thrown when the ABI encoder or decoder encounters an + * issue. + */ +class ParserError extends Error { + constructor(message, originalError) { + super(message); + this.name = 'ParserError'; + const originalStack = (0, exports.getErrorStack)(originalError); + if (originalStack) { + this.stack = originalStack; + } + } +} +exports.ParserError = ParserError; +//# sourceMappingURL=errors.js.map + +/***/ }), + +/***/ 93256: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__webpack_require__(56498), exports); +__exportStar(__webpack_require__(5961), exports); +__exportStar(__webpack_require__(11126), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 57924: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.iterate = void 0; +const utils_1 = __webpack_require__(22049); +/** + * Iterate over a buffer with the specified size. This will yield a part of the + * buffer starting at an increment of the specified size, until the end of the + * buffer is reached. + * + * Calling the `skip` function will make it skip the specified number of bytes. + * + * @param buffer - The buffer to iterate over. + * @param size - The number of bytes to iterate with. + * @returns An iterator that yields the parts of the byte array. + * @yields The parts of the byte array. + */ +const iterate = function* (buffer, size = 32) { + for (let pointer = 0; pointer < buffer.length; pointer += size) { + const skip = (length) => { + (0, utils_1.assert)(length >= 0, 'Cannot skip a negative number of bytes.'); + (0, utils_1.assert)(length % size === 0, 'Length must be a multiple of the size.'); + pointer += length; + }; + const value = buffer.subarray(pointer); + yield { skip, value }; + } + return { + skip: () => undefined, + value: new Uint8Array(), + }; +}; +exports.iterate = iterate; +//# sourceMappingURL=iterator.js.map + +/***/ }), + +/***/ 37700: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unpack = exports.pack = exports.isDynamicParser = exports.getParser = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const iterator_1 = __webpack_require__(57924); +const parsers_1 = __webpack_require__(46207); +const utils_2 = __webpack_require__(26365); +/** + * Get the parser for the specified type. + * + * @param type - The type to get a parser for. + * @returns The parser. + * @throws If there is no parser for the specified type. + */ +const getParser = (type) => { + const parsers = { + address: parsers_1.address, + array: parsers_1.array, + bool: parsers_1.bool, + bytes: parsers_1.bytes, + fixedBytes: parsers_1.fixedBytes, + function: parsers_1.fn, + number: parsers_1.number, + string: parsers_1.string, + tuple: parsers_1.tuple, + }; + const staticParser = parsers[type]; + if (staticParser) { + return staticParser; + } + const parser = Object.values(parsers).find((value) => value.isType(type)); + if (parser) { + return parser; + } + throw new errors_1.ParserError(`The type "${type}" is not supported.`); +}; +exports.getParser = getParser; +/** + * Check if the specified parser is dynamic, for the provided types. This is + * primarily used for parsing tuples, where a tuple can be dynamic based on the + * types. For other parsers, it will simply use the set `isDynamic` value. + * + * @param parser - The parser to check. + * @param type - The type to check the parser with. + * @returns Whether the parser is dynamic. + */ +const isDynamicParser = (parser, type) => { + const { isDynamic } = parser; + if (typeof isDynamic === 'function') { + return isDynamic(type); + } + return isDynamic; +}; +exports.isDynamicParser = isDynamicParser; +/** + * Pack the provided values in a buffer, encoded with the specified types. If a + * buffer is specified, the resulting value will be concatenated with the + * buffer. + * + * @param args - The arguments object. + * @param args.types - The types of the values to pack. + * @param args.values - The values to pack. + * @param args.packed - Whether to use the non-standard packed mode. Defaults to + * `false`. + * @param args.arrayPacked - Whether to use the non-standard packed mode for + * arrays. Defaults to `false`. + * @param args.byteArray - The byte array to encode the values into. Defaults to + * an empty array. + * @param args.tight - Whether to use tight packing mode. Only applicable when + * `packed` is true. When true, the packed mode will not add any padding bytes. + * This matches the packing behaviour of `ethereumjs-abi`, but is not standard. + * @returns The resulting encoded buffer. + */ +const pack = ({ types, values, packed = false, tight = false, arrayPacked = false, byteArray = new Uint8Array(), }) => { + (0, utils_1.assert)(types.length === values.length, new errors_1.ParserError(`The number of types (${types.length}) does not match the number of values (${values.length}).`)); + const { staticBuffer, dynamicBuffer, pointers } = types.reduce( + // eslint-disable-next-line @typescript-eslint/no-shadow + ({ staticBuffer, dynamicBuffer, pointers }, type, index) => { + const parser = (0, exports.getParser)(type); + const value = values[index]; + // If packed mode is enabled, we can skip the dynamic check, as all + // values are encoded in the static buffer. + if (packed || arrayPacked || !(0, exports.isDynamicParser)(parser, type)) { + return { + staticBuffer: parser.encode({ + buffer: staticBuffer, + value, + type, + packed, + tight, + }), + dynamicBuffer, + pointers, + }; + } + const newStaticBuffer = (0, utils_1.concatBytes)([staticBuffer, new Uint8Array(32)]); + const newDynamicBuffer = parser.encode({ + buffer: dynamicBuffer, + value, + type, + packed, + tight, + }); + return { + staticBuffer: newStaticBuffer, + dynamicBuffer: newDynamicBuffer, + pointers: [ + ...pointers, + { position: staticBuffer.length, pointer: dynamicBuffer.length }, + ], + }; + }, { + staticBuffer: new Uint8Array(), + dynamicBuffer: new Uint8Array(), + pointers: [], + }); + // If packed mode is enabled, there shouldn't be any dynamic values. + (0, utils_1.assert)((!packed && !arrayPacked) || dynamicBuffer.length === 0, new errors_1.ParserError('Invalid pack state.')); + const dynamicStart = staticBuffer.length; + const updatedBuffer = pointers.reduce((target, { pointer, position }) => { + const offset = (0, utils_2.padStart)((0, utils_1.numberToBytes)(dynamicStart + pointer)); + return (0, utils_2.set)(target, offset, position); + }, staticBuffer); + return (0, utils_1.concatBytes)([byteArray, updatedBuffer, dynamicBuffer]); +}; +exports.pack = pack; +const unpack = (types, buffer) => { + const iterator = (0, iterator_1.iterate)(buffer); + return types.map((type) => { + const { value: { value, skip }, done, } = iterator.next(); + (0, utils_1.assert)(!done, new errors_1.ParserError(`The encoded value is invalid for the provided types. Reached end of buffer while attempting to parse "${type}".`)); + const parser = (0, exports.getParser)(type); + const isDynamic = (0, exports.isDynamicParser)(parser, type); + if (isDynamic) { + const pointer = (0, utils_1.bytesToNumber)(value.subarray(0, 32)); + const target = buffer.subarray(pointer); + return parser.decode({ type, value: target, skip }); + } + return parser.decode({ type, value, skip }); + }); +}; +exports.unpack = unpack; +//# sourceMappingURL=packer.js.map + +/***/ }), + +/***/ 91563: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.address = exports.getAddress = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const utils_2 = __webpack_require__(26365); +/** + * Normalize an address value. This accepts the address as: + * + * - A hex string starting with the `0x` prefix. + * - A byte array (`Uint8Array` or `Buffer`). + * + * It checks that the address is 20 bytes long. + * + * @param value - The value to normalize. + * @returns The normalized address as `Uint8Array`. + */ +const getAddress = (value) => { + const bytesValue = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bytesValue.length <= 20, new errors_1.ParserError(`Invalid address value. Expected address to be 20 bytes long, but received ${bytesValue.length} bytes.`)); + return (0, utils_2.padStart)(bytesValue, 20); +}; +exports.getAddress = getAddress; +exports.address = { + isDynamic: false, + /** + * Get if the given value is a valid address type. Since `address` is a simple + * type, this is just a check that the value is "address". + * + * @param type - The type to check. + * @returns Whether the type is a valid address type. + */ + isType: (type) => type === 'address', + /** + * Get the byte length of an encoded address. Since `address` is a simple + * type, this always returns 32. + * + * Note that actual addresses are only 20 bytes long, but the encoding of + * the `address` type is always 32 bytes long. + * + * @returns The byte length of an encoded address. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given address to a 32-byte-long byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The address to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded address added to it. + */ + encode({ buffer, value, packed }) { + const addressValue = (0, exports.getAddress)(value); + // If we're using packed encoding, we can just add the address bytes to the + // byte array, without adding any padding. + if (packed) { + return (0, utils_1.concatBytes)([buffer, addressValue]); + } + const addressBuffer = (0, utils_2.padStart)(addressValue); + return (0, utils_1.concatBytes)([buffer, addressBuffer]); + }, + /** + * Decode the given byte array to an address. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded address as a hexadecimal string, starting with the + * "0x"-prefix. + */ + decode({ value }) { + return (0, utils_1.add0x)((0, utils_1.bytesToHex)(value.slice(12, 32))); + }, +}; +//# sourceMappingURL=address.js.map + +/***/ }), + +/***/ 186: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.array = exports.getTupleType = exports.getArrayType = exports.isArrayType = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const packer_1 = __webpack_require__(37700); +const utils_2 = __webpack_require__(26365); +const fixed_bytes_1 = __webpack_require__(83415); +const tuple_1 = __webpack_require__(30717); +const ARRAY_REGEX = /^(?.*)\[(?\d*?)\]$/u; +const isArrayType = (type) => ARRAY_REGEX.test(type); +exports.isArrayType = isArrayType; +/** + * Get the type of the array. + * + * @param type - The type to get the array type for. + * @returns The array type. + */ +const getArrayType = (type) => { + const match = type.match(ARRAY_REGEX); + (0, utils_1.assert)(match?.groups?.type, new errors_1.ParserError(`Invalid array type. Expected an array type, but received "${type}".`)); + return [ + match.groups.type, + match.groups.length ? parseInt(match.groups.length, 10) : undefined, + ]; +}; +exports.getArrayType = getArrayType; +/** + * Get the type of the array as a tuple type. This is used for encoding fixed + * length arrays, which are encoded as tuples. + * + * @param innerType - The type of the array. + * @param length - The length of the array. + * @returns The tuple type. + */ +const getTupleType = (innerType, length) => { + return `(${new Array(length).fill(innerType).join(',')})`; +}; +exports.getTupleType = getTupleType; +exports.array = { + /** + * Check if the array is dynamic. Arrays are dynamic if the array does not + * have a fixed length, or if the array type is dynamic. + * + * @param type - The type to check. + * @returns Whether the array is dynamic. + */ + isDynamic(type) { + const [innerType, length] = (0, exports.getArrayType)(type); + return ( + // `T[]` is dynamic for any `T`. `T[k]` is dynamic for any dynamic `T` and + // any `k >= 0`. + length === undefined || (0, packer_1.isDynamicParser)((0, packer_1.getParser)(innerType), innerType)); + }, + /** + * Check if a type is an array type. + * + * @param type - The type to check. + * @returns Whether the type is an array type. + */ + isType(type) { + return (0, exports.isArrayType)(type); + }, + /** + * Get the byte length of an encoded array. If the array is dynamic, this + * returns 32, i.e., the length of the pointer to the array. If the array is + * static, this returns the byte length of the resulting tuple type. + * + * @param type - The type to get the byte length for. + * @returns The byte length of an encoded array. + */ + getByteLength(type) { + (0, utils_1.assert)((0, exports.isArrayType)(type), new errors_1.ParserError(`Expected an array type, but received "${type}".`)); + const [innerType, length] = (0, exports.getArrayType)(type); + if (!(0, packer_1.isDynamicParser)(this, type) && length !== undefined) { + return tuple_1.tuple.getByteLength((0, exports.getTupleType)(innerType, length)); + } + return 32; + }, + /** + * Encode the given array to a byte array. If the array is static, this uses + * the tuple encoder. + * + * @param args - The encoding arguments. + * @param args.type - The type of the array. + * @param args.buffer - The byte array to add to. + * @param args.value - The array to encode. + * @param args.packed - Whether to use non-standard packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded array added to it. + */ + encode({ type, buffer, value, packed, tight }) { + const [arrayType, fixedLength] = (0, exports.getArrayType)(type); + // Packed encoding does not support nested arrays. + (0, utils_1.assert)(!packed || !(0, exports.isArrayType)(arrayType), new errors_1.ParserError(`Cannot pack nested arrays.`)); + // Tightly pack `T[]` where `T` is a dynamic type. This is not supported in + // Solidity, but is commonly used in the Ethereum ecosystem. + if (packed && (0, packer_1.isDynamicParser)((0, packer_1.getParser)(arrayType), arrayType)) { + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: buffer, + packed, + arrayPacked: true, + tight, + }); + } + if (fixedLength) { + (0, utils_1.assert)(fixedLength === value.length, new errors_1.ParserError(`Array length does not match type length. Expected a length of ${fixedLength}, but received ${value.length}.`)); + // `T[k]` for any `T` and `k` is encoded as `(T[0], ..., T[k - 1])`. + return tuple_1.tuple.encode({ + type: (0, exports.getTupleType)(arrayType, fixedLength), + buffer, + value, + // In "tight" mode, we don't pad the values to 32 bytes if the value is + // of type `bytesN`. This is an edge case in `ethereumjs-abi` that we + // support to provide compatibility with it. + packed: fixed_bytes_1.fixedBytes.isType(arrayType) && tight, + tight, + }); + } + // For packed encoding, we don't need to encode the length of the array, + // so we can just encode the values. + if (packed) { + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: buffer, + // In "tight" mode, we don't pad the values to 32 bytes if the value is + // of type `bytesN`. This is an edge case in `ethereumjs-abi` that we + // support to provide compatibility with it. + packed: fixed_bytes_1.fixedBytes.isType(arrayType) && tight, + arrayPacked: true, + tight, + }); + } + // `T[]` with `k` elements is encoded as `k (T[0], ..., T[k - 1])`. That + // means that we just need to encode the length of the array, and then the + // array itself. The pointer is encoded by the {@link pack} function. + const arrayLength = (0, utils_2.padStart)((0, utils_1.numberToBytes)(value.length)); + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: (0, utils_1.concatBytes)([buffer, arrayLength]), + packed, + tight, + }); + }, + /** + * Decode an array from the given byte array. + * + * @param args - The decoding arguments. + * @param args.type - The type of the array. + * @param args.value - The byte array to decode. + * @returns The decoded array. + */ + decode({ type, value, ...rest }) { + const [arrayType, fixedLength] = (0, exports.getArrayType)(type); + if (fixedLength) { + const result = tuple_1.tuple.decode({ + type: (0, exports.getTupleType)(arrayType, fixedLength), + value, + ...rest, + }); + (0, utils_1.assert)(result.length === fixedLength, new errors_1.ParserError(`Array length does not match type length. Expected a length of ${fixedLength}, but received ${result.length}.`)); + return result; + } + const arrayLength = (0, utils_1.bytesToNumber)(value.subarray(0, 32)); + return (0, packer_1.unpack)(new Array(arrayLength).fill(arrayType), value.subarray(32)); + }, +}; +//# sourceMappingURL=array.js.map + +/***/ }), + +/***/ 47435: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.bool = exports.getBooleanValue = void 0; +const utils_1 = __webpack_require__(22049); +const superstruct_1 = __webpack_require__(2150); +const errors_1 = __webpack_require__(5961); +const number_1 = __webpack_require__(6150); +const BooleanCoercer = (0, superstruct_1.coerce)((0, superstruct_1.boolean)(), (0, superstruct_1.union)([(0, superstruct_1.literal)('true'), (0, superstruct_1.literal)('false')]), (value) => value === 'true'); +/** + * Normalize a boolean value. This accepts the boolean as: + * + * - A boolean literal. + * - The string "true" or "false". + * + * @param value - The value to get a boolean for. + * @returns The parsed boolean value. This is `BigInt(1)` for truthy values, or + * `BigInt(0)` for falsy values. + */ +const getBooleanValue = (value) => { + try { + const booleanValue = (0, superstruct_1.create)(value, BooleanCoercer); + if (booleanValue) { + return BigInt(1); + } + return BigInt(0); + } + catch { + throw new errors_1.ParserError(`Invalid boolean value. Expected a boolean literal, or the string "true" or "false", but received "${value}".`); + } +}; +exports.getBooleanValue = getBooleanValue; +exports.bool = { + isDynamic: false, + /** + * Get if the given value is a valid boolean type. Since `bool` is a simple + * type, this is just a check that the value is "bool". + * + * @param type - The type to check. + * @returns Whether the type is a valid boolean type. + */ + isType: (type) => type === 'bool', + /** + * Get the byte length of an encoded boolean. Since `bool` is a simple + * type, this always returns 32. + * + * Note that actual booleans are only 1 byte long, but the encoding of + * the `bool` type is always 32 bytes long. + * + * @returns The byte length of an encoded boolean. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given boolean to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The boolean to encode. + * @param args.packed - Whether the value is packed. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded boolean added to it. + */ + encode({ buffer, value, packed, tight }) { + const booleanValue = (0, exports.getBooleanValue)(value); + // For packed encoding, we add a single byte (`0x00` or `0x01`) to the byte + // array. + if (packed) { + return (0, utils_1.concatBytes)([buffer, (0, utils_1.bigIntToBytes)(booleanValue)]); + } + // Booleans are encoded as 32-byte integers, so we use the number parser + // to encode the boolean value. + return number_1.number.encode({ + type: 'uint256', + buffer, + value: booleanValue, + packed, + tight, + }); + }, + /** + * Decode the given byte array to a boolean. + * + * @param args - The decoding arguments. + * @returns The decoded boolean. + */ + decode(args) { + // Booleans are encoded as 32-byte integers, so we use the number parser + // to decode the boolean value. + return number_1.number.decode({ ...args, type: 'uint256' }) === BigInt(1); + }, +}; +//# sourceMappingURL=bool.js.map + +/***/ }), + +/***/ 99356: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.bytes = void 0; +const utils_1 = __webpack_require__(22049); +const utils_2 = __webpack_require__(26365); +exports.bytes = { + isDynamic: true, + /** + * Check if a type is a bytes type. Since `bytes` is a simple type, this is + * just a check that the type is "bytes". + * + * @param type - The type to check. + * @returns Whether the type is a bytes type. + */ + isType: (type) => type === 'bytes', + /** + * Get the byte length of an encoded bytes value. Since `bytes` is a simple + * type, this always returns 32. + * + * Note that actual length of a bytes value is variable, but the encoded + * static value (pointer) is always 32 bytes long. + * + * @returns The byte length of an encoded bytes value. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given bytes value to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The bytes value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded bytes value added to it. + */ + encode({ buffer, value, packed }) { + const bufferValue = (0, utils_1.createBytes)(value); + // For packed encoding, we can just add the bytes value to the byte array, + // without adding any padding or alignment. There is also no need to + // encode the length of the bytes. + if (packed) { + return (0, utils_1.concatBytes)([buffer, bufferValue]); + } + const paddedSize = Math.ceil(bufferValue.byteLength / 32) * 32; + // Bytes of length `k` are encoded as `k pad_right(bytes)`. + return (0, utils_1.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1.numberToBytes)(bufferValue.byteLength)), + (0, utils_2.padEnd)(bufferValue, paddedSize), + ]); + }, + /** + * Decode the given byte array to a bytes value. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded bytes value as a `Uint8Array`. + */ + decode({ value }) { + const bytesValue = value.subarray(0, 32); + const length = (0, utils_1.bytesToNumber)(bytesValue); + // Since we're returning a `Uint8Array`, we use `slice` to copy the bytes + // into a new array. + return value.slice(32, 32 + length); + }, +}; +//# sourceMappingURL=bytes.js.map + +/***/ }), + +/***/ 83415: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fixedBytes = exports.getByteLength = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const utils_2 = __webpack_require__(26365); +const BYTES_REGEX = /^bytes([0-9]{1,2})$/u; +/** + * Get the length of the specified type. If a length is not specified, or if the + * length is out of range (0 < n <= 32), this will throw an error. + * + * @param type - The type to get the length for. + * @returns The byte length of the type. + */ +const getByteLength = (type) => { + const bytes = type.match(BYTES_REGEX)?.[1]; + (0, utils_1.assert)(bytes, `Invalid byte length. Expected a number between 1 and 32, but received "${type}".`); + const length = Number(bytes); + (0, utils_1.assert)(length > 0 && length <= 32, new errors_1.ParserError(`Invalid byte length. Expected a number between 1 and 32, but received "${type}".`)); + return length; +}; +exports.getByteLength = getByteLength; +exports.fixedBytes = { + isDynamic: false, + /** + * Check if a type is a fixed bytes type. + * + * @param type - The type to check. + * @returns Whether the type is a fixed bytes type. + */ + isType(type) { + return BYTES_REGEX.test(type); + }, + /** + * Get the byte length of an encoded fixed bytes type. + * + * @returns The byte length of the type. + */ + getByteLength() { + return 32; + }, + /** + * Encode a fixed bytes value. + * + * @param args - The arguments to encode. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed }) { + const length = (0, exports.getByteLength)(type); + const bufferValue = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bufferValue.length <= length, new errors_1.ParserError(`Expected a value of length ${length}, but received a value of length ${bufferValue.length}.`)); + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padEnd)(bufferValue, length)]); + } + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padEnd)(bufferValue)]); + }, + /** + * Decode a fixed bytes value. + * + * @param args - The arguments to decode. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @returns The decoded value as a `Uint8Array`. + */ + decode({ type, value }) { + const length = (0, exports.getByteLength)(type); + // Since we're returning a `Uint8Array`, we use `slice` to copy the bytes + // into a new array. + return value.slice(0, length); + }, +}; +//# sourceMappingURL=fixed-bytes.js.map + +/***/ }), + +/***/ 27827: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fn = exports.getFunction = void 0; +const utils_1 = __webpack_require__(22049); +const superstruct_1 = __webpack_require__(2150); +const errors_1 = __webpack_require__(5961); +const fixed_bytes_1 = __webpack_require__(83415); +/** + * A struct that represents a Solidity function. The value must be a hex string + * or a byte array. The created value will always be an object with an `address` + * and `selector` property. + */ +const FunctionStruct = (0, superstruct_1.coerce)((0, superstruct_1.object)({ + address: utils_1.StrictHexStruct, + selector: utils_1.StrictHexStruct, +}), (0, superstruct_1.union)([utils_1.StrictHexStruct, (0, superstruct_1.instance)(Uint8Array)]), (value) => { + const bytes = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bytes.length === 24, new errors_1.ParserError(`Invalid Solidity function. Expected function to be 24 bytes long, but received ${bytes.length} bytes.`)); + return { + address: (0, utils_1.bytesToHex)(bytes.subarray(0, 20)), + selector: (0, utils_1.bytesToHex)(bytes.subarray(20, 24)), + }; +}); +/** + * Normalize a function. This accepts the function as: + * + * - A {@link SolidityFunction} object. + * - A hexadecimal string. + * - A byte array. + * + * @param input - The function-like input. + * @returns The function as buffer. + */ +const getFunction = (input) => { + const value = (0, superstruct_1.create)(input, FunctionStruct); + return (0, utils_1.concatBytes)([(0, utils_1.hexToBytes)(value.address), (0, utils_1.hexToBytes)(value.selector)]); +}; +exports.getFunction = getFunction; +exports.fn = { + isDynamic: false, + /** + * Check if a type is a function type. Since `function` is a simple type, this + * is just a check that the type is "function". + * + * @param type - The type to check. + * @returns Whether the type is a function type. + */ + isType: (type) => type === 'function', + /** + * Get the byte length of an encoded function. Since `function` is a simple + * type, this always returns 32. + * + * Note that actual functions are only 24 bytes long, but the encoding of + * the `function` type is always 32 bytes long. + * + * @returns The byte length of an encoded function. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given function to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The function to encode. + * @param args.packed - Whether to use packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded function added to it. + */ + encode({ buffer, value, packed, tight }) { + const fnValue = (0, exports.getFunction)(value); + // Functions are encoded as `bytes24`, so we use the fixedBytes parser to + // encode the function. + return fixed_bytes_1.fixedBytes.encode({ + type: 'bytes24', + buffer, + value: fnValue, + packed, + tight, + }); + }, + /** + * Decode the given byte array to a function. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded function as a {@link SolidityFunction} object. + */ + decode({ value }) { + return { + address: (0, utils_1.bytesToHex)(value.slice(0, 20)), + selector: (0, utils_1.bytesToHex)(value.slice(20, 24)), + }; + }, +}; +//# sourceMappingURL=function.js.map + +/***/ }), + +/***/ 46207: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__webpack_require__(91563), exports); +__exportStar(__webpack_require__(186), exports); +__exportStar(__webpack_require__(47435), exports); +__exportStar(__webpack_require__(99356), exports); +__exportStar(__webpack_require__(83415), exports); +__exportStar(__webpack_require__(27827), exports); +__exportStar(__webpack_require__(6150), exports); +__exportStar(__webpack_require__(28160), exports); +__exportStar(__webpack_require__(8446), exports); +__exportStar(__webpack_require__(30717), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6150: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.number = exports.getBigInt = exports.assertNumberLength = exports.getLength = exports.isSigned = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const utils_2 = __webpack_require__(26365); +const NUMBER_REGEX = /^u?int(?[0-9]*)?$/u; +/** + * Check if a number type is signed. + * + * @param type - The type to check. + * @returns Whether the type is signed. + */ +const isSigned = (type) => { + return !type.startsWith('u'); +}; +exports.isSigned = isSigned; +/** + * Get the length of the specified type. If a length is not specified, if the + * length is out of range (8 <= n <= 256), or if the length is not a multiple of + * 8, this will throw an error. + * + * @param type - The type to get the length for. + * @returns The bit length of the type. + */ +const getLength = (type) => { + if (type === 'int' || type === 'uint') { + return 256; + } + const match = type.match(NUMBER_REGEX); + (0, utils_1.assert)(match?.groups?.length, new errors_1.ParserError(`Invalid number type. Expected a number type, but received "${type}".`)); + const length = parseInt(match.groups.length, 10); + (0, utils_1.assert)(length >= 8 && length <= 256, new errors_1.ParserError(`Invalid number length. Expected a number between 8 and 256, but received "${type}".`)); + (0, utils_1.assert)(length % 8 === 0, new errors_1.ParserError(`Invalid number length. Expected a multiple of 8, but received "${type}".`)); + return length; +}; +exports.getLength = getLength; +/** + * Assert that the byte length of the given value is in range for the given + * number type. + * + * @param value - The value to check. + * @param type - The type of the value. + * @throws If the value is out of range for the type. + */ +const assertNumberLength = (value, type) => { + const length = (0, exports.getLength)(type); + const maxValue = BigInt(2) ** BigInt(length - ((0, exports.isSigned)(type) ? 1 : 0)) - BigInt(1); + if ((0, exports.isSigned)(type)) { + // Signed types must be in the range of `-(2^(length - 1))` to + // `2^(length - 1) - 1`. + (0, utils_1.assert)(value >= -(maxValue + BigInt(1)) && value <= maxValue, new errors_1.ParserError(`Number "${value}" is out of range for type "${type}".`)); + return; + } + // Unsigned types must be in the range of `0` to `2^length - 1`. + (0, utils_1.assert)(value <= maxValue, new errors_1.ParserError(`Number "${value}" is out of range for type "${type}".`)); +}; +exports.assertNumberLength = assertNumberLength; +/** + * Normalize a `bigint` value. This accepts the value as: + * + * - A `bigint`. + * - A `number`. + * - A decimal string, i.e., a string that does not start with "0x". + * - A hexadecimal string, i.e., a string that starts with "0x". + * + * @param value - The number-like value to parse. + * @returns The value parsed as bigint. + */ +const getBigInt = (value) => { + try { + return (0, utils_1.createBigInt)(value); + } + catch { + throw new errors_1.ParserError(`Invalid number. Expected a valid number value, but received "${value}".`); + } +}; +exports.getBigInt = getBigInt; +exports.number = { + isDynamic: false, + /** + * Check if a type is a number type. + * + * @param type - The type to check. + * @returns Whether the type is a number type. + */ + isType(type) { + return NUMBER_REGEX.test(type); + }, + /** + * Get the byte length of an encoded number type. Since `int` and `uint` are + * simple types, this will always return 32. + * + * @returns The byte length of the type. + */ + getByteLength() { + return 32; + }, + /** + * Encode a number value. + * + * @param args - The arguments to encode. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed }) { + const bigIntValue = (0, exports.getBigInt)(value); + (0, exports.assertNumberLength)(bigIntValue, type); + if ((0, exports.isSigned)(type)) { + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + const length = (0, exports.getLength)(type) / 8; + return (0, utils_1.concatBytes)([buffer, (0, utils_1.signedBigIntToBytes)(bigIntValue, length)]); + } + return (0, utils_1.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1.signedBigIntToBytes)(bigIntValue, 32)), + ]); + } + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + const length = (0, exports.getLength)(type) / 8; + return (0, utils_1.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1.bigIntToBytes)(bigIntValue), length), + ]); + } + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padStart)((0, utils_1.bigIntToBytes)(bigIntValue))]); + }, + /** + * Decode a number value. + * + * @param args - The decoding arguments. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @returns The decoded value. + */ + decode({ type, value }) { + const buffer = value.subarray(0, 32); + if ((0, exports.isSigned)(type)) { + const numberValue = (0, utils_1.bytesToSignedBigInt)(buffer); + (0, exports.assertNumberLength)(numberValue, type); + return numberValue; + } + const numberValue = (0, utils_1.bytesToBigInt)(buffer); + (0, exports.assertNumberLength)(numberValue, type); + return numberValue; + }, +}; +//# sourceMappingURL=number.js.map + +/***/ }), + +/***/ 28160: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +//# sourceMappingURL=parser.js.map + +/***/ }), + +/***/ 8446: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.string = void 0; +const utils_1 = __webpack_require__(22049); +const bytes_1 = __webpack_require__(99356); +exports.string = { + isDynamic: true, + /** + * Check if a type is a string type. Since `string` is a simple type, this + * is just a check if the type is "string". + * + * @param type - The type to check. + * @returns Whether the type is a string type. + */ + isType: (type) => type === 'string', + /** + * Get the byte length of an encoded string type. Since `string` is a simple + * type, this will always return 32. + * + * Note that actual strings are variable in length, but the encoded static + * value (pointer) is always 32 bytes long. + * + * @returns The byte length of an encoded string. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given string value to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The string value to encode. + * @param args.packed - Whether to use packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded string value added to it. + */ + encode({ buffer, value, packed, tight }) { + // Strings are encoded as UTF-8 bytes, so we use the bytes parser to encode + // the string as bytes. + return bytes_1.bytes.encode({ + type: 'bytes', + buffer, + value: (0, utils_1.stringToBytes)(value), + packed, + tight, + }); + }, + /** + * Decode the given byte array to a string value. + * + * @param args - The decoding arguments. + * @returns The decoded string value. + */ + decode(args) { + // Strings are encoded as UTF-8 bytes, so we use the bytes parser to decode + // the bytes, and convert them to a string. + return (0, utils_1.bytesToString)(bytes_1.bytes.decode(args)); + }, +}; +//# sourceMappingURL=string.js.map + +/***/ }), + +/***/ 30717: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.tuple = exports.getTupleElements = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const packer_1 = __webpack_require__(37700); +const TUPLE_REGEX = /^\((.+)\)$/u; +const isTupleType = (type) => TUPLE_REGEX.test(type); +/** + * Get elements from a tuple type. + * + * @param type - The tuple type to get the types for. + * @returns The elements of the tuple as string array. + */ +const getTupleElements = (type) => { + (0, utils_1.assert)(type.startsWith('(') && type.endsWith(')'), new errors_1.ParserError(`Invalid tuple type. Expected tuple type, but received "${type}".`)); + const elements = []; + let current = ''; + let depth = 0; + for (let i = 1; i < type.length - 1; i++) { + const char = type[i]; + if (char === ',' && depth === 0) { + elements.push(current.trim()); + current = ''; + } + else { + current += char; + if (char === '(') { + depth += 1; + } + else if (char === ')') { + depth -= 1; + } + } + } + if (current.trim()) { + elements.push(current.trim()); + } + return elements; +}; +exports.getTupleElements = getTupleElements; +exports.tuple = { + /** + * Check if the tuple is dynamic. Tuples are dynamic if one or more elements + * of the tuple are dynamic. + * + * @param type - The type to check. + * @returns Whether the tuple is dynamic. + */ + isDynamic(type) { + const elements = (0, exports.getTupleElements)(type); + return elements.some((element) => { + const parser = (0, packer_1.getParser)(element); + return (0, packer_1.isDynamicParser)(parser, element); + }); + }, + /** + * Check if a type is a tuple type. + * + * @param type - The type to check. + * @returns Whether the type is a tuple type. + */ + isType(type) { + return isTupleType(type); + }, + /** + * Get the byte length of a tuple type. If the tuple is dynamic, this will + * always return 32. If the tuple is static, this will return the sum of the + * byte lengths of the tuple elements. + * + * @param type - The type to get the byte length for. + * @returns The byte length of the tuple type. + */ + getByteLength(type) { + if ((0, packer_1.isDynamicParser)(this, type)) { + return 32; + } + const elements = (0, exports.getTupleElements)(type); + return elements.reduce((total, element) => { + return total + (0, packer_1.getParser)(element).getByteLength(element); + }, 0); + }, + /** + * Encode a tuple value. + * + * @param args - The encoding arguments. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use non-standard packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed, tight }) { + const elements = (0, exports.getTupleElements)(type); + return (0, packer_1.pack)({ + types: elements, + values: value, + byteArray: buffer, + packed, + tight, + }); + }, + /** + * Decode a tuple value. + * + * @param args - The decoding arguments. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @param args.skip - A function to skip a number of bytes. + * @returns The decoded value. + */ + decode({ type, value, skip }) { + const elements = (0, exports.getTupleElements)(type); + const length = this.getByteLength(type) - 32; + skip(length); + return (0, packer_1.unpack)(elements, value); + }, +}; +//# sourceMappingURL=tuple.js.map + +/***/ }), + +/***/ 15744: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +//# sourceMappingURL=abi.js.map + +/***/ }), + +/***/ 11126: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__webpack_require__(15744), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 59194: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.padEnd = exports.padStart = exports.set = void 0; +const utils_1 = __webpack_require__(22049); +const BUFFER_WIDTH = 32; +/** + * Set `buffer` in `target` at the specified position. + * + * @param target - The buffer to set to. + * @param buffer - The buffer to set in the target. + * @param position - The position at which to set the target. + * @returns The combined buffer. + */ +const set = (target, buffer, position) => { + return (0, utils_1.concatBytes)([ + target.subarray(0, position), + buffer, + target.subarray(position + buffer.length), + ]); +}; +exports.set = set; +/** + * Add padding to a buffer. If the buffer is larger than `length`, this function won't do anything. If it's smaller, the + * buffer will be padded to the specified length, with extra zeroes at the start. + * + * @param buffer - The buffer to add padding to. + * @param length - The number of bytes to pad the buffer to. + * @returns The padded buffer. + */ +const padStart = (buffer, length = BUFFER_WIDTH) => { + const padding = new Uint8Array(Math.max(length - buffer.length, 0)).fill(0x00); + return (0, utils_1.concatBytes)([padding, buffer]); +}; +exports.padStart = padStart; +/** + * Add padding to a buffer. If the buffer is larger than `length`, this function won't do anything. If it's smaller, the + * buffer will be padded to the specified length, with extra zeroes at the end. + * + * @param buffer - The buffer to add padding to. + * @param length - The number of bytes to pad the buffer to. + * @returns The padded buffer. + */ +const padEnd = (buffer, length = BUFFER_WIDTH) => { + const padding = new Uint8Array(Math.max(length - buffer.length, 0)).fill(0x00); + return (0, utils_1.concatBytes)([buffer, padding]); +}; +exports.padEnd = padEnd; +//# sourceMappingURL=buffer.js.map + +/***/ }), + +/***/ 26365: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__webpack_require__(59194), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 98537: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEncryptionPublicKey = exports.decryptSafely = exports.decrypt = exports.encryptSafely = exports.encrypt = void 0; +const nacl = __importStar(__webpack_require__(88947)); +const naclUtil = __importStar(__webpack_require__(76386)); +const utils_1 = __webpack_require__(54907); +/** + * Encrypt a message. + * + * @param options - The encryption options. + * @param options.publicKey - The public key of the message recipient. + * @param options.data - The message data. + * @param options.version - The type of encryption to use. + * @returns The encrypted data. + */ +function encrypt({ publicKey, data, version, }) { + if ((0, utils_1.isNullish)(publicKey)) { + throw new Error('Missing publicKey parameter'); + } + else if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(version)) { + throw new Error('Missing version parameter'); + } + switch (version) { + case 'x25519-xsalsa20-poly1305': { + if (typeof data !== 'string') { + throw new Error('Message data must be given as a string'); + } + // generate ephemeral keypair + const ephemeralKeyPair = nacl.box.keyPair(); + // assemble encryption parameters - from string to UInt8 + let pubKeyUInt8Array; + try { + pubKeyUInt8Array = naclUtil.decodeBase64(publicKey); + } + catch (err) { + throw new Error('Bad public key'); + } + const msgParamsUInt8Array = naclUtil.decodeUTF8(data); + const nonce = nacl.randomBytes(nacl.box.nonceLength); + // encrypt + const encryptedMessage = nacl.box(msgParamsUInt8Array, nonce, pubKeyUInt8Array, ephemeralKeyPair.secretKey); + // handle encrypted data + const output = { + version: 'x25519-xsalsa20-poly1305', + nonce: naclUtil.encodeBase64(nonce), + ephemPublicKey: naclUtil.encodeBase64(ephemeralKeyPair.publicKey), + ciphertext: naclUtil.encodeBase64(encryptedMessage), + }; + // return encrypted msg data + return output; + } + default: + throw new Error('Encryption type/version not supported'); + } +} +exports.encrypt = encrypt; +/** + * Encrypt a message in a way that obscures the message length. + * + * The message is padded to a multiple of 2048 before being encrypted so that the length of the + * resulting encrypted message can't be used to guess the exact length of the original message. + * + * @param options - The encryption options. + * @param options.publicKey - The public key of the message recipient. + * @param options.data - The message data. + * @param options.version - The type of encryption to use. + * @returns The encrypted data. + */ +function encryptSafely({ publicKey, data, version, }) { + if ((0, utils_1.isNullish)(publicKey)) { + throw new Error('Missing publicKey parameter'); + } + else if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(version)) { + throw new Error('Missing version parameter'); + } + const DEFAULT_PADDING_LENGTH = 2 ** 11; + const NACL_EXTRA_BYTES = 16; + if (typeof data === 'object' && data && 'toJSON' in data) { + // remove toJSON attack vector + // TODO, check all possible children + throw new Error('Cannot encrypt with toJSON property. Please remove toJSON property'); + } + // add padding + const dataWithPadding = { + data, + padding: '', + }; + // calculate padding + const dataLength = Buffer.byteLength(JSON.stringify(dataWithPadding), 'utf-8'); + const modVal = dataLength % DEFAULT_PADDING_LENGTH; + let padLength = 0; + // Only pad if necessary + if (modVal > 0) { + padLength = DEFAULT_PADDING_LENGTH - modVal - NACL_EXTRA_BYTES; // nacl extra bytes + } + dataWithPadding.padding = '0'.repeat(padLength); + const paddedMessage = JSON.stringify(dataWithPadding); + return encrypt({ publicKey, data: paddedMessage, version }); +} +exports.encryptSafely = encryptSafely; +/** + * Decrypt a message. + * + * @param options - The decryption options. + * @param options.encryptedData - The encrypted data. + * @param options.privateKey - The private key to decrypt with. + * @returns The decrypted message. + */ +function decrypt({ encryptedData, privateKey, }) { + if ((0, utils_1.isNullish)(encryptedData)) { + throw new Error('Missing encryptedData parameter'); + } + else if ((0, utils_1.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + switch (encryptedData.version) { + case 'x25519-xsalsa20-poly1305': { + // string to buffer to UInt8Array + const receiverPrivateKeyUint8Array = naclDecodeHex(privateKey); + const receiverEncryptionPrivateKey = nacl.box.keyPair.fromSecretKey(receiverPrivateKeyUint8Array).secretKey; + // assemble decryption parameters + const nonce = naclUtil.decodeBase64(encryptedData.nonce); + const ciphertext = naclUtil.decodeBase64(encryptedData.ciphertext); + const ephemPublicKey = naclUtil.decodeBase64(encryptedData.ephemPublicKey); + // decrypt + const decryptedMessage = nacl.box.open(ciphertext, nonce, ephemPublicKey, receiverEncryptionPrivateKey); + // return decrypted msg data + try { + if (!decryptedMessage) { + throw new Error(); + } + const output = naclUtil.encodeUTF8(decryptedMessage); + // TODO: This is probably extraneous but was kept to minimize changes during refactor + if (!output) { + throw new Error(); + } + return output; + } + catch (err) { + if (err && typeof err.message === 'string' && err.message.length) { + throw new Error(`Decryption failed: ${err.message}`); + } + throw new Error(`Decryption failed.`); + } + } + default: + throw new Error('Encryption type/version not supported.'); + } +} +exports.decrypt = decrypt; +/** + * Decrypt a message that has been encrypted using `encryptSafely`. + * + * @param options - The decryption options. + * @param options.encryptedData - The encrypted data. + * @param options.privateKey - The private key to decrypt with. + * @returns The decrypted message. + */ +function decryptSafely({ encryptedData, privateKey, }) { + if ((0, utils_1.isNullish)(encryptedData)) { + throw new Error('Missing encryptedData parameter'); + } + else if ((0, utils_1.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + const dataWithPadding = JSON.parse(decrypt({ encryptedData, privateKey })); + return dataWithPadding.data; +} +exports.decryptSafely = decryptSafely; +/** + * Get the encryption public key for the given key. + * + * @param privateKey - The private key to generate the encryption public key with. + * @returns The encryption public key. + */ +function getEncryptionPublicKey(privateKey) { + const privateKeyUint8Array = naclDecodeHex(privateKey); + const encryptionPublicKey = nacl.box.keyPair.fromSecretKey(privateKeyUint8Array).publicKey; + return naclUtil.encodeBase64(encryptionPublicKey); +} +exports.getEncryptionPublicKey = getEncryptionPublicKey; +/** + * Convert a hex string to the UInt8Array format used by nacl. + * + * @param msgHex - The string to convert. + * @returns The converted string. + */ +function naclDecodeHex(msgHex) { + const msgBase64 = Buffer.from(msgHex, 'hex').toString('base64'); + return naclUtil.decodeBase64(msgBase64); +} +//# sourceMappingURL=encryption.js.map + +/***/ }), + +/***/ 51594: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.normalize = exports.concatSig = void 0; +__exportStar(__webpack_require__(20252), exports); +__exportStar(__webpack_require__(10169), exports); +__exportStar(__webpack_require__(98537), exports); +var utils_1 = __webpack_require__(54907); +Object.defineProperty(exports, "concatSig", ({ enumerable: true, get: function () { return utils_1.concatSig; } })); +Object.defineProperty(exports, "normalize", ({ enumerable: true, get: function () { return utils_1.normalize; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 20252: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.extractPublicKey = exports.recoverPersonalSignature = exports.personalSign = void 0; +const util_1 = __webpack_require__(68683); +const utils_1 = __webpack_require__(54907); +/** + * Create an Ethereum-specific signature for a message. + * + * This function is equivalent to the `eth_sign` Ethereum JSON-RPC method as specified in EIP-1417, + * as well as the MetaMask's `personal_sign` method. + * + * @param options - The personal sign options. + * @param options.privateKey - The key to sign with. + * @param options.data - The hex data to sign. + * @returns The '0x'-prefixed hex encoded signature. + */ +function personalSign({ privateKey, data, }) { + if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + const message = (0, utils_1.legacyToBuffer)(data); + const msgHash = (0, util_1.hashPersonalMessage)(message); + const sig = (0, util_1.ecsign)(msgHash, privateKey); + const serialized = (0, utils_1.concatSig)((0, util_1.toBuffer)(sig.v), sig.r, sig.s); + return serialized; +} +exports.personalSign = personalSign; +/** + * Recover the address of the account used to create the given Ethereum signature. The message + * must have been signed using the `personalSign` function, or an equivalent function. + * + * @param options - The signature recovery options. + * @param options.data - The hex data that was signed. + * @param options.signature - The '0x'-prefixed hex encoded message signature. + * @returns The '0x'-prefixed hex encoded address of the message signer. + */ +function recoverPersonalSignature({ data, signature, }) { + if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const publicKey = getPublicKeyFor(data, signature); + const sender = (0, util_1.publicToAddress)(publicKey); + const senderHex = (0, util_1.bufferToHex)(sender); + return senderHex; +} +exports.recoverPersonalSignature = recoverPersonalSignature; +/** + * Recover the public key of the account used to create the given Ethereum signature. The message + * must have been signed using the `personalSign` function, or an equivalent function. + * + * @param options - The public key recovery options. + * @param options.data - The hex data that was signed. + * @param options.signature - The '0x'-prefixed hex encoded message signature. + * @returns The '0x'-prefixed hex encoded public key of the message signer. + */ +function extractPublicKey({ data, signature, }) { + if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const publicKey = getPublicKeyFor(data, signature); + return `0x${publicKey.toString('hex')}`; +} +exports.extractPublicKey = extractPublicKey; +/** + * Get the public key for the given signature and message. + * + * @param message - The message that was signed. + * @param signature - The '0x'-prefixed hex encoded message signature. + * @returns The public key of the signer. + */ +function getPublicKeyFor(message, signature) { + const messageHash = (0, util_1.hashPersonalMessage)((0, utils_1.legacyToBuffer)(message)); + return (0, utils_1.recoverPublicKey)(messageHash, signature); +} +//# sourceMappingURL=personal-sign.js.map + +/***/ }), + +/***/ 10169: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.recoverTypedSignature = exports.signTypedData = exports.typedSignatureHash = exports.TypedDataUtils = exports.TYPED_MESSAGE_SCHEMA = exports.SignTypedDataVersion = void 0; +const util_1 = __webpack_require__(68683); +const abi_utils_1 = __webpack_require__(93256); +const parsers_1 = __webpack_require__(46207); +const utils_1 = __webpack_require__(26365); +const utils_2 = __webpack_require__(22049); +const keccak_1 = __webpack_require__(32019); +const utils_3 = __webpack_require__(54907); +/** + * Represents the version of `signTypedData` being used. + * + * V1 is based upon [an early version of + * EIP-712](https://github.com/ethereum/EIPs/pull/712/commits/21abe254fe0452d8583d5b132b1d7be87c0439ca) + * that lacked some later security improvements, and should generally be neglected in favor of + * later versions. + * + * V3 is based on EIP-712, except that arrays and recursive data structures are not supported. + * + * V4 is based on EIP-712, and includes full support of arrays and recursive data structures. + */ +var SignTypedDataVersion; +(function (SignTypedDataVersion) { + SignTypedDataVersion["V1"] = "V1"; + SignTypedDataVersion["V3"] = "V3"; + SignTypedDataVersion["V4"] = "V4"; +})(SignTypedDataVersion = exports.SignTypedDataVersion || (exports.SignTypedDataVersion = {})); +exports.TYPED_MESSAGE_SCHEMA = { + type: 'object', + properties: { + types: { + type: 'object', + additionalProperties: { + type: 'array', + items: { + type: 'object', + properties: { + name: { type: 'string' }, + type: { type: 'string' }, + }, + required: ['name', 'type'], + }, + }, + }, + primaryType: { type: 'string' }, + domain: { type: 'object' }, + message: { type: 'object' }, + }, + required: ['types', 'primaryType', 'domain', 'message'], +}; +/** + * Validate that the given value is a valid version string. + * + * @param version - The version value to validate. + * @param allowedVersions - A list of allowed versions. If omitted, all versions are assumed to be + * allowed. + */ +function validateVersion(version, allowedVersions) { + if (!Object.keys(SignTypedDataVersion).includes(version)) { + throw new Error(`Invalid version: '${version}'`); + } + else if (allowedVersions && !allowedVersions.includes(version)) { + throw new Error(`SignTypedDataVersion not allowed: '${version}'. Allowed versions are: ${allowedVersions.join(', ')}`); + } +} +/** + * Parse a string, number, or bigint value into a `Uint8Array`. + * + * @param type - The type of the value. + * @param value - The value to parse. + * @returns The parsed value. + */ +function parseNumber(type, value) { + (0, utils_2.assert)(value !== null, `Unable to encode value: Invalid number. Expected a valid number value, but received "${value}".`); + const bigIntValue = BigInt(value); + const length = (0, parsers_1.getLength)(type); + const maxValue = BigInt(2) ** BigInt(length) - BigInt(1); + // Note that this is not accurate, since the actual maximum value for unsigned + // integers is `2 ^ (length - 1) - 1`, but this is required for backwards + // compatibility with the old implementation. + (0, utils_2.assert)(bigIntValue >= -maxValue && bigIntValue <= maxValue, `Unable to encode value: Number "${value}" is out of range for type "${type}".`); + return bigIntValue; +} +/** + * Parse an address string to a `Uint8Array`. The behaviour of this is quite + * strange, in that it does not parse the address as hexadecimal string, nor as + * UTF-8. It does some weird stuff with the string and char codes, and then + * returns the result as a `Uint8Array`. + * + * This is based on the old `ethereumjs-abi` implementation, which essentially + * calls `new BN(address, 10)` on the address string, the equivalent of calling + * `parseInt(address, 10)` in JavaScript. This is not a valid way to parse an + * address and would result in `NaN` in plain JavaScript, but it is the + * behaviour of the old implementation, and so we must preserve it for backwards + * compatibility. + * + * @param address - The address to parse. + * @returns The parsed address. + */ +function reallyStrangeAddressToBytes(address) { + let addressValue = BigInt(0); + for (let i = 0; i < address.length; i++) { + const character = BigInt(address.charCodeAt(i) - 48); + addressValue *= BigInt(10); + // 'a' + if (character >= 49) { + addressValue += character - BigInt(49) + BigInt(0xa); + // 'A' + } + else if (character >= 17) { + addressValue += character - BigInt(17) + BigInt(0xa); + // '0' - '9' + } + else { + addressValue += character; + } + } + return (0, utils_1.padStart)((0, utils_2.bigIntToBytes)(addressValue), 20); +} +/** + * Encode a single field. + * + * @param types - All type definitions. + * @param name - The name of the field to encode. + * @param type - The type of the field being encoded. + * @param value - The value to encode. + * @param version - The EIP-712 version the encoding should comply with. + * @returns Encoded representation of the field. + */ +function encodeField(types, name, type, +// TODO: constrain type on `value` +value, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + if (types[type] !== undefined) { + return [ + 'bytes32', + // TODO: return Buffer, remove string from return type + version === SignTypedDataVersion.V4 && value == null // eslint-disable-line no-eq-null + ? '0x0000000000000000000000000000000000000000000000000000000000000000' + : (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(encodeData(type, value, types, version))), + ]; + } + // `function` is supported in `@metamask/abi-utils`, but not allowed by + // EIP-712, so we throw an error here. + if (type === 'function') { + throw new Error('Unsupported or invalid type: "function"'); + } + if (value === undefined) { + throw new Error(`missing value for field ${name} of type ${type}`); + } + if (type === 'address') { + if (typeof value === 'number') { + return ['address', (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 20)]; + } + else if ((0, utils_2.isStrictHexString)(value)) { + return ['address', (0, utils_2.add0x)(value)]; + } + else if (typeof value === 'string') { + return ['address', reallyStrangeAddressToBytes(value).subarray(0, 20)]; + } + } + if (type === 'bool') { + return ['bool', Boolean(value)]; + } + if (type === 'bytes') { + if (typeof value === 'number') { + value = (0, utils_2.numberToBytes)(value); + } + else if ((0, utils_2.isStrictHexString)(value) || value === '0x') { + value = (0, utils_2.hexToBytes)(value); + } + else if (typeof value === 'string') { + value = (0, utils_2.stringToBytes)(value); + } + return ['bytes32', (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(value))]; + } + if (type.startsWith('bytes') && type !== 'bytes' && !type.includes('[')) { + if (typeof value === 'number') { + if (value < 0) { + return ['bytes32', new Uint8Array(32)]; + } + return ['bytes32', (0, utils_2.bigIntToBytes)(BigInt(value))]; + } + else if ((0, utils_2.isStrictHexString)(value)) { + return ['bytes32', (0, utils_2.hexToBytes)(value)]; + } + return ['bytes32', value]; + } + if (type.startsWith('int') && !type.includes('[')) { + const bigIntValue = parseNumber(type, value); + if (bigIntValue >= BigInt(0)) { + return ['uint256', bigIntValue]; + } + return ['int256', bigIntValue]; + } + if (type === 'string') { + if (typeof value === 'number') { + value = (0, utils_2.numberToBytes)(value); + } + else { + value = (0, utils_2.stringToBytes)(value !== null && value !== void 0 ? value : ''); + } + return ['bytes32', (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(value))]; + } + if (type.endsWith(']')) { + if (version === SignTypedDataVersion.V3) { + throw new Error('Arrays are unimplemented in encodeData; use V4 extension'); + } + const parsedType = type.slice(0, type.lastIndexOf('[')); + const typeValuePairs = value.map((item) => encodeField(types, name, parsedType, item, version)); + return [ + 'bytes32', + (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, abi_utils_1.encode)(typeValuePairs.map(([t]) => t), typeValuePairs.map(([, v]) => v)))), + ]; + } + return [type, value]; +} +/** + * Encodes an object by encoding and concatenating each of its members. + * + * @param primaryType - The root type. + * @param data - The object to encode. + * @param types - Type definitions for all types included in the message. + * @param version - The EIP-712 version the encoding should comply with. + * @returns An encoded representation of an object. + */ +function encodeData(primaryType, data, types, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const encodedTypes = ['bytes32']; + const encodedValues = [ + hashType(primaryType, types), + ]; + for (const field of types[primaryType]) { + if (version === SignTypedDataVersion.V3 && data[field.name] === undefined) { + continue; + } + const [type, value] = encodeField(types, field.name, field.type, data[field.name], version); + encodedTypes.push(type); + encodedValues.push(value); + } + return (0, util_1.arrToBufArr)((0, abi_utils_1.encode)(encodedTypes, encodedValues)); +} +/** + * Encodes the type of an object by encoding a comma delimited list of its members. + * + * @param primaryType - The root type to encode. + * @param types - Type definitions for all types included in the message. + * @returns An encoded representation of the primary type. + */ +function encodeType(primaryType, types) { + let result = ''; + const unsortedDeps = findTypeDependencies(primaryType, types); + unsortedDeps.delete(primaryType); + const deps = [primaryType, ...Array.from(unsortedDeps).sort()]; + for (const type of deps) { + const children = types[type]; + if (!children) { + throw new Error(`No type definition specified: ${type}`); + } + result += `${type}(${types[type] + .map(({ name, type: t }) => `${t} ${name}`) + .join(',')})`; + } + return result; +} +/** + * Finds all types within a type definition object. + * + * @param primaryType - The root type. + * @param types - Type definitions for all types included in the message. + * @param results - The current set of accumulated types. + * @returns The set of all types found in the type definition. + */ +function findTypeDependencies(primaryType, types, results = new Set()) { + if (typeof primaryType !== 'string') { + throw new Error(`Invalid findTypeDependencies input ${JSON.stringify(primaryType)}`); + } + const match = primaryType.match(/^\w*/u); + [primaryType] = match; + if (results.has(primaryType) || types[primaryType] === undefined) { + return results; + } + results.add(primaryType); + for (const field of types[primaryType]) { + findTypeDependencies(field.type, types, results); + } + return results; +} +/** + * Hashes an object. + * + * @param primaryType - The root type. + * @param data - The object to hash. + * @param types - Type definitions for all types included in the message. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the object. + */ +function hashStruct(primaryType, data, types, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const encoded = encodeData(primaryType, data, types, version); + const hashed = (0, keccak_1.keccak256)(encoded); + const buf = (0, util_1.arrToBufArr)(hashed); + return buf; +} +/** + * Hashes the type of an object. + * + * @param primaryType - The root type to hash. + * @param types - Type definitions for all types included in the message. + * @returns The hash of the object type. + */ +function hashType(primaryType, types) { + const encodedHashType = (0, utils_2.stringToBytes)(encodeType(primaryType, types)); + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(encodedHashType)); +} +/** + * Removes properties from a message object that are not defined per EIP-712. + * + * @param data - The typed message object. + * @returns The typed message object with only allowed fields. + */ +function sanitizeData(data) { + const sanitizedData = {}; + for (const key in exports.TYPED_MESSAGE_SCHEMA.properties) { + if (data[key]) { + sanitizedData[key] = data[key]; + } + } + if ('types' in sanitizedData) { + // TODO: Fix types + sanitizedData.types = Object.assign({ EIP712Domain: [] }, sanitizedData.types); + } + return sanitizedData; +} +/** + * Create a EIP-712 Domain Hash. + * This hash is used at the top of the EIP-712 encoding. + * + * @param typedData - The typed message to hash. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the domain object. + */ +function eip712DomainHash(typedData, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const sanitizedData = sanitizeData(typedData); + const { domain } = sanitizedData; + const domainType = { EIP712Domain: sanitizedData.types.EIP712Domain }; + return hashStruct('EIP712Domain', domain, domainType, version); +} +/** + * Hash a typed message according to EIP-712. The returned message starts with the EIP-712 prefix, + * which is "1901", followed by the hash of the domain separator, then the data (if any). + * The result is hashed again and returned. + * + * This function does not sign the message. The resulting hash must still be signed to create an + * EIP-712 signature. + * + * @param typedData - The typed message to hash. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the typed message. + */ +function eip712Hash(typedData, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const sanitizedData = sanitizeData(typedData); + const parts = [(0, utils_2.hexToBytes)('1901')]; + parts.push(eip712DomainHash(typedData, version)); + if (sanitizedData.primaryType !== 'EIP712Domain') { + parts.push(hashStruct( + // TODO: Validate that this is a string, so this type cast can be removed. + sanitizedData.primaryType, sanitizedData.message, sanitizedData.types, version)); + } + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, utils_2.concatBytes)(parts))); +} +/** + * A collection of utility functions used for signing typed data. + */ +exports.TypedDataUtils = { + encodeData, + encodeType, + findTypeDependencies, + hashStruct, + hashType, + sanitizeData, + eip712Hash, + eip712DomainHash, +}; +/** + * Generate the "V1" hash for the provided typed message. + * + * The hash will be generated in accordance with an earlier version of the EIP-712 + * specification. This hash is used in `signTypedData_v1`. + * + * @param typedData - The typed message. + * @returns The '0x'-prefixed hex encoded hash representing the type of the provided message. + */ +function typedSignatureHash(typedData) { + const hashBuffer = _typedSignatureHash(typedData); + return (0, utils_2.bytesToHex)(hashBuffer); +} +exports.typedSignatureHash = typedSignatureHash; +/** + * Normalize a value, so that `@metamask/abi-utils` can handle it. This + * matches the behaviour of the `ethereumjs-abi` library. + * + * @param type - The type of the value to normalize. + * @param value - The value to normalize. + * @returns The normalized value. + */ +function normalizeValue(type, value) { + if ((0, parsers_1.isArrayType)(type) && Array.isArray(value)) { + const [innerType] = (0, parsers_1.getArrayType)(type); + return value.map((item) => normalizeValue(innerType, item)); + } + if (type === 'address') { + if (typeof value === 'number') { + return (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 20); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_1.padStart)((0, utils_2.hexToBytes)(value).subarray(0, 20), 20); + } + if (value instanceof Uint8Array) { + return (0, utils_1.padStart)(value.subarray(0, 20), 20); + } + } + if (type === 'bool') { + return Boolean(value); + } + if (type.startsWith('bytes') && type !== 'bytes') { + const length = (0, parsers_1.getByteLength)(type); + if (typeof value === 'number') { + if (value < 0) { + // `solidityPack(['bytesN'], [-1])` returns `0x00..00`. + return new Uint8Array(); + } + return (0, utils_2.numberToBytes)(value).subarray(0, length); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_2.hexToBytes)(value).subarray(0, length); + } + if (value instanceof Uint8Array) { + return value.subarray(0, length); + } + } + if (type.startsWith('uint')) { + if (typeof value === 'number') { + return Math.abs(value); + } + } + if (type.startsWith('int')) { + if (typeof value === 'number') { + const length = (0, parsers_1.getLength)(type); + return BigInt.asIntN(length, BigInt(value)); + } + } + return value; +} +/** + * For some reason `ethereumjs-abi` treats `address` and `address[]` differently + * so we need to normalize `address[]` differently. + * + * @param values - The values to normalize. + * @returns The normalized values. + */ +function normalizeAddresses(values) { + return values.map((value) => { + if (typeof value === 'number') { + return (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 32); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_1.padStart)((0, utils_2.hexToBytes)(value).subarray(0, 32), 32); + } + if (value instanceof Uint8Array) { + return (0, utils_1.padStart)(value.subarray(0, 32), 32); + } + return value; + }); +} +/** + * For some reason `ethereumjs-abi` treats `intN` and `intN[]` differently + * so we need to normalize `intN[]` differently. + * + * @param type - The type of the value to normalize. + * @param values - The values to normalize. + * @returns The normalized values. + */ +function normalizeIntegers(type, values) { + return values.map((value) => { + if (typeof value === 'string' || + typeof value === 'number' || + typeof value === 'bigint') { + const bigIntValue = parseNumber(type, value); + if (bigIntValue >= BigInt(0)) { + return (0, utils_1.padStart)((0, utils_2.bigIntToBytes)(bigIntValue), 32); + } + const length = (0, parsers_1.getLength)(type); + const asIntN = BigInt.asIntN(length, bigIntValue); + return (0, utils_2.signedBigIntToBytes)(asIntN, 32); + } + return value; + }); +} +/** + * Generate the "V1" hash for the provided typed message. + * + * The hash will be generated in accordance with an earlier version of the EIP-712 + * specification. This hash is used in `signTypedData_v1`. + * + * @param typedData - The typed message. + * @returns The hash representing the type of the provided message. + */ +function _typedSignatureHash(typedData) { + const error = new Error('Expect argument to be non-empty array'); + if (typeof typedData !== 'object' || + !('length' in typedData) || + !typedData.length) { + throw error; + } + const normalizedData = typedData.map(({ name, type, value }) => { + // Handle an edge case with `address[]` types. + if (type === 'address[]') { + return { + name, + type: 'bytes32[]', + value: normalizeAddresses(value), + }; + } + // Handle an edge case with `intN[]` types. + if (type.startsWith('int') && (0, parsers_1.isArrayType)(type)) { + const [innerType, length] = (0, parsers_1.getArrayType)(type); + return { + name, + type: `bytes32[${length !== null && length !== void 0 ? length : ''}]`, + value: normalizeIntegers(innerType, value), + }; + } + return { + name, + type, + value: normalizeValue(type, value), + }; + }); + const data = normalizedData.map((e) => { + if (e.type !== 'bytes') { + return e.value; + } + return (0, utils_3.legacyToBuffer)(e.value); + }); + const types = normalizedData.map((e) => { + if (e.type === 'function') { + throw new Error('Unsupported or invalid type: "function"'); + } + return e.type; + }); + const schema = typedData.map((e) => { + if (!e.name) { + throw error; + } + return `${e.type} ${e.name}`; + }); + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(['bytes32', 'bytes32'], [ + (0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(['string[]'], [schema], true)), + (0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(types, data, true)), + ]))); +} +/** + * Sign typed data according to EIP-712. The signing differs based upon the `version`. + * + * V1 is based upon [an early version of + * EIP-712](https://github.com/ethereum/EIPs/pull/712/commits/21abe254fe0452d8583d5b132b1d7be87c0439ca) + * that lacked some later security improvements, and should generally be neglected in favor of + * later versions. + * + * V3 is based on [EIP-712](https://eips.ethereum.org/EIPS/eip-712), except that arrays and + * recursive data structures are not supported. + * + * V4 is based on [EIP-712](https://eips.ethereum.org/EIPS/eip-712), and includes full support of + * arrays and recursive data structures. + * + * @param options - The signing options. + * @param options.privateKey - The private key to sign with. + * @param options.data - The typed data to sign. + * @param options.version - The signing version to use. + * @returns The '0x'-prefixed hex encoded signature. + */ +function signTypedData({ privateKey, data, version, }) { + validateVersion(version); + if ((0, utils_3.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_3.isNullish)(privateKey)) { + throw new Error('Missing private key parameter'); + } + const messageHash = version === SignTypedDataVersion.V1 + ? _typedSignatureHash(data) + : exports.TypedDataUtils.eip712Hash(data, version); + const sig = (0, util_1.ecsign)(messageHash, privateKey); + return (0, utils_3.concatSig)((0, util_1.arrToBufArr)((0, utils_2.bigIntToBytes)(sig.v)), sig.r, sig.s); +} +exports.signTypedData = signTypedData; +/** + * Recover the address of the account that created the given EIP-712 + * signature. The version provided must match the version used to + * create the signature. + * + * @param options - The signature recovery options. + * @param options.data - The typed data that was signed. + * @param options.signature - The '0x-prefixed hex encoded message signature. + * @param options.version - The signing version to use. + * @returns The '0x'-prefixed hex address of the signer. + */ +function recoverTypedSignature({ data, signature, version, }) { + validateVersion(version); + if ((0, utils_3.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_3.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const messageHash = version === SignTypedDataVersion.V1 + ? _typedSignatureHash(data) + : exports.TypedDataUtils.eip712Hash(data, version); + const publicKey = (0, utils_3.recoverPublicKey)(messageHash, signature); + const sender = (0, util_1.publicToAddress)(publicKey); + return (0, utils_2.bytesToHex)(sender); +} +exports.recoverTypedSignature = recoverTypedSignature; +//# sourceMappingURL=sign-typed-data.js.map + +/***/ }), + +/***/ 54907: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.normalize = exports.recoverPublicKey = exports.concatSig = exports.legacyToBuffer = exports.isNullish = exports.padWithZeroes = void 0; +const util_1 = __webpack_require__(68683); +const utils_1 = __webpack_require__(22049); +/** + * Pads the front of the given hex string with zeroes until it reaches the + * target length. If the input string is already longer than or equal to the + * target length, it is returned unmodified. + * + * If the input string is "0x"-prefixed or not a hex string, an error will be + * thrown. + * + * @param hexString - The hexadecimal string to pad with zeroes. + * @param targetLength - The target length of the hexadecimal string. + * @returns The input string front-padded with zeroes, or the original string + * if it was already greater than or equal to to the target length. + */ +function padWithZeroes(hexString, targetLength) { + if (hexString !== '' && !/^[a-f0-9]+$/iu.test(hexString)) { + throw new Error(`Expected an unprefixed hex string. Received: ${hexString}`); + } + if (targetLength < 0) { + throw new Error(`Expected a non-negative integer target length. Received: ${targetLength}`); + } + return String.prototype.padStart.call(hexString, targetLength, '0'); +} +exports.padWithZeroes = padWithZeroes; +/** + * Returns `true` if the given value is nullish. + * + * @param value - The value being checked. + * @returns Whether the value is nullish. + */ +function isNullish(value) { + return value === null || value === undefined; +} +exports.isNullish = isNullish; +/** + * Convert a value to a Buffer. This function should be equivalent to the `toBuffer` function in + * `ethereumjs-util@5.2.1`. + * + * @param value - The value to convert to a Buffer. + * @returns The given value as a Buffer. + */ +function legacyToBuffer(value) { + return typeof value === 'string' && !(0, util_1.isHexString)(value) + ? Buffer.from(value) + : (0, util_1.toBuffer)(value); +} +exports.legacyToBuffer = legacyToBuffer; +/** + * Concatenate an extended ECDSA signature into a single '0x'-prefixed hex string. + * + * @param v - The 'v' portion of the signature. + * @param r - The 'r' portion of the signature. + * @param s - The 's' portion of the signature. + * @returns The concatenated ECDSA signature as a '0x'-prefixed string. + */ +function concatSig(v, r, s) { + const rSig = (0, util_1.fromSigned)(r); + const sSig = (0, util_1.fromSigned)(s); + const vSig = (0, util_1.bufferToInt)(v); + const rStr = padWithZeroes((0, util_1.toUnsigned)(rSig).toString('hex'), 64); + const sStr = padWithZeroes((0, util_1.toUnsigned)(sSig).toString('hex'), 64); + const vStr = (0, utils_1.remove0x)((0, utils_1.numberToHex)(vSig)); + return (0, utils_1.add0x)(rStr.concat(sStr, vStr)); +} +exports.concatSig = concatSig; +/** + * Recover the public key from the given signature and message hash. + * + * @param messageHash - The hash of the signed message. + * @param signature - The signature. + * @returns The public key of the signer. + */ +function recoverPublicKey(messageHash, signature) { + const sigParams = (0, util_1.fromRpcSig)(signature); + return (0, util_1.ecrecover)(messageHash, sigParams.v, sigParams.r, sigParams.s); +} +exports.recoverPublicKey = recoverPublicKey; +/** + * Normalize the input to a lower-cased '0x'-prefixed hex string. + * + * @param input - The value to normalize. + * @returns The normalized value. + */ +function normalize(input) { + if (isNullish(input)) { + return undefined; + } + if (typeof input === 'number') { + if (input < 0) { + return '0x'; + } + const buffer = (0, utils_1.numberToBytes)(input); + input = (0, utils_1.bytesToHex)(buffer); + } + if (typeof input !== 'string') { + let msg = 'eth-sig-util.normalize() requires hex string or integer input.'; + msg += ` received ${typeof input}: ${input}`; + throw new Error(msg); + } + return (0, utils_1.add0x)(input.toLowerCase()); +} +exports.normalize = normalize; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 61275: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }// src/logging.ts +var _debug = __webpack_require__(17833); var _debug2 = _interopRequireDefault(_debug); +var globalLogger = _debug2.default.call(void 0, "metamask"); +function createProjectLogger(projectName) { + return globalLogger.extend(projectName); +} +function createModuleLogger(projectLogger, moduleName) { + return projectLogger.extend(moduleName); +} + + + + +exports.createProjectLogger = createProjectLogger; exports.createModuleLogger = createModuleLogger; +//# sourceMappingURL=chunk-2LBGT4GH.js.map + +/***/ }), + +/***/ 85244: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true}));var __accessCheck = (obj, member, msg) => { + if (!member.has(obj)) + throw TypeError("Cannot " + msg); +}; +var __privateGet = (obj, member, getter) => { + __accessCheck(obj, member, "read from private field"); + return getter ? getter.call(obj) : member.get(obj); +}; +var __privateAdd = (obj, member, value) => { + if (member.has(obj)) + throw TypeError("Cannot add the same private member more than once"); + member instanceof WeakSet ? member.add(obj) : member.set(obj, value); +}; +var __privateSet = (obj, member, value, setter) => { + __accessCheck(obj, member, "write to private field"); + setter ? setter.call(obj, value) : member.set(obj, value); + return value; +}; + + + + + +exports.__privateGet = __privateGet; exports.__privateAdd = __privateAdd; exports.__privateSet = __privateSet; +//# sourceMappingURL=chunk-3W5G4CYI.js.map + +/***/ }), + +/***/ 73631: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/versions.ts + + + + + + +var _semver = __webpack_require__(99589); +var _superstruct = __webpack_require__(2150); +var VersionStruct = _superstruct.refine.call(void 0, + _superstruct.string.call(void 0, ), + "Version", + (value) => { + if (_semver.valid.call(void 0, value) === null) { + return `Expected SemVer version, got "${value}"`; + } + return true; + } +); +var VersionRangeStruct = _superstruct.refine.call(void 0, + _superstruct.string.call(void 0, ), + "Version range", + (value) => { + if (_semver.validRange.call(void 0, value) === null) { + return `Expected SemVer range, got "${value}"`; + } + return true; + } +); +function isValidSemVerVersion(version) { + return _superstruct.is.call(void 0, version, VersionStruct); +} +function isValidSemVerRange(versionRange) { + return _superstruct.is.call(void 0, versionRange, VersionRangeStruct); +} +function assertIsSemVerVersion(version) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, version, VersionStruct); +} +function assertIsSemVerRange(range) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, range, VersionRangeStruct); +} +function gtVersion(version1, version2) { + return _semver.gt.call(void 0, version1, version2); +} +function gtRange(version, range) { + return _semver.gtr.call(void 0, version, range); +} +function satisfiesVersionRange(version, versionRange) { + return _semver.satisfies.call(void 0, version, versionRange, { + includePrerelease: true + }); +} + + + + + + + + + + + +exports.VersionStruct = VersionStruct; exports.VersionRangeStruct = VersionRangeStruct; exports.isValidSemVerVersion = isValidSemVerVersion; exports.isValidSemVerRange = isValidSemVerRange; exports.assertIsSemVerVersion = assertIsSemVerVersion; exports.assertIsSemVerRange = assertIsSemVerRange; exports.gtVersion = gtVersion; exports.gtRange = gtRange; exports.satisfiesVersionRange = satisfiesVersionRange; +//# sourceMappingURL=chunk-4D6XQBHA.js.map + +/***/ }), + +/***/ 69116: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true}));// src/time.ts +var Duration = /* @__PURE__ */ ((Duration2) => { + Duration2[Duration2["Millisecond"] = 1] = "Millisecond"; + Duration2[Duration2["Second"] = 1e3] = "Second"; + Duration2[Duration2["Minute"] = 6e4] = "Minute"; + Duration2[Duration2["Hour"] = 36e5] = "Hour"; + Duration2[Duration2["Day"] = 864e5] = "Day"; + Duration2[Duration2["Week"] = 6048e5] = "Week"; + Duration2[Duration2["Year"] = 31536e6] = "Year"; + return Duration2; +})(Duration || {}); +var isNonNegativeInteger = (number) => Number.isInteger(number) && number >= 0; +var assertIsNonNegativeInteger = (number, name) => { + if (!isNonNegativeInteger(number)) { + throw new Error( + `"${name}" must be a non-negative integer. Received: "${number}".` + ); + } +}; +function inMilliseconds(count, duration) { + assertIsNonNegativeInteger(count, "count"); + return count * duration; +} +function timeSince(timestamp) { + assertIsNonNegativeInteger(timestamp, "timestamp"); + return Date.now() - timestamp; +} + + + + + +exports.Duration = Duration; exports.inMilliseconds = inMilliseconds; exports.timeSince = timeSince; +//# sourceMappingURL=chunk-4RMX5YWE.js.map + +/***/ }), + +/***/ 87982: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-5AVWINSB.js.map + +/***/ }), + +/***/ 21848: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/base64.ts +var _superstruct = __webpack_require__(2150); +var base64 = (struct, options = {}) => { + const paddingRequired = _nullishCoalesce(options.paddingRequired, () => ( false)); + const characterSet = _nullishCoalesce(options.characterSet, () => ( "base64")); + let letters; + if (characterSet === "base64") { + letters = String.raw`[A-Za-z0-9+\/]`; + } else { + _chunk6ZDHSOUVjs.assert.call(void 0, characterSet === "base64url"); + letters = String.raw`[-_A-Za-z0-9]`; + } + let re; + if (paddingRequired) { + re = new RegExp( + `^(?:${letters}{4})*(?:${letters}{3}=|${letters}{2}==)?$`, + "u" + ); + } else { + re = new RegExp( + `^(?:${letters}{4})*(?:${letters}{2,3}|${letters}{3}=|${letters}{2}==)?$`, + "u" + ); + } + return _superstruct.pattern.call(void 0, struct, re); +}; + + + +exports.base64 = base64; +//# sourceMappingURL=chunk-6NZW4WK4.js.map + +/***/ }), + +/***/ 40932: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } + +var _chunkIZC266HSjs = __webpack_require__(1486); + +// src/assert.ts +var _superstruct = __webpack_require__(2150); +function isConstructable(fn) { + return Boolean(typeof _optionalChain([fn, 'optionalAccess', _ => _.prototype, 'optionalAccess', _2 => _2.constructor, 'optionalAccess', _3 => _3.name]) === "string"); +} +function getErrorMessageWithoutTrailingPeriod(error) { + return _chunkIZC266HSjs.getErrorMessage.call(void 0, error).replace(/\.$/u, ""); +} +function getError(ErrorWrapper, message) { + if (isConstructable(ErrorWrapper)) { + return new ErrorWrapper({ + message + }); + } + return ErrorWrapper({ + message + }); +} +var AssertionError = class extends Error { + constructor(options) { + super(options.message); + this.code = "ERR_ASSERTION"; + } +}; +function assert(value, message = "Assertion failed.", ErrorWrapper = AssertionError) { + if (!value) { + if (message instanceof Error) { + throw message; + } + throw getError(ErrorWrapper, message); + } +} +function assertStruct(value, struct, errorPrefix = "Assertion failed", ErrorWrapper = AssertionError) { + try { + _superstruct.assert.call(void 0, value, struct); + } catch (error) { + throw getError( + ErrorWrapper, + `${errorPrefix}: ${getErrorMessageWithoutTrailingPeriod(error)}.` + ); + } +} +function assertExhaustive(_object) { + throw new Error( + "Invalid branch reached. Should be detected during compilation." + ); +} + + + + + + +exports.AssertionError = AssertionError; exports.assert = assert; exports.assertStruct = assertStruct; exports.assertExhaustive = assertExhaustive; +//# sourceMappingURL=chunk-6ZDHSOUV.js.map + +/***/ }), + +/***/ 39705: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true}));// src/promise.ts +function createDeferredPromise({ + suppressUnhandledRejection = false +} = {}) { + let resolve; + let reject; + const promise = new Promise( + (innerResolve, innerReject) => { + resolve = innerResolve; + reject = innerReject; + } + ); + if (suppressUnhandledRejection) { + promise.catch((_error) => { + }); + } + return { promise, resolve, reject }; +} + + + +exports.createDeferredPromise = createDeferredPromise; +//# sourceMappingURL=chunk-C6HGFEYL.js.map + +/***/ }), + +/***/ 1203: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + + + +var _chunkQEPVHEP7js = __webpack_require__(75363); + + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/coercers.ts + + + + + + + + + +var _superstruct = __webpack_require__(2150); +var NumberLikeStruct = _superstruct.union.call(void 0, [_superstruct.number.call(void 0, ), _superstruct.bigint.call(void 0, ), _superstruct.string.call(void 0, ), _chunkQEPVHEP7js.StrictHexStruct]); +var NumberCoercer = _superstruct.coerce.call(void 0, _superstruct.number.call(void 0, ), NumberLikeStruct, Number); +var BigIntCoercer = _superstruct.coerce.call(void 0, _superstruct.bigint.call(void 0, ), NumberLikeStruct, BigInt); +var BytesLikeStruct = _superstruct.union.call(void 0, [_chunkQEPVHEP7js.StrictHexStruct, _superstruct.instance.call(void 0, Uint8Array)]); +var BytesCoercer = _superstruct.coerce.call(void 0, + _superstruct.instance.call(void 0, Uint8Array), + _superstruct.union.call(void 0, [_chunkQEPVHEP7js.StrictHexStruct]), + _chunkQEPVHEP7js.hexToBytes +); +var HexCoercer = _superstruct.coerce.call(void 0, _chunkQEPVHEP7js.StrictHexStruct, _superstruct.instance.call(void 0, Uint8Array), _chunkQEPVHEP7js.bytesToHex); +function createNumber(value) { + try { + const result = _superstruct.create.call(void 0, value, NumberCoercer); + _chunk6ZDHSOUVjs.assert.call(void 0, + Number.isFinite(result), + `Expected a number-like value, got "${value}".` + ); + return result; + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error(`Expected a number-like value, got "${value}".`); + } + throw error; + } +} +function createBigInt(value) { + try { + return _superstruct.create.call(void 0, value, BigIntCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a number-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} +function createBytes(value) { + if (typeof value === "string" && value.toLowerCase() === "0x") { + return new Uint8Array(); + } + try { + return _superstruct.create.call(void 0, value, BytesCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a bytes-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} +function createHex(value) { + if (value instanceof Uint8Array && value.length === 0 || typeof value === "string" && value.toLowerCase() === "0x") { + return "0x"; + } + try { + return _superstruct.create.call(void 0, value, HexCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a bytes-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} + + + + + + +exports.createNumber = createNumber; exports.createBigInt = createBigInt; exports.createBytes = createBytes; exports.createHex = createHex; +//# sourceMappingURL=chunk-DHVKFDHQ.js.map + +/***/ }), + +/***/ 1508: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + +var _chunk6NZW4WK4js = __webpack_require__(21848); + +// src/checksum.ts +var _superstruct = __webpack_require__(2150); +var ChecksumStruct = _superstruct.size.call(void 0, + _chunk6NZW4WK4js.base64.call(void 0, _superstruct.string.call(void 0, ), { paddingRequired: true }), + 44, + 44 +); + + + +exports.ChecksumStruct = ChecksumStruct; +//# sourceMappingURL=chunk-E4C7EW4R.js.map + +/***/ }), + +/***/ 51423: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-EQMZL4XU.js.map + +/***/ }), + +/***/ 1486: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + + +var _chunkQVEKZRZ2js = __webpack_require__(96526); + +// src/errors.ts +var _ponycause = __webpack_require__(71843); +function isError(error) { + return error instanceof Error || _chunkQVEKZRZ2js.isObject.call(void 0, error) && error.constructor.name === "Error"; +} +function isErrorWithCode(error) { + return typeof error === "object" && error !== null && "code" in error; +} +function isErrorWithMessage(error) { + return typeof error === "object" && error !== null && "message" in error; +} +function isErrorWithStack(error) { + return typeof error === "object" && error !== null && "stack" in error; +} +function getErrorMessage(error) { + if (isErrorWithMessage(error) && typeof error.message === "string") { + return error.message; + } + if (_chunkQVEKZRZ2js.isNullOrUndefined.call(void 0, error)) { + return ""; + } + return String(error); +} +function wrapError(originalError, message) { + if (isError(originalError)) { + let error; + if (Error.length === 2) { + error = new Error(message, { cause: originalError }); + } else { + error = new (0, _ponycause.ErrorWithCause)(message, { cause: originalError }); + } + if (isErrorWithCode(originalError)) { + error.code = originalError.code; + } + return error; + } + if (message.length > 0) { + return new Error(`${String(originalError)}: ${message}`); + } + return new Error(String(originalError)); +} + + + + + + + +exports.isErrorWithCode = isErrorWithCode; exports.isErrorWithMessage = isErrorWithMessage; exports.isErrorWithStack = isErrorWithStack; exports.getErrorMessage = getErrorMessage; exports.wrapError = wrapError; +//# sourceMappingURL=chunk-IZC266HS.js.map + +/***/ }), + +/***/ 58383: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-LC2CRSWD.js.map + +/***/ }), + +/***/ 87427: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + + +var _chunkQVEKZRZ2js = __webpack_require__(96526); + +// src/json.ts + + + + + + + + + + + + + + + + + + + + +var _superstruct = __webpack_require__(2150); +var object = (schema) => ( + // The type is slightly different from a regular object struct, because we + // want to make properties with `undefined` in their type optional, but not + // `undefined` itself. This means that we need a type cast. + _superstruct.object.call(void 0, schema) +); +function hasOptional({ path, branch }) { + const field = path[path.length - 1]; + return _chunkQVEKZRZ2js.hasProperty.call(void 0, branch[branch.length - 2], field); +} +function exactOptional(struct) { + return new (0, _superstruct.Struct)({ + ...struct, + type: `optional ${struct.type}`, + validator: (value, context) => !hasOptional(context) || struct.validator(value, context), + refiner: (value, context) => !hasOptional(context) || struct.refiner(value, context) + }); +} +var finiteNumber = () => _superstruct.define.call(void 0, "finite number", (value) => { + return _superstruct.is.call(void 0, value, _superstruct.number.call(void 0, )) && Number.isFinite(value); +}); +var UnsafeJsonStruct = _superstruct.union.call(void 0, [ + _superstruct.literal.call(void 0, null), + _superstruct.boolean.call(void 0, ), + finiteNumber(), + _superstruct.string.call(void 0, ), + _superstruct.array.call(void 0, _superstruct.lazy.call(void 0, () => UnsafeJsonStruct)), + _superstruct.record.call(void 0, + _superstruct.string.call(void 0, ), + _superstruct.lazy.call(void 0, () => UnsafeJsonStruct) + ) +]); +var JsonStruct = _superstruct.coerce.call(void 0, UnsafeJsonStruct, _superstruct.any.call(void 0, ), (value) => { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, value, UnsafeJsonStruct); + return JSON.parse( + JSON.stringify(value, (propKey, propValue) => { + if (propKey === "__proto__" || propKey === "constructor") { + return void 0; + } + return propValue; + }) + ); +}); +function isValidJson(value) { + try { + getSafeJson(value); + return true; + } catch (e) { + return false; + } +} +function getSafeJson(value) { + return _superstruct.create.call(void 0, value, JsonStruct); +} +function getJsonSize(value) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, value, JsonStruct, "Invalid JSON value"); + const json = JSON.stringify(value); + return new TextEncoder().encode(json).byteLength; +} +var jsonrpc2 = "2.0"; +var JsonRpcVersionStruct = _superstruct.literal.call(void 0, jsonrpc2); +var JsonRpcIdStruct = _superstruct.nullable.call(void 0, _superstruct.union.call(void 0, [_superstruct.number.call(void 0, ), _superstruct.string.call(void 0, )])); +var JsonRpcErrorStruct = object({ + code: _superstruct.integer.call(void 0, ), + message: _superstruct.string.call(void 0, ), + data: exactOptional(JsonStruct), + stack: exactOptional(_superstruct.string.call(void 0, )) +}); +var JsonRpcParamsStruct = _superstruct.union.call(void 0, [_superstruct.record.call(void 0, _superstruct.string.call(void 0, ), JsonStruct), _superstruct.array.call(void 0, JsonStruct)]); +var JsonRpcRequestStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + method: _superstruct.string.call(void 0, ), + params: exactOptional(JsonRpcParamsStruct) +}); +var JsonRpcNotificationStruct = object({ + jsonrpc: JsonRpcVersionStruct, + method: _superstruct.string.call(void 0, ), + params: exactOptional(JsonRpcParamsStruct) +}); +function isJsonRpcNotification(value) { + return _superstruct.is.call(void 0, value, JsonRpcNotificationStruct); +} +function assertIsJsonRpcNotification(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcNotificationStruct, + "Invalid JSON-RPC notification", + ErrorWrapper + ); +} +function isJsonRpcRequest(value) { + return _superstruct.is.call(void 0, value, JsonRpcRequestStruct); +} +function assertIsJsonRpcRequest(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcRequestStruct, + "Invalid JSON-RPC request", + ErrorWrapper + ); +} +var PendingJsonRpcResponseStruct = _superstruct.object.call(void 0, { + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + result: _superstruct.optional.call(void 0, _superstruct.unknown.call(void 0, )), + error: _superstruct.optional.call(void 0, JsonRpcErrorStruct) +}); +var JsonRpcSuccessStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + result: JsonStruct +}); +var JsonRpcFailureStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + error: JsonRpcErrorStruct +}); +var JsonRpcResponseStruct = _superstruct.union.call(void 0, [ + JsonRpcSuccessStruct, + JsonRpcFailureStruct +]); +function isPendingJsonRpcResponse(response) { + return _superstruct.is.call(void 0, response, PendingJsonRpcResponseStruct); +} +function assertIsPendingJsonRpcResponse(response, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + response, + PendingJsonRpcResponseStruct, + "Invalid pending JSON-RPC response", + ErrorWrapper + ); +} +function isJsonRpcResponse(response) { + return _superstruct.is.call(void 0, response, JsonRpcResponseStruct); +} +function assertIsJsonRpcResponse(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcResponseStruct, + "Invalid JSON-RPC response", + ErrorWrapper + ); +} +function isJsonRpcSuccess(value) { + return _superstruct.is.call(void 0, value, JsonRpcSuccessStruct); +} +function assertIsJsonRpcSuccess(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcSuccessStruct, + "Invalid JSON-RPC success response", + ErrorWrapper + ); +} +function isJsonRpcFailure(value) { + return _superstruct.is.call(void 0, value, JsonRpcFailureStruct); +} +function assertIsJsonRpcFailure(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcFailureStruct, + "Invalid JSON-RPC failure response", + ErrorWrapper + ); +} +function isJsonRpcError(value) { + return _superstruct.is.call(void 0, value, JsonRpcErrorStruct); +} +function assertIsJsonRpcError(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcErrorStruct, + "Invalid JSON-RPC error", + ErrorWrapper + ); +} +function getJsonRpcIdValidator(options) { + const { permitEmptyString, permitFractions, permitNull } = { + permitEmptyString: true, + permitFractions: false, + permitNull: true, + ...options + }; + const isValidJsonRpcId = (id) => { + return Boolean( + typeof id === "number" && (permitFractions || Number.isInteger(id)) || typeof id === "string" && (permitEmptyString || id.length > 0) || permitNull && id === null + ); + }; + return isValidJsonRpcId; +} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +exports.object = object; exports.exactOptional = exactOptional; exports.UnsafeJsonStruct = UnsafeJsonStruct; exports.JsonStruct = JsonStruct; exports.isValidJson = isValidJson; exports.getSafeJson = getSafeJson; exports.getJsonSize = getJsonSize; exports.jsonrpc2 = jsonrpc2; exports.JsonRpcVersionStruct = JsonRpcVersionStruct; exports.JsonRpcIdStruct = JsonRpcIdStruct; exports.JsonRpcErrorStruct = JsonRpcErrorStruct; exports.JsonRpcParamsStruct = JsonRpcParamsStruct; exports.JsonRpcRequestStruct = JsonRpcRequestStruct; exports.JsonRpcNotificationStruct = JsonRpcNotificationStruct; exports.isJsonRpcNotification = isJsonRpcNotification; exports.assertIsJsonRpcNotification = assertIsJsonRpcNotification; exports.isJsonRpcRequest = isJsonRpcRequest; exports.assertIsJsonRpcRequest = assertIsJsonRpcRequest; exports.PendingJsonRpcResponseStruct = PendingJsonRpcResponseStruct; exports.JsonRpcSuccessStruct = JsonRpcSuccessStruct; exports.JsonRpcFailureStruct = JsonRpcFailureStruct; exports.JsonRpcResponseStruct = JsonRpcResponseStruct; exports.isPendingJsonRpcResponse = isPendingJsonRpcResponse; exports.assertIsPendingJsonRpcResponse = assertIsPendingJsonRpcResponse; exports.isJsonRpcResponse = isJsonRpcResponse; exports.assertIsJsonRpcResponse = assertIsJsonRpcResponse; exports.isJsonRpcSuccess = isJsonRpcSuccess; exports.assertIsJsonRpcSuccess = assertIsJsonRpcSuccess; exports.isJsonRpcFailure = isJsonRpcFailure; exports.assertIsJsonRpcFailure = assertIsJsonRpcFailure; exports.isJsonRpcError = isJsonRpcError; exports.assertIsJsonRpcError = assertIsJsonRpcError; exports.getJsonRpcIdValidator = getJsonRpcIdValidator; +//# sourceMappingURL=chunk-OLLG4H35.js.map + +/***/ }), + +/***/ 75363: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +Object.defineProperty(exports, "__esModule", ({value: true})); function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/hex.ts +var _sha3 = __webpack_require__(2214); +var _superstruct = __webpack_require__(2150); + +// src/bytes.ts +var _base = __webpack_require__(63203); +var HEX_MINIMUM_NUMBER_CHARACTER = 48; +var HEX_MAXIMUM_NUMBER_CHARACTER = 58; +var HEX_CHARACTER_OFFSET = 87; +function getPrecomputedHexValuesBuilder() { + const lookupTable = []; + return () => { + if (lookupTable.length === 0) { + for (let i = 0; i < 256; i++) { + lookupTable.push(i.toString(16).padStart(2, "0")); + } + } + return lookupTable; + }; +} +var getPrecomputedHexValues = getPrecomputedHexValuesBuilder(); +function isBytes(value) { + return value instanceof Uint8Array; +} +function assertIsBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, isBytes(value), "Value must be a Uint8Array."); +} +function bytesToHex(bytes) { + assertIsBytes(bytes); + if (bytes.length === 0) { + return "0x"; + } + const lookupTable = getPrecomputedHexValues(); + const hexadecimal = new Array(bytes.length); + for (let i = 0; i < bytes.length; i++) { + hexadecimal[i] = lookupTable[bytes[i]]; + } + return add0x(hexadecimal.join("")); +} +function bytesToBigInt(bytes) { + assertIsBytes(bytes); + const hexadecimal = bytesToHex(bytes); + return BigInt(hexadecimal); +} +function bytesToSignedBigInt(bytes) { + assertIsBytes(bytes); + let value = BigInt(0); + for (const byte of bytes) { + value = (value << BigInt(8)) + BigInt(byte); + } + return BigInt.asIntN(bytes.length * 8, value); +} +function bytesToNumber(bytes) { + assertIsBytes(bytes); + const bigint = bytesToBigInt(bytes); + _chunk6ZDHSOUVjs.assert.call(void 0, + bigint <= BigInt(Number.MAX_SAFE_INTEGER), + "Number is not a safe integer. Use `bytesToBigInt` instead." + ); + return Number(bigint); +} +function bytesToString(bytes) { + assertIsBytes(bytes); + return new TextDecoder().decode(bytes); +} +function bytesToBase64(bytes) { + assertIsBytes(bytes); + return _base.base64.encode(bytes); +} +function hexToBytes(value) { + if (_optionalChain([value, 'optionalAccess', _ => _.toLowerCase, 'optionalCall', _2 => _2()]) === "0x") { + return new Uint8Array(); + } + assertIsHexString(value); + const strippedValue = remove0x(value).toLowerCase(); + const normalizedValue = strippedValue.length % 2 === 0 ? strippedValue : `0${strippedValue}`; + const bytes = new Uint8Array(normalizedValue.length / 2); + for (let i = 0; i < bytes.length; i++) { + const c1 = normalizedValue.charCodeAt(i * 2); + const c2 = normalizedValue.charCodeAt(i * 2 + 1); + const n1 = c1 - (c1 < HEX_MAXIMUM_NUMBER_CHARACTER ? HEX_MINIMUM_NUMBER_CHARACTER : HEX_CHARACTER_OFFSET); + const n2 = c2 - (c2 < HEX_MAXIMUM_NUMBER_CHARACTER ? HEX_MINIMUM_NUMBER_CHARACTER : HEX_CHARACTER_OFFSET); + bytes[i] = n1 * 16 + n2; + } + return bytes; +} +function bigIntToBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs.assert.call(void 0, value >= BigInt(0), "Value must be a non-negative bigint."); + const hexadecimal = value.toString(16); + return hexToBytes(hexadecimal); +} +function bigIntFits(value, bytes) { + _chunk6ZDHSOUVjs.assert.call(void 0, bytes > 0); + const mask = value >> BigInt(31); + return !((~value & mask) + (value & ~mask) >> BigInt(bytes * 8 + ~0)); +} +function signedBigIntToBytes(value, byteLength) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs.assert.call(void 0, typeof byteLength === "number", "Byte length must be a number."); + _chunk6ZDHSOUVjs.assert.call(void 0, byteLength > 0, "Byte length must be greater than 0."); + _chunk6ZDHSOUVjs.assert.call(void 0, + bigIntFits(value, byteLength), + "Byte length is too small to represent the given value." + ); + let numberValue = value; + const bytes = new Uint8Array(byteLength); + for (let i = 0; i < bytes.length; i++) { + bytes[i] = Number(BigInt.asUintN(8, numberValue)); + numberValue >>= BigInt(8); + } + return bytes.reverse(); +} +function numberToBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "number", "Value must be a number."); + _chunk6ZDHSOUVjs.assert.call(void 0, value >= 0, "Value must be a non-negative number."); + _chunk6ZDHSOUVjs.assert.call(void 0, + Number.isSafeInteger(value), + "Value is not a safe integer. Use `bigIntToBytes` instead." + ); + const hexadecimal = value.toString(16); + return hexToBytes(hexadecimal); +} +function stringToBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "string", "Value must be a string."); + return new TextEncoder().encode(value); +} +function base64ToBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "string", "Value must be a string."); + return _base.base64.decode(value); +} +function valueToBytes(value) { + if (typeof value === "bigint") { + return bigIntToBytes(value); + } + if (typeof value === "number") { + return numberToBytes(value); + } + if (typeof value === "string") { + if (value.startsWith("0x")) { + return hexToBytes(value); + } + return stringToBytes(value); + } + if (isBytes(value)) { + return value; + } + throw new TypeError(`Unsupported value type: "${typeof value}".`); +} +function concatBytes(values) { + const normalizedValues = new Array(values.length); + let byteLength = 0; + for (let i = 0; i < values.length; i++) { + const value = valueToBytes(values[i]); + normalizedValues[i] = value; + byteLength += value.length; + } + const bytes = new Uint8Array(byteLength); + for (let i = 0, offset = 0; i < normalizedValues.length; i++) { + bytes.set(normalizedValues[i], offset); + offset += normalizedValues[i].length; + } + return bytes; +} +function createDataView(bytes) { + if (typeof Buffer !== "undefined" && bytes instanceof Buffer) { + const buffer = bytes.buffer.slice( + bytes.byteOffset, + bytes.byteOffset + bytes.byteLength + ); + return new DataView(buffer); + } + return new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength); +} + +// src/hex.ts +var HexStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), /^(?:0x)?[0-9a-f]+$/iu); +var StrictHexStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), /^0x[0-9a-f]+$/iu); +var HexAddressStruct = _superstruct.pattern.call(void 0, + _superstruct.string.call(void 0, ), + /^0x[0-9a-f]{40}$/u +); +var HexChecksumAddressStruct = _superstruct.pattern.call(void 0, + _superstruct.string.call(void 0, ), + /^0x[0-9a-fA-F]{40}$/u +); +function isHexString(value) { + return _superstruct.is.call(void 0, value, HexStruct); +} +function isStrictHexString(value) { + return _superstruct.is.call(void 0, value, StrictHexStruct); +} +function assertIsHexString(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, isHexString(value), "Value must be a hexadecimal string."); +} +function assertIsStrictHexString(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, + isStrictHexString(value), + 'Value must be a hexadecimal string, starting with "0x".' + ); +} +function isValidHexAddress(possibleAddress) { + return _superstruct.is.call(void 0, possibleAddress, HexAddressStruct) || isValidChecksumAddress(possibleAddress); +} +function getChecksumAddress(address) { + _chunk6ZDHSOUVjs.assert.call(void 0, _superstruct.is.call(void 0, address, HexChecksumAddressStruct), "Invalid hex address."); + const unPrefixed = remove0x(address.toLowerCase()); + const unPrefixedHash = remove0x(bytesToHex(_sha3.keccak_256.call(void 0, unPrefixed))); + return `0x${unPrefixed.split("").map((character, nibbleIndex) => { + const hashCharacter = unPrefixedHash[nibbleIndex]; + _chunk6ZDHSOUVjs.assert.call(void 0, _superstruct.is.call(void 0, hashCharacter, _superstruct.string.call(void 0, )), "Hash shorter than address."); + return parseInt(hashCharacter, 16) > 7 ? character.toUpperCase() : character; + }).join("")}`; +} +function isValidChecksumAddress(possibleChecksum) { + if (!_superstruct.is.call(void 0, possibleChecksum, HexChecksumAddressStruct)) { + return false; + } + return getChecksumAddress(possibleChecksum) === possibleChecksum; +} +function add0x(hexadecimal) { + if (hexadecimal.startsWith("0x")) { + return hexadecimal; + } + if (hexadecimal.startsWith("0X")) { + return `0x${hexadecimal.substring(2)}`; + } + return `0x${hexadecimal}`; +} +function remove0x(hexadecimal) { + if (hexadecimal.startsWith("0x") || hexadecimal.startsWith("0X")) { + return hexadecimal.substring(2); + } + return hexadecimal; +} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +exports.HexStruct = HexStruct; exports.StrictHexStruct = StrictHexStruct; exports.HexAddressStruct = HexAddressStruct; exports.HexChecksumAddressStruct = HexChecksumAddressStruct; exports.isHexString = isHexString; exports.isStrictHexString = isStrictHexString; exports.assertIsHexString = assertIsHexString; exports.assertIsStrictHexString = assertIsStrictHexString; exports.isValidHexAddress = isValidHexAddress; exports.getChecksumAddress = getChecksumAddress; exports.isValidChecksumAddress = isValidChecksumAddress; exports.add0x = add0x; exports.remove0x = remove0x; exports.isBytes = isBytes; exports.assertIsBytes = assertIsBytes; exports.bytesToHex = bytesToHex; exports.bytesToBigInt = bytesToBigInt; exports.bytesToSignedBigInt = bytesToSignedBigInt; exports.bytesToNumber = bytesToNumber; exports.bytesToString = bytesToString; exports.bytesToBase64 = bytesToBase64; exports.hexToBytes = hexToBytes; exports.bigIntToBytes = bigIntToBytes; exports.signedBigIntToBytes = signedBigIntToBytes; exports.numberToBytes = numberToBytes; exports.stringToBytes = stringToBytes; exports.base64ToBytes = base64ToBytes; exports.valueToBytes = valueToBytes; exports.concatBytes = concatBytes; exports.createDataView = createDataView; +//# sourceMappingURL=chunk-QEPVHEP7.js.map + +/***/ }), + +/***/ 96526: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }// src/misc.ts +function isNonEmptyArray(value) { + return Array.isArray(value) && value.length > 0; +} +function isNullOrUndefined(value) { + return value === null || value === void 0; +} +function isObject(value) { + return Boolean(value) && typeof value === "object" && !Array.isArray(value); +} +var hasProperty = (objectToCheck, name) => Object.hasOwnProperty.call(objectToCheck, name); +function getKnownPropertyNames(object) { + return Object.getOwnPropertyNames(object); +} +var JsonSize = /* @__PURE__ */ ((JsonSize2) => { + JsonSize2[JsonSize2["Null"] = 4] = "Null"; + JsonSize2[JsonSize2["Comma"] = 1] = "Comma"; + JsonSize2[JsonSize2["Wrapper"] = 1] = "Wrapper"; + JsonSize2[JsonSize2["True"] = 4] = "True"; + JsonSize2[JsonSize2["False"] = 5] = "False"; + JsonSize2[JsonSize2["Quote"] = 1] = "Quote"; + JsonSize2[JsonSize2["Colon"] = 1] = "Colon"; + JsonSize2[JsonSize2["Date"] = 24] = "Date"; + return JsonSize2; +})(JsonSize || {}); +var ESCAPE_CHARACTERS_REGEXP = /"|\\|\n|\r|\t/gu; +function isPlainObject(value) { + if (typeof value !== "object" || value === null) { + return false; + } + try { + let proto = value; + while (Object.getPrototypeOf(proto) !== null) { + proto = Object.getPrototypeOf(proto); + } + return Object.getPrototypeOf(value) === proto; + } catch (_) { + return false; + } +} +function isASCII(character) { + return character.charCodeAt(0) <= 127; +} +function calculateStringSize(value) { + const size = value.split("").reduce((total, character) => { + if (isASCII(character)) { + return total + 1; + } + return total + 2; + }, 0); + return size + (_nullishCoalesce(value.match(ESCAPE_CHARACTERS_REGEXP), () => ( []))).length; +} +function calculateNumberSize(value) { + return value.toString().length; +} + + + + + + + + + + + + + +exports.isNonEmptyArray = isNonEmptyArray; exports.isNullOrUndefined = isNullOrUndefined; exports.isObject = isObject; exports.hasProperty = hasProperty; exports.getKnownPropertyNames = getKnownPropertyNames; exports.JsonSize = JsonSize; exports.ESCAPE_CHARACTERS_REGEXP = ESCAPE_CHARACTERS_REGEXP; exports.isPlainObject = isPlainObject; exports.isASCII = isASCII; exports.calculateStringSize = calculateStringSize; exports.calculateNumberSize = calculateNumberSize; +//# sourceMappingURL=chunk-QVEKZRZ2.js.map + +/***/ }), + +/***/ 61305: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-RKRGAFXY.js.map + +/***/ }), + +/***/ 43207: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-UOTVU7OQ.js.map + +/***/ }), + +/***/ 41535: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + + +var _chunkQEPVHEP7js = __webpack_require__(75363); + + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/number.ts +var numberToHex = (value) => { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "number", "Value must be a number."); + _chunk6ZDHSOUVjs.assert.call(void 0, value >= 0, "Value must be a non-negative number."); + _chunk6ZDHSOUVjs.assert.call(void 0, + Number.isSafeInteger(value), + "Value is not a safe integer. Use `bigIntToHex` instead." + ); + return _chunkQEPVHEP7js.add0x.call(void 0, value.toString(16)); +}; +var bigIntToHex = (value) => { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs.assert.call(void 0, value >= 0, "Value must be a non-negative bigint."); + return _chunkQEPVHEP7js.add0x.call(void 0, value.toString(16)); +}; +var hexToNumber = (value) => { + _chunkQEPVHEP7js.assertIsHexString.call(void 0, value); + const numberValue = parseInt(value, 16); + _chunk6ZDHSOUVjs.assert.call(void 0, + Number.isSafeInteger(numberValue), + "Value is not a safe integer. Use `hexToBigInt` instead." + ); + return numberValue; +}; +var hexToBigInt = (value) => { + _chunkQEPVHEP7js.assertIsHexString.call(void 0, value); + return BigInt(_chunkQEPVHEP7js.add0x.call(void 0, value)); +}; + + + + + + +exports.numberToHex = numberToHex; exports.bigIntToHex = bigIntToHex; exports.hexToNumber = hexToNumber; exports.hexToBigInt = hexToBigInt; +//# sourceMappingURL=chunk-VFXTVNXN.js.map + +/***/ }), + +/***/ 2489: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }// src/caip-types.ts +var _superstruct = __webpack_require__(2150); +var CAIP_CHAIN_ID_REGEX = /^(?[-a-z0-9]{3,8}):(?[-_a-zA-Z0-9]{1,32})$/u; +var CAIP_NAMESPACE_REGEX = /^[-a-z0-9]{3,8}$/u; +var CAIP_REFERENCE_REGEX = /^[-_a-zA-Z0-9]{1,32}$/u; +var CAIP_ACCOUNT_ID_REGEX = /^(?(?[-a-z0-9]{3,8}):(?[-_a-zA-Z0-9]{1,32})):(?[-.%a-zA-Z0-9]{1,128})$/u; +var CAIP_ACCOUNT_ADDRESS_REGEX = /^[-.%a-zA-Z0-9]{1,128}$/u; +var CaipChainIdStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), CAIP_CHAIN_ID_REGEX); +var CaipNamespaceStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), CAIP_NAMESPACE_REGEX); +var CaipReferenceStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), CAIP_REFERENCE_REGEX); +var CaipAccountIdStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), CAIP_ACCOUNT_ID_REGEX); +var CaipAccountAddressStruct = _superstruct.pattern.call(void 0, + _superstruct.string.call(void 0, ), + CAIP_ACCOUNT_ADDRESS_REGEX +); +var KnownCaipNamespace = /* @__PURE__ */ ((KnownCaipNamespace2) => { + KnownCaipNamespace2["Eip155"] = "eip155"; + return KnownCaipNamespace2; +})(KnownCaipNamespace || {}); +function isCaipChainId(value) { + return _superstruct.is.call(void 0, value, CaipChainIdStruct); +} +function isCaipNamespace(value) { + return _superstruct.is.call(void 0, value, CaipNamespaceStruct); +} +function isCaipReference(value) { + return _superstruct.is.call(void 0, value, CaipReferenceStruct); +} +function isCaipAccountId(value) { + return _superstruct.is.call(void 0, value, CaipAccountIdStruct); +} +function isCaipAccountAddress(value) { + return _superstruct.is.call(void 0, value, CaipAccountAddressStruct); +} +function parseCaipChainId(caipChainId) { + const match = CAIP_CHAIN_ID_REGEX.exec(caipChainId); + if (!_optionalChain([match, 'optionalAccess', _ => _.groups])) { + throw new Error("Invalid CAIP chain ID."); + } + return { + namespace: match.groups.namespace, + reference: match.groups.reference + }; +} +function parseCaipAccountId(caipAccountId) { + const match = CAIP_ACCOUNT_ID_REGEX.exec(caipAccountId); + if (!_optionalChain([match, 'optionalAccess', _2 => _2.groups])) { + throw new Error("Invalid CAIP account ID."); + } + return { + address: match.groups.accountAddress, + chainId: match.groups.chainId, + chain: { + namespace: match.groups.namespace, + reference: match.groups.reference + } + }; +} +function toCaipChainId(namespace, reference) { + if (!isCaipNamespace(namespace)) { + throw new Error( + `Invalid "namespace", must match: ${CAIP_NAMESPACE_REGEX.toString()}` + ); + } + if (!isCaipReference(reference)) { + throw new Error( + `Invalid "reference", must match: ${CAIP_REFERENCE_REGEX.toString()}` + ); + } + return `${namespace}:${reference}`; +} + + + + + + + + + + + + + + + + + + + + + +exports.CAIP_CHAIN_ID_REGEX = CAIP_CHAIN_ID_REGEX; exports.CAIP_NAMESPACE_REGEX = CAIP_NAMESPACE_REGEX; exports.CAIP_REFERENCE_REGEX = CAIP_REFERENCE_REGEX; exports.CAIP_ACCOUNT_ID_REGEX = CAIP_ACCOUNT_ID_REGEX; exports.CAIP_ACCOUNT_ADDRESS_REGEX = CAIP_ACCOUNT_ADDRESS_REGEX; exports.CaipChainIdStruct = CaipChainIdStruct; exports.CaipNamespaceStruct = CaipNamespaceStruct; exports.CaipReferenceStruct = CaipReferenceStruct; exports.CaipAccountIdStruct = CaipAccountIdStruct; exports.CaipAccountAddressStruct = CaipAccountAddressStruct; exports.KnownCaipNamespace = KnownCaipNamespace; exports.isCaipChainId = isCaipChainId; exports.isCaipNamespace = isCaipNamespace; exports.isCaipReference = isCaipReference; exports.isCaipAccountId = isCaipAccountId; exports.isCaipAccountAddress = isCaipAccountAddress; exports.parseCaipChainId = parseCaipChainId; exports.parseCaipAccountId = parseCaipAccountId; exports.toCaipChainId = toCaipChainId; +//# sourceMappingURL=chunk-YWAID473.js.map + +/***/ }), + +/***/ 51584: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + + + +var _chunk3W5G4CYIjs = __webpack_require__(85244); + +// src/collections.ts +var _map; +var FrozenMap = class { + constructor(entries) { + _chunk3W5G4CYIjs.__privateAdd.call(void 0, this, _map, void 0); + _chunk3W5G4CYIjs.__privateSet.call(void 0, this, _map, new Map(entries)); + Object.freeze(this); + } + get size() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).size; + } + [Symbol.iterator]() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map)[Symbol.iterator](); + } + entries() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).entries(); + } + forEach(callbackfn, thisArg) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).forEach( + (value, key, _map2) => callbackfn.call(thisArg, value, key, this) + ); + } + get(key) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).get(key); + } + has(key) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).has(key); + } + keys() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).keys(); + } + values() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).values(); + } + toString() { + return `FrozenMap(${this.size}) {${this.size > 0 ? ` ${[...this.entries()].map(([key, value]) => `${String(key)} => ${String(value)}`).join(", ")} ` : ""}}`; + } +}; +_map = new WeakMap(); +var _set; +var FrozenSet = class { + constructor(values) { + _chunk3W5G4CYIjs.__privateAdd.call(void 0, this, _set, void 0); + _chunk3W5G4CYIjs.__privateSet.call(void 0, this, _set, new Set(values)); + Object.freeze(this); + } + get size() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).size; + } + [Symbol.iterator]() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set)[Symbol.iterator](); + } + entries() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).entries(); + } + forEach(callbackfn, thisArg) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).forEach( + (value, value2, _set2) => callbackfn.call(thisArg, value, value2, this) + ); + } + has(value) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).has(value); + } + keys() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).keys(); + } + values() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).values(); + } + toString() { + return `FrozenSet(${this.size}) {${this.size > 0 ? ` ${[...this.values()].map((member) => String(member)).join(", ")} ` : ""}}`; + } +}; +_set = new WeakMap(); +Object.freeze(FrozenMap); +Object.freeze(FrozenMap.prototype); +Object.freeze(FrozenSet); +Object.freeze(FrozenSet.prototype); + + + + +exports.FrozenMap = FrozenMap; exports.FrozenSet = FrozenSet; +//# sourceMappingURL=chunk-Z2RGWDD7.js.map + +/***/ }), + +/***/ 22049: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true}));__webpack_require__(87982); + + + + + +var _chunkVFXTVNXNjs = __webpack_require__(41535); +__webpack_require__(58383); + + +var _chunkC6HGFEYLjs = __webpack_require__(39705); + + + + +var _chunk4RMX5YWEjs = __webpack_require__(69116); +__webpack_require__(43207); + + + + + + + + + + +var _chunk4D6XQBHAjs = __webpack_require__(73631); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +var _chunkOLLG4H35js = __webpack_require__(87427); +__webpack_require__(61305); + + + +var _chunk2LBGT4GHjs = __webpack_require__(61275); + + + + + + + + + + + + + + + + + + + + +var _chunkYWAID473js = __webpack_require__(2489); + + +var _chunkE4C7EW4Rjs = __webpack_require__(1508); + + +var _chunk6NZW4WK4js = __webpack_require__(21848); + + + + + +var _chunkDHVKFDHQjs = __webpack_require__(1203); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +var _chunkQEPVHEP7js = __webpack_require__(75363); + + + + + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + + + + + + +var _chunkIZC266HSjs = __webpack_require__(1486); + + + + + + + + + + + + +var _chunkQVEKZRZ2js = __webpack_require__(96526); + + + +var _chunkZ2RGWDD7js = __webpack_require__(51584); +__webpack_require__(85244); +__webpack_require__(51423); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +exports.AssertionError = _chunk6ZDHSOUVjs.AssertionError; exports.CAIP_ACCOUNT_ADDRESS_REGEX = _chunkYWAID473js.CAIP_ACCOUNT_ADDRESS_REGEX; exports.CAIP_ACCOUNT_ID_REGEX = _chunkYWAID473js.CAIP_ACCOUNT_ID_REGEX; exports.CAIP_CHAIN_ID_REGEX = _chunkYWAID473js.CAIP_CHAIN_ID_REGEX; exports.CAIP_NAMESPACE_REGEX = _chunkYWAID473js.CAIP_NAMESPACE_REGEX; exports.CAIP_REFERENCE_REGEX = _chunkYWAID473js.CAIP_REFERENCE_REGEX; exports.CaipAccountAddressStruct = _chunkYWAID473js.CaipAccountAddressStruct; exports.CaipAccountIdStruct = _chunkYWAID473js.CaipAccountIdStruct; exports.CaipChainIdStruct = _chunkYWAID473js.CaipChainIdStruct; exports.CaipNamespaceStruct = _chunkYWAID473js.CaipNamespaceStruct; exports.CaipReferenceStruct = _chunkYWAID473js.CaipReferenceStruct; exports.ChecksumStruct = _chunkE4C7EW4Rjs.ChecksumStruct; exports.Duration = _chunk4RMX5YWEjs.Duration; exports.ESCAPE_CHARACTERS_REGEXP = _chunkQVEKZRZ2js.ESCAPE_CHARACTERS_REGEXP; exports.FrozenMap = _chunkZ2RGWDD7js.FrozenMap; exports.FrozenSet = _chunkZ2RGWDD7js.FrozenSet; exports.HexAddressStruct = _chunkQEPVHEP7js.HexAddressStruct; exports.HexChecksumAddressStruct = _chunkQEPVHEP7js.HexChecksumAddressStruct; exports.HexStruct = _chunkQEPVHEP7js.HexStruct; exports.JsonRpcErrorStruct = _chunkOLLG4H35js.JsonRpcErrorStruct; exports.JsonRpcFailureStruct = _chunkOLLG4H35js.JsonRpcFailureStruct; exports.JsonRpcIdStruct = _chunkOLLG4H35js.JsonRpcIdStruct; exports.JsonRpcNotificationStruct = _chunkOLLG4H35js.JsonRpcNotificationStruct; exports.JsonRpcParamsStruct = _chunkOLLG4H35js.JsonRpcParamsStruct; exports.JsonRpcRequestStruct = _chunkOLLG4H35js.JsonRpcRequestStruct; exports.JsonRpcResponseStruct = _chunkOLLG4H35js.JsonRpcResponseStruct; exports.JsonRpcSuccessStruct = _chunkOLLG4H35js.JsonRpcSuccessStruct; exports.JsonRpcVersionStruct = _chunkOLLG4H35js.JsonRpcVersionStruct; exports.JsonSize = _chunkQVEKZRZ2js.JsonSize; exports.JsonStruct = _chunkOLLG4H35js.JsonStruct; exports.KnownCaipNamespace = _chunkYWAID473js.KnownCaipNamespace; exports.PendingJsonRpcResponseStruct = _chunkOLLG4H35js.PendingJsonRpcResponseStruct; exports.StrictHexStruct = _chunkQEPVHEP7js.StrictHexStruct; exports.UnsafeJsonStruct = _chunkOLLG4H35js.UnsafeJsonStruct; exports.VersionRangeStruct = _chunk4D6XQBHAjs.VersionRangeStruct; exports.VersionStruct = _chunk4D6XQBHAjs.VersionStruct; exports.add0x = _chunkQEPVHEP7js.add0x; exports.assert = _chunk6ZDHSOUVjs.assert; exports.assertExhaustive = _chunk6ZDHSOUVjs.assertExhaustive; exports.assertIsBytes = _chunkQEPVHEP7js.assertIsBytes; exports.assertIsHexString = _chunkQEPVHEP7js.assertIsHexString; exports.assertIsJsonRpcError = _chunkOLLG4H35js.assertIsJsonRpcError; exports.assertIsJsonRpcFailure = _chunkOLLG4H35js.assertIsJsonRpcFailure; exports.assertIsJsonRpcNotification = _chunkOLLG4H35js.assertIsJsonRpcNotification; exports.assertIsJsonRpcRequest = _chunkOLLG4H35js.assertIsJsonRpcRequest; exports.assertIsJsonRpcResponse = _chunkOLLG4H35js.assertIsJsonRpcResponse; exports.assertIsJsonRpcSuccess = _chunkOLLG4H35js.assertIsJsonRpcSuccess; exports.assertIsPendingJsonRpcResponse = _chunkOLLG4H35js.assertIsPendingJsonRpcResponse; exports.assertIsSemVerRange = _chunk4D6XQBHAjs.assertIsSemVerRange; exports.assertIsSemVerVersion = _chunk4D6XQBHAjs.assertIsSemVerVersion; exports.assertIsStrictHexString = _chunkQEPVHEP7js.assertIsStrictHexString; exports.assertStruct = _chunk6ZDHSOUVjs.assertStruct; exports.base64 = _chunk6NZW4WK4js.base64; exports.base64ToBytes = _chunkQEPVHEP7js.base64ToBytes; exports.bigIntToBytes = _chunkQEPVHEP7js.bigIntToBytes; exports.bigIntToHex = _chunkVFXTVNXNjs.bigIntToHex; exports.bytesToBase64 = _chunkQEPVHEP7js.bytesToBase64; exports.bytesToBigInt = _chunkQEPVHEP7js.bytesToBigInt; exports.bytesToHex = _chunkQEPVHEP7js.bytesToHex; exports.bytesToNumber = _chunkQEPVHEP7js.bytesToNumber; exports.bytesToSignedBigInt = _chunkQEPVHEP7js.bytesToSignedBigInt; exports.bytesToString = _chunkQEPVHEP7js.bytesToString; exports.calculateNumberSize = _chunkQVEKZRZ2js.calculateNumberSize; exports.calculateStringSize = _chunkQVEKZRZ2js.calculateStringSize; exports.concatBytes = _chunkQEPVHEP7js.concatBytes; exports.createBigInt = _chunkDHVKFDHQjs.createBigInt; exports.createBytes = _chunkDHVKFDHQjs.createBytes; exports.createDataView = _chunkQEPVHEP7js.createDataView; exports.createDeferredPromise = _chunkC6HGFEYLjs.createDeferredPromise; exports.createHex = _chunkDHVKFDHQjs.createHex; exports.createModuleLogger = _chunk2LBGT4GHjs.createModuleLogger; exports.createNumber = _chunkDHVKFDHQjs.createNumber; exports.createProjectLogger = _chunk2LBGT4GHjs.createProjectLogger; exports.exactOptional = _chunkOLLG4H35js.exactOptional; exports.getChecksumAddress = _chunkQEPVHEP7js.getChecksumAddress; exports.getErrorMessage = _chunkIZC266HSjs.getErrorMessage; exports.getJsonRpcIdValidator = _chunkOLLG4H35js.getJsonRpcIdValidator; exports.getJsonSize = _chunkOLLG4H35js.getJsonSize; exports.getKnownPropertyNames = _chunkQVEKZRZ2js.getKnownPropertyNames; exports.getSafeJson = _chunkOLLG4H35js.getSafeJson; exports.gtRange = _chunk4D6XQBHAjs.gtRange; exports.gtVersion = _chunk4D6XQBHAjs.gtVersion; exports.hasProperty = _chunkQVEKZRZ2js.hasProperty; exports.hexToBigInt = _chunkVFXTVNXNjs.hexToBigInt; exports.hexToBytes = _chunkQEPVHEP7js.hexToBytes; exports.hexToNumber = _chunkVFXTVNXNjs.hexToNumber; exports.inMilliseconds = _chunk4RMX5YWEjs.inMilliseconds; exports.isASCII = _chunkQVEKZRZ2js.isASCII; exports.isBytes = _chunkQEPVHEP7js.isBytes; exports.isCaipAccountAddress = _chunkYWAID473js.isCaipAccountAddress; exports.isCaipAccountId = _chunkYWAID473js.isCaipAccountId; exports.isCaipChainId = _chunkYWAID473js.isCaipChainId; exports.isCaipNamespace = _chunkYWAID473js.isCaipNamespace; exports.isCaipReference = _chunkYWAID473js.isCaipReference; exports.isErrorWithCode = _chunkIZC266HSjs.isErrorWithCode; exports.isErrorWithMessage = _chunkIZC266HSjs.isErrorWithMessage; exports.isErrorWithStack = _chunkIZC266HSjs.isErrorWithStack; exports.isHexString = _chunkQEPVHEP7js.isHexString; exports.isJsonRpcError = _chunkOLLG4H35js.isJsonRpcError; exports.isJsonRpcFailure = _chunkOLLG4H35js.isJsonRpcFailure; exports.isJsonRpcNotification = _chunkOLLG4H35js.isJsonRpcNotification; exports.isJsonRpcRequest = _chunkOLLG4H35js.isJsonRpcRequest; exports.isJsonRpcResponse = _chunkOLLG4H35js.isJsonRpcResponse; exports.isJsonRpcSuccess = _chunkOLLG4H35js.isJsonRpcSuccess; exports.isNonEmptyArray = _chunkQVEKZRZ2js.isNonEmptyArray; exports.isNullOrUndefined = _chunkQVEKZRZ2js.isNullOrUndefined; exports.isObject = _chunkQVEKZRZ2js.isObject; exports.isPendingJsonRpcResponse = _chunkOLLG4H35js.isPendingJsonRpcResponse; exports.isPlainObject = _chunkQVEKZRZ2js.isPlainObject; exports.isStrictHexString = _chunkQEPVHEP7js.isStrictHexString; exports.isValidChecksumAddress = _chunkQEPVHEP7js.isValidChecksumAddress; exports.isValidHexAddress = _chunkQEPVHEP7js.isValidHexAddress; exports.isValidJson = _chunkOLLG4H35js.isValidJson; exports.isValidSemVerRange = _chunk4D6XQBHAjs.isValidSemVerRange; exports.isValidSemVerVersion = _chunk4D6XQBHAjs.isValidSemVerVersion; exports.jsonrpc2 = _chunkOLLG4H35js.jsonrpc2; exports.numberToBytes = _chunkQEPVHEP7js.numberToBytes; exports.numberToHex = _chunkVFXTVNXNjs.numberToHex; exports.object = _chunkOLLG4H35js.object; exports.parseCaipAccountId = _chunkYWAID473js.parseCaipAccountId; exports.parseCaipChainId = _chunkYWAID473js.parseCaipChainId; exports.remove0x = _chunkQEPVHEP7js.remove0x; exports.satisfiesVersionRange = _chunk4D6XQBHAjs.satisfiesVersionRange; exports.signedBigIntToBytes = _chunkQEPVHEP7js.signedBigIntToBytes; exports.stringToBytes = _chunkQEPVHEP7js.stringToBytes; exports.timeSince = _chunk4RMX5YWEjs.timeSince; exports.toCaipChainId = _chunkYWAID473js.toCaipChainId; exports.valueToBytes = _chunkQEPVHEP7js.valueToBytes; exports.wrapError = _chunkIZC266HSjs.wrapError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 82102: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.output = exports.exists = exports.hash = exports.bytes = exports.bool = exports.number = exports.isBytes = void 0; +function number(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`positive integer expected, not ${n}`); +} +exports.number = number; +function bool(b) { + if (typeof b !== 'boolean') + throw new Error(`boolean expected, not ${b}`); +} +exports.bool = bool; +// copied from utils +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +exports.isBytes = isBytes; +function bytes(b, ...lengths) { + if (!isBytes(b)) + throw new Error('Uint8Array expected'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`); +} +exports.bytes = bytes; +function hash(h) { + if (typeof h !== 'function' || typeof h.create !== 'function') + throw new Error('Hash should be wrapped by utils.wrapConstructor'); + number(h.outputLen); + number(h.blockLen); +} +exports.hash = hash; +function exists(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); +} +exports.exists = exists; +function output(out, instance) { + bytes(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } +} +exports.output = output; +const assert = { number, bool, bytes, hash, exists, output }; +exports["default"] = assert; +//# sourceMappingURL=_assert.js.map + +/***/ }), + +/***/ 17335: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.add5L = exports.add5H = exports.add4H = exports.add4L = exports.add3H = exports.add3L = exports.add = exports.rotlBL = exports.rotlBH = exports.rotlSL = exports.rotlSH = exports.rotr32L = exports.rotr32H = exports.rotrBL = exports.rotrBH = exports.rotrSL = exports.rotrSH = exports.shrSL = exports.shrSH = exports.toBig = exports.split = exports.fromBig = void 0; +const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); +const _32n = /* @__PURE__ */ BigInt(32); +// We are not using BigUint64Array, because they are extremely slow as per 2022 +function fromBig(n, le = false) { + if (le) + return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; + return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; +} +exports.fromBig = fromBig; +function split(lst, le = false) { + let Ah = new Uint32Array(lst.length); + let Al = new Uint32Array(lst.length); + for (let i = 0; i < lst.length; i++) { + const { h, l } = fromBig(lst[i], le); + [Ah[i], Al[i]] = [h, l]; + } + return [Ah, Al]; +} +exports.split = split; +const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0); +exports.toBig = toBig; +// for Shift in [0, 32) +const shrSH = (h, _l, s) => h >>> s; +exports.shrSH = shrSH; +const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +exports.shrSL = shrSL; +// Right rotate for Shift in [1, 32) +const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s)); +exports.rotrSH = rotrSH; +const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +exports.rotrSL = rotrSL; +// Right rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); +exports.rotrBH = rotrBH; +const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); +exports.rotrBL = rotrBL; +// Right rotate for shift===32 (just swaps l&h) +const rotr32H = (_h, l) => l; +exports.rotr32H = rotr32H; +const rotr32L = (h, _l) => h; +exports.rotr32L = rotr32L; +// Left rotate for Shift in [1, 32) +const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); +exports.rotlSH = rotlSH; +const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); +exports.rotlSL = rotlSL; +// Left rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); +exports.rotlBH = rotlBH; +const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); +exports.rotlBL = rotlBL; +// JS uses 32-bit signed integers for bitwise operations which means we cannot +// simple take carry out of low bit sum by shift, we need to use division. +function add(Ah, Al, Bh, Bl) { + const l = (Al >>> 0) + (Bl >>> 0); + return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; +} +exports.add = add; +// Addition with more than 2 elements +const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); +exports.add3L = add3L; +const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; +exports.add3H = add3H; +const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); +exports.add4L = add4L; +const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; +exports.add4H = add4H; +const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); +exports.add5L = add5L; +const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; +exports.add5H = add5H; +// prettier-ignore +const u64 = { + fromBig, split, toBig, + shrSH, shrSL, + rotrSH, rotrSL, rotrBH, rotrBL, + rotr32H, rotr32L, + rotlSH, rotlSL, rotlBH, rotlBL, + add, add3L, add3H, add4L, add4H, add5H, add5L, +}; +exports["default"] = u64; +//# sourceMappingURL=_u64.js.map + +/***/ }), + +/***/ 6256: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.crypto = void 0; +exports.crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; +//# sourceMappingURL=crypto.js.map + +/***/ }), + +/***/ 2214: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.shake256 = exports.shake128 = exports.keccak_512 = exports.keccak_384 = exports.keccak_256 = exports.keccak_224 = exports.sha3_512 = exports.sha3_384 = exports.sha3_256 = exports.sha3_224 = exports.Keccak = exports.keccakP = void 0; +const _assert_js_1 = __webpack_require__(82102); +const _u64_js_1 = __webpack_require__(17335); +const utils_js_1 = __webpack_require__(79520); +// SHA3 (keccak) is based on a new design: basically, the internal state is bigger than output size. +// It's called a sponge function. +// Various per round constants calculations +const SHA3_PI = []; +const SHA3_ROTL = []; +const _SHA3_IOTA = []; +const _0n = /* @__PURE__ */ BigInt(0); +const _1n = /* @__PURE__ */ BigInt(1); +const _2n = /* @__PURE__ */ BigInt(2); +const _7n = /* @__PURE__ */ BigInt(7); +const _256n = /* @__PURE__ */ BigInt(256); +const _0x71n = /* @__PURE__ */ BigInt(0x71); +for (let round = 0, R = _1n, x = 1, y = 0; round < 24; round++) { + // Pi + [x, y] = [y, (2 * x + 3 * y) % 5]; + SHA3_PI.push(2 * (5 * y + x)); + // Rotational + SHA3_ROTL.push((((round + 1) * (round + 2)) / 2) % 64); + // Iota + let t = _0n; + for (let j = 0; j < 7; j++) { + R = ((R << _1n) ^ ((R >> _7n) * _0x71n)) % _256n; + if (R & _2n) + t ^= _1n << ((_1n << /* @__PURE__ */ BigInt(j)) - _1n); + } + _SHA3_IOTA.push(t); +} +const [SHA3_IOTA_H, SHA3_IOTA_L] = /* @__PURE__ */ (0, _u64_js_1.split)(_SHA3_IOTA, true); +// Left rotation (without 0, 32, 64) +const rotlH = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBH)(h, l, s) : (0, _u64_js_1.rotlSH)(h, l, s)); +const rotlL = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBL)(h, l, s) : (0, _u64_js_1.rotlSL)(h, l, s)); +// Same as keccakf1600, but allows to skip some rounds +function keccakP(s, rounds = 24) { + const B = new Uint32Array(5 * 2); + // NOTE: all indices are x2 since we store state as u32 instead of u64 (bigints to slow in js) + for (let round = 24 - rounds; round < 24; round++) { + // Theta θ + for (let x = 0; x < 10; x++) + B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; + for (let x = 0; x < 10; x += 2) { + const idx1 = (x + 8) % 10; + const idx0 = (x + 2) % 10; + const B0 = B[idx0]; + const B1 = B[idx0 + 1]; + const Th = rotlH(B0, B1, 1) ^ B[idx1]; + const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; + for (let y = 0; y < 50; y += 10) { + s[x + y] ^= Th; + s[x + y + 1] ^= Tl; + } + } + // Rho (ρ) and Pi (π) + let curH = s[2]; + let curL = s[3]; + for (let t = 0; t < 24; t++) { + const shift = SHA3_ROTL[t]; + const Th = rotlH(curH, curL, shift); + const Tl = rotlL(curH, curL, shift); + const PI = SHA3_PI[t]; + curH = s[PI]; + curL = s[PI + 1]; + s[PI] = Th; + s[PI + 1] = Tl; + } + // Chi (χ) + for (let y = 0; y < 50; y += 10) { + for (let x = 0; x < 10; x++) + B[x] = s[y + x]; + for (let x = 0; x < 10; x++) + s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; + } + // Iota (ι) + s[0] ^= SHA3_IOTA_H[round]; + s[1] ^= SHA3_IOTA_L[round]; + } + B.fill(0); +} +exports.keccakP = keccakP; +class Keccak extends utils_js_1.Hash { + // NOTE: we accept arguments in bytes instead of bits here. + constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { + super(); + this.blockLen = blockLen; + this.suffix = suffix; + this.outputLen = outputLen; + this.enableXOF = enableXOF; + this.rounds = rounds; + this.pos = 0; + this.posOut = 0; + this.finished = false; + this.destroyed = false; + // Can be passed from user as dkLen + (0, _assert_js_1.number)(outputLen); + // 1600 = 5x5 matrix of 64bit. 1600 bits === 200 bytes + if (0 >= this.blockLen || this.blockLen >= 200) + throw new Error('Sha3 supports only keccak-f1600 function'); + this.state = new Uint8Array(200); + this.state32 = (0, utils_js_1.u32)(this.state); + } + keccak() { + if (!utils_js_1.isLE) + (0, utils_js_1.byteSwap32)(this.state32); + keccakP(this.state32, this.rounds); + if (!utils_js_1.isLE) + (0, utils_js_1.byteSwap32)(this.state32); + this.posOut = 0; + this.pos = 0; + } + update(data) { + (0, _assert_js_1.exists)(this); + const { blockLen, state } = this; + data = (0, utils_js_1.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + for (let i = 0; i < take; i++) + state[this.pos++] ^= data[pos++]; + if (this.pos === blockLen) + this.keccak(); + } + return this; + } + finish() { + if (this.finished) + return; + this.finished = true; + const { state, suffix, pos, blockLen } = this; + // Do the padding + state[pos] ^= suffix; + if ((suffix & 0x80) !== 0 && pos === blockLen - 1) + this.keccak(); + state[blockLen - 1] ^= 0x80; + this.keccak(); + } + writeInto(out) { + (0, _assert_js_1.exists)(this, false); + (0, _assert_js_1.bytes)(out); + this.finish(); + const bufferOut = this.state; + const { blockLen } = this; + for (let pos = 0, len = out.length; pos < len;) { + if (this.posOut >= blockLen) + this.keccak(); + const take = Math.min(blockLen - this.posOut, len - pos); + out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); + this.posOut += take; + pos += take; + } + return out; + } + xofInto(out) { + // Sha3/Keccak usage with XOF is probably mistake, only SHAKE instances can do XOF + if (!this.enableXOF) + throw new Error('XOF is not possible for this instance'); + return this.writeInto(out); + } + xof(bytes) { + (0, _assert_js_1.number)(bytes); + return this.xofInto(new Uint8Array(bytes)); + } + digestInto(out) { + (0, _assert_js_1.output)(out, this); + if (this.finished) + throw new Error('digest() was already called'); + this.writeInto(out); + this.destroy(); + return out; + } + digest() { + return this.digestInto(new Uint8Array(this.outputLen)); + } + destroy() { + this.destroyed = true; + this.state.fill(0); + } + _cloneInto(to) { + const { blockLen, suffix, outputLen, rounds, enableXOF } = this; + to || (to = new Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); + to.state32.set(this.state32); + to.pos = this.pos; + to.posOut = this.posOut; + to.finished = this.finished; + to.rounds = rounds; + // Suffix can change in cSHAKE + to.suffix = suffix; + to.outputLen = outputLen; + to.enableXOF = enableXOF; + to.destroyed = this.destroyed; + return to; + } +} +exports.Keccak = Keccak; +const gen = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapConstructor)(() => new Keccak(blockLen, suffix, outputLen)); +exports.sha3_224 = gen(0x06, 144, 224 / 8); +/** + * SHA3-256 hash function + * @param message - that would be hashed + */ +exports.sha3_256 = gen(0x06, 136, 256 / 8); +exports.sha3_384 = gen(0x06, 104, 384 / 8); +exports.sha3_512 = gen(0x06, 72, 512 / 8); +exports.keccak_224 = gen(0x01, 144, 224 / 8); +/** + * keccak-256 hash function. Different from SHA3-256. + * @param message - that would be hashed + */ +exports.keccak_256 = gen(0x01, 136, 256 / 8); +exports.keccak_384 = gen(0x01, 104, 384 / 8); +exports.keccak_512 = gen(0x01, 72, 512 / 8); +const genShake = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapXOFConstructorWithOpts)((opts = {}) => new Keccak(blockLen, suffix, opts.dkLen === undefined ? outputLen : opts.dkLen, true)); +exports.shake128 = genShake(0x1f, 168, 128 / 8); +exports.shake256 = genShake(0x1f, 136, 256 / 8); +//# sourceMappingURL=sha3.js.map + +/***/ }), + +/***/ 79520: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.randomBytes = exports.wrapXOFConstructorWithOpts = exports.wrapConstructorWithOpts = exports.wrapConstructor = exports.checkOpts = exports.Hash = exports.concatBytes = exports.toBytes = exports.utf8ToBytes = exports.asyncLoop = exports.nextTick = exports.hexToBytes = exports.bytesToHex = exports.byteSwap32 = exports.byteSwapIfBE = exports.byteSwap = exports.isLE = exports.rotl = exports.rotr = exports.createView = exports.u32 = exports.u8 = exports.isBytes = void 0; +// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. +// node.js versions earlier than v19 don't declare it in global scope. +// For node.js, package.json#exports field mapping rewrites import +// from `crypto` to `cryptoNode`, which imports native module. +// Makes the utils un-importable in browsers without a bundler. +// Once node.js 18 is deprecated (2025-04-30), we can just drop the import. +const crypto_1 = __webpack_require__(6256); +const _assert_js_1 = __webpack_require__(82102); +// export { isBytes } from './_assert.js'; +// We can't reuse isBytes from _assert, because somehow this causes huge perf issues +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +exports.isBytes = isBytes; +// Cast array to different type +const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); +exports.u8 = u8; +const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); +exports.u32 = u32; +// Cast array to view +const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); +exports.createView = createView; +// The rotate right (circular right shift) operation for uint32 +const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); +exports.rotr = rotr; +// The rotate left (circular left shift) operation for uint32 +const rotl = (word, shift) => (word << shift) | ((word >>> (32 - shift)) >>> 0); +exports.rotl = rotl; +exports.isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; +// The byte swap operation for uint32 +const byteSwap = (word) => ((word << 24) & 0xff000000) | + ((word << 8) & 0xff0000) | + ((word >>> 8) & 0xff00) | + ((word >>> 24) & 0xff); +exports.byteSwap = byteSwap; +// Conditionally byte swap if on a big-endian platform +exports.byteSwapIfBE = exports.isLE ? (n) => n : (n) => (0, exports.byteSwap)(n); +// In place byte swap for Uint32Array +function byteSwap32(arr) { + for (let i = 0; i < arr.length; i++) { + arr[i] = (0, exports.byteSwap)(arr[i]); + } +} +exports.byteSwap32 = byteSwap32; +// Array where index 0xf0 (240) is mapped to string 'f0' +const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +/** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ +function bytesToHex(bytes) { + (0, _assert_js_1.bytes)(bytes); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; +} +exports.bytesToHex = bytesToHex; +// We use optimized technique to convert hex string to byte array +const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; +function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; +} +/** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ +function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; +} +exports.hexToBytes = hexToBytes; +// There is no setImmediate in browser and setTimeout is slow. +// call of async fn will return Promise, which will be fullfiled only on +// next scheduler queue processing step and this is exactly what we need. +const nextTick = async () => { }; +exports.nextTick = nextTick; +// Returns control to thread each 'tick' ms to avoid blocking +async function asyncLoop(iters, tick, cb) { + let ts = Date.now(); + for (let i = 0; i < iters; i++) { + cb(i); + // Date.now() is not monotonic, so in case if clock goes backwards we return return control too + const diff = Date.now() - ts; + if (diff >= 0 && diff < tick) + continue; + await (0, exports.nextTick)(); + ts += diff; + } +} +exports.asyncLoop = asyncLoop; +/** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ +function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 +} +exports.utf8ToBytes = utf8ToBytes; +/** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ +function toBytes(data) { + if (typeof data === 'string') + data = utf8ToBytes(data); + (0, _assert_js_1.bytes)(data); + return data; +} +exports.toBytes = toBytes; +/** + * Copies several Uint8Arrays into one. + */ +function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + (0, _assert_js_1.bytes)(a); + sum += a.length; + } + const res = new Uint8Array(sum); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; +} +exports.concatBytes = concatBytes; +// For runtime check if class implements interface +class Hash { + // Safe version that clones internal state + clone() { + return this._cloneInto(); + } +} +exports.Hash = Hash; +const toStr = {}.toString; +function checkOpts(defaults, opts) { + if (opts !== undefined && toStr.call(opts) !== '[object Object]') + throw new Error('Options should be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; +} +exports.checkOpts = checkOpts; +function wrapConstructor(hashCons) { + const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); + const tmp = hashCons(); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = () => hashCons(); + return hashC; +} +exports.wrapConstructor = wrapConstructor; +function wrapConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +exports.wrapConstructorWithOpts = wrapConstructorWithOpts; +function wrapXOFConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +exports.wrapXOFConstructorWithOpts = wrapXOFConstructorWithOpts; +/** + * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. + */ +function randomBytes(bytesLength = 32) { + if (crypto_1.crypto && typeof crypto_1.crypto.getRandomValues === 'function') { + return crypto_1.crypto.getRandomValues(new Uint8Array(bytesLength)); + } + throw new Error('crypto.getRandomValues must be defined'); +} +exports.randomBytes = randomBytes; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 73562: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCurve = exports.getHash = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +const hmac_1 = __webpack_require__(39615); +const utils_1 = __webpack_require__(99175); +const weierstrass_js_1 = __webpack_require__(91705); +// connects noble-curves to noble-hashes +function getHash(hash) { + return { + hash, + hmac: (key, ...msgs) => (0, hmac_1.hmac)(hash, key, (0, utils_1.concatBytes)(...msgs)), + randomBytes: utils_1.randomBytes, + }; +} +exports.getHash = getHash; +function createCurve(curveDef, defHash) { + const create = (hash) => (0, weierstrass_js_1.weierstrass)({ ...curveDef, ...getHash(hash) }); + return Object.freeze({ ...create(defHash), create }); +} +exports.createCurve = createCurve; +//# sourceMappingURL=_shortw_utils.js.map + +/***/ }), + +/***/ 62422: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateBasic = exports.wNAF = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// Abelian group utilities +const modular_js_1 = __webpack_require__(24967); +const utils_js_1 = __webpack_require__(91484); +const _0n = BigInt(0); +const _1n = BigInt(1); +// Elliptic curve multiplication of Point by scalar. Fragile. +// Scalars should always be less than curve order: this should be checked inside of a curve itself. +// Creates precomputation tables for fast multiplication: +// - private scalar is split by fixed size windows of W bits +// - every window point is collected from window's table & added to accumulator +// - since windows are different, same point inside tables won't be accessed more than once per calc +// - each multiplication is 'Math.ceil(CURVE_ORDER / 𝑊) + 1' point additions (fixed for any scalar) +// - +1 window is neccessary for wNAF +// - wNAF reduces table size: 2x less memory + 2x faster generation, but 10% slower multiplication +// TODO: Research returning 2d JS array of windows, instead of a single window. This would allow +// windows to be in different memory locations +function wNAF(c, bits) { + const constTimeNegate = (condition, item) => { + const neg = item.negate(); + return condition ? neg : item; + }; + const opts = (W) => { + const windows = Math.ceil(bits / W) + 1; // +1, because + const windowSize = 2 ** (W - 1); // -1 because we skip zero + return { windows, windowSize }; + }; + return { + constTimeNegate, + // non-const time multiplication ladder + unsafeLadder(elm, n) { + let p = c.ZERO; + let d = elm; + while (n > _0n) { + if (n & _1n) + p = p.add(d); + d = d.double(); + n >>= _1n; + } + return p; + }, + /** + * Creates a wNAF precomputation window. Used for caching. + * Default window size is set by `utils.precompute()` and is equal to 8. + * Number of precomputed points depends on the curve size: + * 2^(𝑊−1) * (Math.ceil(𝑛 / 𝑊) + 1), where: + * - 𝑊 is the window size + * - 𝑛 is the bitlength of the curve order. + * For a 256-bit curve and window size 8, the number of precomputed points is 128 * 33 = 4224. + * @returns precomputed point tables flattened to a single array + */ + precomputeWindow(elm, W) { + const { windows, windowSize } = opts(W); + const points = []; + let p = elm; + let base = p; + for (let window = 0; window < windows; window++) { + base = p; + points.push(base); + // =1, because we skip zero + for (let i = 1; i < windowSize; i++) { + base = base.add(p); + points.push(base); + } + p = base.double(); + } + return points; + }, + /** + * Implements ec multiplication using precomputed tables and w-ary non-adjacent form. + * @param W window size + * @param precomputes precomputed tables + * @param n scalar (we don't check here, but should be less than curve order) + * @returns real and fake (for const-time) points + */ + wNAF(W, precomputes, n) { + // TODO: maybe check that scalar is less than group order? wNAF behavious is undefined otherwise + // But need to carefully remove other checks before wNAF. ORDER == bits here + const { windows, windowSize } = opts(W); + let p = c.ZERO; + let f = c.BASE; + const mask = BigInt(2 ** W - 1); // Create mask with W ones: 0b1111 for W=4 etc. + const maxNumber = 2 ** W; + const shiftBy = BigInt(W); + for (let window = 0; window < windows; window++) { + const offset = window * windowSize; + // Extract W bits. + let wbits = Number(n & mask); + // Shift number by W bits. + n >>= shiftBy; + // If the bits are bigger than max size, we'll split those. + // +224 => 256 - 32 + if (wbits > windowSize) { + wbits -= maxNumber; + n += _1n; + } + // This code was first written with assumption that 'f' and 'p' will never be infinity point: + // since each addition is multiplied by 2 ** W, it cannot cancel each other. However, + // there is negate now: it is possible that negated element from low value + // would be the same as high element, which will create carry into next window. + // It's not obvious how this can fail, but still worth investigating later. + // Check if we're onto Zero point. + // Add random point inside current window to f. + const offset1 = offset; + const offset2 = offset + Math.abs(wbits) - 1; // -1 because we skip zero + const cond1 = window % 2 !== 0; + const cond2 = wbits < 0; + if (wbits === 0) { + // The most important part for const-time getPublicKey + f = f.add(constTimeNegate(cond1, precomputes[offset1])); + } + else { + p = p.add(constTimeNegate(cond2, precomputes[offset2])); + } + } + // JIT-compiler should not eliminate f here, since it will later be used in normalizeZ() + // Even if the variable is still unused, there are some checks which will + // throw an exception, so compiler needs to prove they won't happen, which is hard. + // At this point there is a way to F be infinity-point even if p is not, + // which makes it less const-time: around 1 bigint multiply. + return { p, f }; + }, + wNAFCached(P, precomputesMap, n, transform) { + // @ts-ignore + const W = P._WINDOW_SIZE || 1; + // Calculate precomputes on a first run, reuse them after + let comp = precomputesMap.get(P); + if (!comp) { + comp = this.precomputeWindow(P, W); + if (W !== 1) { + precomputesMap.set(P, transform(comp)); + } + } + return this.wNAF(W, comp, n); + }, + }; +} +exports.wNAF = wNAF; +function validateBasic(curve) { + (0, modular_js_1.validateField)(curve.Fp); + (0, utils_js_1.validateObject)(curve, { + n: 'bigint', + h: 'bigint', + Gx: 'field', + Gy: 'field', + }, { + nBitLength: 'isSafeInteger', + nByteLength: 'isSafeInteger', + }); + // Set defaults + return Object.freeze({ + ...(0, modular_js_1.nLength)(curve.n, curve.nBitLength), + ...curve, + ...{ p: curve.Fp.ORDER }, + }); +} +exports.validateBasic = validateBasic; +//# sourceMappingURL=curve.js.map + +/***/ }), + +/***/ 71761: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createHasher = exports.isogenyMap = exports.hash_to_field = exports.expand_message_xof = exports.expand_message_xmd = void 0; +const modular_js_1 = __webpack_require__(24967); +const utils_js_1 = __webpack_require__(91484); +function validateDST(dst) { + if ((0, utils_js_1.isBytes)(dst)) + return dst; + if (typeof dst === 'string') + return (0, utils_js_1.utf8ToBytes)(dst); + throw new Error('DST must be Uint8Array or string'); +} +// Octet Stream to Integer. "spec" implementation of os2ip is 2.5x slower vs bytesToNumberBE. +const os2ip = utils_js_1.bytesToNumberBE; +// Integer to Octet Stream (numberToBytesBE) +function i2osp(value, length) { + if (value < 0 || value >= 1 << (8 * length)) { + throw new Error(`bad I2OSP call: value=${value} length=${length}`); + } + const res = Array.from({ length }).fill(0); + for (let i = length - 1; i >= 0; i--) { + res[i] = value & 0xff; + value >>>= 8; + } + return new Uint8Array(res); +} +function strxor(a, b) { + const arr = new Uint8Array(a.length); + for (let i = 0; i < a.length; i++) { + arr[i] = a[i] ^ b[i]; + } + return arr; +} +function abytes(item) { + if (!(0, utils_js_1.isBytes)(item)) + throw new Error('Uint8Array expected'); +} +function isNum(item) { + if (!Number.isSafeInteger(item)) + throw new Error('number expected'); +} +// Produces a uniformly random byte string using a cryptographic hash function H that outputs b bits +// https://www.rfc-editor.org/rfc/rfc9380#section-5.3.1 +function expand_message_xmd(msg, DST, lenInBytes, H) { + abytes(msg); + abytes(DST); + isNum(lenInBytes); + // https://www.rfc-editor.org/rfc/rfc9380#section-5.3.3 + if (DST.length > 255) + DST = H((0, utils_js_1.concatBytes)((0, utils_js_1.utf8ToBytes)('H2C-OVERSIZE-DST-'), DST)); + const { outputLen: b_in_bytes, blockLen: r_in_bytes } = H; + const ell = Math.ceil(lenInBytes / b_in_bytes); + if (ell > 255) + throw new Error('Invalid xmd length'); + const DST_prime = (0, utils_js_1.concatBytes)(DST, i2osp(DST.length, 1)); + const Z_pad = i2osp(0, r_in_bytes); + const l_i_b_str = i2osp(lenInBytes, 2); // len_in_bytes_str + const b = new Array(ell); + const b_0 = H((0, utils_js_1.concatBytes)(Z_pad, msg, l_i_b_str, i2osp(0, 1), DST_prime)); + b[0] = H((0, utils_js_1.concatBytes)(b_0, i2osp(1, 1), DST_prime)); + for (let i = 1; i <= ell; i++) { + const args = [strxor(b_0, b[i - 1]), i2osp(i + 1, 1), DST_prime]; + b[i] = H((0, utils_js_1.concatBytes)(...args)); + } + const pseudo_random_bytes = (0, utils_js_1.concatBytes)(...b); + return pseudo_random_bytes.slice(0, lenInBytes); +} +exports.expand_message_xmd = expand_message_xmd; +// Produces a uniformly random byte string using an extendable-output function (XOF) H. +// 1. The collision resistance of H MUST be at least k bits. +// 2. H MUST be an XOF that has been proved indifferentiable from +// a random oracle under a reasonable cryptographic assumption. +// https://www.rfc-editor.org/rfc/rfc9380#section-5.3.2 +function expand_message_xof(msg, DST, lenInBytes, k, H) { + abytes(msg); + abytes(DST); + isNum(lenInBytes); + // https://www.rfc-editor.org/rfc/rfc9380#section-5.3.3 + // DST = H('H2C-OVERSIZE-DST-' || a_very_long_DST, Math.ceil((lenInBytes * k) / 8)); + if (DST.length > 255) { + const dkLen = Math.ceil((2 * k) / 8); + DST = H.create({ dkLen }).update((0, utils_js_1.utf8ToBytes)('H2C-OVERSIZE-DST-')).update(DST).digest(); + } + if (lenInBytes > 65535 || DST.length > 255) + throw new Error('expand_message_xof: invalid lenInBytes'); + return (H.create({ dkLen: lenInBytes }) + .update(msg) + .update(i2osp(lenInBytes, 2)) + // 2. DST_prime = DST || I2OSP(len(DST), 1) + .update(DST) + .update(i2osp(DST.length, 1)) + .digest()); +} +exports.expand_message_xof = expand_message_xof; +/** + * Hashes arbitrary-length byte strings to a list of one or more elements of a finite field F + * https://www.rfc-editor.org/rfc/rfc9380#section-5.2 + * @param msg a byte string containing the message to hash + * @param count the number of elements of F to output + * @param options `{DST: string, p: bigint, m: number, k: number, expand: 'xmd' | 'xof', hash: H}`, see above + * @returns [u_0, ..., u_(count - 1)], a list of field elements. + */ +function hash_to_field(msg, count, options) { + (0, utils_js_1.validateObject)(options, { + DST: 'stringOrUint8Array', + p: 'bigint', + m: 'isSafeInteger', + k: 'isSafeInteger', + hash: 'hash', + }); + const { p, k, m, hash, expand, DST: _DST } = options; + abytes(msg); + isNum(count); + const DST = validateDST(_DST); + const log2p = p.toString(2).length; + const L = Math.ceil((log2p + k) / 8); // section 5.1 of ietf draft link above + const len_in_bytes = count * m * L; + let prb; // pseudo_random_bytes + if (expand === 'xmd') { + prb = expand_message_xmd(msg, DST, len_in_bytes, hash); + } + else if (expand === 'xof') { + prb = expand_message_xof(msg, DST, len_in_bytes, k, hash); + } + else if (expand === '_internal_pass') { + // for internal tests only + prb = msg; + } + else { + throw new Error('expand must be "xmd" or "xof"'); + } + const u = new Array(count); + for (let i = 0; i < count; i++) { + const e = new Array(m); + for (let j = 0; j < m; j++) { + const elm_offset = L * (j + i * m); + const tv = prb.subarray(elm_offset, elm_offset + L); + e[j] = (0, modular_js_1.mod)(os2ip(tv), p); + } + u[i] = e; + } + return u; +} +exports.hash_to_field = hash_to_field; +function isogenyMap(field, map) { + // Make same order as in spec + const COEFF = map.map((i) => Array.from(i).reverse()); + return (x, y) => { + const [xNum, xDen, yNum, yDen] = COEFF.map((val) => val.reduce((acc, i) => field.add(field.mul(acc, x), i))); + x = field.div(xNum, xDen); // xNum / xDen + y = field.mul(y, field.div(yNum, yDen)); // y * (yNum / yDev) + return { x, y }; + }; +} +exports.isogenyMap = isogenyMap; +function createHasher(Point, mapToCurve, def) { + if (typeof mapToCurve !== 'function') + throw new Error('mapToCurve() must be defined'); + return { + // Encodes byte string to elliptic curve. + // hash_to_curve from https://www.rfc-editor.org/rfc/rfc9380#section-3 + hashToCurve(msg, options) { + const u = hash_to_field(msg, 2, { ...def, DST: def.DST, ...options }); + const u0 = Point.fromAffine(mapToCurve(u[0])); + const u1 = Point.fromAffine(mapToCurve(u[1])); + const P = u0.add(u1).clearCofactor(); + P.assertValidity(); + return P; + }, + // Encodes byte string to elliptic curve. + // encode_to_curve from https://www.rfc-editor.org/rfc/rfc9380#section-3 + encodeToCurve(msg, options) { + const u = hash_to_field(msg, 1, { ...def, DST: def.encodeDST, ...options }); + const P = Point.fromAffine(mapToCurve(u[0])).clearCofactor(); + P.assertValidity(); + return P; + }, + }; +} +exports.createHasher = createHasher; +//# sourceMappingURL=hash-to-curve.js.map + +/***/ }), + +/***/ 24967: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mapHashToField = exports.getMinHashLength = exports.getFieldBytesLength = exports.hashToPrivateScalar = exports.FpSqrtEven = exports.FpSqrtOdd = exports.Field = exports.nLength = exports.FpIsSquare = exports.FpDiv = exports.FpInvertBatch = exports.FpPow = exports.validateField = exports.isNegativeLE = exports.FpSqrt = exports.tonelliShanks = exports.invert = exports.pow2 = exports.pow = exports.mod = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// Utilities for modular arithmetics and finite fields +const utils_js_1 = __webpack_require__(91484); +// prettier-ignore +const _0n = BigInt(0), _1n = BigInt(1), _2n = BigInt(2), _3n = BigInt(3); +// prettier-ignore +const _4n = BigInt(4), _5n = BigInt(5), _8n = BigInt(8); +// prettier-ignore +const _9n = BigInt(9), _16n = BigInt(16); +// Calculates a modulo b +function mod(a, b) { + const result = a % b; + return result >= _0n ? result : b + result; +} +exports.mod = mod; +/** + * Efficiently raise num to power and do modular division. + * Unsafe in some contexts: uses ladder, so can expose bigint bits. + * @example + * pow(2n, 6n, 11n) // 64n % 11n == 9n + */ +// TODO: use field version && remove +function pow(num, power, modulo) { + if (modulo <= _0n || power < _0n) + throw new Error('Expected power/modulo > 0'); + if (modulo === _1n) + return _0n; + let res = _1n; + while (power > _0n) { + if (power & _1n) + res = (res * num) % modulo; + num = (num * num) % modulo; + power >>= _1n; + } + return res; +} +exports.pow = pow; +// Does x ^ (2 ^ power) mod p. pow2(30, 4) == 30 ^ (2 ^ 4) +function pow2(x, power, modulo) { + let res = x; + while (power-- > _0n) { + res *= res; + res %= modulo; + } + return res; +} +exports.pow2 = pow2; +// Inverses number over modulo +function invert(number, modulo) { + if (number === _0n || modulo <= _0n) { + throw new Error(`invert: expected positive integers, got n=${number} mod=${modulo}`); + } + // Euclidean GCD https://brilliant.org/wiki/extended-euclidean-algorithm/ + // Fermat's little theorem "CT-like" version inv(n) = n^(m-2) mod m is 30x slower. + let a = mod(number, modulo); + let b = modulo; + // prettier-ignore + let x = _0n, y = _1n, u = _1n, v = _0n; + while (a !== _0n) { + // JIT applies optimization if those two lines follow each other + const q = b / a; + const r = b % a; + const m = x - u * q; + const n = y - v * q; + // prettier-ignore + b = a, a = r, x = u, y = v, u = m, v = n; + } + const gcd = b; + if (gcd !== _1n) + throw new Error('invert: does not exist'); + return mod(x, modulo); +} +exports.invert = invert; +/** + * Tonelli-Shanks square root search algorithm. + * 1. https://eprint.iacr.org/2012/685.pdf (page 12) + * 2. Square Roots from 1; 24, 51, 10 to Dan Shanks + * Will start an infinite loop if field order P is not prime. + * @param P field order + * @returns function that takes field Fp (created from P) and number n + */ +function tonelliShanks(P) { + // Legendre constant: used to calculate Legendre symbol (a | p), + // which denotes the value of a^((p-1)/2) (mod p). + // (a | p) ≡ 1 if a is a square (mod p) + // (a | p) ≡ -1 if a is not a square (mod p) + // (a | p) ≡ 0 if a ≡ 0 (mod p) + const legendreC = (P - _1n) / _2n; + let Q, S, Z; + // Step 1: By factoring out powers of 2 from p - 1, + // find q and s such that p - 1 = q*(2^s) with q odd + for (Q = P - _1n, S = 0; Q % _2n === _0n; Q /= _2n, S++) + ; + // Step 2: Select a non-square z such that (z | p) ≡ -1 and set c ≡ zq + for (Z = _2n; Z < P && pow(Z, legendreC, P) !== P - _1n; Z++) + ; + // Fast-path + if (S === 1) { + const p1div4 = (P + _1n) / _4n; + return function tonelliFast(Fp, n) { + const root = Fp.pow(n, p1div4); + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Slow-path + const Q1div2 = (Q + _1n) / _2n; + return function tonelliSlow(Fp, n) { + // Step 0: Check that n is indeed a square: (n | p) should not be ≡ -1 + if (Fp.pow(n, legendreC) === Fp.neg(Fp.ONE)) + throw new Error('Cannot find square root'); + let r = S; + // TODO: will fail at Fp2/etc + let g = Fp.pow(Fp.mul(Fp.ONE, Z), Q); // will update both x and b + let x = Fp.pow(n, Q1div2); // first guess at the square root + let b = Fp.pow(n, Q); // first guess at the fudge factor + while (!Fp.eql(b, Fp.ONE)) { + if (Fp.eql(b, Fp.ZERO)) + return Fp.ZERO; // https://en.wikipedia.org/wiki/Tonelli%E2%80%93Shanks_algorithm (4. If t = 0, return r = 0) + // Find m such b^(2^m)==1 + let m = 1; + for (let t2 = Fp.sqr(b); m < r; m++) { + if (Fp.eql(t2, Fp.ONE)) + break; + t2 = Fp.sqr(t2); // t2 *= t2 + } + // NOTE: r-m-1 can be bigger than 32, need to convert to bigint before shift, otherwise there will be overflow + const ge = Fp.pow(g, _1n << BigInt(r - m - 1)); // ge = 2^(r-m-1) + g = Fp.sqr(ge); // g = ge * ge + x = Fp.mul(x, ge); // x *= ge + b = Fp.mul(b, g); // b *= g + r = m; + } + return x; + }; +} +exports.tonelliShanks = tonelliShanks; +function FpSqrt(P) { + // NOTE: different algorithms can give different roots, it is up to user to decide which one they want. + // For example there is FpSqrtOdd/FpSqrtEven to choice root based on oddness (used for hash-to-curve). + // P ≡ 3 (mod 4) + // √n = n^((P+1)/4) + if (P % _4n === _3n) { + // Not all roots possible! + // const ORDER = + // 0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaabn; + // const NUM = 72057594037927816n; + const p1div4 = (P + _1n) / _4n; + return function sqrt3mod4(Fp, n) { + const root = Fp.pow(n, p1div4); + // Throw if root**2 != n + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Atkin algorithm for q ≡ 5 (mod 8), https://eprint.iacr.org/2012/685.pdf (page 10) + if (P % _8n === _5n) { + const c1 = (P - _5n) / _8n; + return function sqrt5mod8(Fp, n) { + const n2 = Fp.mul(n, _2n); + const v = Fp.pow(n2, c1); + const nv = Fp.mul(n, v); + const i = Fp.mul(Fp.mul(nv, _2n), v); + const root = Fp.mul(nv, Fp.sub(i, Fp.ONE)); + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // P ≡ 9 (mod 16) + if (P % _16n === _9n) { + // NOTE: tonelli is too slow for bls-Fp2 calculations even on start + // Means we cannot use sqrt for constants at all! + // + // const c1 = Fp.sqrt(Fp.negate(Fp.ONE)); // 1. c1 = sqrt(-1) in F, i.e., (c1^2) == -1 in F + // const c2 = Fp.sqrt(c1); // 2. c2 = sqrt(c1) in F, i.e., (c2^2) == c1 in F + // const c3 = Fp.sqrt(Fp.negate(c1)); // 3. c3 = sqrt(-c1) in F, i.e., (c3^2) == -c1 in F + // const c4 = (P + _7n) / _16n; // 4. c4 = (q + 7) / 16 # Integer arithmetic + // sqrt = (x) => { + // let tv1 = Fp.pow(x, c4); // 1. tv1 = x^c4 + // let tv2 = Fp.mul(c1, tv1); // 2. tv2 = c1 * tv1 + // const tv3 = Fp.mul(c2, tv1); // 3. tv3 = c2 * tv1 + // let tv4 = Fp.mul(c3, tv1); // 4. tv4 = c3 * tv1 + // const e1 = Fp.equals(Fp.square(tv2), x); // 5. e1 = (tv2^2) == x + // const e2 = Fp.equals(Fp.square(tv3), x); // 6. e2 = (tv3^2) == x + // tv1 = Fp.cmov(tv1, tv2, e1); // 7. tv1 = CMOV(tv1, tv2, e1) # Select tv2 if (tv2^2) == x + // tv2 = Fp.cmov(tv4, tv3, e2); // 8. tv2 = CMOV(tv4, tv3, e2) # Select tv3 if (tv3^2) == x + // const e3 = Fp.equals(Fp.square(tv2), x); // 9. e3 = (tv2^2) == x + // return Fp.cmov(tv1, tv2, e3); // 10. z = CMOV(tv1, tv2, e3) # Select the sqrt from tv1 and tv2 + // } + } + // Other cases: Tonelli-Shanks algorithm + return tonelliShanks(P); +} +exports.FpSqrt = FpSqrt; +// Little-endian check for first LE bit (last BE bit); +const isNegativeLE = (num, modulo) => (mod(num, modulo) & _1n) === _1n; +exports.isNegativeLE = isNegativeLE; +// prettier-ignore +const FIELD_FIELDS = [ + 'create', 'isValid', 'is0', 'neg', 'inv', 'sqrt', 'sqr', + 'eql', 'add', 'sub', 'mul', 'pow', 'div', + 'addN', 'subN', 'mulN', 'sqrN' +]; +function validateField(field) { + const initial = { + ORDER: 'bigint', + MASK: 'bigint', + BYTES: 'isSafeInteger', + BITS: 'isSafeInteger', + }; + const opts = FIELD_FIELDS.reduce((map, val) => { + map[val] = 'function'; + return map; + }, initial); + return (0, utils_js_1.validateObject)(field, opts); +} +exports.validateField = validateField; +// Generic field functions +/** + * Same as `pow` but for Fp: non-constant-time. + * Unsafe in some contexts: uses ladder, so can expose bigint bits. + */ +function FpPow(f, num, power) { + // Should have same speed as pow for bigints + // TODO: benchmark! + if (power < _0n) + throw new Error('Expected power > 0'); + if (power === _0n) + return f.ONE; + if (power === _1n) + return num; + let p = f.ONE; + let d = num; + while (power > _0n) { + if (power & _1n) + p = f.mul(p, d); + d = f.sqr(d); + power >>= _1n; + } + return p; +} +exports.FpPow = FpPow; +/** + * Efficiently invert an array of Field elements. + * `inv(0)` will return `undefined` here: make sure to throw an error. + */ +function FpInvertBatch(f, nums) { + const tmp = new Array(nums.length); + // Walk from first to last, multiply them by each other MOD p + const lastMultiplied = nums.reduce((acc, num, i) => { + if (f.is0(num)) + return acc; + tmp[i] = acc; + return f.mul(acc, num); + }, f.ONE); + // Invert last element + const inverted = f.inv(lastMultiplied); + // Walk from last to first, multiply them by inverted each other MOD p + nums.reduceRight((acc, num, i) => { + if (f.is0(num)) + return acc; + tmp[i] = f.mul(acc, tmp[i]); + return f.mul(acc, num); + }, inverted); + return tmp; +} +exports.FpInvertBatch = FpInvertBatch; +function FpDiv(f, lhs, rhs) { + return f.mul(lhs, typeof rhs === 'bigint' ? invert(rhs, f.ORDER) : f.inv(rhs)); +} +exports.FpDiv = FpDiv; +// This function returns True whenever the value x is a square in the field F. +function FpIsSquare(f) { + const legendreConst = (f.ORDER - _1n) / _2n; // Integer arithmetic + return (x) => { + const p = f.pow(x, legendreConst); + return f.eql(p, f.ZERO) || f.eql(p, f.ONE); + }; +} +exports.FpIsSquare = FpIsSquare; +// CURVE.n lengths +function nLength(n, nBitLength) { + // Bit size, byte size of CURVE.n + const _nBitLength = nBitLength !== undefined ? nBitLength : n.toString(2).length; + const nByteLength = Math.ceil(_nBitLength / 8); + return { nBitLength: _nBitLength, nByteLength }; +} +exports.nLength = nLength; +/** + * Initializes a finite field over prime. **Non-primes are not supported.** + * Do not init in loop: slow. Very fragile: always run a benchmark on a change. + * Major performance optimizations: + * * a) denormalized operations like mulN instead of mul + * * b) same object shape: never add or remove keys + * * c) Object.freeze + * @param ORDER prime positive bigint + * @param bitLen how many bits the field consumes + * @param isLE (def: false) if encoding / decoding should be in little-endian + * @param redef optional faster redefinitions of sqrt and other methods + */ +function Field(ORDER, bitLen, isLE = false, redef = {}) { + if (ORDER <= _0n) + throw new Error(`Expected Field ORDER > 0, got ${ORDER}`); + const { nBitLength: BITS, nByteLength: BYTES } = nLength(ORDER, bitLen); + if (BYTES > 2048) + throw new Error('Field lengths over 2048 bytes are not supported'); + const sqrtP = FpSqrt(ORDER); + const f = Object.freeze({ + ORDER, + BITS, + BYTES, + MASK: (0, utils_js_1.bitMask)(BITS), + ZERO: _0n, + ONE: _1n, + create: (num) => mod(num, ORDER), + isValid: (num) => { + if (typeof num !== 'bigint') + throw new Error(`Invalid field element: expected bigint, got ${typeof num}`); + return _0n <= num && num < ORDER; // 0 is valid element, but it's not invertible + }, + is0: (num) => num === _0n, + isOdd: (num) => (num & _1n) === _1n, + neg: (num) => mod(-num, ORDER), + eql: (lhs, rhs) => lhs === rhs, + sqr: (num) => mod(num * num, ORDER), + add: (lhs, rhs) => mod(lhs + rhs, ORDER), + sub: (lhs, rhs) => mod(lhs - rhs, ORDER), + mul: (lhs, rhs) => mod(lhs * rhs, ORDER), + pow: (num, power) => FpPow(f, num, power), + div: (lhs, rhs) => mod(lhs * invert(rhs, ORDER), ORDER), + // Same as above, but doesn't normalize + sqrN: (num) => num * num, + addN: (lhs, rhs) => lhs + rhs, + subN: (lhs, rhs) => lhs - rhs, + mulN: (lhs, rhs) => lhs * rhs, + inv: (num) => invert(num, ORDER), + sqrt: redef.sqrt || ((n) => sqrtP(f, n)), + invertBatch: (lst) => FpInvertBatch(f, lst), + // TODO: do we really need constant cmov? + // We don't have const-time bigints anyway, so probably will be not very useful + cmov: (a, b, c) => (c ? b : a), + toBytes: (num) => (isLE ? (0, utils_js_1.numberToBytesLE)(num, BYTES) : (0, utils_js_1.numberToBytesBE)(num, BYTES)), + fromBytes: (bytes) => { + if (bytes.length !== BYTES) + throw new Error(`Fp.fromBytes: expected ${BYTES}, got ${bytes.length}`); + return isLE ? (0, utils_js_1.bytesToNumberLE)(bytes) : (0, utils_js_1.bytesToNumberBE)(bytes); + }, + }); + return Object.freeze(f); +} +exports.Field = Field; +function FpSqrtOdd(Fp, elm) { + if (!Fp.isOdd) + throw new Error(`Field doesn't have isOdd`); + const root = Fp.sqrt(elm); + return Fp.isOdd(root) ? root : Fp.neg(root); +} +exports.FpSqrtOdd = FpSqrtOdd; +function FpSqrtEven(Fp, elm) { + if (!Fp.isOdd) + throw new Error(`Field doesn't have isOdd`); + const root = Fp.sqrt(elm); + return Fp.isOdd(root) ? Fp.neg(root) : root; +} +exports.FpSqrtEven = FpSqrtEven; +/** + * "Constant-time" private key generation utility. + * Same as mapKeyToField, but accepts less bytes (40 instead of 48 for 32-byte field). + * Which makes it slightly more biased, less secure. + * @deprecated use mapKeyToField instead + */ +function hashToPrivateScalar(hash, groupOrder, isLE = false) { + hash = (0, utils_js_1.ensureBytes)('privateHash', hash); + const hashLen = hash.length; + const minLen = nLength(groupOrder).nByteLength + 8; + if (minLen < 24 || hashLen < minLen || hashLen > 1024) + throw new Error(`hashToPrivateScalar: expected ${minLen}-1024 bytes of input, got ${hashLen}`); + const num = isLE ? (0, utils_js_1.bytesToNumberLE)(hash) : (0, utils_js_1.bytesToNumberBE)(hash); + return mod(num, groupOrder - _1n) + _1n; +} +exports.hashToPrivateScalar = hashToPrivateScalar; +/** + * Returns total number of bytes consumed by the field element. + * For example, 32 bytes for usual 256-bit weierstrass curve. + * @param fieldOrder number of field elements, usually CURVE.n + * @returns byte length of field + */ +function getFieldBytesLength(fieldOrder) { + if (typeof fieldOrder !== 'bigint') + throw new Error('field order must be bigint'); + const bitLength = fieldOrder.toString(2).length; + return Math.ceil(bitLength / 8); +} +exports.getFieldBytesLength = getFieldBytesLength; +/** + * Returns minimal amount of bytes that can be safely reduced + * by field order. + * Should be 2^-128 for 128-bit curve such as P256. + * @param fieldOrder number of field elements, usually CURVE.n + * @returns byte length of target hash + */ +function getMinHashLength(fieldOrder) { + const length = getFieldBytesLength(fieldOrder); + return length + Math.ceil(length / 2); +} +exports.getMinHashLength = getMinHashLength; +/** + * "Constant-time" private key generation utility. + * Can take (n + n/2) or more bytes of uniform input e.g. from CSPRNG or KDF + * and convert them into private scalar, with the modulo bias being negligible. + * Needs at least 48 bytes of input for 32-byte private key. + * https://research.kudelskisecurity.com/2020/07/28/the-definitive-guide-to-modulo-bias-and-how-to-avoid-it/ + * FIPS 186-5, A.2 https://csrc.nist.gov/publications/detail/fips/186/5/final + * RFC 9380, https://www.rfc-editor.org/rfc/rfc9380#section-5 + * @param hash hash output from SHA3 or a similar function + * @param groupOrder size of subgroup - (e.g. secp256k1.CURVE.n) + * @param isLE interpret hash bytes as LE num + * @returns valid private scalar + */ +function mapHashToField(key, fieldOrder, isLE = false) { + const len = key.length; + const fieldLen = getFieldBytesLength(fieldOrder); + const minLen = getMinHashLength(fieldOrder); + // No small numbers: need to understand bias story. No huge numbers: easier to detect JS timings. + if (len < 16 || len < minLen || len > 1024) + throw new Error(`expected ${minLen}-1024 bytes of input, got ${len}`); + const num = isLE ? (0, utils_js_1.bytesToNumberBE)(key) : (0, utils_js_1.bytesToNumberLE)(key); + // `mod(x, 11)` can sometimes produce 0. `mod(x, 10) + 1` is the same, but no 0 + const reduced = mod(num, fieldOrder - _1n) + _1n; + return isLE ? (0, utils_js_1.numberToBytesLE)(reduced, fieldLen) : (0, utils_js_1.numberToBytesBE)(reduced, fieldLen); +} +exports.mapHashToField = mapHashToField; +//# sourceMappingURL=modular.js.map + +/***/ }), + +/***/ 91484: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateObject = exports.createHmacDrbg = exports.bitMask = exports.bitSet = exports.bitGet = exports.bitLen = exports.utf8ToBytes = exports.equalBytes = exports.concatBytes = exports.ensureBytes = exports.numberToVarBytesBE = exports.numberToBytesLE = exports.numberToBytesBE = exports.bytesToNumberLE = exports.bytesToNumberBE = exports.hexToBytes = exports.hexToNumber = exports.numberToHexUnpadded = exports.bytesToHex = exports.isBytes = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// 100 lines of code in the file are duplicated from noble-hashes (utils). +// This is OK: `abstract` directory does not use noble-hashes. +// User may opt-in into using different hashing library. This way, noble-hashes +// won't be included into their bundle. +const _0n = BigInt(0); +const _1n = BigInt(1); +const _2n = BigInt(2); +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +exports.isBytes = isBytes; +// Array where index 0xf0 (240) is mapped to string 'f0' +const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +/** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ +function bytesToHex(bytes) { + if (!isBytes(bytes)) + throw new Error('Uint8Array expected'); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; +} +exports.bytesToHex = bytesToHex; +function numberToHexUnpadded(num) { + const hex = num.toString(16); + return hex.length & 1 ? `0${hex}` : hex; +} +exports.numberToHexUnpadded = numberToHexUnpadded; +function hexToNumber(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + // Big Endian + return BigInt(hex === '' ? '0' : `0x${hex}`); +} +exports.hexToNumber = hexToNumber; +// We use optimized technique to convert hex string to byte array +const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; +function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; +} +/** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ +function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; +} +exports.hexToBytes = hexToBytes; +// BE: Big Endian, LE: Little Endian +function bytesToNumberBE(bytes) { + return hexToNumber(bytesToHex(bytes)); +} +exports.bytesToNumberBE = bytesToNumberBE; +function bytesToNumberLE(bytes) { + if (!isBytes(bytes)) + throw new Error('Uint8Array expected'); + return hexToNumber(bytesToHex(Uint8Array.from(bytes).reverse())); +} +exports.bytesToNumberLE = bytesToNumberLE; +function numberToBytesBE(n, len) { + return hexToBytes(n.toString(16).padStart(len * 2, '0')); +} +exports.numberToBytesBE = numberToBytesBE; +function numberToBytesLE(n, len) { + return numberToBytesBE(n, len).reverse(); +} +exports.numberToBytesLE = numberToBytesLE; +// Unpadded, rarely used +function numberToVarBytesBE(n) { + return hexToBytes(numberToHexUnpadded(n)); +} +exports.numberToVarBytesBE = numberToVarBytesBE; +/** + * Takes hex string or Uint8Array, converts to Uint8Array. + * Validates output length. + * Will throw error for other types. + * @param title descriptive title for an error e.g. 'private key' + * @param hex hex string or Uint8Array + * @param expectedLength optional, will compare to result array's length + * @returns + */ +function ensureBytes(title, hex, expectedLength) { + let res; + if (typeof hex === 'string') { + try { + res = hexToBytes(hex); + } + catch (e) { + throw new Error(`${title} must be valid hex string, got "${hex}". Cause: ${e}`); + } + } + else if (isBytes(hex)) { + // Uint8Array.from() instead of hash.slice() because node.js Buffer + // is instance of Uint8Array, and its slice() creates **mutable** copy + res = Uint8Array.from(hex); + } + else { + throw new Error(`${title} must be hex string or Uint8Array`); + } + const len = res.length; + if (typeof expectedLength === 'number' && len !== expectedLength) + throw new Error(`${title} expected ${expectedLength} bytes, got ${len}`); + return res; +} +exports.ensureBytes = ensureBytes; +/** + * Copies several Uint8Arrays into one. + */ +function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + if (!isBytes(a)) + throw new Error('Uint8Array expected'); + sum += a.length; + } + let res = new Uint8Array(sum); + let pad = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; +} +exports.concatBytes = concatBytes; +// Compares 2 u8a-s in kinda constant time +function equalBytes(a, b) { + if (a.length !== b.length) + return false; + let diff = 0; + for (let i = 0; i < a.length; i++) + diff |= a[i] ^ b[i]; + return diff === 0; +} +exports.equalBytes = equalBytes; +/** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ +function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 +} +exports.utf8ToBytes = utf8ToBytes; +// Bit operations +/** + * Calculates amount of bits in a bigint. + * Same as `n.toString(2).length` + */ +function bitLen(n) { + let len; + for (len = 0; n > _0n; n >>= _1n, len += 1) + ; + return len; +} +exports.bitLen = bitLen; +/** + * Gets single bit at position. + * NOTE: first bit position is 0 (same as arrays) + * Same as `!!+Array.from(n.toString(2)).reverse()[pos]` + */ +function bitGet(n, pos) { + return (n >> BigInt(pos)) & _1n; +} +exports.bitGet = bitGet; +/** + * Sets single bit at position. + */ +const bitSet = (n, pos, value) => { + return n | ((value ? _1n : _0n) << BigInt(pos)); +}; +exports.bitSet = bitSet; +/** + * Calculate mask for N bits. Not using ** operator with bigints because of old engines. + * Same as BigInt(`0b${Array(i).fill('1').join('')}`) + */ +const bitMask = (n) => (_2n << BigInt(n - 1)) - _1n; +exports.bitMask = bitMask; +// DRBG +const u8n = (data) => new Uint8Array(data); // creates Uint8Array +const u8fr = (arr) => Uint8Array.from(arr); // another shortcut +/** + * Minimal HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + * @returns function that will call DRBG until 2nd arg returns something meaningful + * @example + * const drbg = createHmacDRBG(32, 32, hmac); + * drbg(seed, bytesToKey); // bytesToKey must return Key or undefined + */ +function createHmacDrbg(hashLen, qByteLen, hmacFn) { + if (typeof hashLen !== 'number' || hashLen < 2) + throw new Error('hashLen must be a number'); + if (typeof qByteLen !== 'number' || qByteLen < 2) + throw new Error('qByteLen must be a number'); + if (typeof hmacFn !== 'function') + throw new Error('hmacFn must be a function'); + // Step B, Step C: set hashLen to 8*ceil(hlen/8) + let v = u8n(hashLen); // Minimal non-full-spec HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + let k = u8n(hashLen); // Steps B and C of RFC6979 3.2: set hashLen, in our case always same + let i = 0; // Iterations counter, will throw when over 1000 + const reset = () => { + v.fill(1); + k.fill(0); + i = 0; + }; + const h = (...b) => hmacFn(k, v, ...b); // hmac(k)(v, ...values) + const reseed = (seed = u8n()) => { + // HMAC-DRBG reseed() function. Steps D-G + k = h(u8fr([0x00]), seed); // k = hmac(k || v || 0x00 || seed) + v = h(); // v = hmac(k || v) + if (seed.length === 0) + return; + k = h(u8fr([0x01]), seed); // k = hmac(k || v || 0x01 || seed) + v = h(); // v = hmac(k || v) + }; + const gen = () => { + // HMAC-DRBG generate() function + if (i++ >= 1000) + throw new Error('drbg: tried 1000 values'); + let len = 0; + const out = []; + while (len < qByteLen) { + v = h(); + const sl = v.slice(); + out.push(sl); + len += v.length; + } + return concatBytes(...out); + }; + const genUntil = (seed, pred) => { + reset(); + reseed(seed); // Steps D-G + let res = undefined; // Step H: grind until k is in [1..n-1] + while (!(res = pred(gen()))) + reseed(); + reset(); + return res; + }; + return genUntil; +} +exports.createHmacDrbg = createHmacDrbg; +// Validating curves and fields +const validatorFns = { + bigint: (val) => typeof val === 'bigint', + function: (val) => typeof val === 'function', + boolean: (val) => typeof val === 'boolean', + string: (val) => typeof val === 'string', + stringOrUint8Array: (val) => typeof val === 'string' || isBytes(val), + isSafeInteger: (val) => Number.isSafeInteger(val), + array: (val) => Array.isArray(val), + field: (val, object) => object.Fp.isValid(val), + hash: (val) => typeof val === 'function' && Number.isSafeInteger(val.outputLen), +}; +// type Record = { [P in K]: T; } +function validateObject(object, validators, optValidators = {}) { + const checkField = (fieldName, type, isOptional) => { + const checkVal = validatorFns[type]; + if (typeof checkVal !== 'function') + throw new Error(`Invalid validator "${type}", expected function`); + const val = object[fieldName]; + if (isOptional && val === undefined) + return; + if (!checkVal(val, object)) { + throw new Error(`Invalid param ${String(fieldName)}=${val} (${typeof val}), expected ${type}`); + } + }; + for (const [fieldName, type] of Object.entries(validators)) + checkField(fieldName, type, false); + for (const [fieldName, type] of Object.entries(optValidators)) + checkField(fieldName, type, true); + return object; +} +exports.validateObject = validateObject; +// validate type tests +// const o: { a: number; b: number; c: number } = { a: 1, b: 5, c: 6 }; +// const z0 = validateObject(o, { a: 'isSafeInteger' }, { c: 'bigint' }); // Ok! +// // Should fail type-check +// const z1 = validateObject(o, { a: 'tmp' }, { c: 'zz' }); +// const z2 = validateObject(o, { a: 'isSafeInteger' }, { c: 'zz' }); +// const z3 = validateObject(o, { test: 'boolean', z: 'bug' }); +// const z4 = validateObject(o, { a: 'boolean', z: 'bug' }); +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 91705: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mapToCurveSimpleSWU = exports.SWUFpSqrtRatio = exports.weierstrass = exports.weierstrassPoints = exports.DER = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// Short Weierstrass curve. The formula is: y² = x³ + ax + b +const mod = __webpack_require__(24967); +const ut = __webpack_require__(91484); +const utils_js_1 = __webpack_require__(91484); +const curve_js_1 = __webpack_require__(62422); +function validatePointOpts(curve) { + const opts = (0, curve_js_1.validateBasic)(curve); + ut.validateObject(opts, { + a: 'field', + b: 'field', + }, { + allowedPrivateKeyLengths: 'array', + wrapPrivateKey: 'boolean', + isTorsionFree: 'function', + clearCofactor: 'function', + allowInfinityPoint: 'boolean', + fromBytes: 'function', + toBytes: 'function', + }); + const { endo, Fp, a } = opts; + if (endo) { + if (!Fp.eql(a, Fp.ZERO)) { + throw new Error('Endomorphism can only be defined for Koblitz curves that have a=0'); + } + if (typeof endo !== 'object' || + typeof endo.beta !== 'bigint' || + typeof endo.splitScalar !== 'function') { + throw new Error('Expected endomorphism with beta: bigint and splitScalar: function'); + } + } + return Object.freeze({ ...opts }); +} +// ASN.1 DER encoding utilities +const { bytesToNumberBE: b2n, hexToBytes: h2b } = ut; +exports.DER = { + // asn.1 DER encoding utils + Err: class DERErr extends Error { + constructor(m = '') { + super(m); + } + }, + _parseInt(data) { + const { Err: E } = exports.DER; + if (data.length < 2 || data[0] !== 0x02) + throw new E('Invalid signature integer tag'); + const len = data[1]; + const res = data.subarray(2, len + 2); + if (!len || res.length !== len) + throw new E('Invalid signature integer: wrong length'); + // https://crypto.stackexchange.com/a/57734 Leftmost bit of first byte is 'negative' flag, + // since we always use positive integers here. It must always be empty: + // - add zero byte if exists + // - if next byte doesn't have a flag, leading zero is not allowed (minimal encoding) + if (res[0] & 0b10000000) + throw new E('Invalid signature integer: negative'); + if (res[0] === 0x00 && !(res[1] & 0b10000000)) + throw new E('Invalid signature integer: unnecessary leading zero'); + return { d: b2n(res), l: data.subarray(len + 2) }; // d is data, l is left + }, + toSig(hex) { + // parse DER signature + const { Err: E } = exports.DER; + const data = typeof hex === 'string' ? h2b(hex) : hex; + if (!ut.isBytes(data)) + throw new Error('ui8a expected'); + let l = data.length; + if (l < 2 || data[0] != 0x30) + throw new E('Invalid signature tag'); + if (data[1] !== l - 2) + throw new E('Invalid signature: incorrect length'); + const { d: r, l: sBytes } = exports.DER._parseInt(data.subarray(2)); + const { d: s, l: rBytesLeft } = exports.DER._parseInt(sBytes); + if (rBytesLeft.length) + throw new E('Invalid signature: left bytes after parsing'); + return { r, s }; + }, + hexFromSig(sig) { + // Add leading zero if first byte has negative bit enabled. More details in '_parseInt' + const slice = (s) => (Number.parseInt(s[0], 16) & 0b1000 ? '00' + s : s); + const h = (num) => { + const hex = num.toString(16); + return hex.length & 1 ? `0${hex}` : hex; + }; + const s = slice(h(sig.s)); + const r = slice(h(sig.r)); + const shl = s.length / 2; + const rhl = r.length / 2; + const sl = h(shl); + const rl = h(rhl); + return `30${h(rhl + shl + 4)}02${rl}${r}02${sl}${s}`; + }, +}; +// Be friendly to bad ECMAScript parsers by not using bigint literals +// prettier-ignore +const _0n = BigInt(0), _1n = BigInt(1), _2n = BigInt(2), _3n = BigInt(3), _4n = BigInt(4); +function weierstrassPoints(opts) { + const CURVE = validatePointOpts(opts); + const { Fp } = CURVE; // All curves has same field / group length as for now, but they can differ + const toBytes = CURVE.toBytes || + ((_c, point, _isCompressed) => { + const a = point.toAffine(); + return ut.concatBytes(Uint8Array.from([0x04]), Fp.toBytes(a.x), Fp.toBytes(a.y)); + }); + const fromBytes = CURVE.fromBytes || + ((bytes) => { + // const head = bytes[0]; + const tail = bytes.subarray(1); + // if (head !== 0x04) throw new Error('Only non-compressed encoding is supported'); + const x = Fp.fromBytes(tail.subarray(0, Fp.BYTES)); + const y = Fp.fromBytes(tail.subarray(Fp.BYTES, 2 * Fp.BYTES)); + return { x, y }; + }); + /** + * y² = x³ + ax + b: Short weierstrass curve formula + * @returns y² + */ + function weierstrassEquation(x) { + const { a, b } = CURVE; + const x2 = Fp.sqr(x); // x * x + const x3 = Fp.mul(x2, x); // x2 * x + return Fp.add(Fp.add(x3, Fp.mul(x, a)), b); // x3 + a * x + b + } + // Validate whether the passed curve params are valid. + // We check if curve equation works for generator point. + // `assertValidity()` won't work: `isTorsionFree()` is not available at this point in bls12-381. + // ProjectivePoint class has not been initialized yet. + if (!Fp.eql(Fp.sqr(CURVE.Gy), weierstrassEquation(CURVE.Gx))) + throw new Error('bad generator point: equation left != right'); + // Valid group elements reside in range 1..n-1 + function isWithinCurveOrder(num) { + return typeof num === 'bigint' && _0n < num && num < CURVE.n; + } + function assertGE(num) { + if (!isWithinCurveOrder(num)) + throw new Error('Expected valid bigint: 0 < bigint < curve.n'); + } + // Validates if priv key is valid and converts it to bigint. + // Supports options allowedPrivateKeyLengths and wrapPrivateKey. + function normPrivateKeyToScalar(key) { + const { allowedPrivateKeyLengths: lengths, nByteLength, wrapPrivateKey, n } = CURVE; + if (lengths && typeof key !== 'bigint') { + if (ut.isBytes(key)) + key = ut.bytesToHex(key); + // Normalize to hex string, pad. E.g. P521 would norm 130-132 char hex to 132-char bytes + if (typeof key !== 'string' || !lengths.includes(key.length)) + throw new Error('Invalid key'); + key = key.padStart(nByteLength * 2, '0'); + } + let num; + try { + num = + typeof key === 'bigint' + ? key + : ut.bytesToNumberBE((0, utils_js_1.ensureBytes)('private key', key, nByteLength)); + } + catch (error) { + throw new Error(`private key must be ${nByteLength} bytes, hex or bigint, not ${typeof key}`); + } + if (wrapPrivateKey) + num = mod.mod(num, n); // disabled by default, enabled for BLS + assertGE(num); // num in range [1..N-1] + return num; + } + const pointPrecomputes = new Map(); + function assertPrjPoint(other) { + if (!(other instanceof Point)) + throw new Error('ProjectivePoint expected'); + } + /** + * Projective Point works in 3d / projective (homogeneous) coordinates: (x, y, z) ∋ (x=x/z, y=y/z) + * Default Point works in 2d / affine coordinates: (x, y) + * We're doing calculations in projective, because its operations don't require costly inversion. + */ + class Point { + constructor(px, py, pz) { + this.px = px; + this.py = py; + this.pz = pz; + if (px == null || !Fp.isValid(px)) + throw new Error('x required'); + if (py == null || !Fp.isValid(py)) + throw new Error('y required'); + if (pz == null || !Fp.isValid(pz)) + throw new Error('z required'); + } + // Does not validate if the point is on-curve. + // Use fromHex instead, or call assertValidity() later. + static fromAffine(p) { + const { x, y } = p || {}; + if (!p || !Fp.isValid(x) || !Fp.isValid(y)) + throw new Error('invalid affine point'); + if (p instanceof Point) + throw new Error('projective point not allowed'); + const is0 = (i) => Fp.eql(i, Fp.ZERO); + // fromAffine(x:0, y:0) would produce (x:0, y:0, z:1), but we need (x:0, y:1, z:0) + if (is0(x) && is0(y)) + return Point.ZERO; + return new Point(x, y, Fp.ONE); + } + get x() { + return this.toAffine().x; + } + get y() { + return this.toAffine().y; + } + /** + * Takes a bunch of Projective Points but executes only one + * inversion on all of them. Inversion is very slow operation, + * so this improves performance massively. + * Optimization: converts a list of projective points to a list of identical points with Z=1. + */ + static normalizeZ(points) { + const toInv = Fp.invertBatch(points.map((p) => p.pz)); + return points.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + } + /** + * Converts hash string or Uint8Array to Point. + * @param hex short/long ECDSA hex + */ + static fromHex(hex) { + const P = Point.fromAffine(fromBytes((0, utils_js_1.ensureBytes)('pointHex', hex))); + P.assertValidity(); + return P; + } + // Multiplies generator point by privateKey. + static fromPrivateKey(privateKey) { + return Point.BASE.multiply(normPrivateKeyToScalar(privateKey)); + } + // "Private method", don't use it directly + _setWindowSize(windowSize) { + this._WINDOW_SIZE = windowSize; + pointPrecomputes.delete(this); + } + // A point on curve is valid if it conforms to equation. + assertValidity() { + if (this.is0()) { + // (0, 1, 0) aka ZERO is invalid in most contexts. + // In BLS, ZERO can be serialized, so we allow it. + // (0, 0, 0) is wrong representation of ZERO and is always invalid. + if (CURVE.allowInfinityPoint && !Fp.is0(this.py)) + return; + throw new Error('bad point: ZERO'); + } + // Some 3rd-party test vectors require different wording between here & `fromCompressedHex` + const { x, y } = this.toAffine(); + // Check if x, y are valid field elements + if (!Fp.isValid(x) || !Fp.isValid(y)) + throw new Error('bad point: x or y not FE'); + const left = Fp.sqr(y); // y² + const right = weierstrassEquation(x); // x³ + ax + b + if (!Fp.eql(left, right)) + throw new Error('bad point: equation left != right'); + if (!this.isTorsionFree()) + throw new Error('bad point: not in prime-order subgroup'); + } + hasEvenY() { + const { y } = this.toAffine(); + if (Fp.isOdd) + return !Fp.isOdd(y); + throw new Error("Field doesn't support isOdd"); + } + /** + * Compare one point to another. + */ + equals(other) { + assertPrjPoint(other); + const { px: X1, py: Y1, pz: Z1 } = this; + const { px: X2, py: Y2, pz: Z2 } = other; + const U1 = Fp.eql(Fp.mul(X1, Z2), Fp.mul(X2, Z1)); + const U2 = Fp.eql(Fp.mul(Y1, Z2), Fp.mul(Y2, Z1)); + return U1 && U2; + } + /** + * Flips point to one corresponding to (x, -y) in Affine coordinates. + */ + negate() { + return new Point(this.px, Fp.neg(this.py), this.pz); + } + // Renes-Costello-Batina exception-free doubling formula. + // There is 30% faster Jacobian formula, but it is not complete. + // https://eprint.iacr.org/2015/1060, algorithm 3 + // Cost: 8M + 3S + 3*a + 2*b3 + 15add. + double() { + const { a, b } = CURVE; + const b3 = Fp.mul(b, _3n); + const { px: X1, py: Y1, pz: Z1 } = this; + let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore + let t0 = Fp.mul(X1, X1); // step 1 + let t1 = Fp.mul(Y1, Y1); + let t2 = Fp.mul(Z1, Z1); + let t3 = Fp.mul(X1, Y1); + t3 = Fp.add(t3, t3); // step 5 + Z3 = Fp.mul(X1, Z1); + Z3 = Fp.add(Z3, Z3); + X3 = Fp.mul(a, Z3); + Y3 = Fp.mul(b3, t2); + Y3 = Fp.add(X3, Y3); // step 10 + X3 = Fp.sub(t1, Y3); + Y3 = Fp.add(t1, Y3); + Y3 = Fp.mul(X3, Y3); + X3 = Fp.mul(t3, X3); + Z3 = Fp.mul(b3, Z3); // step 15 + t2 = Fp.mul(a, t2); + t3 = Fp.sub(t0, t2); + t3 = Fp.mul(a, t3); + t3 = Fp.add(t3, Z3); + Z3 = Fp.add(t0, t0); // step 20 + t0 = Fp.add(Z3, t0); + t0 = Fp.add(t0, t2); + t0 = Fp.mul(t0, t3); + Y3 = Fp.add(Y3, t0); + t2 = Fp.mul(Y1, Z1); // step 25 + t2 = Fp.add(t2, t2); + t0 = Fp.mul(t2, t3); + X3 = Fp.sub(X3, t0); + Z3 = Fp.mul(t2, t1); + Z3 = Fp.add(Z3, Z3); // step 30 + Z3 = Fp.add(Z3, Z3); + return new Point(X3, Y3, Z3); + } + // Renes-Costello-Batina exception-free addition formula. + // There is 30% faster Jacobian formula, but it is not complete. + // https://eprint.iacr.org/2015/1060, algorithm 1 + // Cost: 12M + 0S + 3*a + 3*b3 + 23add. + add(other) { + assertPrjPoint(other); + const { px: X1, py: Y1, pz: Z1 } = this; + const { px: X2, py: Y2, pz: Z2 } = other; + let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore + const a = CURVE.a; + const b3 = Fp.mul(CURVE.b, _3n); + let t0 = Fp.mul(X1, X2); // step 1 + let t1 = Fp.mul(Y1, Y2); + let t2 = Fp.mul(Z1, Z2); + let t3 = Fp.add(X1, Y1); + let t4 = Fp.add(X2, Y2); // step 5 + t3 = Fp.mul(t3, t4); + t4 = Fp.add(t0, t1); + t3 = Fp.sub(t3, t4); + t4 = Fp.add(X1, Z1); + let t5 = Fp.add(X2, Z2); // step 10 + t4 = Fp.mul(t4, t5); + t5 = Fp.add(t0, t2); + t4 = Fp.sub(t4, t5); + t5 = Fp.add(Y1, Z1); + X3 = Fp.add(Y2, Z2); // step 15 + t5 = Fp.mul(t5, X3); + X3 = Fp.add(t1, t2); + t5 = Fp.sub(t5, X3); + Z3 = Fp.mul(a, t4); + X3 = Fp.mul(b3, t2); // step 20 + Z3 = Fp.add(X3, Z3); + X3 = Fp.sub(t1, Z3); + Z3 = Fp.add(t1, Z3); + Y3 = Fp.mul(X3, Z3); + t1 = Fp.add(t0, t0); // step 25 + t1 = Fp.add(t1, t0); + t2 = Fp.mul(a, t2); + t4 = Fp.mul(b3, t4); + t1 = Fp.add(t1, t2); + t2 = Fp.sub(t0, t2); // step 30 + t2 = Fp.mul(a, t2); + t4 = Fp.add(t4, t2); + t0 = Fp.mul(t1, t4); + Y3 = Fp.add(Y3, t0); + t0 = Fp.mul(t5, t4); // step 35 + X3 = Fp.mul(t3, X3); + X3 = Fp.sub(X3, t0); + t0 = Fp.mul(t3, t1); + Z3 = Fp.mul(t5, Z3); + Z3 = Fp.add(Z3, t0); // step 40 + return new Point(X3, Y3, Z3); + } + subtract(other) { + return this.add(other.negate()); + } + is0() { + return this.equals(Point.ZERO); + } + wNAF(n) { + return wnaf.wNAFCached(this, pointPrecomputes, n, (comp) => { + const toInv = Fp.invertBatch(comp.map((p) => p.pz)); + return comp.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + }); + } + /** + * Non-constant-time multiplication. Uses double-and-add algorithm. + * It's faster, but should only be used when you don't care about + * an exposed private key e.g. sig verification, which works over *public* keys. + */ + multiplyUnsafe(n) { + const I = Point.ZERO; + if (n === _0n) + return I; + assertGE(n); // Will throw on 0 + if (n === _1n) + return this; + const { endo } = CURVE; + if (!endo) + return wnaf.unsafeLadder(this, n); + // Apply endomorphism + let { k1neg, k1, k2neg, k2 } = endo.splitScalar(n); + let k1p = I; + let k2p = I; + let d = this; + while (k1 > _0n || k2 > _0n) { + if (k1 & _1n) + k1p = k1p.add(d); + if (k2 & _1n) + k2p = k2p.add(d); + d = d.double(); + k1 >>= _1n; + k2 >>= _1n; + } + if (k1neg) + k1p = k1p.negate(); + if (k2neg) + k2p = k2p.negate(); + k2p = new Point(Fp.mul(k2p.px, endo.beta), k2p.py, k2p.pz); + return k1p.add(k2p); + } + /** + * Constant time multiplication. + * Uses wNAF method. Windowed method may be 10% faster, + * but takes 2x longer to generate and consumes 2x memory. + * Uses precomputes when available. + * Uses endomorphism for Koblitz curves. + * @param scalar by which the point would be multiplied + * @returns New point + */ + multiply(scalar) { + assertGE(scalar); + let n = scalar; + let point, fake; // Fake point is used to const-time mult + const { endo } = CURVE; + if (endo) { + const { k1neg, k1, k2neg, k2 } = endo.splitScalar(n); + let { p: k1p, f: f1p } = this.wNAF(k1); + let { p: k2p, f: f2p } = this.wNAF(k2); + k1p = wnaf.constTimeNegate(k1neg, k1p); + k2p = wnaf.constTimeNegate(k2neg, k2p); + k2p = new Point(Fp.mul(k2p.px, endo.beta), k2p.py, k2p.pz); + point = k1p.add(k2p); + fake = f1p.add(f2p); + } + else { + const { p, f } = this.wNAF(n); + point = p; + fake = f; + } + // Normalize `z` for both points, but return only real one + return Point.normalizeZ([point, fake])[0]; + } + /** + * Efficiently calculate `aP + bQ`. Unsafe, can expose private key, if used incorrectly. + * Not using Strauss-Shamir trick: precomputation tables are faster. + * The trick could be useful if both P and Q are not G (not in our case). + * @returns non-zero affine point + */ + multiplyAndAddUnsafe(Q, a, b) { + const G = Point.BASE; // No Strauss-Shamir trick: we have 10% faster G precomputes + const mul = (P, a // Select faster multiply() method + ) => (a === _0n || a === _1n || !P.equals(G) ? P.multiplyUnsafe(a) : P.multiply(a)); + const sum = mul(this, a).add(mul(Q, b)); + return sum.is0() ? undefined : sum; + } + // Converts Projective point to affine (x, y) coordinates. + // Can accept precomputed Z^-1 - for example, from invertBatch. + // (x, y, z) ∋ (x=x/z, y=y/z) + toAffine(iz) { + const { px: x, py: y, pz: z } = this; + const is0 = this.is0(); + // If invZ was 0, we return zero point. However we still want to execute + // all operations, so we replace invZ with a random number, 1. + if (iz == null) + iz = is0 ? Fp.ONE : Fp.inv(z); + const ax = Fp.mul(x, iz); + const ay = Fp.mul(y, iz); + const zz = Fp.mul(z, iz); + if (is0) + return { x: Fp.ZERO, y: Fp.ZERO }; + if (!Fp.eql(zz, Fp.ONE)) + throw new Error('invZ was invalid'); + return { x: ax, y: ay }; + } + isTorsionFree() { + const { h: cofactor, isTorsionFree } = CURVE; + if (cofactor === _1n) + return true; // No subgroups, always torsion-free + if (isTorsionFree) + return isTorsionFree(Point, this); + throw new Error('isTorsionFree() has not been declared for the elliptic curve'); + } + clearCofactor() { + const { h: cofactor, clearCofactor } = CURVE; + if (cofactor === _1n) + return this; // Fast-path + if (clearCofactor) + return clearCofactor(Point, this); + return this.multiplyUnsafe(CURVE.h); + } + toRawBytes(isCompressed = true) { + this.assertValidity(); + return toBytes(Point, this, isCompressed); + } + toHex(isCompressed = true) { + return ut.bytesToHex(this.toRawBytes(isCompressed)); + } + } + Point.BASE = new Point(CURVE.Gx, CURVE.Gy, Fp.ONE); + Point.ZERO = new Point(Fp.ZERO, Fp.ONE, Fp.ZERO); + const _bits = CURVE.nBitLength; + const wnaf = (0, curve_js_1.wNAF)(Point, CURVE.endo ? Math.ceil(_bits / 2) : _bits); + // Validate if generator point is on curve + return { + CURVE, + ProjectivePoint: Point, + normPrivateKeyToScalar, + weierstrassEquation, + isWithinCurveOrder, + }; +} +exports.weierstrassPoints = weierstrassPoints; +function validateOpts(curve) { + const opts = (0, curve_js_1.validateBasic)(curve); + ut.validateObject(opts, { + hash: 'hash', + hmac: 'function', + randomBytes: 'function', + }, { + bits2int: 'function', + bits2int_modN: 'function', + lowS: 'boolean', + }); + return Object.freeze({ lowS: true, ...opts }); +} +function weierstrass(curveDef) { + const CURVE = validateOpts(curveDef); + const { Fp, n: CURVE_ORDER } = CURVE; + const compressedLen = Fp.BYTES + 1; // e.g. 33 for 32 + const uncompressedLen = 2 * Fp.BYTES + 1; // e.g. 65 for 32 + function isValidFieldElement(num) { + return _0n < num && num < Fp.ORDER; // 0 is banned since it's not invertible FE + } + function modN(a) { + return mod.mod(a, CURVE_ORDER); + } + function invN(a) { + return mod.invert(a, CURVE_ORDER); + } + const { ProjectivePoint: Point, normPrivateKeyToScalar, weierstrassEquation, isWithinCurveOrder, } = weierstrassPoints({ + ...CURVE, + toBytes(_c, point, isCompressed) { + const a = point.toAffine(); + const x = Fp.toBytes(a.x); + const cat = ut.concatBytes; + if (isCompressed) { + return cat(Uint8Array.from([point.hasEvenY() ? 0x02 : 0x03]), x); + } + else { + return cat(Uint8Array.from([0x04]), x, Fp.toBytes(a.y)); + } + }, + fromBytes(bytes) { + const len = bytes.length; + const head = bytes[0]; + const tail = bytes.subarray(1); + // this.assertValidity() is done inside of fromHex + if (len === compressedLen && (head === 0x02 || head === 0x03)) { + const x = ut.bytesToNumberBE(tail); + if (!isValidFieldElement(x)) + throw new Error('Point is not on curve'); + const y2 = weierstrassEquation(x); // y² = x³ + ax + b + let y = Fp.sqrt(y2); // y = y² ^ (p+1)/4 + const isYOdd = (y & _1n) === _1n; + // ECDSA + const isHeadOdd = (head & 1) === 1; + if (isHeadOdd !== isYOdd) + y = Fp.neg(y); + return { x, y }; + } + else if (len === uncompressedLen && head === 0x04) { + const x = Fp.fromBytes(tail.subarray(0, Fp.BYTES)); + const y = Fp.fromBytes(tail.subarray(Fp.BYTES, 2 * Fp.BYTES)); + return { x, y }; + } + else { + throw new Error(`Point of length ${len} was invalid. Expected ${compressedLen} compressed bytes or ${uncompressedLen} uncompressed bytes`); + } + }, + }); + const numToNByteStr = (num) => ut.bytesToHex(ut.numberToBytesBE(num, CURVE.nByteLength)); + function isBiggerThanHalfOrder(number) { + const HALF = CURVE_ORDER >> _1n; + return number > HALF; + } + function normalizeS(s) { + return isBiggerThanHalfOrder(s) ? modN(-s) : s; + } + // slice bytes num + const slcNum = (b, from, to) => ut.bytesToNumberBE(b.slice(from, to)); + /** + * ECDSA signature with its (r, s) properties. Supports DER & compact representations. + */ + class Signature { + constructor(r, s, recovery) { + this.r = r; + this.s = s; + this.recovery = recovery; + this.assertValidity(); + } + // pair (bytes of r, bytes of s) + static fromCompact(hex) { + const l = CURVE.nByteLength; + hex = (0, utils_js_1.ensureBytes)('compactSignature', hex, l * 2); + return new Signature(slcNum(hex, 0, l), slcNum(hex, l, 2 * l)); + } + // DER encoded ECDSA signature + // https://bitcoin.stackexchange.com/questions/57644/what-are-the-parts-of-a-bitcoin-transaction-input-script + static fromDER(hex) { + const { r, s } = exports.DER.toSig((0, utils_js_1.ensureBytes)('DER', hex)); + return new Signature(r, s); + } + assertValidity() { + // can use assertGE here + if (!isWithinCurveOrder(this.r)) + throw new Error('r must be 0 < r < CURVE.n'); + if (!isWithinCurveOrder(this.s)) + throw new Error('s must be 0 < s < CURVE.n'); + } + addRecoveryBit(recovery) { + return new Signature(this.r, this.s, recovery); + } + recoverPublicKey(msgHash) { + const { r, s, recovery: rec } = this; + const h = bits2int_modN((0, utils_js_1.ensureBytes)('msgHash', msgHash)); // Truncate hash + if (rec == null || ![0, 1, 2, 3].includes(rec)) + throw new Error('recovery id invalid'); + const radj = rec === 2 || rec === 3 ? r + CURVE.n : r; + if (radj >= Fp.ORDER) + throw new Error('recovery id 2 or 3 invalid'); + const prefix = (rec & 1) === 0 ? '02' : '03'; + const R = Point.fromHex(prefix + numToNByteStr(radj)); + const ir = invN(radj); // r^-1 + const u1 = modN(-h * ir); // -hr^-1 + const u2 = modN(s * ir); // sr^-1 + const Q = Point.BASE.multiplyAndAddUnsafe(R, u1, u2); // (sr^-1)R-(hr^-1)G = -(hr^-1)G + (sr^-1) + if (!Q) + throw new Error('point at infinify'); // unsafe is fine: no priv data leaked + Q.assertValidity(); + return Q; + } + // Signatures should be low-s, to prevent malleability. + hasHighS() { + return isBiggerThanHalfOrder(this.s); + } + normalizeS() { + return this.hasHighS() ? new Signature(this.r, modN(-this.s), this.recovery) : this; + } + // DER-encoded + toDERRawBytes() { + return ut.hexToBytes(this.toDERHex()); + } + toDERHex() { + return exports.DER.hexFromSig({ r: this.r, s: this.s }); + } + // padded bytes of r, then padded bytes of s + toCompactRawBytes() { + return ut.hexToBytes(this.toCompactHex()); + } + toCompactHex() { + return numToNByteStr(this.r) + numToNByteStr(this.s); + } + } + const utils = { + isValidPrivateKey(privateKey) { + try { + normPrivateKeyToScalar(privateKey); + return true; + } + catch (error) { + return false; + } + }, + normPrivateKeyToScalar: normPrivateKeyToScalar, + /** + * Produces cryptographically secure private key from random of size + * (groupLen + ceil(groupLen / 2)) with modulo bias being negligible. + */ + randomPrivateKey: () => { + const length = mod.getMinHashLength(CURVE.n); + return mod.mapHashToField(CURVE.randomBytes(length), CURVE.n); + }, + /** + * Creates precompute table for an arbitrary EC point. Makes point "cached". + * Allows to massively speed-up `point.multiply(scalar)`. + * @returns cached point + * @example + * const fast = utils.precompute(8, ProjectivePoint.fromHex(someonesPubKey)); + * fast.multiply(privKey); // much faster ECDH now + */ + precompute(windowSize = 8, point = Point.BASE) { + point._setWindowSize(windowSize); + point.multiply(BigInt(3)); // 3 is arbitrary, just need any number here + return point; + }, + }; + /** + * Computes public key for a private key. Checks for validity of the private key. + * @param privateKey private key + * @param isCompressed whether to return compact (default), or full key + * @returns Public key, full when isCompressed=false; short when isCompressed=true + */ + function getPublicKey(privateKey, isCompressed = true) { + return Point.fromPrivateKey(privateKey).toRawBytes(isCompressed); + } + /** + * Quick and dirty check for item being public key. Does not validate hex, or being on-curve. + */ + function isProbPub(item) { + const arr = ut.isBytes(item); + const str = typeof item === 'string'; + const len = (arr || str) && item.length; + if (arr) + return len === compressedLen || len === uncompressedLen; + if (str) + return len === 2 * compressedLen || len === 2 * uncompressedLen; + if (item instanceof Point) + return true; + return false; + } + /** + * ECDH (Elliptic Curve Diffie Hellman). + * Computes shared public key from private key and public key. + * Checks: 1) private key validity 2) shared key is on-curve. + * Does NOT hash the result. + * @param privateA private key + * @param publicB different public key + * @param isCompressed whether to return compact (default), or full key + * @returns shared public key + */ + function getSharedSecret(privateA, publicB, isCompressed = true) { + if (isProbPub(privateA)) + throw new Error('first arg must be private key'); + if (!isProbPub(publicB)) + throw new Error('second arg must be public key'); + const b = Point.fromHex(publicB); // check for being on-curve + return b.multiply(normPrivateKeyToScalar(privateA)).toRawBytes(isCompressed); + } + // RFC6979: ensure ECDSA msg is X bytes and < N. RFC suggests optional truncating via bits2octets. + // FIPS 186-4 4.6 suggests the leftmost min(nBitLen, outLen) bits, which matches bits2int. + // bits2int can produce res>N, we can do mod(res, N) since the bitLen is the same. + // int2octets can't be used; pads small msgs with 0: unacceptatble for trunc as per RFC vectors + const bits2int = CURVE.bits2int || + function (bytes) { + // For curves with nBitLength % 8 !== 0: bits2octets(bits2octets(m)) !== bits2octets(m) + // for some cases, since bytes.length * 8 is not actual bitLength. + const num = ut.bytesToNumberBE(bytes); // check for == u8 done here + const delta = bytes.length * 8 - CURVE.nBitLength; // truncate to nBitLength leftmost bits + return delta > 0 ? num >> BigInt(delta) : num; + }; + const bits2int_modN = CURVE.bits2int_modN || + function (bytes) { + return modN(bits2int(bytes)); // can't use bytesToNumberBE here + }; + // NOTE: pads output with zero as per spec + const ORDER_MASK = ut.bitMask(CURVE.nBitLength); + /** + * Converts to bytes. Checks if num in `[0..ORDER_MASK-1]` e.g.: `[0..2^256-1]`. + */ + function int2octets(num) { + if (typeof num !== 'bigint') + throw new Error('bigint expected'); + if (!(_0n <= num && num < ORDER_MASK)) + throw new Error(`bigint expected < 2^${CURVE.nBitLength}`); + // works with order, can have different size than numToField! + return ut.numberToBytesBE(num, CURVE.nByteLength); + } + // Steps A, D of RFC6979 3.2 + // Creates RFC6979 seed; converts msg/privKey to numbers. + // Used only in sign, not in verify. + // NOTE: we cannot assume here that msgHash has same amount of bytes as curve order, this will be wrong at least for P521. + // Also it can be bigger for P224 + SHA256 + function prepSig(msgHash, privateKey, opts = defaultSigOpts) { + if (['recovered', 'canonical'].some((k) => k in opts)) + throw new Error('sign() legacy options not supported'); + const { hash, randomBytes } = CURVE; + let { lowS, prehash, extraEntropy: ent } = opts; // generates low-s sigs by default + if (lowS == null) + lowS = true; // RFC6979 3.2: we skip step A, because we already provide hash + msgHash = (0, utils_js_1.ensureBytes)('msgHash', msgHash); + if (prehash) + msgHash = (0, utils_js_1.ensureBytes)('prehashed msgHash', hash(msgHash)); + // We can't later call bits2octets, since nested bits2int is broken for curves + // with nBitLength % 8 !== 0. Because of that, we unwrap it here as int2octets call. + // const bits2octets = (bits) => int2octets(bits2int_modN(bits)) + const h1int = bits2int_modN(msgHash); + const d = normPrivateKeyToScalar(privateKey); // validate private key, convert to bigint + const seedArgs = [int2octets(d), int2octets(h1int)]; + // extraEntropy. RFC6979 3.6: additional k' (optional). + if (ent != null) { + // K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1) || k') + const e = ent === true ? randomBytes(Fp.BYTES) : ent; // generate random bytes OR pass as-is + seedArgs.push((0, utils_js_1.ensureBytes)('extraEntropy', e)); // check for being bytes + } + const seed = ut.concatBytes(...seedArgs); // Step D of RFC6979 3.2 + const m = h1int; // NOTE: no need to call bits2int second time here, it is inside truncateHash! + // Converts signature params into point w r/s, checks result for validity. + function k2sig(kBytes) { + // RFC 6979 Section 3.2, step 3: k = bits2int(T) + const k = bits2int(kBytes); // Cannot use fields methods, since it is group element + if (!isWithinCurveOrder(k)) + return; // Important: all mod() calls here must be done over N + const ik = invN(k); // k^-1 mod n + const q = Point.BASE.multiply(k).toAffine(); // q = Gk + const r = modN(q.x); // r = q.x mod n + if (r === _0n) + return; + // Can use scalar blinding b^-1(bm + bdr) where b ∈ [1,q−1] according to + // https://tches.iacr.org/index.php/TCHES/article/view/7337/6509. We've decided against it: + // a) dependency on CSPRNG b) 15% slowdown c) doesn't really help since bigints are not CT + const s = modN(ik * modN(m + r * d)); // Not using blinding here + if (s === _0n) + return; + let recovery = (q.x === r ? 0 : 2) | Number(q.y & _1n); // recovery bit (2 or 3, when q.x > n) + let normS = s; + if (lowS && isBiggerThanHalfOrder(s)) { + normS = normalizeS(s); // if lowS was passed, ensure s is always + recovery ^= 1; // // in the bottom half of N + } + return new Signature(r, normS, recovery); // use normS, not s + } + return { seed, k2sig }; + } + const defaultSigOpts = { lowS: CURVE.lowS, prehash: false }; + const defaultVerOpts = { lowS: CURVE.lowS, prehash: false }; + /** + * Signs message hash with a private key. + * ``` + * sign(m, d, k) where + * (x, y) = G × k + * r = x mod n + * s = (m + dr)/k mod n + * ``` + * @param msgHash NOT message. msg needs to be hashed to `msgHash`, or use `prehash`. + * @param privKey private key + * @param opts lowS for non-malleable sigs. extraEntropy for mixing randomness into k. prehash will hash first arg. + * @returns signature with recovery param + */ + function sign(msgHash, privKey, opts = defaultSigOpts) { + const { seed, k2sig } = prepSig(msgHash, privKey, opts); // Steps A, D of RFC6979 3.2. + const C = CURVE; + const drbg = ut.createHmacDrbg(C.hash.outputLen, C.nByteLength, C.hmac); + return drbg(seed, k2sig); // Steps B, C, D, E, F, G + } + // Enable precomputes. Slows down first publicKey computation by 20ms. + Point.BASE._setWindowSize(8); + // utils.precompute(8, ProjectivePoint.BASE) + /** + * Verifies a signature against message hash and public key. + * Rejects lowS signatures by default: to override, + * specify option `{lowS: false}`. Implements section 4.1.4 from https://www.secg.org/sec1-v2.pdf: + * + * ``` + * verify(r, s, h, P) where + * U1 = hs^-1 mod n + * U2 = rs^-1 mod n + * R = U1⋅G - U2⋅P + * mod(R.x, n) == r + * ``` + */ + function verify(signature, msgHash, publicKey, opts = defaultVerOpts) { + const sg = signature; + msgHash = (0, utils_js_1.ensureBytes)('msgHash', msgHash); + publicKey = (0, utils_js_1.ensureBytes)('publicKey', publicKey); + if ('strict' in opts) + throw new Error('options.strict was renamed to lowS'); + const { lowS, prehash } = opts; + let _sig = undefined; + let P; + try { + if (typeof sg === 'string' || ut.isBytes(sg)) { + // Signature can be represented in 2 ways: compact (2*nByteLength) & DER (variable-length). + // Since DER can also be 2*nByteLength bytes, we check for it first. + try { + _sig = Signature.fromDER(sg); + } + catch (derError) { + if (!(derError instanceof exports.DER.Err)) + throw derError; + _sig = Signature.fromCompact(sg); + } + } + else if (typeof sg === 'object' && typeof sg.r === 'bigint' && typeof sg.s === 'bigint') { + const { r, s } = sg; + _sig = new Signature(r, s); + } + else { + throw new Error('PARSE'); + } + P = Point.fromHex(publicKey); + } + catch (error) { + if (error.message === 'PARSE') + throw new Error(`signature must be Signature instance, Uint8Array or hex string`); + return false; + } + if (lowS && _sig.hasHighS()) + return false; + if (prehash) + msgHash = CURVE.hash(msgHash); + const { r, s } = _sig; + const h = bits2int_modN(msgHash); // Cannot use fields methods, since it is group element + const is = invN(s); // s^-1 + const u1 = modN(h * is); // u1 = hs^-1 mod n + const u2 = modN(r * is); // u2 = rs^-1 mod n + const R = Point.BASE.multiplyAndAddUnsafe(P, u1, u2)?.toAffine(); // R = u1⋅G + u2⋅P + if (!R) + return false; + const v = modN(R.x); + return v === r; + } + return { + CURVE, + getPublicKey, + getSharedSecret, + sign, + verify, + ProjectivePoint: Point, + Signature, + utils, + }; +} +exports.weierstrass = weierstrass; +/** + * Implementation of the Shallue and van de Woestijne method for any weierstrass curve. + * TODO: check if there is a way to merge this with uvRatio in Edwards; move to modular. + * b = True and y = sqrt(u / v) if (u / v) is square in F, and + * b = False and y = sqrt(Z * (u / v)) otherwise. + * @param Fp + * @param Z + * @returns + */ +function SWUFpSqrtRatio(Fp, Z) { + // Generic implementation + const q = Fp.ORDER; + let l = _0n; + for (let o = q - _1n; o % _2n === _0n; o /= _2n) + l += _1n; + const c1 = l; // 1. c1, the largest integer such that 2^c1 divides q - 1. + // We need 2n ** c1 and 2n ** (c1-1). We can't use **; but we can use <<. + // 2n ** c1 == 2n << (c1-1) + const _2n_pow_c1_1 = _2n << (c1 - _1n - _1n); + const _2n_pow_c1 = _2n_pow_c1_1 * _2n; + const c2 = (q - _1n) / _2n_pow_c1; // 2. c2 = (q - 1) / (2^c1) # Integer arithmetic + const c3 = (c2 - _1n) / _2n; // 3. c3 = (c2 - 1) / 2 # Integer arithmetic + const c4 = _2n_pow_c1 - _1n; // 4. c4 = 2^c1 - 1 # Integer arithmetic + const c5 = _2n_pow_c1_1; // 5. c5 = 2^(c1 - 1) # Integer arithmetic + const c6 = Fp.pow(Z, c2); // 6. c6 = Z^c2 + const c7 = Fp.pow(Z, (c2 + _1n) / _2n); // 7. c7 = Z^((c2 + 1) / 2) + let sqrtRatio = (u, v) => { + let tv1 = c6; // 1. tv1 = c6 + let tv2 = Fp.pow(v, c4); // 2. tv2 = v^c4 + let tv3 = Fp.sqr(tv2); // 3. tv3 = tv2^2 + tv3 = Fp.mul(tv3, v); // 4. tv3 = tv3 * v + let tv5 = Fp.mul(u, tv3); // 5. tv5 = u * tv3 + tv5 = Fp.pow(tv5, c3); // 6. tv5 = tv5^c3 + tv5 = Fp.mul(tv5, tv2); // 7. tv5 = tv5 * tv2 + tv2 = Fp.mul(tv5, v); // 8. tv2 = tv5 * v + tv3 = Fp.mul(tv5, u); // 9. tv3 = tv5 * u + let tv4 = Fp.mul(tv3, tv2); // 10. tv4 = tv3 * tv2 + tv5 = Fp.pow(tv4, c5); // 11. tv5 = tv4^c5 + let isQR = Fp.eql(tv5, Fp.ONE); // 12. isQR = tv5 == 1 + tv2 = Fp.mul(tv3, c7); // 13. tv2 = tv3 * c7 + tv5 = Fp.mul(tv4, tv1); // 14. tv5 = tv4 * tv1 + tv3 = Fp.cmov(tv2, tv3, isQR); // 15. tv3 = CMOV(tv2, tv3, isQR) + tv4 = Fp.cmov(tv5, tv4, isQR); // 16. tv4 = CMOV(tv5, tv4, isQR) + // 17. for i in (c1, c1 - 1, ..., 2): + for (let i = c1; i > _1n; i--) { + let tv5 = i - _2n; // 18. tv5 = i - 2 + tv5 = _2n << (tv5 - _1n); // 19. tv5 = 2^tv5 + let tvv5 = Fp.pow(tv4, tv5); // 20. tv5 = tv4^tv5 + const e1 = Fp.eql(tvv5, Fp.ONE); // 21. e1 = tv5 == 1 + tv2 = Fp.mul(tv3, tv1); // 22. tv2 = tv3 * tv1 + tv1 = Fp.mul(tv1, tv1); // 23. tv1 = tv1 * tv1 + tvv5 = Fp.mul(tv4, tv1); // 24. tv5 = tv4 * tv1 + tv3 = Fp.cmov(tv2, tv3, e1); // 25. tv3 = CMOV(tv2, tv3, e1) + tv4 = Fp.cmov(tvv5, tv4, e1); // 26. tv4 = CMOV(tv5, tv4, e1) + } + return { isValid: isQR, value: tv3 }; + }; + if (Fp.ORDER % _4n === _3n) { + // sqrt_ratio_3mod4(u, v) + const c1 = (Fp.ORDER - _3n) / _4n; // 1. c1 = (q - 3) / 4 # Integer arithmetic + const c2 = Fp.sqrt(Fp.neg(Z)); // 2. c2 = sqrt(-Z) + sqrtRatio = (u, v) => { + let tv1 = Fp.sqr(v); // 1. tv1 = v^2 + const tv2 = Fp.mul(u, v); // 2. tv2 = u * v + tv1 = Fp.mul(tv1, tv2); // 3. tv1 = tv1 * tv2 + let y1 = Fp.pow(tv1, c1); // 4. y1 = tv1^c1 + y1 = Fp.mul(y1, tv2); // 5. y1 = y1 * tv2 + const y2 = Fp.mul(y1, c2); // 6. y2 = y1 * c2 + const tv3 = Fp.mul(Fp.sqr(y1), v); // 7. tv3 = y1^2; 8. tv3 = tv3 * v + const isQR = Fp.eql(tv3, u); // 9. isQR = tv3 == u + let y = Fp.cmov(y2, y1, isQR); // 10. y = CMOV(y2, y1, isQR) + return { isValid: isQR, value: y }; // 11. return (isQR, y) isQR ? y : y*c2 + }; + } + // No curves uses that + // if (Fp.ORDER % _8n === _5n) // sqrt_ratio_5mod8 + return sqrtRatio; +} +exports.SWUFpSqrtRatio = SWUFpSqrtRatio; +/** + * Simplified Shallue-van de Woestijne-Ulas Method + * https://www.rfc-editor.org/rfc/rfc9380#section-6.6.2 + */ +function mapToCurveSimpleSWU(Fp, opts) { + mod.validateField(Fp); + if (!Fp.isValid(opts.A) || !Fp.isValid(opts.B) || !Fp.isValid(opts.Z)) + throw new Error('mapToCurveSimpleSWU: invalid opts'); + const sqrtRatio = SWUFpSqrtRatio(Fp, opts.Z); + if (!Fp.isOdd) + throw new Error('Fp.isOdd is not implemented!'); + // Input: u, an element of F. + // Output: (x, y), a point on E. + return (u) => { + // prettier-ignore + let tv1, tv2, tv3, tv4, tv5, tv6, x, y; + tv1 = Fp.sqr(u); // 1. tv1 = u^2 + tv1 = Fp.mul(tv1, opts.Z); // 2. tv1 = Z * tv1 + tv2 = Fp.sqr(tv1); // 3. tv2 = tv1^2 + tv2 = Fp.add(tv2, tv1); // 4. tv2 = tv2 + tv1 + tv3 = Fp.add(tv2, Fp.ONE); // 5. tv3 = tv2 + 1 + tv3 = Fp.mul(tv3, opts.B); // 6. tv3 = B * tv3 + tv4 = Fp.cmov(opts.Z, Fp.neg(tv2), !Fp.eql(tv2, Fp.ZERO)); // 7. tv4 = CMOV(Z, -tv2, tv2 != 0) + tv4 = Fp.mul(tv4, opts.A); // 8. tv4 = A * tv4 + tv2 = Fp.sqr(tv3); // 9. tv2 = tv3^2 + tv6 = Fp.sqr(tv4); // 10. tv6 = tv4^2 + tv5 = Fp.mul(tv6, opts.A); // 11. tv5 = A * tv6 + tv2 = Fp.add(tv2, tv5); // 12. tv2 = tv2 + tv5 + tv2 = Fp.mul(tv2, tv3); // 13. tv2 = tv2 * tv3 + tv6 = Fp.mul(tv6, tv4); // 14. tv6 = tv6 * tv4 + tv5 = Fp.mul(tv6, opts.B); // 15. tv5 = B * tv6 + tv2 = Fp.add(tv2, tv5); // 16. tv2 = tv2 + tv5 + x = Fp.mul(tv1, tv3); // 17. x = tv1 * tv3 + const { isValid, value } = sqrtRatio(tv2, tv6); // 18. (is_gx1_square, y1) = sqrt_ratio(tv2, tv6) + y = Fp.mul(tv1, u); // 19. y = tv1 * u -> Z * u^3 * y1 + y = Fp.mul(y, value); // 20. y = y * y1 + x = Fp.cmov(x, tv3, isValid); // 21. x = CMOV(x, tv3, is_gx1_square) + y = Fp.cmov(y, value, isValid); // 22. y = CMOV(y, y1, is_gx1_square) + const e1 = Fp.isOdd(u) === Fp.isOdd(y); // 23. e1 = sgn0(u) == sgn0(y) + y = Fp.cmov(Fp.neg(y), y, e1); // 24. y = CMOV(-y, y, e1) + x = Fp.div(x, tv4); // 25. x = x / tv4 + return { x, y }; + }; +} +exports.mapToCurveSimpleSWU = mapToCurveSimpleSWU; +//# sourceMappingURL=weierstrass.js.map + +/***/ }), + +/***/ 8510: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encodeToCurve = exports.hashToCurve = exports.schnorr = exports.secp256k1 = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +const sha256_1 = __webpack_require__(22623); +const utils_1 = __webpack_require__(99175); +const modular_js_1 = __webpack_require__(24967); +const weierstrass_js_1 = __webpack_require__(91705); +const utils_js_1 = __webpack_require__(91484); +const hash_to_curve_js_1 = __webpack_require__(71761); +const _shortw_utils_js_1 = __webpack_require__(73562); +const secp256k1P = BigInt('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f'); +const secp256k1N = BigInt('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141'); +const _1n = BigInt(1); +const _2n = BigInt(2); +const divNearest = (a, b) => (a + b / _2n) / b; +/** + * √n = n^((p+1)/4) for fields p = 3 mod 4. We unwrap the loop and multiply bit-by-bit. + * (P+1n/4n).toString(2) would produce bits [223x 1, 0, 22x 1, 4x 0, 11, 00] + */ +function sqrtMod(y) { + const P = secp256k1P; + // prettier-ignore + const _3n = BigInt(3), _6n = BigInt(6), _11n = BigInt(11), _22n = BigInt(22); + // prettier-ignore + const _23n = BigInt(23), _44n = BigInt(44), _88n = BigInt(88); + const b2 = (y * y * y) % P; // x^3, 11 + const b3 = (b2 * b2 * y) % P; // x^7 + const b6 = ((0, modular_js_1.pow2)(b3, _3n, P) * b3) % P; + const b9 = ((0, modular_js_1.pow2)(b6, _3n, P) * b3) % P; + const b11 = ((0, modular_js_1.pow2)(b9, _2n, P) * b2) % P; + const b22 = ((0, modular_js_1.pow2)(b11, _11n, P) * b11) % P; + const b44 = ((0, modular_js_1.pow2)(b22, _22n, P) * b22) % P; + const b88 = ((0, modular_js_1.pow2)(b44, _44n, P) * b44) % P; + const b176 = ((0, modular_js_1.pow2)(b88, _88n, P) * b88) % P; + const b220 = ((0, modular_js_1.pow2)(b176, _44n, P) * b44) % P; + const b223 = ((0, modular_js_1.pow2)(b220, _3n, P) * b3) % P; + const t1 = ((0, modular_js_1.pow2)(b223, _23n, P) * b22) % P; + const t2 = ((0, modular_js_1.pow2)(t1, _6n, P) * b2) % P; + const root = (0, modular_js_1.pow2)(t2, _2n, P); + if (!Fp.eql(Fp.sqr(root), y)) + throw new Error('Cannot find square root'); + return root; +} +const Fp = (0, modular_js_1.Field)(secp256k1P, undefined, undefined, { sqrt: sqrtMod }); +exports.secp256k1 = (0, _shortw_utils_js_1.createCurve)({ + a: BigInt(0), // equation params: a, b + b: BigInt(7), // Seem to be rigid: bitcointalk.org/index.php?topic=289795.msg3183975#msg3183975 + Fp, // Field's prime: 2n**256n - 2n**32n - 2n**9n - 2n**8n - 2n**7n - 2n**6n - 2n**4n - 1n + n: secp256k1N, // Curve order, total count of valid points in the field + // Base point (x, y) aka generator point + Gx: BigInt('55066263022277343669578718895168534326250603453777594175500187360389116729240'), + Gy: BigInt('32670510020758816978083085130507043184471273380659243275938904335757337482424'), + h: BigInt(1), // Cofactor + lowS: true, // Allow only low-S signatures by default in sign() and verify() + /** + * secp256k1 belongs to Koblitz curves: it has efficiently computable endomorphism. + * Endomorphism uses 2x less RAM, speeds up precomputation by 2x and ECDH / key recovery by 20%. + * For precomputed wNAF it trades off 1/2 init time & 1/3 ram for 20% perf hit. + * Explanation: https://gist.github.com/paulmillr/eb670806793e84df628a7c434a873066 + */ + endo: { + beta: BigInt('0x7ae96a2b657c07106e64479eac3434e99cf0497512f58995c1396c28719501ee'), + splitScalar: (k) => { + const n = secp256k1N; + const a1 = BigInt('0x3086d221a7d46bcde86c90e49284eb15'); + const b1 = -_1n * BigInt('0xe4437ed6010e88286f547fa90abfe4c3'); + const a2 = BigInt('0x114ca50f7a8e2f3f657c1108d9d44cfd8'); + const b2 = a1; + const POW_2_128 = BigInt('0x100000000000000000000000000000000'); // (2n**128n).toString(16) + const c1 = divNearest(b2 * k, n); + const c2 = divNearest(-b1 * k, n); + let k1 = (0, modular_js_1.mod)(k - c1 * a1 - c2 * a2, n); + let k2 = (0, modular_js_1.mod)(-c1 * b1 - c2 * b2, n); + const k1neg = k1 > POW_2_128; + const k2neg = k2 > POW_2_128; + if (k1neg) + k1 = n - k1; + if (k2neg) + k2 = n - k2; + if (k1 > POW_2_128 || k2 > POW_2_128) { + throw new Error('splitScalar: Endomorphism failed, k=' + k); + } + return { k1neg, k1, k2neg, k2 }; + }, + }, +}, sha256_1.sha256); +// Schnorr signatures are superior to ECDSA from above. Below is Schnorr-specific BIP0340 code. +// https://github.com/bitcoin/bips/blob/master/bip-0340.mediawiki +const _0n = BigInt(0); +const fe = (x) => typeof x === 'bigint' && _0n < x && x < secp256k1P; +const ge = (x) => typeof x === 'bigint' && _0n < x && x < secp256k1N; +/** An object mapping tags to their tagged hash prefix of [SHA256(tag) | SHA256(tag)] */ +const TAGGED_HASH_PREFIXES = {}; +function taggedHash(tag, ...messages) { + let tagP = TAGGED_HASH_PREFIXES[tag]; + if (tagP === undefined) { + const tagH = (0, sha256_1.sha256)(Uint8Array.from(tag, (c) => c.charCodeAt(0))); + tagP = (0, utils_js_1.concatBytes)(tagH, tagH); + TAGGED_HASH_PREFIXES[tag] = tagP; + } + return (0, sha256_1.sha256)((0, utils_js_1.concatBytes)(tagP, ...messages)); +} +// ECDSA compact points are 33-byte. Schnorr is 32: we strip first byte 0x02 or 0x03 +const pointToBytes = (point) => point.toRawBytes(true).slice(1); +const numTo32b = (n) => (0, utils_js_1.numberToBytesBE)(n, 32); +const modP = (x) => (0, modular_js_1.mod)(x, secp256k1P); +const modN = (x) => (0, modular_js_1.mod)(x, secp256k1N); +const Point = exports.secp256k1.ProjectivePoint; +const GmulAdd = (Q, a, b) => Point.BASE.multiplyAndAddUnsafe(Q, a, b); +// Calculate point, scalar and bytes +function schnorrGetExtPubKey(priv) { + let d_ = exports.secp256k1.utils.normPrivateKeyToScalar(priv); // same method executed in fromPrivateKey + let p = Point.fromPrivateKey(d_); // P = d'⋅G; 0 < d' < n check is done inside + const scalar = p.hasEvenY() ? d_ : modN(-d_); + return { scalar: scalar, bytes: pointToBytes(p) }; +} +/** + * lift_x from BIP340. Convert 32-byte x coordinate to elliptic curve point. + * @returns valid point checked for being on-curve + */ +function lift_x(x) { + if (!fe(x)) + throw new Error('bad x: need 0 < x < p'); // Fail if x ≥ p. + const xx = modP(x * x); + const c = modP(xx * x + BigInt(7)); // Let c = x³ + 7 mod p. + let y = sqrtMod(c); // Let y = c^(p+1)/4 mod p. + if (y % _2n !== _0n) + y = modP(-y); // Return the unique point P such that x(P) = x and + const p = new Point(x, y, _1n); // y(P) = y if y mod 2 = 0 or y(P) = p-y otherwise. + p.assertValidity(); + return p; +} +/** + * Create tagged hash, convert it to bigint, reduce modulo-n. + */ +function challenge(...args) { + return modN((0, utils_js_1.bytesToNumberBE)(taggedHash('BIP0340/challenge', ...args))); +} +/** + * Schnorr public key is just `x` coordinate of Point as per BIP340. + */ +function schnorrGetPublicKey(privateKey) { + return schnorrGetExtPubKey(privateKey).bytes; // d'=int(sk). Fail if d'=0 or d'≥n. Ret bytes(d'⋅G) +} +/** + * Creates Schnorr signature as per BIP340. Verifies itself before returning anything. + * auxRand is optional and is not the sole source of k generation: bad CSPRNG won't be dangerous. + */ +function schnorrSign(message, privateKey, auxRand = (0, utils_1.randomBytes)(32)) { + const m = (0, utils_js_1.ensureBytes)('message', message); + const { bytes: px, scalar: d } = schnorrGetExtPubKey(privateKey); // checks for isWithinCurveOrder + const a = (0, utils_js_1.ensureBytes)('auxRand', auxRand, 32); // Auxiliary random data a: a 32-byte array + const t = numTo32b(d ^ (0, utils_js_1.bytesToNumberBE)(taggedHash('BIP0340/aux', a))); // Let t be the byte-wise xor of bytes(d) and hash/aux(a) + const rand = taggedHash('BIP0340/nonce', t, px, m); // Let rand = hash/nonce(t || bytes(P) || m) + const k_ = modN((0, utils_js_1.bytesToNumberBE)(rand)); // Let k' = int(rand) mod n + if (k_ === _0n) + throw new Error('sign failed: k is zero'); // Fail if k' = 0. + const { bytes: rx, scalar: k } = schnorrGetExtPubKey(k_); // Let R = k'⋅G. + const e = challenge(rx, px, m); // Let e = int(hash/challenge(bytes(R) || bytes(P) || m)) mod n. + const sig = new Uint8Array(64); // Let sig = bytes(R) || bytes((k + ed) mod n). + sig.set(rx, 0); + sig.set(numTo32b(modN(k + e * d)), 32); + // If Verify(bytes(P), m, sig) (see below) returns failure, abort + if (!schnorrVerify(sig, m, px)) + throw new Error('sign: Invalid signature produced'); + return sig; +} +/** + * Verifies Schnorr signature. + * Will swallow errors & return false except for initial type validation of arguments. + */ +function schnorrVerify(signature, message, publicKey) { + const sig = (0, utils_js_1.ensureBytes)('signature', signature, 64); + const m = (0, utils_js_1.ensureBytes)('message', message); + const pub = (0, utils_js_1.ensureBytes)('publicKey', publicKey, 32); + try { + const P = lift_x((0, utils_js_1.bytesToNumberBE)(pub)); // P = lift_x(int(pk)); fail if that fails + const r = (0, utils_js_1.bytesToNumberBE)(sig.subarray(0, 32)); // Let r = int(sig[0:32]); fail if r ≥ p. + if (!fe(r)) + return false; + const s = (0, utils_js_1.bytesToNumberBE)(sig.subarray(32, 64)); // Let s = int(sig[32:64]); fail if s ≥ n. + if (!ge(s)) + return false; + const e = challenge(numTo32b(r), pointToBytes(P), m); // int(challenge(bytes(r)||bytes(P)||m))%n + const R = GmulAdd(P, s, modN(-e)); // R = s⋅G - e⋅P + if (!R || !R.hasEvenY() || R.toAffine().x !== r) + return false; // -eP == (n-e)P + return true; // Fail if is_infinite(R) / not has_even_y(R) / x(R) ≠ r. + } + catch (error) { + return false; + } +} +exports.schnorr = (() => ({ + getPublicKey: schnorrGetPublicKey, + sign: schnorrSign, + verify: schnorrVerify, + utils: { + randomPrivateKey: exports.secp256k1.utils.randomPrivateKey, + lift_x, + pointToBytes, + numberToBytesBE: utils_js_1.numberToBytesBE, + bytesToNumberBE: utils_js_1.bytesToNumberBE, + taggedHash, + mod: modular_js_1.mod, + }, +}))(); +const isoMap = /* @__PURE__ */ (() => (0, hash_to_curve_js_1.isogenyMap)(Fp, [ + // xNum + [ + '0x8e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38daaaaa8c7', + '0x7d3d4c80bc321d5b9f315cea7fd44c5d595d2fc0bf63b92dfff1044f17c6581', + '0x534c328d23f234e6e2a413deca25caece4506144037c40314ecbd0b53d9dd262', + '0x8e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38daaaaa88c', + ], + // xDen + [ + '0xd35771193d94918a9ca34ccbb7b640dd86cd409542f8487d9fe6b745781eb49b', + '0xedadc6f64383dc1df7c4b2d51b54225406d36b641f5e41bbc52a56612a8c6d14', + '0x0000000000000000000000000000000000000000000000000000000000000001', // LAST 1 + ], + // yNum + [ + '0x4bda12f684bda12f684bda12f684bda12f684bda12f684bda12f684b8e38e23c', + '0xc75e0c32d5cb7c0fa9d0a54b12a0a6d5647ab046d686da6fdffc90fc201d71a3', + '0x29a6194691f91a73715209ef6512e576722830a201be2018a765e85a9ecee931', + '0x2f684bda12f684bda12f684bda12f684bda12f684bda12f684bda12f38e38d84', + ], + // yDen + [ + '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffff93b', + '0x7a06534bb8bdb49fd5e9e6632722c2989467c1bfc8e8d978dfb425d2685c2573', + '0x6484aa716545ca2cf3a70c3fa8fe337e0a3d21162f0d6299a7bf8192bfd2a76f', + '0x0000000000000000000000000000000000000000000000000000000000000001', // LAST 1 + ], +].map((i) => i.map((j) => BigInt(j)))))(); +const mapSWU = /* @__PURE__ */ (() => (0, weierstrass_js_1.mapToCurveSimpleSWU)(Fp, { + A: BigInt('0x3f8731abdd661adca08a5558f0f5d272e953d363cb6f0e5d405447c01a444533'), + B: BigInt('1771'), + Z: Fp.create(BigInt('-11')), +}))(); +const htf = /* @__PURE__ */ (() => (0, hash_to_curve_js_1.createHasher)(exports.secp256k1.ProjectivePoint, (scalars) => { + const { x, y } = mapSWU(Fp.create(scalars[0])); + return isoMap(x, y); +}, { + DST: 'secp256k1_XMD:SHA-256_SSWU_RO_', + encodeDST: 'secp256k1_XMD:SHA-256_SSWU_NU_', + p: Fp.ORDER, + m: 1, + k: 128, + expand: 'xmd', + hash: sha256_1.sha256, +}))(); +exports.hashToCurve = (() => htf.hashToCurve)(); +exports.encodeToCurve = (() => htf.encodeToCurve)(); +//# sourceMappingURL=secp256k1.js.map + +/***/ }), + +/***/ 67557: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.output = exports.exists = exports.hash = exports.bytes = exports.bool = exports.number = void 0; +function number(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`Wrong positive integer: ${n}`); +} +exports.number = number; +function bool(b) { + if (typeof b !== 'boolean') + throw new Error(`Expected boolean, not ${b}`); +} +exports.bool = bool; +// copied from utils +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +function bytes(b, ...lengths) { + if (!isBytes(b)) + throw new Error('Expected Uint8Array'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Expected Uint8Array of length ${lengths}, not of length=${b.length}`); +} +exports.bytes = bytes; +function hash(hash) { + if (typeof hash !== 'function' || typeof hash.create !== 'function') + throw new Error('Hash should be wrapped by utils.wrapConstructor'); + number(hash.outputLen); + number(hash.blockLen); +} +exports.hash = hash; +function exists(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); +} +exports.exists = exists; +function output(out, instance) { + bytes(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } +} +exports.output = output; +const assert = { number, bool, bytes, hash, exists, output }; +exports["default"] = assert; +//# sourceMappingURL=_assert.js.map + +/***/ }), + +/***/ 90915: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SHA2 = void 0; +const _assert_js_1 = __webpack_require__(67557); +const utils_js_1 = __webpack_require__(99175); +// Polyfill for Safari 14 +function setBigUint64(view, byteOffset, value, isLE) { + if (typeof view.setBigUint64 === 'function') + return view.setBigUint64(byteOffset, value, isLE); + const _32n = BigInt(32); + const _u32_max = BigInt(0xffffffff); + const wh = Number((value >> _32n) & _u32_max); + const wl = Number(value & _u32_max); + const h = isLE ? 4 : 0; + const l = isLE ? 0 : 4; + view.setUint32(byteOffset + h, wh, isLE); + view.setUint32(byteOffset + l, wl, isLE); +} +// Base SHA2 class (RFC 6234) +class SHA2 extends utils_js_1.Hash { + constructor(blockLen, outputLen, padOffset, isLE) { + super(); + this.blockLen = blockLen; + this.outputLen = outputLen; + this.padOffset = padOffset; + this.isLE = isLE; + this.finished = false; + this.length = 0; + this.pos = 0; + this.destroyed = false; + this.buffer = new Uint8Array(blockLen); + this.view = (0, utils_js_1.createView)(this.buffer); + } + update(data) { + (0, _assert_js_1.exists)(this); + const { view, buffer, blockLen } = this; + data = (0, utils_js_1.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + // Fast path: we have at least one block in input, cast it to view and process + if (take === blockLen) { + const dataView = (0, utils_js_1.createView)(data); + for (; blockLen <= len - pos; pos += blockLen) + this.process(dataView, pos); + continue; + } + buffer.set(data.subarray(pos, pos + take), this.pos); + this.pos += take; + pos += take; + if (this.pos === blockLen) { + this.process(view, 0); + this.pos = 0; + } + } + this.length += data.length; + this.roundClean(); + return this; + } + digestInto(out) { + (0, _assert_js_1.exists)(this); + (0, _assert_js_1.output)(out, this); + this.finished = true; + // Padding + // We can avoid allocation of buffer for padding completely if it + // was previously not allocated here. But it won't change performance. + const { buffer, view, blockLen, isLE } = this; + let { pos } = this; + // append the bit '1' to the message + buffer[pos++] = 0b10000000; + this.buffer.subarray(pos).fill(0); + // we have less than padOffset left in buffer, so we cannot put length in current block, need process it and pad again + if (this.padOffset > blockLen - pos) { + this.process(view, 0); + pos = 0; + } + // Pad until full block byte with zeros + for (let i = pos; i < blockLen; i++) + buffer[i] = 0; + // Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that + // You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen. + // So we just write lowest 64 bits of that value. + setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE); + this.process(view, 0); + const oview = (0, utils_js_1.createView)(out); + const len = this.outputLen; + // NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT + if (len % 4) + throw new Error('_sha2: outputLen should be aligned to 32bit'); + const outLen = len / 4; + const state = this.get(); + if (outLen > state.length) + throw new Error('_sha2: outputLen bigger than state'); + for (let i = 0; i < outLen; i++) + oview.setUint32(4 * i, state[i], isLE); + } + digest() { + const { buffer, outputLen } = this; + this.digestInto(buffer); + const res = buffer.slice(0, outputLen); + this.destroy(); + return res; + } + _cloneInto(to) { + to || (to = new this.constructor()); + to.set(...this.get()); + const { blockLen, buffer, length, finished, destroyed, pos } = this; + to.length = length; + to.pos = pos; + to.finished = finished; + to.destroyed = destroyed; + if (length % blockLen) + to.buffer.set(buffer); + return to; + } +} +exports.SHA2 = SHA2; +//# sourceMappingURL=_sha2.js.map + +/***/ }), + +/***/ 22318: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.add5L = exports.add5H = exports.add4H = exports.add4L = exports.add3H = exports.add3L = exports.add = exports.rotlBL = exports.rotlBH = exports.rotlSL = exports.rotlSH = exports.rotr32L = exports.rotr32H = exports.rotrBL = exports.rotrBH = exports.rotrSL = exports.rotrSH = exports.shrSL = exports.shrSH = exports.toBig = exports.split = exports.fromBig = void 0; +const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); +const _32n = /* @__PURE__ */ BigInt(32); +// We are not using BigUint64Array, because they are extremely slow as per 2022 +function fromBig(n, le = false) { + if (le) + return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; + return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; +} +exports.fromBig = fromBig; +function split(lst, le = false) { + let Ah = new Uint32Array(lst.length); + let Al = new Uint32Array(lst.length); + for (let i = 0; i < lst.length; i++) { + const { h, l } = fromBig(lst[i], le); + [Ah[i], Al[i]] = [h, l]; + } + return [Ah, Al]; +} +exports.split = split; +const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0); +exports.toBig = toBig; +// for Shift in [0, 32) +const shrSH = (h, _l, s) => h >>> s; +exports.shrSH = shrSH; +const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +exports.shrSL = shrSL; +// Right rotate for Shift in [1, 32) +const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s)); +exports.rotrSH = rotrSH; +const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +exports.rotrSL = rotrSL; +// Right rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); +exports.rotrBH = rotrBH; +const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); +exports.rotrBL = rotrBL; +// Right rotate for shift===32 (just swaps l&h) +const rotr32H = (_h, l) => l; +exports.rotr32H = rotr32H; +const rotr32L = (h, _l) => h; +exports.rotr32L = rotr32L; +// Left rotate for Shift in [1, 32) +const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); +exports.rotlSH = rotlSH; +const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); +exports.rotlSL = rotlSL; +// Left rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); +exports.rotlBH = rotlBH; +const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); +exports.rotlBL = rotlBL; +// JS uses 32-bit signed integers for bitwise operations which means we cannot +// simple take carry out of low bit sum by shift, we need to use division. +function add(Ah, Al, Bh, Bl) { + const l = (Al >>> 0) + (Bl >>> 0); + return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; +} +exports.add = add; +// Addition with more than 2 elements +const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); +exports.add3L = add3L; +const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; +exports.add3H = add3H; +const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); +exports.add4L = add4L; +const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; +exports.add4H = add4H; +const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); +exports.add5L = add5L; +const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; +exports.add5H = add5H; +// prettier-ignore +const u64 = { + fromBig, split, toBig, + shrSH, shrSL, + rotrSH, rotrSL, rotrBH, rotrBL, + rotr32H, rotr32L, + rotlSH, rotlSL, rotlBH, rotlBL, + add, add3L, add3H, add4L, add4H, add5H, add5L, +}; +exports["default"] = u64; +//# sourceMappingURL=_u64.js.map + +/***/ }), + +/***/ 25145: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.crypto = void 0; +exports.crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; +//# sourceMappingURL=crypto.js.map + +/***/ }), + +/***/ 39615: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hmac = exports.HMAC = void 0; +const _assert_js_1 = __webpack_require__(67557); +const utils_js_1 = __webpack_require__(99175); +// HMAC (RFC 2104) +class HMAC extends utils_js_1.Hash { + constructor(hash, _key) { + super(); + this.finished = false; + this.destroyed = false; + (0, _assert_js_1.hash)(hash); + const key = (0, utils_js_1.toBytes)(_key); + this.iHash = hash.create(); + if (typeof this.iHash.update !== 'function') + throw new Error('Expected instance of class which extends utils.Hash'); + this.blockLen = this.iHash.blockLen; + this.outputLen = this.iHash.outputLen; + const blockLen = this.blockLen; + const pad = new Uint8Array(blockLen); + // blockLen can be bigger than outputLen + pad.set(key.length > blockLen ? hash.create().update(key).digest() : key); + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36; + this.iHash.update(pad); + // By doing update (processing of first block) of outer hash here we can re-use it between multiple calls via clone + this.oHash = hash.create(); + // Undo internal XOR && apply outer XOR + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36 ^ 0x5c; + this.oHash.update(pad); + pad.fill(0); + } + update(buf) { + (0, _assert_js_1.exists)(this); + this.iHash.update(buf); + return this; + } + digestInto(out) { + (0, _assert_js_1.exists)(this); + (0, _assert_js_1.bytes)(out, this.outputLen); + this.finished = true; + this.iHash.digestInto(out); + this.oHash.update(out); + this.oHash.digestInto(out); + this.destroy(); + } + digest() { + const out = new Uint8Array(this.oHash.outputLen); + this.digestInto(out); + return out; + } + _cloneInto(to) { + // Create new instance without calling constructor since key already in state and we don't know it. + to || (to = Object.create(Object.getPrototypeOf(this), {})); + const { oHash, iHash, finished, destroyed, blockLen, outputLen } = this; + to = to; + to.finished = finished; + to.destroyed = destroyed; + to.blockLen = blockLen; + to.outputLen = outputLen; + to.oHash = oHash._cloneInto(to.oHash); + to.iHash = iHash._cloneInto(to.iHash); + return to; + } + destroy() { + this.destroyed = true; + this.oHash.destroy(); + this.iHash.destroy(); + } +} +exports.HMAC = HMAC; +/** + * HMAC: RFC2104 message authentication code. + * @param hash - function that would be used e.g. sha256 + * @param key - message key + * @param message - message data + */ +const hmac = (hash, key, message) => new HMAC(hash, key).update(message).digest(); +exports.hmac = hmac; +exports.hmac.create = (hash, key) => new HMAC(hash, key); +//# sourceMappingURL=hmac.js.map + +/***/ }), + +/***/ 22623: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sha224 = exports.sha256 = void 0; +const _sha2_js_1 = __webpack_require__(90915); +const utils_js_1 = __webpack_require__(99175); +// SHA2-256 need to try 2^128 hashes to execute birthday attack. +// BTC network is doing 2^67 hashes/sec as per early 2023. +// Choice: a ? b : c +const Chi = (a, b, c) => (a & b) ^ (~a & c); +// Majority function, true if any two inpust is true +const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c); +// Round constants: +// first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311) +// prettier-ignore +const SHA256_K = /* @__PURE__ */ new Uint32Array([ + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 +]); +// Initial state (first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19): +// prettier-ignore +const IV = /* @__PURE__ */ new Uint32Array([ + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 +]); +// Temporary buffer, not used to store anything between runs +// Named this way because it matches specification. +const SHA256_W = /* @__PURE__ */ new Uint32Array(64); +class SHA256 extends _sha2_js_1.SHA2 { + constructor() { + super(64, 32, 8, false); + // We cannot use array here since array allows indexing by variable + // which means optimizer/compiler cannot use registers. + this.A = IV[0] | 0; + this.B = IV[1] | 0; + this.C = IV[2] | 0; + this.D = IV[3] | 0; + this.E = IV[4] | 0; + this.F = IV[5] | 0; + this.G = IV[6] | 0; + this.H = IV[7] | 0; + } + get() { + const { A, B, C, D, E, F, G, H } = this; + return [A, B, C, D, E, F, G, H]; + } + // prettier-ignore + set(A, B, C, D, E, F, G, H) { + this.A = A | 0; + this.B = B | 0; + this.C = C | 0; + this.D = D | 0; + this.E = E | 0; + this.F = F | 0; + this.G = G | 0; + this.H = H | 0; + } + process(view, offset) { + // Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array + for (let i = 0; i < 16; i++, offset += 4) + SHA256_W[i] = view.getUint32(offset, false); + for (let i = 16; i < 64; i++) { + const W15 = SHA256_W[i - 15]; + const W2 = SHA256_W[i - 2]; + const s0 = (0, utils_js_1.rotr)(W15, 7) ^ (0, utils_js_1.rotr)(W15, 18) ^ (W15 >>> 3); + const s1 = (0, utils_js_1.rotr)(W2, 17) ^ (0, utils_js_1.rotr)(W2, 19) ^ (W2 >>> 10); + SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0; + } + // Compression function main loop, 64 rounds + let { A, B, C, D, E, F, G, H } = this; + for (let i = 0; i < 64; i++) { + const sigma1 = (0, utils_js_1.rotr)(E, 6) ^ (0, utils_js_1.rotr)(E, 11) ^ (0, utils_js_1.rotr)(E, 25); + const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0; + const sigma0 = (0, utils_js_1.rotr)(A, 2) ^ (0, utils_js_1.rotr)(A, 13) ^ (0, utils_js_1.rotr)(A, 22); + const T2 = (sigma0 + Maj(A, B, C)) | 0; + H = G; + G = F; + F = E; + E = (D + T1) | 0; + D = C; + C = B; + B = A; + A = (T1 + T2) | 0; + } + // Add the compressed chunk to the current hash value + A = (A + this.A) | 0; + B = (B + this.B) | 0; + C = (C + this.C) | 0; + D = (D + this.D) | 0; + E = (E + this.E) | 0; + F = (F + this.F) | 0; + G = (G + this.G) | 0; + H = (H + this.H) | 0; + this.set(A, B, C, D, E, F, G, H); + } + roundClean() { + SHA256_W.fill(0); + } + destroy() { + this.set(0, 0, 0, 0, 0, 0, 0, 0); + this.buffer.fill(0); + } +} +// Constants from https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf +class SHA224 extends SHA256 { + constructor() { + super(); + this.A = 0xc1059ed8 | 0; + this.B = 0x367cd507 | 0; + this.C = 0x3070dd17 | 0; + this.D = 0xf70e5939 | 0; + this.E = 0xffc00b31 | 0; + this.F = 0x68581511 | 0; + this.G = 0x64f98fa7 | 0; + this.H = 0xbefa4fa4 | 0; + this.outputLen = 28; + } +} +/** + * SHA2-256 hash function + * @param message - data that would be hashed + */ +exports.sha256 = (0, utils_js_1.wrapConstructor)(() => new SHA256()); +exports.sha224 = (0, utils_js_1.wrapConstructor)(() => new SHA224()); +//# sourceMappingURL=sha256.js.map + +/***/ }), + +/***/ 32955: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.shake256 = exports.shake128 = exports.keccak_512 = exports.keccak_384 = exports.keccak_256 = exports.keccak_224 = exports.sha3_512 = exports.sha3_384 = exports.sha3_256 = exports.sha3_224 = exports.Keccak = exports.keccakP = void 0; +const _assert_js_1 = __webpack_require__(67557); +const _u64_js_1 = __webpack_require__(22318); +const utils_js_1 = __webpack_require__(99175); +// SHA3 (keccak) is based on a new design: basically, the internal state is bigger than output size. +// It's called a sponge function. +// Various per round constants calculations +const [SHA3_PI, SHA3_ROTL, _SHA3_IOTA] = [[], [], []]; +const _0n = /* @__PURE__ */ BigInt(0); +const _1n = /* @__PURE__ */ BigInt(1); +const _2n = /* @__PURE__ */ BigInt(2); +const _7n = /* @__PURE__ */ BigInt(7); +const _256n = /* @__PURE__ */ BigInt(256); +const _0x71n = /* @__PURE__ */ BigInt(0x71); +for (let round = 0, R = _1n, x = 1, y = 0; round < 24; round++) { + // Pi + [x, y] = [y, (2 * x + 3 * y) % 5]; + SHA3_PI.push(2 * (5 * y + x)); + // Rotational + SHA3_ROTL.push((((round + 1) * (round + 2)) / 2) % 64); + // Iota + let t = _0n; + for (let j = 0; j < 7; j++) { + R = ((R << _1n) ^ ((R >> _7n) * _0x71n)) % _256n; + if (R & _2n) + t ^= _1n << ((_1n << /* @__PURE__ */ BigInt(j)) - _1n); + } + _SHA3_IOTA.push(t); +} +const [SHA3_IOTA_H, SHA3_IOTA_L] = /* @__PURE__ */ (0, _u64_js_1.split)(_SHA3_IOTA, true); +// Left rotation (without 0, 32, 64) +const rotlH = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBH)(h, l, s) : (0, _u64_js_1.rotlSH)(h, l, s)); +const rotlL = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBL)(h, l, s) : (0, _u64_js_1.rotlSL)(h, l, s)); +// Same as keccakf1600, but allows to skip some rounds +function keccakP(s, rounds = 24) { + const B = new Uint32Array(5 * 2); + // NOTE: all indices are x2 since we store state as u32 instead of u64 (bigints to slow in js) + for (let round = 24 - rounds; round < 24; round++) { + // Theta θ + for (let x = 0; x < 10; x++) + B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; + for (let x = 0; x < 10; x += 2) { + const idx1 = (x + 8) % 10; + const idx0 = (x + 2) % 10; + const B0 = B[idx0]; + const B1 = B[idx0 + 1]; + const Th = rotlH(B0, B1, 1) ^ B[idx1]; + const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; + for (let y = 0; y < 50; y += 10) { + s[x + y] ^= Th; + s[x + y + 1] ^= Tl; + } + } + // Rho (ρ) and Pi (π) + let curH = s[2]; + let curL = s[3]; + for (let t = 0; t < 24; t++) { + const shift = SHA3_ROTL[t]; + const Th = rotlH(curH, curL, shift); + const Tl = rotlL(curH, curL, shift); + const PI = SHA3_PI[t]; + curH = s[PI]; + curL = s[PI + 1]; + s[PI] = Th; + s[PI + 1] = Tl; + } + // Chi (χ) + for (let y = 0; y < 50; y += 10) { + for (let x = 0; x < 10; x++) + B[x] = s[y + x]; + for (let x = 0; x < 10; x++) + s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; + } + // Iota (ι) + s[0] ^= SHA3_IOTA_H[round]; + s[1] ^= SHA3_IOTA_L[round]; + } + B.fill(0); +} +exports.keccakP = keccakP; +class Keccak extends utils_js_1.Hash { + // NOTE: we accept arguments in bytes instead of bits here. + constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { + super(); + this.blockLen = blockLen; + this.suffix = suffix; + this.outputLen = outputLen; + this.enableXOF = enableXOF; + this.rounds = rounds; + this.pos = 0; + this.posOut = 0; + this.finished = false; + this.destroyed = false; + // Can be passed from user as dkLen + (0, _assert_js_1.number)(outputLen); + // 1600 = 5x5 matrix of 64bit. 1600 bits === 200 bytes + if (0 >= this.blockLen || this.blockLen >= 200) + throw new Error('Sha3 supports only keccak-f1600 function'); + this.state = new Uint8Array(200); + this.state32 = (0, utils_js_1.u32)(this.state); + } + keccak() { + keccakP(this.state32, this.rounds); + this.posOut = 0; + this.pos = 0; + } + update(data) { + (0, _assert_js_1.exists)(this); + const { blockLen, state } = this; + data = (0, utils_js_1.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + for (let i = 0; i < take; i++) + state[this.pos++] ^= data[pos++]; + if (this.pos === blockLen) + this.keccak(); + } + return this; + } + finish() { + if (this.finished) + return; + this.finished = true; + const { state, suffix, pos, blockLen } = this; + // Do the padding + state[pos] ^= suffix; + if ((suffix & 0x80) !== 0 && pos === blockLen - 1) + this.keccak(); + state[blockLen - 1] ^= 0x80; + this.keccak(); + } + writeInto(out) { + (0, _assert_js_1.exists)(this, false); + (0, _assert_js_1.bytes)(out); + this.finish(); + const bufferOut = this.state; + const { blockLen } = this; + for (let pos = 0, len = out.length; pos < len;) { + if (this.posOut >= blockLen) + this.keccak(); + const take = Math.min(blockLen - this.posOut, len - pos); + out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); + this.posOut += take; + pos += take; + } + return out; + } + xofInto(out) { + // Sha3/Keccak usage with XOF is probably mistake, only SHAKE instances can do XOF + if (!this.enableXOF) + throw new Error('XOF is not possible for this instance'); + return this.writeInto(out); + } + xof(bytes) { + (0, _assert_js_1.number)(bytes); + return this.xofInto(new Uint8Array(bytes)); + } + digestInto(out) { + (0, _assert_js_1.output)(out, this); + if (this.finished) + throw new Error('digest() was already called'); + this.writeInto(out); + this.destroy(); + return out; + } + digest() { + return this.digestInto(new Uint8Array(this.outputLen)); + } + destroy() { + this.destroyed = true; + this.state.fill(0); + } + _cloneInto(to) { + const { blockLen, suffix, outputLen, rounds, enableXOF } = this; + to || (to = new Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); + to.state32.set(this.state32); + to.pos = this.pos; + to.posOut = this.posOut; + to.finished = this.finished; + to.rounds = rounds; + // Suffix can change in cSHAKE + to.suffix = suffix; + to.outputLen = outputLen; + to.enableXOF = enableXOF; + to.destroyed = this.destroyed; + return to; + } +} +exports.Keccak = Keccak; +const gen = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapConstructor)(() => new Keccak(blockLen, suffix, outputLen)); +exports.sha3_224 = gen(0x06, 144, 224 / 8); +/** + * SHA3-256 hash function + * @param message - that would be hashed + */ +exports.sha3_256 = gen(0x06, 136, 256 / 8); +exports.sha3_384 = gen(0x06, 104, 384 / 8); +exports.sha3_512 = gen(0x06, 72, 512 / 8); +exports.keccak_224 = gen(0x01, 144, 224 / 8); +/** + * keccak-256 hash function. Different from SHA3-256. + * @param message - that would be hashed + */ +exports.keccak_256 = gen(0x01, 136, 256 / 8); +exports.keccak_384 = gen(0x01, 104, 384 / 8); +exports.keccak_512 = gen(0x01, 72, 512 / 8); +const genShake = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapXOFConstructorWithOpts)((opts = {}) => new Keccak(blockLen, suffix, opts.dkLen === undefined ? outputLen : opts.dkLen, true)); +exports.shake128 = genShake(0x1f, 168, 128 / 8); +exports.shake256 = genShake(0x1f, 136, 256 / 8); +//# sourceMappingURL=sha3.js.map + +/***/ }), + +/***/ 99175: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.randomBytes = exports.wrapXOFConstructorWithOpts = exports.wrapConstructorWithOpts = exports.wrapConstructor = exports.checkOpts = exports.Hash = exports.concatBytes = exports.toBytes = exports.utf8ToBytes = exports.asyncLoop = exports.nextTick = exports.hexToBytes = exports.bytesToHex = exports.isLE = exports.rotr = exports.createView = exports.u32 = exports.u8 = void 0; +// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. +// node.js versions earlier than v19 don't declare it in global scope. +// For node.js, package.json#exports field mapping rewrites import +// from `crypto` to `cryptoNode`, which imports native module. +// Makes the utils un-importable in browsers without a bundler. +// Once node.js 18 is deprecated (2025-04-30), we can just drop the import. +const crypto_1 = __webpack_require__(25145); +// Cast array to different type +const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); +exports.u8 = u8; +const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); +exports.u32 = u32; +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +// Cast array to view +const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); +exports.createView = createView; +// The rotate right (circular right shift) operation for uint32 +const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); +exports.rotr = rotr; +// big-endian hardware is rare. Just in case someone still decides to run hashes: +// early-throw an error because we don't support BE yet. +// Other libraries would silently corrupt the data instead of throwing an error, +// when they don't support it. +exports.isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; +if (!exports.isLE) + throw new Error('Non little-endian hardware is not supported'); +// Array where index 0xf0 (240) is mapped to string 'f0' +const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +/** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ +function bytesToHex(bytes) { + if (!isBytes(bytes)) + throw new Error('Uint8Array expected'); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; +} +exports.bytesToHex = bytesToHex; +// We use optimized technique to convert hex string to byte array +const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; +function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; +} +/** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ +function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; +} +exports.hexToBytes = hexToBytes; +// There is no setImmediate in browser and setTimeout is slow. +// call of async fn will return Promise, which will be fullfiled only on +// next scheduler queue processing step and this is exactly what we need. +const nextTick = async () => { }; +exports.nextTick = nextTick; +// Returns control to thread each 'tick' ms to avoid blocking +async function asyncLoop(iters, tick, cb) { + let ts = Date.now(); + for (let i = 0; i < iters; i++) { + cb(i); + // Date.now() is not monotonic, so in case if clock goes backwards we return return control too + const diff = Date.now() - ts; + if (diff >= 0 && diff < tick) + continue; + await (0, exports.nextTick)(); + ts += diff; + } +} +exports.asyncLoop = asyncLoop; +/** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ +function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 +} +exports.utf8ToBytes = utf8ToBytes; +/** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ +function toBytes(data) { + if (typeof data === 'string') + data = utf8ToBytes(data); + if (!isBytes(data)) + throw new Error(`expected Uint8Array, got ${typeof data}`); + return data; +} +exports.toBytes = toBytes; +/** + * Copies several Uint8Arrays into one. + */ +function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + if (!isBytes(a)) + throw new Error('Uint8Array expected'); + sum += a.length; + } + const res = new Uint8Array(sum); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; +} +exports.concatBytes = concatBytes; +// For runtime check if class implements interface +class Hash { + // Safe version that clones internal state + clone() { + return this._cloneInto(); + } +} +exports.Hash = Hash; +const toStr = {}.toString; +function checkOpts(defaults, opts) { + if (opts !== undefined && toStr.call(opts) !== '[object Object]') + throw new Error('Options should be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; +} +exports.checkOpts = checkOpts; +function wrapConstructor(hashCons) { + const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); + const tmp = hashCons(); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = () => hashCons(); + return hashC; +} +exports.wrapConstructor = wrapConstructor; +function wrapConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +exports.wrapConstructorWithOpts = wrapConstructorWithOpts; +function wrapXOFConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +exports.wrapXOFConstructorWithOpts = wrapXOFConstructorWithOpts; +/** + * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. + */ +function randomBytes(bytesLength = 32) { + if (crypto_1.crypto && typeof crypto_1.crypto.getRandomValues === 'function') { + return crypto_1.crypto.getRandomValues(new Uint8Array(bytesLength)); + } + throw new Error('crypto.getRandomValues must be defined'); +} +exports.randomBytes = randomBytes; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 63203: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/*! scure-base - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.bytes = exports.stringToBytes = exports.str = exports.bytesToString = exports.hex = exports.utf8 = exports.bech32m = exports.bech32 = exports.base58check = exports.createBase58check = exports.base58xmr = exports.base58xrp = exports.base58flickr = exports.base58 = exports.base64urlnopad = exports.base64url = exports.base64nopad = exports.base64 = exports.base32crockford = exports.base32hex = exports.base32 = exports.base16 = exports.utils = exports.assertNumber = void 0; +// Utilities +/** + * @__NO_SIDE_EFFECTS__ + */ +function assertNumber(n) { + if (!Number.isSafeInteger(n)) + throw new Error(`Wrong integer: ${n}`); +} +exports.assertNumber = assertNumber; +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function chain(...args) { + const id = (a) => a; + // Wrap call in closure so JIT can inline calls + const wrap = (a, b) => (c) => a(b(c)); + // Construct chain of args[-1].encode(args[-2].encode([...])) + const encode = args.map((x) => x.encode).reduceRight(wrap, id); + // Construct chain of args[0].decode(args[1].decode(...)) + const decode = args.map((x) => x.decode).reduce(wrap, id); + return { encode, decode }; +} +/** + * Encodes integer radix representation to array of strings using alphabet and back + * @__NO_SIDE_EFFECTS__ + */ +function alphabet(alphabet) { + return { + encode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('alphabet.encode input should be an array of numbers'); + return digits.map((i) => { + assertNumber(i); + if (i < 0 || i >= alphabet.length) + throw new Error(`Digit index outside alphabet: ${i} (alphabet: ${alphabet.length})`); + return alphabet[i]; + }); + }, + decode: (input) => { + if (!Array.isArray(input) || (input.length && typeof input[0] !== 'string')) + throw new Error('alphabet.decode input should be array of strings'); + return input.map((letter) => { + if (typeof letter !== 'string') + throw new Error(`alphabet.decode: not string element=${letter}`); + const index = alphabet.indexOf(letter); + if (index === -1) + throw new Error(`Unknown letter: "${letter}". Allowed: ${alphabet}`); + return index; + }); + }, + }; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function join(separator = '') { + if (typeof separator !== 'string') + throw new Error('join separator should be string'); + return { + encode: (from) => { + if (!Array.isArray(from) || (from.length && typeof from[0] !== 'string')) + throw new Error('join.encode input should be array of strings'); + for (let i of from) + if (typeof i !== 'string') + throw new Error(`join.encode: non-string input=${i}`); + return from.join(separator); + }, + decode: (to) => { + if (typeof to !== 'string') + throw new Error('join.decode input should be string'); + return to.split(separator); + }, + }; +} +/** + * Pad strings array so it has integer number of bits + * @__NO_SIDE_EFFECTS__ + */ +function padding(bits, chr = '=') { + assertNumber(bits); + if (typeof chr !== 'string') + throw new Error('padding chr should be string'); + return { + encode(data) { + if (!Array.isArray(data) || (data.length && typeof data[0] !== 'string')) + throw new Error('padding.encode input should be array of strings'); + for (let i of data) + if (typeof i !== 'string') + throw new Error(`padding.encode: non-string input=${i}`); + while ((data.length * bits) % 8) + data.push(chr); + return data; + }, + decode(input) { + if (!Array.isArray(input) || (input.length && typeof input[0] !== 'string')) + throw new Error('padding.encode input should be array of strings'); + for (let i of input) + if (typeof i !== 'string') + throw new Error(`padding.decode: non-string input=${i}`); + let end = input.length; + if ((end * bits) % 8) + throw new Error('Invalid padding: string should have whole number of bytes'); + for (; end > 0 && input[end - 1] === chr; end--) { + if (!(((end - 1) * bits) % 8)) + throw new Error('Invalid padding: string has too much padding'); + } + return input.slice(0, end); + }, + }; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function normalize(fn) { + if (typeof fn !== 'function') + throw new Error('normalize fn should be function'); + return { encode: (from) => from, decode: (to) => fn(to) }; +} +/** + * Slow: O(n^2) time complexity + * @__NO_SIDE_EFFECTS__ + */ +function convertRadix(data, from, to) { + // base 1 is impossible + if (from < 2) + throw new Error(`convertRadix: wrong from=${from}, base cannot be less than 2`); + if (to < 2) + throw new Error(`convertRadix: wrong to=${to}, base cannot be less than 2`); + if (!Array.isArray(data)) + throw new Error('convertRadix: data should be array'); + if (!data.length) + return []; + let pos = 0; + const res = []; + const digits = Array.from(data); + digits.forEach((d) => { + assertNumber(d); + if (d < 0 || d >= from) + throw new Error(`Wrong integer: ${d}`); + }); + while (true) { + let carry = 0; + let done = true; + for (let i = pos; i < digits.length; i++) { + const digit = digits[i]; + const digitBase = from * carry + digit; + if (!Number.isSafeInteger(digitBase) || + (from * carry) / from !== carry || + digitBase - digit !== from * carry) { + throw new Error('convertRadix: carry overflow'); + } + carry = digitBase % to; + const rounded = Math.floor(digitBase / to); + digits[i] = rounded; + if (!Number.isSafeInteger(rounded) || rounded * to + carry !== digitBase) + throw new Error('convertRadix: carry overflow'); + if (!done) + continue; + else if (!rounded) + pos = i; + else + done = false; + } + res.push(carry); + if (done) + break; + } + for (let i = 0; i < data.length - 1 && data[i] === 0; i++) + res.push(0); + return res.reverse(); +} +const gcd = /* @__NO_SIDE_EFFECTS__ */ (a, b) => (!b ? a : gcd(b, a % b)); +const radix2carry = /*@__NO_SIDE_EFFECTS__ */ (from, to) => from + (to - gcd(from, to)); +/** + * Implemented with numbers, because BigInt is 5x slower + * @__NO_SIDE_EFFECTS__ + */ +function convertRadix2(data, from, to, padding) { + if (!Array.isArray(data)) + throw new Error('convertRadix2: data should be array'); + if (from <= 0 || from > 32) + throw new Error(`convertRadix2: wrong from=${from}`); + if (to <= 0 || to > 32) + throw new Error(`convertRadix2: wrong to=${to}`); + if (radix2carry(from, to) > 32) { + throw new Error(`convertRadix2: carry overflow from=${from} to=${to} carryBits=${radix2carry(from, to)}`); + } + let carry = 0; + let pos = 0; // bitwise position in current element + const mask = 2 ** to - 1; + const res = []; + for (const n of data) { + assertNumber(n); + if (n >= 2 ** from) + throw new Error(`convertRadix2: invalid data word=${n} from=${from}`); + carry = (carry << from) | n; + if (pos + from > 32) + throw new Error(`convertRadix2: carry overflow pos=${pos} from=${from}`); + pos += from; + for (; pos >= to; pos -= to) + res.push(((carry >> (pos - to)) & mask) >>> 0); + carry &= 2 ** pos - 1; // clean carry, otherwise it will cause overflow + } + carry = (carry << (to - pos)) & mask; + if (!padding && pos >= from) + throw new Error('Excess padding'); + if (!padding && carry) + throw new Error(`Non-zero padding: ${carry}`); + if (padding && pos > 0) + res.push(carry >>> 0); + return res; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function radix(num) { + assertNumber(num); + return { + encode: (bytes) => { + if (!isBytes(bytes)) + throw new Error('radix.encode input should be Uint8Array'); + return convertRadix(Array.from(bytes), 2 ** 8, num); + }, + decode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('radix.decode input should be array of numbers'); + return Uint8Array.from(convertRadix(digits, num, 2 ** 8)); + }, + }; +} +/** + * If both bases are power of same number (like `2**8 <-> 2**64`), + * there is a linear algorithm. For now we have implementation for power-of-two bases only. + * @__NO_SIDE_EFFECTS__ + */ +function radix2(bits, revPadding = false) { + assertNumber(bits); + if (bits <= 0 || bits > 32) + throw new Error('radix2: bits should be in (0..32]'); + if (radix2carry(8, bits) > 32 || radix2carry(bits, 8) > 32) + throw new Error('radix2: carry overflow'); + return { + encode: (bytes) => { + if (!isBytes(bytes)) + throw new Error('radix2.encode input should be Uint8Array'); + return convertRadix2(Array.from(bytes), 8, bits, !revPadding); + }, + decode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('radix2.decode input should be array of numbers'); + return Uint8Array.from(convertRadix2(digits, bits, 8, revPadding)); + }, + }; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function unsafeWrapper(fn) { + if (typeof fn !== 'function') + throw new Error('unsafeWrapper fn should be function'); + return function (...args) { + try { + return fn.apply(null, args); + } + catch (e) { } + }; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function checksum(len, fn) { + assertNumber(len); + if (typeof fn !== 'function') + throw new Error('checksum fn should be function'); + return { + encode(data) { + if (!isBytes(data)) + throw new Error('checksum.encode: input should be Uint8Array'); + const checksum = fn(data).slice(0, len); + const res = new Uint8Array(data.length + len); + res.set(data); + res.set(checksum, data.length); + return res; + }, + decode(data) { + if (!isBytes(data)) + throw new Error('checksum.decode: input should be Uint8Array'); + const payload = data.slice(0, -len); + const newChecksum = fn(payload).slice(0, len); + const oldChecksum = data.slice(-len); + for (let i = 0; i < len; i++) + if (newChecksum[i] !== oldChecksum[i]) + throw new Error('Invalid checksum'); + return payload; + }, + }; +} +// prettier-ignore +exports.utils = { + alphabet, chain, checksum, convertRadix, convertRadix2, radix, radix2, join, padding, +}; +// RFC 4648 aka RFC 3548 +// --------------------- +exports.base16 = chain(radix2(4), alphabet('0123456789ABCDEF'), join('')); +exports.base32 = chain(radix2(5), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'), padding(5), join('')); +exports.base32hex = chain(radix2(5), alphabet('0123456789ABCDEFGHIJKLMNOPQRSTUV'), padding(5), join('')); +exports.base32crockford = chain(radix2(5), alphabet('0123456789ABCDEFGHJKMNPQRSTVWXYZ'), join(''), normalize((s) => s.toUpperCase().replace(/O/g, '0').replace(/[IL]/g, '1'))); +exports.base64 = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'), padding(6), join('')); +exports.base64nopad = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'), join('')); +exports.base64url = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'), padding(6), join('')); +exports.base64urlnopad = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'), join('')); +// base58 code +// ----------- +const genBase58 = (abc) => chain(radix(58), alphabet(abc), join('')); +exports.base58 = genBase58('123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'); +exports.base58flickr = genBase58('123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ'); +exports.base58xrp = genBase58('rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz'); +// xmr ver is done in 8-byte blocks (which equals 11 chars in decoding). Last (non-full) block padded with '1' to size in XMR_BLOCK_LEN. +// Block encoding significantly reduces quadratic complexity of base58. +// Data len (index) -> encoded block len +const XMR_BLOCK_LEN = [0, 2, 3, 5, 6, 7, 9, 10, 11]; +exports.base58xmr = { + encode(data) { + let res = ''; + for (let i = 0; i < data.length; i += 8) { + const block = data.subarray(i, i + 8); + res += exports.base58.encode(block).padStart(XMR_BLOCK_LEN[block.length], '1'); + } + return res; + }, + decode(str) { + let res = []; + for (let i = 0; i < str.length; i += 11) { + const slice = str.slice(i, i + 11); + const blockLen = XMR_BLOCK_LEN.indexOf(slice.length); + const block = exports.base58.decode(slice); + for (let j = 0; j < block.length - blockLen; j++) { + if (block[j] !== 0) + throw new Error('base58xmr: wrong padding'); + } + res = res.concat(Array.from(block.slice(block.length - blockLen))); + } + return Uint8Array.from(res); + }, +}; +const createBase58check = (sha256) => chain(checksum(4, (data) => sha256(sha256(data))), exports.base58); +exports.createBase58check = createBase58check; +// legacy export, bad name +exports.base58check = exports.createBase58check; +const BECH_ALPHABET = /* @__PURE__ */ chain(alphabet('qpzry9x8gf2tvdw0s3jn54khce6mua7l'), join('')); +const POLYMOD_GENERATORS = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3]; +/** + * @__NO_SIDE_EFFECTS__ + */ +function bech32Polymod(pre) { + const b = pre >> 25; + let chk = (pre & 0x1ffffff) << 5; + for (let i = 0; i < POLYMOD_GENERATORS.length; i++) { + if (((b >> i) & 1) === 1) + chk ^= POLYMOD_GENERATORS[i]; + } + return chk; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function bechChecksum(prefix, words, encodingConst = 1) { + const len = prefix.length; + let chk = 1; + for (let i = 0; i < len; i++) { + const c = prefix.charCodeAt(i); + if (c < 33 || c > 126) + throw new Error(`Invalid prefix (${prefix})`); + chk = bech32Polymod(chk) ^ (c >> 5); + } + chk = bech32Polymod(chk); + for (let i = 0; i < len; i++) + chk = bech32Polymod(chk) ^ (prefix.charCodeAt(i) & 0x1f); + for (let v of words) + chk = bech32Polymod(chk) ^ v; + for (let i = 0; i < 6; i++) + chk = bech32Polymod(chk); + chk ^= encodingConst; + return BECH_ALPHABET.encode(convertRadix2([chk % 2 ** 30], 30, 5, false)); +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function genBech32(encoding) { + const ENCODING_CONST = encoding === 'bech32' ? 1 : 0x2bc830a3; + const _words = radix2(5); + const fromWords = _words.decode; + const toWords = _words.encode; + const fromWordsUnsafe = unsafeWrapper(fromWords); + function encode(prefix, words, limit = 90) { + if (typeof prefix !== 'string') + throw new Error(`bech32.encode prefix should be string, not ${typeof prefix}`); + if (!Array.isArray(words) || (words.length && typeof words[0] !== 'number')) + throw new Error(`bech32.encode words should be array of numbers, not ${typeof words}`); + if (prefix.length === 0) + throw new TypeError(`Invalid prefix length ${prefix.length}`); + const actualLength = prefix.length + 7 + words.length; + if (limit !== false && actualLength > limit) + throw new TypeError(`Length ${actualLength} exceeds limit ${limit}`); + const lowered = prefix.toLowerCase(); + const sum = bechChecksum(lowered, words, ENCODING_CONST); + return `${lowered}1${BECH_ALPHABET.encode(words)}${sum}`; + } + function decode(str, limit = 90) { + if (typeof str !== 'string') + throw new Error(`bech32.decode input should be string, not ${typeof str}`); + if (str.length < 8 || (limit !== false && str.length > limit)) + throw new TypeError(`Wrong string length: ${str.length} (${str}). Expected (8..${limit})`); + // don't allow mixed case + const lowered = str.toLowerCase(); + if (str !== lowered && str !== str.toUpperCase()) + throw new Error(`String must be lowercase or uppercase`); + const sepIndex = lowered.lastIndexOf('1'); + if (sepIndex === 0 || sepIndex === -1) + throw new Error(`Letter "1" must be present between prefix and data only`); + const prefix = lowered.slice(0, sepIndex); + const data = lowered.slice(sepIndex + 1); + if (data.length < 6) + throw new Error('Data must be at least 6 characters long'); + const words = BECH_ALPHABET.decode(data).slice(0, -6); + const sum = bechChecksum(prefix, words, ENCODING_CONST); + if (!data.endsWith(sum)) + throw new Error(`Invalid checksum in ${str}: expected "${sum}"`); + return { prefix, words }; + } + const decodeUnsafe = unsafeWrapper(decode); + function decodeToBytes(str) { + const { prefix, words } = decode(str, false); + return { prefix, words, bytes: fromWords(words) }; + } + return { encode, decode, decodeToBytes, decodeUnsafe, fromWords, fromWordsUnsafe, toWords }; +} +exports.bech32 = genBech32('bech32'); +exports.bech32m = genBech32('bech32m'); +exports.utf8 = { + encode: (data) => new TextDecoder().decode(data), + decode: (str) => new TextEncoder().encode(str), +}; +exports.hex = chain(radix2(4), alphabet('0123456789abcdef'), join(''), normalize((s) => { + if (typeof s !== 'string' || s.length % 2) + throw new TypeError(`hex.decode: expected string, got ${typeof s} with length ${s.length}`); + return s.toLowerCase(); +})); +// prettier-ignore +const CODERS = { + utf8: exports.utf8, hex: exports.hex, base16: exports.base16, base32: exports.base32, base64: exports.base64, base64url: exports.base64url, base58: exports.base58, base58xmr: exports.base58xmr +}; +const coderTypeError = 'Invalid encoding type. Available types: utf8, hex, base16, base32, base64, base64url, base58, base58xmr'; +const bytesToString = (type, bytes) => { + if (typeof type !== 'string' || !CODERS.hasOwnProperty(type)) + throw new TypeError(coderTypeError); + if (!isBytes(bytes)) + throw new TypeError('bytesToString() expects Uint8Array'); + return CODERS[type].encode(bytes); +}; +exports.bytesToString = bytesToString; +exports.str = exports.bytesToString; // as in python, but for bytes only +const stringToBytes = (type, str) => { + if (!CODERS.hasOwnProperty(type)) + throw new TypeError(coderTypeError); + if (typeof str !== 'string') + throw new TypeError('stringToBytes() expects string'); + return CODERS[type].decode(str); +}; +exports.stringToBytes = stringToBytes; +exports.bytes = exports.stringToBytes; +//# sourceMappingURL=index.js.map + +/***/ }), + /***/ 7736: /***/ ((__unused_webpack_module, exports) => { @@ -20183,6 +31352,245 @@ b2wasm.ready(function (err) { }) +/***/ }), + +/***/ 65403: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +var bloom = __webpack_require__(40710); +module.exports = bloom; + +/***/ }), + +/***/ 88217: +/***/ ((module) => { + +/** +* A simple bitview for Array buffer. +* @author: Joy Ghosh. +* @version: 0.0.1 +*/ + +var BitView = function(buffer){ + this.buffer = buffer; + this.unit8 = new Uint8Array(this.buffer); +} + +/** +* Returns the bit value at position 'index'. +*/ +BitView.prototype.get = function(index){ + var value = this.unit8[index >> 3]; + var offset = index & 0x7; + return ((value >> (7-offset)) & 1); +} + +/** +* Sets the bit value at specified position 'index'. +*/ +BitView.prototype.set = function(index){ + var offset = index & 0x7; + this.unit8[index >> 3] |= (0x80 >> offset); +} + +/** +* Clears the bit at position 'index'. +*/ +BitView.prototype.clear = function(index){ + var offset = index & 0x7; + this.unit8[index >> 3] &= ~(0x80 >> offset); +} + +/** +* Returns the byte length of this array buffer. +*/ +BitView.prototype.length = function(){ + return this.unit8.byteLength; +} + +/** +* Returns the array buffer. +*/ +BitView.prototype.view = function(){ + return this.unit8; +} + +module.exports = BitView; + + +/***/ }), + +/***/ 40710: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +/** +* Bloom filter. +* @author: Joy Ghosh +* @version: 0.0.1 +*/ + +var BitView = __webpack_require__(88217); +var fnv_1a = __webpack_require__(89897); +var one_at_a_time_hash = __webpack_require__(81397); + +//Constants. +const BITS_IN_BYTE = 8; +const FALSE_POSITIVE_TOLERANCE = 0.000001; + +/** +* Bloom filter object. +* n represents number of elements in this filter. +*/ +var BloomFilter = function(n, false_postive_tolerance = FALSE_POSITIVE_TOLERANCE){ + //Bits in Bloom filter. + this.m = Math.ceil((-2)*n*Math.log(false_postive_tolerance)); + //Number of hash functions. + this.k = Math.ceil(0.7*(this.m/n)); + + //Normalize size. + this.size = (this.m > BITS_IN_BYTE) ? (Math.ceil(this.m/BITS_IN_BYTE)) : 1; //default size is a byte. + + //Initialize bit array for filter. + this.bitview = new BitView(new ArrayBuffer(this.size)); +} + +//Generate hash value. +BloomFilter.prototype.calculateHash = function(x,m,i){ + //Double hash technique. + return ((fnv_1a(x) + (i*one_at_a_time_hash(x)))%m); +} + +//Looks for membership. +BloomFilter.prototype.test = function(data){ + var hash = data; + for(var i=0; i { + +/** +Fowler-Noll-Vo hash function. +@author: Joy Ghosh +@version: 0.0.1 +*/ + +//FNV constants. +const FNV_PRIME = 16777619; +const FNV_OFFSET_BASIS = 2166136261; + +/** +FNV hash function. (32-bit version) +FNV step 1: hash = hash XOR byte_of_data. +FNV step 2: hash = hash * FNV_Prime. +*/ +function fnv_1a(value){ + + var hash = FNV_OFFSET_BASIS; + for(var i=0; i>> 0; +} + +//FNV step 1:hash = hash XOR byte_of_data. +function fnv_xor(hash, byte_of_data){ + return (hash ^ byte_of_data); +} + +//FNV step 2: hash = hash * FNV_Prime. +function fnv_multiply(hash){ + hash += (hash << 1) + (hash << 4) + (hash << 7) + (hash << 8) + (hash << 24); + return hash; +} + +module.exports = fnv_1a; + +/***/ }), + +/***/ 81397: +/***/ ((module) => { + +/** +Jenkins one_at_a_time hash function. +@author: Joy Ghosh +@version: 0.0.1 +*/ + +/** +* Jenkins's one at a time hash function. +*/ +function one_at_a_time_hash(key){ + + var hash = 0; + for(var i=0;i> 6); + } + + hash += (hash << 3); + hash = hash ^ (hash >> 11); + hash += (hash << 15); + return hash; +} + +module.exports = one_at_a_time_hash; + /***/ }), /***/ 39404: @@ -28190,6 +39598,1041 @@ SafeBuffer.allocUnsafeSlow = function (size) { } +/***/ }), + +/***/ 5974: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +/* provided dependency */ var process = __webpack_require__(65606); + +/* eslint camelcase: "off" */ + +var assert = __webpack_require__(94148); + +var Zstream = __webpack_require__(44442); +var zlib_deflate = __webpack_require__(58411); +var zlib_inflate = __webpack_require__(71447); +var constants = __webpack_require__(19681); + +for (var key in constants) { + exports[key] = constants[key]; +} + +// zlib modes +exports.NONE = 0; +exports.DEFLATE = 1; +exports.INFLATE = 2; +exports.GZIP = 3; +exports.GUNZIP = 4; +exports.DEFLATERAW = 5; +exports.INFLATERAW = 6; +exports.UNZIP = 7; + +var GZIP_HEADER_ID1 = 0x1f; +var GZIP_HEADER_ID2 = 0x8b; + +/** + * Emulate Node's zlib C++ layer for use by the JS layer in index.js + */ +function Zlib(mode) { + if (typeof mode !== 'number' || mode < exports.DEFLATE || mode > exports.UNZIP) { + throw new TypeError('Bad argument'); + } + + this.dictionary = null; + this.err = 0; + this.flush = 0; + this.init_done = false; + this.level = 0; + this.memLevel = 0; + this.mode = mode; + this.strategy = 0; + this.windowBits = 0; + this.write_in_progress = false; + this.pending_close = false; + this.gzip_id_bytes_read = 0; +} + +Zlib.prototype.close = function () { + if (this.write_in_progress) { + this.pending_close = true; + return; + } + + this.pending_close = false; + + assert(this.init_done, 'close before init'); + assert(this.mode <= exports.UNZIP); + + if (this.mode === exports.DEFLATE || this.mode === exports.GZIP || this.mode === exports.DEFLATERAW) { + zlib_deflate.deflateEnd(this.strm); + } else if (this.mode === exports.INFLATE || this.mode === exports.GUNZIP || this.mode === exports.INFLATERAW || this.mode === exports.UNZIP) { + zlib_inflate.inflateEnd(this.strm); + } + + this.mode = exports.NONE; + + this.dictionary = null; +}; + +Zlib.prototype.write = function (flush, input, in_off, in_len, out, out_off, out_len) { + return this._write(true, flush, input, in_off, in_len, out, out_off, out_len); +}; + +Zlib.prototype.writeSync = function (flush, input, in_off, in_len, out, out_off, out_len) { + return this._write(false, flush, input, in_off, in_len, out, out_off, out_len); +}; + +Zlib.prototype._write = function (async, flush, input, in_off, in_len, out, out_off, out_len) { + assert.equal(arguments.length, 8); + + assert(this.init_done, 'write before init'); + assert(this.mode !== exports.NONE, 'already finalized'); + assert.equal(false, this.write_in_progress, 'write already in progress'); + assert.equal(false, this.pending_close, 'close is pending'); + + this.write_in_progress = true; + + assert.equal(false, flush === undefined, 'must provide flush value'); + + this.write_in_progress = true; + + if (flush !== exports.Z_NO_FLUSH && flush !== exports.Z_PARTIAL_FLUSH && flush !== exports.Z_SYNC_FLUSH && flush !== exports.Z_FULL_FLUSH && flush !== exports.Z_FINISH && flush !== exports.Z_BLOCK) { + throw new Error('Invalid flush value'); + } + + if (input == null) { + input = Buffer.alloc(0); + in_len = 0; + in_off = 0; + } + + this.strm.avail_in = in_len; + this.strm.input = input; + this.strm.next_in = in_off; + this.strm.avail_out = out_len; + this.strm.output = out; + this.strm.next_out = out_off; + this.flush = flush; + + if (!async) { + // sync version + this._process(); + + if (this._checkError()) { + return this._afterSync(); + } + return; + } + + // async version + var self = this; + process.nextTick(function () { + self._process(); + self._after(); + }); + + return this; +}; + +Zlib.prototype._afterSync = function () { + var avail_out = this.strm.avail_out; + var avail_in = this.strm.avail_in; + + this.write_in_progress = false; + + return [avail_in, avail_out]; +}; + +Zlib.prototype._process = function () { + var next_expected_header_byte = null; + + // If the avail_out is left at 0, then it means that it ran out + // of room. If there was avail_out left over, then it means + // that all of the input was consumed. + switch (this.mode) { + case exports.DEFLATE: + case exports.GZIP: + case exports.DEFLATERAW: + this.err = zlib_deflate.deflate(this.strm, this.flush); + break; + case exports.UNZIP: + if (this.strm.avail_in > 0) { + next_expected_header_byte = this.strm.next_in; + } + + switch (this.gzip_id_bytes_read) { + case 0: + if (next_expected_header_byte === null) { + break; + } + + if (this.strm.input[next_expected_header_byte] === GZIP_HEADER_ID1) { + this.gzip_id_bytes_read = 1; + next_expected_header_byte++; + + if (this.strm.avail_in === 1) { + // The only available byte was already read. + break; + } + } else { + this.mode = exports.INFLATE; + break; + } + + // fallthrough + case 1: + if (next_expected_header_byte === null) { + break; + } + + if (this.strm.input[next_expected_header_byte] === GZIP_HEADER_ID2) { + this.gzip_id_bytes_read = 2; + this.mode = exports.GUNZIP; + } else { + // There is no actual difference between INFLATE and INFLATERAW + // (after initialization). + this.mode = exports.INFLATE; + } + + break; + default: + throw new Error('invalid number of gzip magic number bytes read'); + } + + // fallthrough + case exports.INFLATE: + case exports.GUNZIP: + case exports.INFLATERAW: + this.err = zlib_inflate.inflate(this.strm, this.flush + + // If data was encoded with dictionary + );if (this.err === exports.Z_NEED_DICT && this.dictionary) { + // Load it + this.err = zlib_inflate.inflateSetDictionary(this.strm, this.dictionary); + if (this.err === exports.Z_OK) { + // And try to decode again + this.err = zlib_inflate.inflate(this.strm, this.flush); + } else if (this.err === exports.Z_DATA_ERROR) { + // Both inflateSetDictionary() and inflate() return Z_DATA_ERROR. + // Make it possible for After() to tell a bad dictionary from bad + // input. + this.err = exports.Z_NEED_DICT; + } + } + while (this.strm.avail_in > 0 && this.mode === exports.GUNZIP && this.err === exports.Z_STREAM_END && this.strm.next_in[0] !== 0x00) { + // Bytes remain in input buffer. Perhaps this is another compressed + // member in the same archive, or just trailing garbage. + // Trailing zero bytes are okay, though, since they are frequently + // used for padding. + + this.reset(); + this.err = zlib_inflate.inflate(this.strm, this.flush); + } + break; + default: + throw new Error('Unknown mode ' + this.mode); + } +}; + +Zlib.prototype._checkError = function () { + // Acceptable error states depend on the type of zlib stream. + switch (this.err) { + case exports.Z_OK: + case exports.Z_BUF_ERROR: + if (this.strm.avail_out !== 0 && this.flush === exports.Z_FINISH) { + this._error('unexpected end of file'); + return false; + } + break; + case exports.Z_STREAM_END: + // normal statuses, not fatal + break; + case exports.Z_NEED_DICT: + if (this.dictionary == null) { + this._error('Missing dictionary'); + } else { + this._error('Bad dictionary'); + } + return false; + default: + // something else. + this._error('Zlib error'); + return false; + } + + return true; +}; + +Zlib.prototype._after = function () { + if (!this._checkError()) { + return; + } + + var avail_out = this.strm.avail_out; + var avail_in = this.strm.avail_in; + + this.write_in_progress = false; + + // call the write() cb + this.callback(avail_in, avail_out); + + if (this.pending_close) { + this.close(); + } +}; + +Zlib.prototype._error = function (message) { + if (this.strm.msg) { + message = this.strm.msg; + } + this.onerror(message, this.err + + // no hope of rescue. + );this.write_in_progress = false; + if (this.pending_close) { + this.close(); + } +}; + +Zlib.prototype.init = function (windowBits, level, memLevel, strategy, dictionary) { + assert(arguments.length === 4 || arguments.length === 5, 'init(windowBits, level, memLevel, strategy, [dictionary])'); + + assert(windowBits >= 8 && windowBits <= 15, 'invalid windowBits'); + assert(level >= -1 && level <= 9, 'invalid compression level'); + + assert(memLevel >= 1 && memLevel <= 9, 'invalid memlevel'); + + assert(strategy === exports.Z_FILTERED || strategy === exports.Z_HUFFMAN_ONLY || strategy === exports.Z_RLE || strategy === exports.Z_FIXED || strategy === exports.Z_DEFAULT_STRATEGY, 'invalid strategy'); + + this._init(level, windowBits, memLevel, strategy, dictionary); + this._setDictionary(); +}; + +Zlib.prototype.params = function () { + throw new Error('deflateParams Not supported'); +}; + +Zlib.prototype.reset = function () { + this._reset(); + this._setDictionary(); +}; + +Zlib.prototype._init = function (level, windowBits, memLevel, strategy, dictionary) { + this.level = level; + this.windowBits = windowBits; + this.memLevel = memLevel; + this.strategy = strategy; + + this.flush = exports.Z_NO_FLUSH; + + this.err = exports.Z_OK; + + if (this.mode === exports.GZIP || this.mode === exports.GUNZIP) { + this.windowBits += 16; + } + + if (this.mode === exports.UNZIP) { + this.windowBits += 32; + } + + if (this.mode === exports.DEFLATERAW || this.mode === exports.INFLATERAW) { + this.windowBits = -1 * this.windowBits; + } + + this.strm = new Zstream(); + + switch (this.mode) { + case exports.DEFLATE: + case exports.GZIP: + case exports.DEFLATERAW: + this.err = zlib_deflate.deflateInit2(this.strm, this.level, exports.Z_DEFLATED, this.windowBits, this.memLevel, this.strategy); + break; + case exports.INFLATE: + case exports.GUNZIP: + case exports.INFLATERAW: + case exports.UNZIP: + this.err = zlib_inflate.inflateInit2(this.strm, this.windowBits); + break; + default: + throw new Error('Unknown mode ' + this.mode); + } + + if (this.err !== exports.Z_OK) { + this._error('Init error'); + } + + this.dictionary = dictionary; + + this.write_in_progress = false; + this.init_done = true; +}; + +Zlib.prototype._setDictionary = function () { + if (this.dictionary == null) { + return; + } + + this.err = exports.Z_OK; + + switch (this.mode) { + case exports.DEFLATE: + case exports.DEFLATERAW: + this.err = zlib_deflate.deflateSetDictionary(this.strm, this.dictionary); + break; + default: + break; + } + + if (this.err !== exports.Z_OK) { + this._error('Failed to set dictionary'); + } +}; + +Zlib.prototype._reset = function () { + this.err = exports.Z_OK; + + switch (this.mode) { + case exports.DEFLATE: + case exports.DEFLATERAW: + case exports.GZIP: + this.err = zlib_deflate.deflateReset(this.strm); + break; + case exports.INFLATE: + case exports.INFLATERAW: + case exports.GUNZIP: + this.err = zlib_inflate.inflateReset(this.strm); + break; + default: + break; + } + + if (this.err !== exports.Z_OK) { + this._error('Failed to reset stream'); + } +}; + +exports.Zlib = Zlib; + +/***/ }), + +/***/ 78559: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var process = __webpack_require__(65606); + + +var Buffer = (__webpack_require__(48287).Buffer); +var Transform = (__webpack_require__(88310).Transform); +var binding = __webpack_require__(5974); +var util = __webpack_require__(40537); +var assert = (__webpack_require__(94148).ok); +var kMaxLength = (__webpack_require__(48287).kMaxLength); +var kRangeErrorMessage = 'Cannot create final Buffer. It would be larger ' + 'than 0x' + kMaxLength.toString(16) + ' bytes'; + +// zlib doesn't provide these, so kludge them in following the same +// const naming scheme zlib uses. +binding.Z_MIN_WINDOWBITS = 8; +binding.Z_MAX_WINDOWBITS = 15; +binding.Z_DEFAULT_WINDOWBITS = 15; + +// fewer than 64 bytes per chunk is stupid. +// technically it could work with as few as 8, but even 64 bytes +// is absurdly low. Usually a MB or more is best. +binding.Z_MIN_CHUNK = 64; +binding.Z_MAX_CHUNK = Infinity; +binding.Z_DEFAULT_CHUNK = 16 * 1024; + +binding.Z_MIN_MEMLEVEL = 1; +binding.Z_MAX_MEMLEVEL = 9; +binding.Z_DEFAULT_MEMLEVEL = 8; + +binding.Z_MIN_LEVEL = -1; +binding.Z_MAX_LEVEL = 9; +binding.Z_DEFAULT_LEVEL = binding.Z_DEFAULT_COMPRESSION; + +// expose all the zlib constants +var bkeys = Object.keys(binding); +for (var bk = 0; bk < bkeys.length; bk++) { + var bkey = bkeys[bk]; + if (bkey.match(/^Z/)) { + Object.defineProperty(exports, bkey, { + enumerable: true, value: binding[bkey], writable: false + }); + } +} + +// translation table for return codes. +var codes = { + Z_OK: binding.Z_OK, + Z_STREAM_END: binding.Z_STREAM_END, + Z_NEED_DICT: binding.Z_NEED_DICT, + Z_ERRNO: binding.Z_ERRNO, + Z_STREAM_ERROR: binding.Z_STREAM_ERROR, + Z_DATA_ERROR: binding.Z_DATA_ERROR, + Z_MEM_ERROR: binding.Z_MEM_ERROR, + Z_BUF_ERROR: binding.Z_BUF_ERROR, + Z_VERSION_ERROR: binding.Z_VERSION_ERROR +}; + +var ckeys = Object.keys(codes); +for (var ck = 0; ck < ckeys.length; ck++) { + var ckey = ckeys[ck]; + codes[codes[ckey]] = ckey; +} + +Object.defineProperty(exports, "codes", ({ + enumerable: true, value: Object.freeze(codes), writable: false +})); + +exports.Deflate = Deflate; +exports.Inflate = Inflate; +exports.Gzip = Gzip; +exports.Gunzip = Gunzip; +exports.DeflateRaw = DeflateRaw; +exports.InflateRaw = InflateRaw; +exports.Unzip = Unzip; + +exports.createDeflate = function (o) { + return new Deflate(o); +}; + +exports.createInflate = function (o) { + return new Inflate(o); +}; + +exports.createDeflateRaw = function (o) { + return new DeflateRaw(o); +}; + +exports.createInflateRaw = function (o) { + return new InflateRaw(o); +}; + +exports.createGzip = function (o) { + return new Gzip(o); +}; + +exports.createGunzip = function (o) { + return new Gunzip(o); +}; + +exports.createUnzip = function (o) { + return new Unzip(o); +}; + +// Convenience methods. +// compress/decompress a string or buffer in one step. +exports.deflate = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Deflate(opts), buffer, callback); +}; + +exports.deflateSync = function (buffer, opts) { + return zlibBufferSync(new Deflate(opts), buffer); +}; + +exports.gzip = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Gzip(opts), buffer, callback); +}; + +exports.gzipSync = function (buffer, opts) { + return zlibBufferSync(new Gzip(opts), buffer); +}; + +exports.deflateRaw = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new DeflateRaw(opts), buffer, callback); +}; + +exports.deflateRawSync = function (buffer, opts) { + return zlibBufferSync(new DeflateRaw(opts), buffer); +}; + +exports.unzip = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Unzip(opts), buffer, callback); +}; + +exports.unzipSync = function (buffer, opts) { + return zlibBufferSync(new Unzip(opts), buffer); +}; + +exports.inflate = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Inflate(opts), buffer, callback); +}; + +exports.inflateSync = function (buffer, opts) { + return zlibBufferSync(new Inflate(opts), buffer); +}; + +exports.gunzip = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Gunzip(opts), buffer, callback); +}; + +exports.gunzipSync = function (buffer, opts) { + return zlibBufferSync(new Gunzip(opts), buffer); +}; + +exports.inflateRaw = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new InflateRaw(opts), buffer, callback); +}; + +exports.inflateRawSync = function (buffer, opts) { + return zlibBufferSync(new InflateRaw(opts), buffer); +}; + +function zlibBuffer(engine, buffer, callback) { + var buffers = []; + var nread = 0; + + engine.on('error', onError); + engine.on('end', onEnd); + + engine.end(buffer); + flow(); + + function flow() { + var chunk; + while (null !== (chunk = engine.read())) { + buffers.push(chunk); + nread += chunk.length; + } + engine.once('readable', flow); + } + + function onError(err) { + engine.removeListener('end', onEnd); + engine.removeListener('readable', flow); + callback(err); + } + + function onEnd() { + var buf; + var err = null; + + if (nread >= kMaxLength) { + err = new RangeError(kRangeErrorMessage); + } else { + buf = Buffer.concat(buffers, nread); + } + + buffers = []; + engine.close(); + callback(err, buf); + } +} + +function zlibBufferSync(engine, buffer) { + if (typeof buffer === 'string') buffer = Buffer.from(buffer); + + if (!Buffer.isBuffer(buffer)) throw new TypeError('Not a string or buffer'); + + var flushFlag = engine._finishFlushFlag; + + return engine._processChunk(buffer, flushFlag); +} + +// generic zlib +// minimal 2-byte header +function Deflate(opts) { + if (!(this instanceof Deflate)) return new Deflate(opts); + Zlib.call(this, opts, binding.DEFLATE); +} + +function Inflate(opts) { + if (!(this instanceof Inflate)) return new Inflate(opts); + Zlib.call(this, opts, binding.INFLATE); +} + +// gzip - bigger header, same deflate compression +function Gzip(opts) { + if (!(this instanceof Gzip)) return new Gzip(opts); + Zlib.call(this, opts, binding.GZIP); +} + +function Gunzip(opts) { + if (!(this instanceof Gunzip)) return new Gunzip(opts); + Zlib.call(this, opts, binding.GUNZIP); +} + +// raw - no header +function DeflateRaw(opts) { + if (!(this instanceof DeflateRaw)) return new DeflateRaw(opts); + Zlib.call(this, opts, binding.DEFLATERAW); +} + +function InflateRaw(opts) { + if (!(this instanceof InflateRaw)) return new InflateRaw(opts); + Zlib.call(this, opts, binding.INFLATERAW); +} + +// auto-detect header. +function Unzip(opts) { + if (!(this instanceof Unzip)) return new Unzip(opts); + Zlib.call(this, opts, binding.UNZIP); +} + +function isValidFlushFlag(flag) { + return flag === binding.Z_NO_FLUSH || flag === binding.Z_PARTIAL_FLUSH || flag === binding.Z_SYNC_FLUSH || flag === binding.Z_FULL_FLUSH || flag === binding.Z_FINISH || flag === binding.Z_BLOCK; +} + +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. + +function Zlib(opts, mode) { + var _this = this; + + this._opts = opts = opts || {}; + this._chunkSize = opts.chunkSize || exports.Z_DEFAULT_CHUNK; + + Transform.call(this, opts); + + if (opts.flush && !isValidFlushFlag(opts.flush)) { + throw new Error('Invalid flush flag: ' + opts.flush); + } + if (opts.finishFlush && !isValidFlushFlag(opts.finishFlush)) { + throw new Error('Invalid flush flag: ' + opts.finishFlush); + } + + this._flushFlag = opts.flush || binding.Z_NO_FLUSH; + this._finishFlushFlag = typeof opts.finishFlush !== 'undefined' ? opts.finishFlush : binding.Z_FINISH; + + if (opts.chunkSize) { + if (opts.chunkSize < exports.Z_MIN_CHUNK || opts.chunkSize > exports.Z_MAX_CHUNK) { + throw new Error('Invalid chunk size: ' + opts.chunkSize); + } + } + + if (opts.windowBits) { + if (opts.windowBits < exports.Z_MIN_WINDOWBITS || opts.windowBits > exports.Z_MAX_WINDOWBITS) { + throw new Error('Invalid windowBits: ' + opts.windowBits); + } + } + + if (opts.level) { + if (opts.level < exports.Z_MIN_LEVEL || opts.level > exports.Z_MAX_LEVEL) { + throw new Error('Invalid compression level: ' + opts.level); + } + } + + if (opts.memLevel) { + if (opts.memLevel < exports.Z_MIN_MEMLEVEL || opts.memLevel > exports.Z_MAX_MEMLEVEL) { + throw new Error('Invalid memLevel: ' + opts.memLevel); + } + } + + if (opts.strategy) { + if (opts.strategy != exports.Z_FILTERED && opts.strategy != exports.Z_HUFFMAN_ONLY && opts.strategy != exports.Z_RLE && opts.strategy != exports.Z_FIXED && opts.strategy != exports.Z_DEFAULT_STRATEGY) { + throw new Error('Invalid strategy: ' + opts.strategy); + } + } + + if (opts.dictionary) { + if (!Buffer.isBuffer(opts.dictionary)) { + throw new Error('Invalid dictionary: it should be a Buffer instance'); + } + } + + this._handle = new binding.Zlib(mode); + + var self = this; + this._hadError = false; + this._handle.onerror = function (message, errno) { + // there is no way to cleanly recover. + // continuing only obscures problems. + _close(self); + self._hadError = true; + + var error = new Error(message); + error.errno = errno; + error.code = exports.codes[errno]; + self.emit('error', error); + }; + + var level = exports.Z_DEFAULT_COMPRESSION; + if (typeof opts.level === 'number') level = opts.level; + + var strategy = exports.Z_DEFAULT_STRATEGY; + if (typeof opts.strategy === 'number') strategy = opts.strategy; + + this._handle.init(opts.windowBits || exports.Z_DEFAULT_WINDOWBITS, level, opts.memLevel || exports.Z_DEFAULT_MEMLEVEL, strategy, opts.dictionary); + + this._buffer = Buffer.allocUnsafe(this._chunkSize); + this._offset = 0; + this._level = level; + this._strategy = strategy; + + this.once('end', this.close); + + Object.defineProperty(this, '_closed', { + get: function () { + return !_this._handle; + }, + configurable: true, + enumerable: true + }); +} + +util.inherits(Zlib, Transform); + +Zlib.prototype.params = function (level, strategy, callback) { + if (level < exports.Z_MIN_LEVEL || level > exports.Z_MAX_LEVEL) { + throw new RangeError('Invalid compression level: ' + level); + } + if (strategy != exports.Z_FILTERED && strategy != exports.Z_HUFFMAN_ONLY && strategy != exports.Z_RLE && strategy != exports.Z_FIXED && strategy != exports.Z_DEFAULT_STRATEGY) { + throw new TypeError('Invalid strategy: ' + strategy); + } + + if (this._level !== level || this._strategy !== strategy) { + var self = this; + this.flush(binding.Z_SYNC_FLUSH, function () { + assert(self._handle, 'zlib binding closed'); + self._handle.params(level, strategy); + if (!self._hadError) { + self._level = level; + self._strategy = strategy; + if (callback) callback(); + } + }); + } else { + process.nextTick(callback); + } +}; + +Zlib.prototype.reset = function () { + assert(this._handle, 'zlib binding closed'); + return this._handle.reset(); +}; + +// This is the _flush function called by the transform class, +// internally, when the last chunk has been written. +Zlib.prototype._flush = function (callback) { + this._transform(Buffer.alloc(0), '', callback); +}; + +Zlib.prototype.flush = function (kind, callback) { + var _this2 = this; + + var ws = this._writableState; + + if (typeof kind === 'function' || kind === undefined && !callback) { + callback = kind; + kind = binding.Z_FULL_FLUSH; + } + + if (ws.ended) { + if (callback) process.nextTick(callback); + } else if (ws.ending) { + if (callback) this.once('end', callback); + } else if (ws.needDrain) { + if (callback) { + this.once('drain', function () { + return _this2.flush(kind, callback); + }); + } + } else { + this._flushFlag = kind; + this.write(Buffer.alloc(0), '', callback); + } +}; + +Zlib.prototype.close = function (callback) { + _close(this, callback); + process.nextTick(emitCloseNT, this); +}; + +function _close(engine, callback) { + if (callback) process.nextTick(callback); + + // Caller may invoke .close after a zlib error (which will null _handle). + if (!engine._handle) return; + + engine._handle.close(); + engine._handle = null; +} + +function emitCloseNT(self) { + self.emit('close'); +} + +Zlib.prototype._transform = function (chunk, encoding, cb) { + var flushFlag; + var ws = this._writableState; + var ending = ws.ending || ws.ended; + var last = ending && (!chunk || ws.length === chunk.length); + + if (chunk !== null && !Buffer.isBuffer(chunk)) return cb(new Error('invalid input')); + + if (!this._handle) return cb(new Error('zlib binding closed')); + + // If it's the last chunk, or a final flush, we use the Z_FINISH flush flag + // (or whatever flag was provided using opts.finishFlush). + // If it's explicitly flushing at some other time, then we use + // Z_FULL_FLUSH. Otherwise, use Z_NO_FLUSH for maximum compression + // goodness. + if (last) flushFlag = this._finishFlushFlag;else { + flushFlag = this._flushFlag; + // once we've flushed the last of the queue, stop flushing and + // go back to the normal behavior. + if (chunk.length >= ws.length) { + this._flushFlag = this._opts.flush || binding.Z_NO_FLUSH; + } + } + + this._processChunk(chunk, flushFlag, cb); +}; + +Zlib.prototype._processChunk = function (chunk, flushFlag, cb) { + var availInBefore = chunk && chunk.length; + var availOutBefore = this._chunkSize - this._offset; + var inOff = 0; + + var self = this; + + var async = typeof cb === 'function'; + + if (!async) { + var buffers = []; + var nread = 0; + + var error; + this.on('error', function (er) { + error = er; + }); + + assert(this._handle, 'zlib binding closed'); + do { + var res = this._handle.writeSync(flushFlag, chunk, // in + inOff, // in_off + availInBefore, // in_len + this._buffer, // out + this._offset, //out_off + availOutBefore); // out_len + } while (!this._hadError && callback(res[0], res[1])); + + if (this._hadError) { + throw error; + } + + if (nread >= kMaxLength) { + _close(this); + throw new RangeError(kRangeErrorMessage); + } + + var buf = Buffer.concat(buffers, nread); + _close(this); + + return buf; + } + + assert(this._handle, 'zlib binding closed'); + var req = this._handle.write(flushFlag, chunk, // in + inOff, // in_off + availInBefore, // in_len + this._buffer, // out + this._offset, //out_off + availOutBefore); // out_len + + req.buffer = chunk; + req.callback = callback; + + function callback(availInAfter, availOutAfter) { + // When the callback is used in an async write, the callback's + // context is the `req` object that was created. The req object + // is === this._handle, and that's why it's important to null + // out the values after they are done being used. `this._handle` + // can stay in memory longer than the callback and buffer are needed. + if (this) { + this.buffer = null; + this.callback = null; + } + + if (self._hadError) return; + + var have = availOutBefore - availOutAfter; + assert(have >= 0, 'have should not go down'); + + if (have > 0) { + var out = self._buffer.slice(self._offset, self._offset + have); + self._offset += have; + // serve some output to the consumer. + if (async) { + self.push(out); + } else { + buffers.push(out); + nread += out.length; + } + } + + // exhausted the output buffer, or used all the input create a new one. + if (availOutAfter === 0 || self._offset >= self._chunkSize) { + availOutBefore = self._chunkSize; + self._offset = 0; + self._buffer = Buffer.allocUnsafe(self._chunkSize); + } + + if (availOutAfter === 0) { + // Not actually done. Need to reprocess. + // Also, update the availInBefore to the availInAfter value, + // so that if we have to hit it a third (fourth, etc.) time, + // it'll have the correct byte counts. + inOff += availInBefore - availInAfter; + availInBefore = availInAfter; + + if (!async) return true; + + var newReq = self._handle.write(flushFlag, chunk, inOff, availInBefore, self._buffer, self._offset, self._chunkSize); + newReq.callback = callback; // this same function + newReq.buffer = chunk; + return; + } + + if (!async) return false; + + // finished with the chunk. + cb(); + } +}; + +util.inherits(Deflate, Zlib); +util.inherits(Inflate, Zlib); +util.inherits(Gzip, Zlib); +util.inherits(Gunzip, Zlib); +util.inherits(DeflateRaw, Zlib); +util.inherits(InflateRaw, Zlib); +util.inherits(Unzip, Zlib); + /***/ }), /***/ 30295: @@ -30323,6 +42766,77 @@ function BufferBigIntNotDefined () { } +/***/ }), + +/***/ 86866: +/***/ ((module) => { + +module.exports = { + "100": "Continue", + "101": "Switching Protocols", + "102": "Processing", + "200": "OK", + "201": "Created", + "202": "Accepted", + "203": "Non-Authoritative Information", + "204": "No Content", + "205": "Reset Content", + "206": "Partial Content", + "207": "Multi-Status", + "208": "Already Reported", + "226": "IM Used", + "300": "Multiple Choices", + "301": "Moved Permanently", + "302": "Found", + "303": "See Other", + "304": "Not Modified", + "305": "Use Proxy", + "307": "Temporary Redirect", + "308": "Permanent Redirect", + "400": "Bad Request", + "401": "Unauthorized", + "402": "Payment Required", + "403": "Forbidden", + "404": "Not Found", + "405": "Method Not Allowed", + "406": "Not Acceptable", + "407": "Proxy Authentication Required", + "408": "Request Timeout", + "409": "Conflict", + "410": "Gone", + "411": "Length Required", + "412": "Precondition Failed", + "413": "Payload Too Large", + "414": "URI Too Long", + "415": "Unsupported Media Type", + "416": "Range Not Satisfiable", + "417": "Expectation Failed", + "418": "I'm a teapot", + "421": "Misdirected Request", + "422": "Unprocessable Entity", + "423": "Locked", + "424": "Failed Dependency", + "425": "Unordered Collection", + "426": "Upgrade Required", + "428": "Precondition Required", + "429": "Too Many Requests", + "431": "Request Header Fields Too Large", + "451": "Unavailable For Legal Reasons", + "500": "Internal Server Error", + "501": "Not Implemented", + "502": "Bad Gateway", + "503": "Service Unavailable", + "504": "Gateway Timeout", + "505": "HTTP Version Not Supported", + "506": "Variant Also Negotiates", + "507": "Insufficient Storage", + "508": "Loop Detected", + "509": "Bandwidth Limit Exceeded", + "510": "Not Extended", + "511": "Network Authentication Required" +} + + /***/ }), /***/ 38075: @@ -35222,6 +47736,566 @@ exports.constants = { } +/***/ }), + +/***/ 17833: +/***/ ((module, exports, __webpack_require__) => { + +/* provided dependency */ var console = __webpack_require__(96763); +/* provided dependency */ var process = __webpack_require__(65606); +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __webpack_require__(40736)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 40736: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +/* provided dependency */ var console = __webpack_require__(96763); + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __webpack_require__(6585); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + /***/ }), /***/ 30041: @@ -47657,7 +60731,7 @@ __webpack_require__.d(__webpack_exports__, { hn: () => (/* binding */ existsAsync), sw: () => (/* binding */ loadCachedEvents), rx: () => (/* binding */ loadSavedEvents), - xU: () => (/* binding */ saveEvents), + jj: () => (/* binding */ saveUserFile), fY: () => (/* binding */ unzipAsync), a8: () => (/* binding */ zipAsync) }); @@ -50392,23 +63466,22 @@ function unzipAsync(data) { }); }); } -function saveEvents(_0) { +function saveUserFile(_0) { return __async(this, arguments, function* ({ - name, + fileName, userDirectory, - events + dataString }) { - const fileName = `${name}.json`.toLowerCase(); + fileName = fileName.toLowerCase(); const filePath = path_ignored_default().join(userDirectory, fileName); - const stringEvents = JSON.stringify(events, null, 2) + "\n"; const payload = yield zipAsync({ - [fileName]: new TextEncoder().encode(stringEvents) + [fileName]: new TextEncoder().encode(dataString) }); if (!(yield existsAsync(userDirectory))) { yield (0,promises_ignored_.mkdir)(userDirectory, { recursive: true }); } yield (0,promises_ignored_.writeFile)(filePath + ".zip", payload); - yield (0,promises_ignored_.writeFile)(filePath, stringEvents); + yield (0,promises_ignored_.writeFile)(filePath, dataString); }); } function loadSavedEvents(_0) { @@ -50593,7 +63666,7 @@ class Deposit { const newDeposit = new Deposit({ currency: currency.toLowerCase(), amount, - netId: Number(netId), + netId, note: `tornado-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.noteHex}`, noteHex: depositObject.noteHex, invoice: `tornadoInvoice-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.commitmentHex}`, @@ -50670,6 +63743,149 @@ class Invoice { } +/***/ }), + +/***/ 87987: +/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ Ad: () => (/* binding */ NoteAccount), +/* harmony export */ Fr: () => (/* binding */ packEncryptedMessage), +/* harmony export */ ol: () => (/* binding */ unpackEncryptedMessage) +/* harmony export */ }); +/* harmony import */ var _metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(51594); +/* harmony import */ var _metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0__); +/* harmony import */ var ethers__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(20415); +/* harmony import */ var ethers__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(30031); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(91401); + + + + +function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext }) { + const nonceBuf = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .toFixedHex */ .$W)((0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToHex */ .My)((0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .base64ToBytes */ .Kp)(nonce)), 24); + const ephemPublicKeyBuf = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .toFixedHex */ .$W)((0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToHex */ .My)((0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .base64ToBytes */ .Kp)(ephemPublicKey)), 32); + const ciphertextBuf = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToHex */ .My)((0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .base64ToBytes */ .Kp)(ciphertext)); + const messageBuff = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .concatBytes */ .Id)((0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .hexToBytes */ .aT)(nonceBuf), (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .hexToBytes */ .aT)(ephemPublicKeyBuf), (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .hexToBytes */ .aT)(ciphertextBuf)); + return (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToHex */ .My)(messageBuff); +} +function unpackEncryptedMessage(encryptedMessage) { + const messageBuff = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .hexToBytes */ .aT)(encryptedMessage); + const nonceBuf = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToBase64 */ ["if"])(messageBuff.slice(0, 24)); + const ephemPublicKeyBuf = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToBase64 */ ["if"])(messageBuff.slice(24, 56)); + const ciphertextBuf = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToBase64 */ ["if"])(messageBuff.slice(56)); + return { + messageBuff: (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToHex */ .My)(messageBuff), + version: "x25519-xsalsa20-poly1305", + nonce: nonceBuf, + ephemPublicKey: ephemPublicKeyBuf, + ciphertext: ciphertextBuf + }; +} +class NoteAccount { + constructor({ netId, blockNumber, recoveryKey, Echoer: Echoer2 }) { + if (!recoveryKey) { + recoveryKey = (0,_utils__WEBPACK_IMPORTED_MODULE_1__/* .bytesToHex */ .My)(_utils__WEBPACK_IMPORTED_MODULE_1__/* .crypto */ .Et.getRandomValues(new Uint8Array(32))).slice(2); + } + this.netId = Math.floor(Number(netId)); + this.blockNumber = blockNumber; + this.recoveryKey = recoveryKey; + this.recoveryAddress = (0,ethers__WEBPACK_IMPORTED_MODULE_2__/* .computeAddress */ .K)("0x" + recoveryKey); + this.recoveryPublicKey = (0,_metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0__.getEncryptionPublicKey)(recoveryKey); + this.Echoer = Echoer2; + } + /** + * Intends to mock eth_getEncryptionPublicKey behavior from MetaMask + * In order to make the recoveryKey retrival from Echoer possible from the bare private key + */ + static getWalletPublicKey(wallet) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + return (0,_metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0__.getEncryptionPublicKey)(privateKey); + } + // This function intends to provide an encrypted value of recoveryKey for an on-chain Echoer backup purpose + // Thus, the pubKey should be derived by a Wallet instance or from Web3 wallets + // pubKey: base64 encoded 32 bytes key from https://docs.metamask.io/wallet/reference/eth_getencryptionpublickey/ + getEncryptedAccount(walletPublicKey) { + const encryptedData = (0,_metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0__.encrypt)({ + publicKey: walletPublicKey, + data: this.recoveryKey, + version: "x25519-xsalsa20-poly1305" + }); + const data = packEncryptedMessage(encryptedData); + return { + // Use this later to save hexPrivateKey generated with + // Buffer.from(JSON.stringify(encryptedData)).toString('hex') + // As we don't use buffer with this library we should leave UI to do the rest + encryptedData, + // Data that could be used as an echo(data) params + data + }; + } + /** + * Decrypt Echoer backuped note encryption account with private keys + */ + decryptAccountsWithWallet(wallet, events) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedAccount); + const recoveryKey = (0,_metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0__.decrypt)({ + encryptedData: unpackedMessage, + privateKey + }); + decryptedEvents.push( + new NoteAccount({ + netId: this.netId, + blockNumber: event.blockNumber, + recoveryKey, + Echoer: this.Echoer + }) + ); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + decryptNotes(events) { + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedNote); + const [address, noteHex] = (0,_metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0__.decrypt)({ + encryptedData: unpackedMessage, + privateKey: this.recoveryKey + }).split("-"); + decryptedEvents.push({ + blockNumber: event.blockNumber, + address: (0,ethers__WEBPACK_IMPORTED_MODULE_3__/* .getAddress */ .b)(address), + noteHex + }); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + encryptNote({ address, noteHex }) { + const encryptedData = (0,_metamask_eth_sig_util__WEBPACK_IMPORTED_MODULE_0__.encrypt)({ + publicKey: this.recoveryPublicKey, + data: `${address}-${noteHex}`, + version: "x25519-xsalsa20-poly1305" + }); + return packEncryptedMessage(encryptedData); + } +} + + /***/ }), /***/ 27819: @@ -50677,6 +63893,7 @@ class Invoice { "use strict"; /* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ GS: () => (/* binding */ BaseEchoService), /* harmony export */ JJ: () => (/* binding */ BaseGovernanceService), /* harmony export */ Lx: () => (/* binding */ DEPOSIT), /* harmony export */ O_: () => (/* binding */ BaseEncryptedNotesService), @@ -51044,6 +64261,57 @@ class BaseDepositsService extends BaseEventsService { } } } +class BaseEchoService extends BaseEventsService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions: fetchDataOptions2 + }) { + super({ netId, provider, graphApi, subgraphName, contract: Echoer, deployedBlock, fetchDataOptions: fetchDataOptions2 }); + } + getInstanceName() { + return `echo_${this.netId}`; + } + getType() { + return "Echo"; + } + getGraphMethod() { + return "getAllGraphEchoEvents"; + } + formatEvents(events) { + return __async(this, null, function* () { + return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { + const { who, data } = args; + if (who && data) { + const eventObjects = { + blockNumber, + logIndex, + transactionHash + }; + return __spreadProps(__spreadValues({}, eventObjects), { + address: who, + encryptedAccount: data + }); + } + }).filter((e) => e); + }); + } + getEventsFromGraph(_0) { + return __async(this, arguments, function* ({ fromBlock }) { + if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + return { + events: [], + lastBlock: fromBlock + }; + } + return __superGet(BaseEchoService.prototype, this, "getEventsFromGraph").call(this, { fromBlock }); + }); + } +} class BaseEncryptedNotesService extends BaseEventsService { constructor({ netId, @@ -51106,11 +64374,15 @@ class BaseGovernanceService extends BaseEventsService { return "*"; } getGraphMethod() { - return "governanceEvents"; + return "getAllGovernanceEvents"; } formatEvents(events) { return __async(this, null, function* () { - const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { + const proposalEvents = []; + const votedEvents = []; + const delegatedEvents = []; + const undelegatedEvents = []; + events.forEach(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { const eventObjects = { blockNumber, logIndex, @@ -51119,60 +64391,61 @@ class BaseGovernanceService extends BaseEventsService { }; if (event === "ProposalCreated") { const { id, proposer, target, startTime, endTime, description } = args; - return __spreadProps(__spreadValues({}, eventObjects), { - id, + proposalEvents.push(__spreadProps(__spreadValues({}, eventObjects), { + id: Number(id), proposer, target, - startTime, - endTime, + startTime: Number(startTime), + endTime: Number(endTime), description - }); + })); } if (event === "Voted") { const { proposalId, voter, support, votes } = args; - return __spreadProps(__spreadValues({}, eventObjects), { - proposalId, + votedEvents.push(__spreadProps(__spreadValues({}, eventObjects), { + proposalId: Number(proposalId), voter, support, - votes - }); + votes, + from: "", + input: "" + })); } if (event === "Delegated") { const { account, to: delegateTo } = args; - return __spreadProps(__spreadValues({}, eventObjects), { + delegatedEvents.push(__spreadProps(__spreadValues({}, eventObjects), { account, delegateTo - }); + })); } if (event === "Undelegated") { const { account, from: delegateFrom } = args; - return __spreadProps(__spreadValues({}, eventObjects), { + undelegatedEvents.push(__spreadProps(__spreadValues({}, eventObjects), { account, delegateFrom - }); + })); } - }).filter((e) => e); - const votedEvents = formattedEvents.map((event, index) => __spreadProps(__spreadValues({}, event), { index })).filter(({ event }) => event === "Voted"); + }); if (votedEvents.length) { this.updateTransactionProgress({ percentage: 0 }); const txs = yield this.batchTransactionService.getBatchTransactions([ ...new Set(votedEvents.map(({ transactionHash }) => transactionHash)) ]); - votedEvents.forEach((event) => { + votedEvents.forEach((event, index) => { let { data: input, from } = txs.find((t) => t.hash === event.transactionHash); if (!input || input.length > 2048) { input = ""; } - formattedEvents[event.index].from = from; - formattedEvents[event.index].input = input; + votedEvents[index].from = from; + votedEvents[index].input = input; }); } - return formattedEvents; + return [...proposalEvents, ...votedEvents, ...delegatedEvents, ...undelegatedEvents]; }); } getEventsFromGraph(_0) { return __async(this, arguments, function* ({ fromBlock }) { - if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + if (!this.graphApi || !this.subgraphName || this.graphApi.includes("api.thegraph.com")) { return { events: [], lastBlock: fromBlock @@ -51237,12 +64510,14 @@ class BaseRegistryService extends BaseEventsService { __webpack_require__.r(__webpack_exports__); /* harmony export */ __webpack_require__.d(__webpack_exports__, { /* harmony export */ BaseDepositsService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.qD), +/* harmony export */ BaseEchoService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.GS), /* harmony export */ BaseEncryptedNotesService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.O_), /* harmony export */ BaseEventsService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.uw), /* harmony export */ BaseGovernanceService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.JJ), /* harmony export */ BaseRegistryService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.cE), /* harmony export */ DEPOSIT: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.Lx), /* harmony export */ NodeDepositsService: () => (/* reexport safe */ _node__WEBPACK_IMPORTED_MODULE_2__.fD), +/* harmony export */ NodeEchoService: () => (/* reexport safe */ _node__WEBPACK_IMPORTED_MODULE_2__.Rl), /* harmony export */ NodeEncryptedNotesService: () => (/* reexport safe */ _node__WEBPACK_IMPORTED_MODULE_2__.l5), /* harmony export */ NodeGovernanceService: () => (/* reexport safe */ _node__WEBPACK_IMPORTED_MODULE_2__.Om), /* harmony export */ NodeRegistryService: () => (/* reexport safe */ _node__WEBPACK_IMPORTED_MODULE_2__.b5), @@ -51269,6 +64544,7 @@ __webpack_require__.r(__webpack_exports__); "use strict"; /* harmony export */ __webpack_require__.d(__webpack_exports__, { /* harmony export */ Om: () => (/* binding */ NodeGovernanceService), +/* harmony export */ Rl: () => (/* binding */ NodeEchoService), /* harmony export */ b5: () => (/* binding */ NodeRegistryService), /* harmony export */ fD: () => (/* binding */ NodeDepositsService), /* harmony export */ l5: () => (/* binding */ NodeEncryptedNotesService) @@ -51444,10 +64720,136 @@ class NodeDepositsService extends _base__WEBPACK_IMPORTED_MODULE_3__/* .BaseDepo ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveEvents */ .xU)({ - name: instanceName, + yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveUserFile */ .jj)({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" + }); + } + }); + } +} +class NodeEchoService extends _base__WEBPACK_IMPORTED_MODULE_3__/* .BaseEchoService */ .GS { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions, + cacheDirectory, + userDirectory + }) { + super({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions + }); + this.cacheDirectory = cacheDirectory; + this.userDirectory = userDirectory; + } + updateEventProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + console.log(`fromBlock - ${fromBlock}`); + console.log(`toBlock - ${toBlock}`); + if (count) { + console.log(`downloaded ${type} events count - ${count}`); + console.log("____________________________________________"); + console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock} +`); + } + } + } + updateGraphProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + console.log(`fromBlock - ${fromBlock}`); + console.log(`toBlock - ${toBlock}`); + if (count) { + console.log(`downloaded ${type} events from graph node count - ${count}`); + console.log("____________________________________________"); + console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock} +`); + } + } + } + getEventsFromDB() { + return __async(this, null, function* () { + if (!this.userDirectory) { + console.log(`Updating events for ${this.netId} chain echo events +`); + console.log(`savedEvents count - ${0}`); + console.log(`savedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const savedEvents = yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .loadSavedEvents */ .rx)({ + name: this.getInstanceName(), + userDirectory: this.userDirectory, + deployedBlock: this.deployedBlock + }); + console.log(`Updating events for ${this.netId} chain echo events +`); + console.log(`savedEvents count - ${savedEvents.events.length}`); + console.log(`savedEvents lastBlock - ${savedEvents.lastBlock} +`); + return savedEvents; + }); + } + getEventsFromCache() { + return __async(this, null, function* () { + if (!this.cacheDirectory) { + console.log(`cachedEvents count - ${0}`); + console.log(`cachedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const cachedEvents = yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .loadCachedEvents */ .sw)({ + name: this.getInstanceName(), + cacheDirectory: this.cacheDirectory, + deployedBlock: this.deployedBlock + }); + console.log(`cachedEvents count - ${cachedEvents.events.length}`); + console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock} +`); + return cachedEvents; + }); + } + saveEvents(_0) { + return __async(this, arguments, function* ({ events, lastBlock }) { + const instanceName = this.getInstanceName(); + console.log("\ntotalEvents count - ", events.length); + console.log( + `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock} +` + ); + const eventTable = new (cli_table3__WEBPACK_IMPORTED_MODULE_0___default())(); + eventTable.push( + [{ colSpan: 2, content: "Echo Accounts", hAlign: "center" }], + ["Network", `${this.netId} chain`], + ["Events", `${events.length} events`], + [{ colSpan: 2, content: "Latest events" }], + ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => { + const eventIndex = events.length - index; + return [eventIndex, blockNumber]; + }) + ); + console.log(eventTable.toString() + "\n"); + if (this.userDirectory) { + yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveUserFile */ .jj)({ + fileName: instanceName + ".json", + userDirectory: this.userDirectory, + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -51570,10 +64972,10 @@ class NodeEncryptedNotesService extends _base__WEBPACK_IMPORTED_MODULE_3__/* .Ba ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveEvents */ .xU)({ - name: instanceName, + yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveUserFile */ .jj)({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -51701,10 +65103,10 @@ class NodeGovernanceService extends _base__WEBPACK_IMPORTED_MODULE_3__/* .BaseGo ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveEvents */ .xU)({ - name: instanceName, + yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveUserFile */ .jj)({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -51827,10 +65229,10 @@ class NodeRegistryService extends _base__WEBPACK_IMPORTED_MODULE_3__/* .BaseRegi ); console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveEvents */ .xU)({ - name: instanceName, + yield (0,_data__WEBPACK_IMPORTED_MODULE_2__/* .saveUserFile */ .jj)({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -51949,7 +65351,10 @@ __webpack_require__.r(__webpack_exports__); // EXPORTS __webpack_require__.d(__webpack_exports__, { GET_DEPOSITS: () => (/* reexport */ GET_DEPOSITS), + GET_ECHO_EVENTS: () => (/* reexport */ GET_ECHO_EVENTS), GET_ENCRYPTED_NOTES: () => (/* reexport */ GET_ENCRYPTED_NOTES), + GET_GOVERNANCE_APY: () => (/* reexport */ GET_GOVERNANCE_APY), + GET_GOVERNANCE_EVENTS: () => (/* reexport */ GET_GOVERNANCE_EVENTS), GET_NOTE_ACCOUNTS: () => (/* reexport */ GET_NOTE_ACCOUNTS), GET_REGISTERED: () => (/* reexport */ GET_REGISTERED), GET_STATISTIC: () => (/* reexport */ GET_STATISTIC), @@ -51957,10 +65362,14 @@ __webpack_require__.d(__webpack_exports__, { _META: () => (/* reexport */ _META), getAllDeposits: () => (/* binding */ getAllDeposits), getAllEncryptedNotes: () => (/* binding */ getAllEncryptedNotes), + getAllGovernanceEvents: () => (/* binding */ getAllGovernanceEvents), + getAllGraphEchoEvents: () => (/* binding */ getAllGraphEchoEvents), getAllRegisters: () => (/* binding */ getAllRegisters), getAllWithdrawals: () => (/* binding */ getAllWithdrawals), getDeposits: () => (/* binding */ getDeposits), getEncryptedNotes: () => (/* binding */ getEncryptedNotes), + getGovernanceEvents: () => (/* binding */ getGovernanceEvents), + getGraphEchoEvents: () => (/* binding */ getGraphEchoEvents), getMeta: () => (/* binding */ getMeta), getNoteAccounts: () => (/* binding */ getNoteAccounts), getRegisters: () => (/* binding */ getRegisters), @@ -51972,7 +65381,7 @@ __webpack_require__.d(__webpack_exports__, { // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/address/address.js var address_address = __webpack_require__(30031); // EXTERNAL MODULE: ./src/services/providers.ts + 40 modules -var providers = __webpack_require__(54389); +var providers = __webpack_require__(32875); ;// CONCATENATED MODULE: ./src/services/graphql/queries.ts const GET_STATISTIC = ` @@ -52078,6 +65487,22 @@ const GET_NOTE_ACCOUNTS = ` } } `; +const GET_ECHO_EVENTS = ` + query getNoteAccounts($first: Int, $fromBlock: Int) { + noteAccounts(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + id + blockNumber + address + encryptedAccount + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; const GET_ENCRYPTED_NOTES = ` query getEncryptedNotes($first: Int, $fromBlock: Int) { encryptedNotes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { @@ -52094,6 +65519,59 @@ const GET_ENCRYPTED_NOTES = ` } } `; +const GET_GOVERNANCE_EVENTS = ` + query getGovernanceEvents($first: Int, $fromBlock: Int) { + proposals(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + proposer + target + startTime + endTime + description + } + votes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + voter + support + votes + from + input + } + delegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateTo + } + undelegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateFrom + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; +const GET_GOVERNANCE_APY = ` + stakeDailyBurns(first: 30, orderBy: date, orderDirection: desc) { + id + date + dailyAmountBurned + } +`; ;// CONCATENATED MODULE: ./src/services/graphql/index.ts /* provided dependency */ var console = __webpack_require__(96763); @@ -52544,7 +66022,7 @@ function getNoteAccounts(_0) { subgraphName, query: GET_NOTE_ACCOUNTS, variables: { - address + address: address.toLowerCase() }, fetchDataOptions: fetchDataOptions2 }); @@ -52562,6 +66040,95 @@ function getNoteAccounts(_0) { } }); } +function getGraphEchoEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_ECHO_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGraphEchoEvents(_0) { + return __async(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const events = []; + let lastSyncBlock = fromBlock; + while (true) { + let { + noteAccounts: result2, + _meta: { + // eslint-disable-next-line prefer-const + block: { number: currentBlock } + } + } = yield getGraphEchoEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + if (isEmptyArray(result2)) { + break; + } + const [firstEvent] = result2; + const [lastEvent2] = result2.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "EchoEvents", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: result2.length + }); + } + if (result2.length < 900) { + events.push(...result2); + break; + } + result2 = result2.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + events.push(...result2); + } + if (!events.length) { + return { + events: [], + lastSyncBlock + }; + } + const result = events.map((e) => { + const [transactionHash, logIndex] = e.id.split("-"); + return { + blockNumber: Number(e.blockNumber), + logIndex: Number(logIndex), + transactionHash, + address: (0,address_address/* getAddress */.b)(e.address), + encryptedAccount: e.encryptedAccount + }; + }); + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + console.log("Error from getAllGraphEchoEvents query"); + console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} function getEncryptedNotes({ graphApi, subgraphName, @@ -52647,6 +66214,152 @@ function getAllEncryptedNotes(_0) { } }); } +function getGovernanceEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_GOVERNANCE_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGovernanceEvents(_0) { + return __async(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const result = []; + let lastSyncBlock = fromBlock; + while (true) { + const { + proposals, + votes, + delegates, + undelegates, + _meta: { + block: { number: currentBlock } + } + } = yield getGovernanceEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + const eventsLength = proposals.length + votes.length + delegates.length + undelegates.length; + if (eventsLength === 0) { + break; + } + const formattedProposals = proposals.map( + ({ blockNumber, logIndex, transactionHash, proposalId, proposer, target, startTime, endTime, description }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "ProposalCreated", + id: Number(proposalId), + proposer: (0,address_address/* getAddress */.b)(proposer), + target: (0,address_address/* getAddress */.b)(target), + startTime: Number(startTime), + endTime: Number(endTime), + description + }; + } + ); + const formattedVotes = votes.map( + ({ blockNumber, logIndex, transactionHash, proposalId, voter, support, votes: votes2, from, input }) => { + if (!input || input.length > 2048) { + input = ""; + } + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Voted", + proposalId: Number(proposalId), + voter: (0,address_address/* getAddress */.b)(voter), + support, + votes: votes2, + from: (0,address_address/* getAddress */.b)(from), + input + }; + } + ); + const formattedDelegates = delegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateTo }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Delegated", + account: (0,address_address/* getAddress */.b)(account), + delegateTo: (0,address_address/* getAddress */.b)(delegateTo) + }; + } + ); + const formattedUndelegates = undelegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateFrom }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Undelegated", + account: (0,address_address/* getAddress */.b)(account), + delegateFrom: (0,address_address/* getAddress */.b)(delegateFrom) + }; + } + ); + let formattedEvents = [ + ...formattedProposals, + ...formattedVotes, + ...formattedDelegates, + ...formattedUndelegates + ].sort((a, b) => { + if (a.blockNumber === b.blockNumber) { + return a.logIndex - b.logIndex; + } + return a.blockNumber - b.blockNumber; + }); + if (eventsLength < 900) { + result.push(...formattedEvents); + break; + } + const [firstEvent] = formattedEvents; + const [lastEvent2] = formattedEvents.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "Governance Events", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: eventsLength + }); + } + formattedEvents = formattedEvents.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + result.push(...formattedEvents); + } + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + console.log("Error from getAllGovernance query"); + console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} /***/ }), @@ -52662,94 +66375,115 @@ __webpack_require__.r(__webpack_exports__); /* harmony export */ BatchTransactionService: () => (/* reexport safe */ _batch__WEBPACK_IMPORTED_MODULE_3__.AF), /* harmony export */ Deposit: () => (/* reexport safe */ _deposits__WEBPACK_IMPORTED_MODULE_5__.dA), /* harmony export */ GET_DEPOSITS: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_DEPOSITS), +/* harmony export */ GET_ECHO_EVENTS: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_ECHO_EVENTS), /* harmony export */ GET_ENCRYPTED_NOTES: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_ENCRYPTED_NOTES), +/* harmony export */ GET_GOVERNANCE_APY: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_GOVERNANCE_APY), +/* harmony export */ GET_GOVERNANCE_EVENTS: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_GOVERNANCE_EVENTS), /* harmony export */ GET_NOTE_ACCOUNTS: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_NOTE_ACCOUNTS), /* harmony export */ GET_REGISTERED: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_REGISTERED), /* harmony export */ GET_STATISTIC: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_STATISTIC), /* harmony export */ GET_WITHDRAWALS: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_WITHDRAWALS), /* harmony export */ Invoice: () => (/* reexport safe */ _deposits__WEBPACK_IMPORTED_MODULE_5__.qO), -/* harmony export */ MIN_STAKE_BALANCE: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.pO), -/* harmony export */ MerkleTreeService: () => (/* reexport safe */ _merkleTree__WEBPACK_IMPORTED_MODULE_7__.s), -/* harmony export */ Mimc: () => (/* reexport safe */ _mimc__WEBPACK_IMPORTED_MODULE_8__.p), -/* harmony export */ Pedersen: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_12__.Hr), -/* harmony export */ RelayerClient: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.OR), -/* harmony export */ TokenPriceOracle: () => (/* reexport safe */ _prices__WEBPACK_IMPORTED_MODULE_13__.T), -/* harmony export */ TornadoBrowserProvider: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.D2), -/* harmony export */ TornadoFeeOracle: () => (/* reexport safe */ _fees__WEBPACK_IMPORTED_MODULE_6__.o), -/* harmony export */ TornadoRpcSigner: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.Vr), -/* harmony export */ TornadoVoidSigner: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.Gd), -/* harmony export */ TornadoWallet: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.nA), +/* harmony export */ MIN_STAKE_BALANCE: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.pO), +/* harmony export */ MerkleTreeService: () => (/* reexport safe */ _merkleTree__WEBPACK_IMPORTED_MODULE_8__.s), +/* harmony export */ Mimc: () => (/* reexport safe */ _mimc__WEBPACK_IMPORTED_MODULE_9__.p), +/* harmony export */ NetId: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.zr), +/* harmony export */ NoteAccount: () => (/* reexport safe */ _encryptedNotes__WEBPACK_IMPORTED_MODULE_6__.Ad), +/* harmony export */ Pedersen: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_13__.Hr), +/* harmony export */ RelayerClient: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.OR), +/* harmony export */ TokenPriceOracle: () => (/* reexport safe */ _prices__WEBPACK_IMPORTED_MODULE_14__.T), +/* harmony export */ TornadoBrowserProvider: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.D2), +/* harmony export */ TornadoFeeOracle: () => (/* reexport safe */ _fees__WEBPACK_IMPORTED_MODULE_7__.o), +/* harmony export */ TornadoRpcSigner: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.Vr), +/* harmony export */ TornadoVoidSigner: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.Gd), +/* harmony export */ TornadoWallet: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.nA), +/* harmony export */ TreeCache: () => (/* reexport safe */ _treeCache__WEBPACK_IMPORTED_MODULE_18__.v), /* harmony export */ _META: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__._META), +/* harmony export */ addNetwork: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.AE), /* harmony export */ ajv: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.SS), -/* harmony export */ base64ToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.Kp), -/* harmony export */ bigIntReplacer: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.gn), -/* harmony export */ blockSyncInterval: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_10__.W6), -/* harmony export */ bnToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.jm), -/* harmony export */ buffPedersenHash: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_12__.UB), -/* harmony export */ bufferToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.lY), -/* harmony export */ bytesToBN: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.Ju), -/* harmony export */ bytesToBase64: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__["if"]), -/* harmony export */ bytesToHex: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.My), -/* harmony export */ calculateScore: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.zy), -/* harmony export */ calculateSnarkProof: () => (/* reexport safe */ _websnark__WEBPACK_IMPORTED_MODULE_18__.i), -/* harmony export */ chunk: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.iv), -/* harmony export */ convertETHToTokenAmount: () => (/* reexport safe */ _fees__WEBPACK_IMPORTED_MODULE_6__.N), +/* harmony export */ base64ToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Kp), +/* harmony export */ bigIntReplacer: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.gn), +/* harmony export */ bnToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.jm), +/* harmony export */ buffPedersenHash: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_13__.UB), +/* harmony export */ bufferToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.lY), +/* harmony export */ bytesToBN: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Ju), +/* harmony export */ bytesToBase64: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__["if"]), +/* harmony export */ bytesToHex: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.My), +/* harmony export */ calculateScore: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.zy), +/* harmony export */ calculateSnarkProof: () => (/* reexport safe */ _websnark__WEBPACK_IMPORTED_MODULE_20__.i), +/* harmony export */ chunk: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.iv), +/* harmony export */ concatBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Id), +/* harmony export */ convertETHToTokenAmount: () => (/* reexport safe */ _fees__WEBPACK_IMPORTED_MODULE_7__.N), /* harmony export */ createDeposit: () => (/* reexport safe */ _deposits__WEBPACK_IMPORTED_MODULE_5__.Hr), -/* harmony export */ defaultUserAgent: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.mJ), +/* harmony export */ crypto: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Et), +/* harmony export */ customConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.cX), +/* harmony export */ defaultConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.sb), +/* harmony export */ defaultUserAgent: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.mJ), /* harmony export */ download: () => (/* reexport safe */ _data__WEBPACK_IMPORTED_MODULE_4__.RG), -/* harmony export */ enabledChains: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_10__.Af), +/* harmony export */ enabledChains: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.Af), /* harmony export */ existsAsync: () => (/* reexport safe */ _data__WEBPACK_IMPORTED_MODULE_4__.hn), -/* harmony export */ fetch: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.hd), -/* harmony export */ fetchData: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.Fd), -/* harmony export */ fetchGetUrlFunc: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.uY), +/* harmony export */ fetch: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.hd), +/* harmony export */ fetchData: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.Fd), +/* harmony export */ fetchGetUrlFunc: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.uY), /* harmony export */ getAllDeposits: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllDeposits), /* harmony export */ getAllEncryptedNotes: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllEncryptedNotes), +/* harmony export */ getAllGovernanceEvents: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllGovernanceEvents), +/* harmony export */ getAllGraphEchoEvents: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllGraphEchoEvents), /* harmony export */ getAllRegisters: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllRegisters), /* harmony export */ getAllWithdrawals: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllWithdrawals), +/* harmony export */ getConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.zj), /* harmony export */ getDeposits: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getDeposits), /* harmony export */ getEncryptedNotes: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getEncryptedNotes), -/* harmony export */ getGasOraclePlugin: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.bD), -/* harmony export */ getHttpAgent: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.WU), +/* harmony export */ getGasOraclePlugin: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.bD), +/* harmony export */ getGovernanceEvents: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getGovernanceEvents), +/* harmony export */ getGraphEchoEvents: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getGraphEchoEvents), +/* harmony export */ getHttpAgent: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.WU), +/* harmony export */ getInstanceByAddress: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.Zh), /* harmony export */ getMeta: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getMeta), +/* harmony export */ getNetworkConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.RY), /* harmony export */ getNoteAccounts: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getNoteAccounts), -/* harmony export */ getProvider: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.sO), -/* harmony export */ getProviderWithNetId: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.MF), +/* harmony export */ getProvider: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.sO), +/* harmony export */ getProviderWithNetId: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.MF), /* harmony export */ getRegisters: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getRegisters), /* harmony export */ getStatistic: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getStatistic), /* harmony export */ getStatusSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.c_), -/* harmony export */ getTokenBalances: () => (/* reexport safe */ _tokens__WEBPACK_IMPORTED_MODULE_16__.H), -/* harmony export */ getWeightRandom: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.c$), +/* harmony export */ getSubdomains: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.cF), +/* harmony export */ getSupportedInstances: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.XF), +/* harmony export */ getTokenBalances: () => (/* reexport safe */ _tokens__WEBPACK_IMPORTED_MODULE_17__.H), +/* harmony export */ getWeightRandom: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.c$), /* harmony export */ getWithdrawals: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getWithdrawals), -/* harmony export */ isNode: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.Ll), -/* harmony export */ isRelayerUpdated: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.mU), +/* harmony export */ hexToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.aT), +/* harmony export */ isNode: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Ll), +/* harmony export */ isRelayerUpdated: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.mU), /* harmony export */ jobsSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.Us), -/* harmony export */ leBuff2Int: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.ae), -/* harmony export */ leInt2Buff: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.EI), +/* harmony export */ leBuff2Int: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.ae), +/* harmony export */ leInt2Buff: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.EI), /* harmony export */ loadCachedEvents: () => (/* reexport safe */ _data__WEBPACK_IMPORTED_MODULE_4__.sw), /* harmony export */ loadSavedEvents: () => (/* reexport safe */ _data__WEBPACK_IMPORTED_MODULE_4__.rx), -/* harmony export */ mimc: () => (/* reexport safe */ _mimc__WEBPACK_IMPORTED_MODULE_8__.f), -/* harmony export */ multicall: () => (/* reexport safe */ _multicall__WEBPACK_IMPORTED_MODULE_9__.C), -/* harmony export */ networkConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_10__.L4), -/* harmony export */ parseAddress: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_11__.or), -/* harmony export */ parseKey: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_11__.Do), -/* harmony export */ parseMnemonic: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_11__.qc), -/* harmony export */ parseNumber: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_11__.$J), -/* harmony export */ parseRelayer: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_11__.yJ), -/* harmony export */ parseSemanticVersion: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.qo), -/* harmony export */ parseUrl: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_11__.Dl), -/* harmony export */ pedersen: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_12__.NO), -/* harmony export */ pickWeightedRandomRelayer: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.sN), -/* harmony export */ populateTransaction: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.zr), +/* harmony export */ mimc: () => (/* reexport safe */ _mimc__WEBPACK_IMPORTED_MODULE_9__.f), +/* harmony export */ multicall: () => (/* reexport safe */ _multicall__WEBPACK_IMPORTED_MODULE_10__.C), +/* harmony export */ packEncryptedMessage: () => (/* reexport safe */ _encryptedNotes__WEBPACK_IMPORTED_MODULE_6__.Fr), +/* harmony export */ parseAddress: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_12__.or), +/* harmony export */ parseKey: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_12__.Do), +/* harmony export */ parseMnemonic: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_12__.qc), +/* harmony export */ parseNumber: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_12__.$J), +/* harmony export */ parseRecoveryKey: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_12__._p), +/* harmony export */ parseRelayer: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_12__.yJ), +/* harmony export */ parseSemanticVersion: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.qo), +/* harmony export */ parseUrl: () => (/* reexport safe */ _parser__WEBPACK_IMPORTED_MODULE_12__.Dl), +/* harmony export */ pedersen: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_13__.NO), +/* harmony export */ pickWeightedRandomRelayer: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.sN), +/* harmony export */ populateTransaction: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.zr), /* harmony export */ queryGraph: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.queryGraph), -/* harmony export */ rBigInt: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.ib), -/* harmony export */ saveEvents: () => (/* reexport safe */ _data__WEBPACK_IMPORTED_MODULE_4__.xU), -/* harmony export */ sleep: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.yy), -/* harmony export */ subdomains: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_10__.a7), -/* harmony export */ substring: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.uU), -/* harmony export */ toFixedHex: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.$W), -/* harmony export */ toFixedLength: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.sY), +/* harmony export */ rBigInt: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.ib), +/* harmony export */ saveUserFile: () => (/* reexport safe */ _data__WEBPACK_IMPORTED_MODULE_4__.jj), +/* harmony export */ sleep: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.yy), +/* harmony export */ substring: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.uU), +/* harmony export */ toFixedHex: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.$W), +/* harmony export */ toFixedLength: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.sY), +/* harmony export */ unpackEncryptedMessage: () => (/* reexport safe */ _encryptedNotes__WEBPACK_IMPORTED_MODULE_6__.ol), /* harmony export */ unzipAsync: () => (/* reexport safe */ _data__WEBPACK_IMPORTED_MODULE_4__.fY), -/* harmony export */ validateUrl: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_17__.wv), +/* harmony export */ validateUrl: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.wv), /* harmony export */ zipAsync: () => (/* reexport safe */ _data__WEBPACK_IMPORTED_MODULE_4__.a8) /* harmony export */ }); /* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(89148); @@ -52761,19 +66495,23 @@ __webpack_require__.r(__webpack_exports__); /* harmony import */ var _batch__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(47320); /* harmony import */ var _data__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(30438); /* harmony import */ var _deposits__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(13449); -/* harmony import */ var _fees__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(28723); -/* harmony import */ var _merkleTree__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(57704); -/* harmony import */ var _mimc__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(83536); -/* harmony import */ var _multicall__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(95977); -/* harmony import */ var _networkConfig__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(63852); -/* harmony import */ var _parser__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(89073); -/* harmony import */ var _pedersen__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(36554); -/* harmony import */ var _prices__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(78488); -/* harmony import */ var _providers__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(54389); -/* harmony import */ var _relayerClient__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(83693); -/* harmony import */ var _tokens__WEBPACK_IMPORTED_MODULE_16__ = __webpack_require__(10556); -/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_17__ = __webpack_require__(91401); -/* harmony import */ var _websnark__WEBPACK_IMPORTED_MODULE_18__ = __webpack_require__(49019); +/* harmony import */ var _encryptedNotes__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(87987); +/* harmony import */ var _fees__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(28723); +/* harmony import */ var _merkleTree__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(57704); +/* harmony import */ var _mimc__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(83536); +/* harmony import */ var _multicall__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(95977); +/* harmony import */ var _networkConfig__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(63852); +/* harmony import */ var _parser__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(89073); +/* harmony import */ var _pedersen__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(36554); +/* harmony import */ var _prices__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(78488); +/* harmony import */ var _providers__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(32875); +/* harmony import */ var _relayerClient__WEBPACK_IMPORTED_MODULE_16__ = __webpack_require__(83693); +/* harmony import */ var _tokens__WEBPACK_IMPORTED_MODULE_17__ = __webpack_require__(10556); +/* harmony import */ var _treeCache__WEBPACK_IMPORTED_MODULE_18__ = __webpack_require__(36334); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_19__ = __webpack_require__(91401); +/* harmony import */ var _websnark__WEBPACK_IMPORTED_MODULE_20__ = __webpack_require__(49019); + + @@ -52843,7 +66581,7 @@ class MerkleTreeService { amount, currency, Tornado, - commitment, + commitmentHex, merkleTreeHeight = 20, emptyElement = "21663839004416932945382355908790599225266501822907911457504978515578255421292", merkleWorkerPath @@ -52854,13 +66592,13 @@ class MerkleTreeService { this.netId = Number(netId); this.Tornado = Tornado; this.instanceName = instanceName; - this.commitment = commitment; + this.commitmentHex = commitmentHex; this.merkleTreeHeight = merkleTreeHeight; this.emptyElement = emptyElement; this.merkleWorkerPath = merkleWorkerPath; } - createTree(_0) { - return __async(this, arguments, function* ({ events }) { + createTree(events) { + return __async(this, null, function* () { const { hash: hashFunction } = yield _mimc__WEBPACK_IMPORTED_MODULE_3__/* .mimc */ .f.getHash(); if (this.merkleWorkerPath) { console.log("Using merkleWorker\n"); @@ -52911,15 +66649,69 @@ class MerkleTreeService { }); }); } - verifyTree(_0) { - return __async(this, arguments, function* ({ events }) { + createPartialTree(_0) { + return __async(this, arguments, function* ({ edge, elements }) { + const { hash: hashFunction } = yield _mimc__WEBPACK_IMPORTED_MODULE_3__/* .mimc */ .f.getHash(); + if (this.merkleWorkerPath) { + console.log("Using merkleWorker\n"); + try { + if (_utils__WEBPACK_IMPORTED_MODULE_2__/* .isNode */ .Ll) { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new worker_threads__WEBPACK_IMPORTED_MODULE_0__.Worker(this.merkleWorkerPath, { + workerData: { + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + } + }); + worker.on("message", resolve); + worker.on("error", reject); + worker.on("exit", (code) => { + if (code !== 0) { + reject(new Error(`Worker stopped with exit code ${code}`)); + } + }); + }); + return _tornado_fixed_merkle_tree__WEBPACK_IMPORTED_MODULE_1__.PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } else { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker(this.merkleWorkerPath); + worker.onmessage = (e) => { + resolve(e.data); + }; + worker.onerror = (e) => { + reject(e); + }; + worker.postMessage({ + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + }); + }); + return _tornado_fixed_merkle_tree__WEBPACK_IMPORTED_MODULE_1__.PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } + } catch (err) { + console.log("merkleWorker failed, falling back to synchronous merkle tree"); + console.log(err); + } + } + return new _tornado_fixed_merkle_tree__WEBPACK_IMPORTED_MODULE_1__.PartialMerkleTree(this.merkleTreeHeight, edge, elements, { + zeroElement: this.emptyElement, + hashFunction + }); + }); + } + verifyTree(events) { + return __async(this, null, function* () { console.log( ` Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while ` ); console.time("Created tree in"); - const tree = yield this.createTree({ events: events.map(({ commitment }) => BigInt(commitment).toString()) }); + const tree = yield this.createTree(events.map(({ commitment }) => commitment)); console.timeEnd("Created tree in"); console.log(""); const isKnownRoot = yield this.Tornado.isKnownRoot((0,_utils__WEBPACK_IMPORTED_MODULE_2__/* .toFixedHex */ .$W)(BigInt(tree.root))); @@ -53054,14 +66846,44 @@ function multicall(Multicall2, calls) { "use strict"; /* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ AE: () => (/* binding */ addNetwork), /* harmony export */ Af: () => (/* binding */ enabledChains), -/* harmony export */ L4: () => (/* binding */ networkConfig), -/* harmony export */ W6: () => (/* binding */ blockSyncInterval), -/* harmony export */ a7: () => (/* binding */ subdomains) +/* harmony export */ RY: () => (/* binding */ getNetworkConfig), +/* harmony export */ Zh: () => (/* binding */ getInstanceByAddress), +/* harmony export */ cF: () => (/* binding */ getSubdomains), +/* harmony export */ cX: () => (/* binding */ customConfig), +/* harmony export */ sb: () => (/* binding */ defaultConfig), +/* harmony export */ zj: () => (/* binding */ getConfig), +/* harmony export */ zr: () => (/* binding */ NetId) /* harmony export */ }); -const blockSyncInterval = 1e4; -const enabledChains = ["1", "10", "56", "100", "137", "42161", "43114", "11155111"]; +var __defProp = Object.defineProperty; +var __getOwnPropSymbols = Object.getOwnPropertySymbols; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __propIsEnum = Object.prototype.propertyIsEnumerable; +var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + if (__getOwnPropSymbols) + for (var prop of __getOwnPropSymbols(b)) { + if (__propIsEnum.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + } + return a; +}; +var NetId = /* @__PURE__ */ ((NetId2) => { + NetId2[NetId2["MAINNET"] = 1] = "MAINNET"; + NetId2[NetId2["BSC"] = 56] = "BSC"; + NetId2[NetId2["POLYGON"] = 137] = "POLYGON"; + NetId2[NetId2["OPTIMISM"] = 10] = "OPTIMISM"; + NetId2[NetId2["ARBITRUM"] = 42161] = "ARBITRUM"; + NetId2[NetId2["GNOSIS"] = 100] = "GNOSIS"; + NetId2[NetId2["AVALANCHE"] = 43114] = "AVALANCHE"; + NetId2[NetId2["SEPOLIA"] = 11155111] = "SEPOLIA"; + return NetId2; +})(NetId || {}); const theGraph = { name: "Hosted Graph", url: "https://api.thegraph.com" @@ -53070,8 +66892,8 @@ const tornado = { name: "Tornado Subgraphs", url: "https://tornadocash-rpc.com" }; -const networkConfig = { - netId1: { +const defaultConfig = { + [1 /* MAINNET */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 80, @@ -53081,11 +66903,7 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://etherscan.io/tx/", - address: "https://etherscan.io/address/", - block: "https://etherscan.io/block/" - }, + explorerUrl: "https://etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Ethereum Mainnet", @@ -53096,7 +66914,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, mevblockerRPC: { @@ -53124,14 +66942,19 @@ const networkConfig = { url: "https://1rpc.io/eth" } }, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", echoContract: "0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornContract: "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", + governanceContract: "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", + stakingRewardsContract: "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29", + registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", aggregatorContract: "0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49", reverseRecordsContract: "0x3671aE578E63FdF66ad4F3E12CC0c0d71Ac7510C", tornadoSubgraph: "tornadocash/mainnet-tornado-subgraph", registrySubgraph: "tornadocash/tornado-relayer-registry", + governanceSubgraph: "tornadocash/tornado-governance", subgraphs: { tornado, theGraph @@ -53213,16 +67036,12 @@ const networkConfig = { constants: { GOVERNANCE_BLOCK: 11474695, NOTE_ACCOUNT_BLOCK: 11842486, - ENCRYPTED_NOTES_BLOCK: 14248730, + ENCRYPTED_NOTES_BLOCK: 12143762, REGISTRY_BLOCK: 14173129, MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", - "governance.contract.tornadocash.eth": "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", - "tornado-router.contract.tornadocash.eth": "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - "staking-rewards.contract.tornadocash.eth": "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29" + } }, - netId56: { + [56 /* BSC */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 5, @@ -53232,18 +67051,15 @@ const networkConfig = { }, nativeCurrency: "bnb", currencyName: "BNB", - explorerUrl: { - tx: "https://bscscan.com/tx/", - address: "https://bscscan.com/address/", - block: "https://bscscan.com/block/" - }, + explorerUrl: "https://bscscan.com", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Binance Smart Chain", deployedBlock: 8158799, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/bsc-tornado-subgraph", subgraphs: { tornado, @@ -53255,7 +67071,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/bsc" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, stackup: { @@ -53288,10 +67104,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 8159269, ENCRYPTED_NOTES_BLOCK: 8159269 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId137: { + [137 /* POLYGON */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 100, @@ -53301,18 +67116,15 @@ const networkConfig = { }, nativeCurrency: "matic", currencyName: "MATIC", - explorerUrl: { - tx: "https://polygonscan.com/tx/", - address: "https://polygonscan.com/address/", - block: "https://polygonscan.com/block/" - }, + explorerUrl: "https://polygonscan.com", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Polygon (Matic) Network", deployedBlock: 16257962, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", gasPriceOracleContract: "0xF81A8D8D3581985D3969fe53bFA67074aDFa8F3C", tornadoSubgraph: "tornadocash/matic-tornado-subgraph", subgraphs: { @@ -53350,10 +67162,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 16257996, ENCRYPTED_NOTES_BLOCK: 16257996 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId10: { + [10 /* OPTIMISM */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 1e-3, @@ -53363,18 +67174,15 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://optimistic.etherscan.io/tx/", - address: "https://optimistic.etherscan.io/address/", - block: "https://optimistic.etherscan.io/block/" - }, + explorerUrl: "https://optimistic.etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Optimism", deployedBlock: 2243689, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", ovmGasPriceOracleContract: "0x420000000000000000000000000000000000000F", tornadoSubgraph: "tornadocash/optimism-tornado-subgraph", subgraphs: { @@ -53387,7 +67195,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/op" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://optimism-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, optimism: { @@ -53420,10 +67228,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 2243694, ENCRYPTED_NOTES_BLOCK: 2243694 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId42161: { + [42161 /* ARBITRUM */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 4, @@ -53433,18 +67240,15 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "ETH", - explorerUrl: { - tx: "https://arbiscan.io/tx/", - address: "https://arbiscan.io/address/", - block: "https://arbiscan.io/block/" - }, + explorerUrl: "https://arbiscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Arbitrum One", deployedBlock: 3430648, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/arbitrum-tornado-subgraph", subgraphs: { tornado, @@ -53456,7 +67260,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/arbitrum" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, arbitrum: { @@ -53489,10 +67293,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 3430605, ENCRYPTED_NOTES_BLOCK: 3430605 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId100: { + [100 /* GNOSIS */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 6, @@ -53502,18 +67305,15 @@ const networkConfig = { }, nativeCurrency: "xdai", currencyName: "xDAI", - explorerUrl: { - tx: "https://blockscout.com/xdai/mainnet/tx/", - address: "https://blockscout.com/xdai/mainnet/address/", - block: "https://blockscout.com/xdai/mainnet/block/" - }, + explorerUrl: "https://gnosisscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Gnosis Chain", deployedBlock: 17754561, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/xdai-tornado-subgraph", subgraphs: { tornado, @@ -53525,7 +67325,7 @@ const networkConfig = { url: "https://tornadocash-rpc.com/gnosis" }, chainnodes: { - name: "Tornado RPC", + name: "Chainnodes RPC", url: "https://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" }, gnosis: { @@ -53558,10 +67358,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 17754564, ENCRYPTED_NOTES_BLOCK: 17754564 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId43114: { + [43114 /* AVALANCHE */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 225, @@ -53571,18 +67370,15 @@ const networkConfig = { }, nativeCurrency: "avax", currencyName: "AVAX", - explorerUrl: { - tx: "https://snowtrace.io/tx/", - address: "https://snowtrace.io/address/", - block: "https://snowtrace.io/block/" - }, + explorerUrl: "https://snowtrace.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Avalanche Mainnet", deployedBlock: 4429818, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", tornadoSubgraph: "tornadocash/avalanche-tornado-subgraph", subgraphs: { theGraph @@ -53617,10 +67413,9 @@ const networkConfig = { constants: { NOTE_ACCOUNT_BLOCK: 4429813, ENCRYPTED_NOTES_BLOCK: 4429813 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" + } }, - netId11155111: { + [11155111 /* SEPOLIA */]: { rpcCallRetryAttempt: 15, gasPrices: { instant: 2, @@ -53630,19 +67425,18 @@ const networkConfig = { }, nativeCurrency: "eth", currencyName: "SepoliaETH", - explorerUrl: { - tx: "https://sepolia.etherscan.io/tx/", - address: "https://sepolia.etherscan.io/address/", - block: "https://sepolia.etherscan.io/block/" - }, + explorerUrl: "https://sepolia.etherscan.io", merkleTreeHeight: 20, emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", networkName: "Ethereum Sepolia", deployedBlock: 5594395, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", routerContract: "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + tornContract: "0x3AE6667167C0f44394106E197904519D808323cA", + governanceContract: "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", + stakingRewardsContract: "0x6d0018890751Efd31feb8166711B16732E2b496b", registryContract: "0x1428e5d2356b13778A13108b10c440C83011dfB8", - echoContract: "0xcDD1fc3F5ac2782D83449d3AbE80D6b7B273B0e5", aggregatorContract: "0x4088712AC9fad39ea133cdb9130E465d235e9642", reverseRecordsContract: "0xEc29700C0283e5Be64AcdFe8077d6cC95dE23C23", tornadoSubgraph: "tornadocash/sepolia-tornado-subgraph", @@ -53695,14 +67489,50 @@ const networkConfig = { NOTE_ACCOUNT_BLOCK: 5594395, ENCRYPTED_NOTES_BLOCK: 5594395, MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x3AE6667167C0f44394106E197904519D808323cA", - "governance.contract.tornadocash.eth": "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", - "tornado-router.contract.tornadocash.eth": "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee" + } } }; -const subdomains = enabledChains.map((chain) => networkConfig[`netId${chain}`].ensSubdomainKey); -/* unused harmony default export */ var __WEBPACK_DEFAULT_EXPORT__ = ((/* unused pure expression or super */ null && (networkConfig))); +const enabledChains = Object.values(NetId); +let customConfig = {}; +function addNetwork(newConfig) { + enabledChains.push( + ...Object.keys(newConfig).map((netId) => Number(netId)).filter((netId) => !enabledChains.includes(netId)) + ); + customConfig = __spreadValues(__spreadValues({}, customConfig), newConfig); +} +function getNetworkConfig() { + const allConfig = __spreadValues(__spreadValues({}, defaultConfig), customConfig); + return enabledChains.reduce((acc, curr) => { + acc[curr] = allConfig[curr]; + return acc; + }, {}); +} +function getConfig(netId) { + const allConfig = getNetworkConfig(); + const chainConfig = allConfig[netId]; + if (!chainConfig) { + const errMsg = `No config found for network ${netId}!`; + throw new Error(errMsg); + } + return chainConfig; +} +function getInstanceByAddress({ netId, address }) { + const { tokens } = getConfig(netId); + for (const [currency, { instanceAddress }] of Object.entries(tokens)) { + for (const [amount, instance] of Object.entries(instanceAddress)) { + if (instance === address) { + return { + amount, + currency + }; + } + } + } +} +function getSubdomains() { + const allConfig = getNetworkConfig(); + return enabledChains.map((chain) => allConfig[chain].ensSubdomainKey); +} /***/ }), @@ -53715,6 +67545,7 @@ const subdomains = enabledChains.map((chain) => networkConfig[`netId${chain}`].e /* harmony export */ $J: () => (/* binding */ parseNumber), /* harmony export */ Dl: () => (/* binding */ parseUrl), /* harmony export */ Do: () => (/* binding */ parseKey), +/* harmony export */ _p: () => (/* binding */ parseRecoveryKey), /* harmony export */ or: () => (/* binding */ parseAddress), /* harmony export */ qc: () => (/* binding */ parseMnemonic), /* harmony export */ yJ: () => (/* binding */ parseRelayer) @@ -53782,6 +67613,17 @@ function parseKey(value) { } return value; } +function parseRecoveryKey(value) { + if (!value) { + throw new commander__WEBPACK_IMPORTED_MODULE_0__.InvalidArgumentError("Invalid Recovery Key"); + } + try { + (0,ethers__WEBPACK_IMPORTED_MODULE_4__/* .computeAddress */ .K)("0x" + value); + } catch (e) { + throw new commander__WEBPACK_IMPORTED_MODULE_0__.InvalidArgumentError("Invalid Recovery Key"); + } + return value; +} /***/ }), @@ -53887,7 +67729,7 @@ class TokenPriceOracle { /***/ }), -/***/ 54389: +/***/ 32875: /***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { "use strict"; @@ -55063,7 +68905,7 @@ var address_address = __webpack_require__(30031); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/address/checks.js var checks = __webpack_require__(41442); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/keccak.js + 1 modules -var keccak = __webpack_require__(2011); +var keccak = __webpack_require__(15539); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/utils/maths.js var maths = __webpack_require__(27033); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/hash/id.js @@ -59912,7 +73754,7 @@ function spelunkMessage(value) { } //# sourceMappingURL=provider-jsonrpc.js.map // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/signing-key.js + 6 modules -var signing_key = __webpack_require__(72588); +var signing_key = __webpack_require__(15496); ;// CONCATENATED MODULE: ./node_modules/ethers/lib.esm/constants/strings.js // NFKC (composed) // (decomposed) /** @@ -60087,7 +73929,7 @@ class BaseWallet extends AbstractSigner { // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/sha2.js var sha2 = __webpack_require__(68650); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/crypto-browser.js + 1 modules -var crypto_browser = __webpack_require__(68682); +var crypto_browser = __webpack_require__(8180); ;// CONCATENATED MODULE: ./node_modules/ethers/lib.esm/crypto/hmac.js /** * An **HMAC** enables verification that a given key was used @@ -60136,11 +73978,11 @@ computeHmac.register = function (func) { }; Object.freeze(computeHmac); //# sourceMappingURL=hmac.js.map -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/_sha2.js -var _sha2 = __webpack_require__(6800); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/utils.js + 1 modules -var utils = __webpack_require__(32531); -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/ripemd160.js +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_sha2.js +var _sha2 = __webpack_require__(37171); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/utils.js + 1 modules +var utils = __webpack_require__(10750); +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/ripemd160.js // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html @@ -60886,13 +74728,13 @@ function pkcs7Strip(data) { //# sourceMappingURL=index.js.map // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/pbkdf2.js var pbkdf2 = __webpack_require__(29851); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/_assert.js -var _assert = __webpack_require__(89190); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/sha256.js -var sha256 = __webpack_require__(78226); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/pbkdf2.js -var esm_pbkdf2 = __webpack_require__(2200); -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/scrypt.js +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_assert.js +var _assert = __webpack_require__(27125); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/sha256.js +var sha256 = __webpack_require__(3439); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/pbkdf2.js +var esm_pbkdf2 = __webpack_require__(84877); +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/scrypt.js @@ -62958,6 +76800,7 @@ class TornadoBrowserProvider extends BrowserProvider { "use strict"; /* harmony export */ __webpack_require__.d(__webpack_exports__, { /* harmony export */ OR: () => (/* binding */ RelayerClient), +/* harmony export */ XF: () => (/* binding */ getSupportedInstances), /* harmony export */ c$: () => (/* binding */ getWeightRandom), /* harmony export */ mU: () => (/* binding */ isRelayerUpdated), /* harmony export */ pO: () => (/* binding */ MIN_STAKE_BALANCE), @@ -62965,11 +76808,13 @@ class TornadoBrowserProvider extends BrowserProvider { /* harmony export */ sN: () => (/* binding */ pickWeightedRandomRelayer), /* harmony export */ zy: () => (/* binding */ calculateScore) /* harmony export */ }); -/* harmony import */ var ethers__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(99770); -/* harmony import */ var ethers__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(64563); +/* harmony import */ var ethers__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(99770); +/* harmony import */ var ethers__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(30031); +/* harmony import */ var ethers__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(64563); /* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(91401); -/* harmony import */ var _providers__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(54389); -/* harmony import */ var _schemas__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(64984); +/* harmony import */ var _networkConfig__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(63852); +/* harmony import */ var _providers__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(32875); +/* harmony import */ var _schemas__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(64984); /* provided dependency */ var console = __webpack_require__(96763); var __defProp = Object.defineProperty; @@ -63015,7 +76860,8 @@ var __async = (__this, __arguments, generator) => { -const MIN_STAKE_BALANCE = (0,ethers__WEBPACK_IMPORTED_MODULE_3__/* .parseEther */ .g5)("500"); + +const MIN_STAKE_BALANCE = (0,ethers__WEBPACK_IMPORTED_MODULE_4__/* .parseEther */ .g5)("500"); const semVerRegex = new RegExp("^(?0|[1-9]\\d*)\\.(?0|[1-9]\\d*)\\.(?0|[1-9]\\d*)(?:-(?(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+(?[0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$"); function parseSemanticVersion(version) { const { groups } = semVerRegex.exec(version); @@ -63023,11 +76869,11 @@ function parseSemanticVersion(version) { } function isRelayerUpdated(relayerVersion, netId) { const { major, patch, prerelease } = parseSemanticVersion(relayerVersion); - const requiredMajor = netId === 1 ? "4" : "5"; + const requiredMajor = netId === _networkConfig__WEBPACK_IMPORTED_MODULE_1__/* .NetId */ .zr.MAINNET ? "4" : "5"; const isUpdatedMajor = major === requiredMajor; if (prerelease) return false; - return isUpdatedMajor && (Number(patch) >= 5 || Number(netId) !== 1); + return isUpdatedMajor && (Number(patch) >= 5 || netId !== _networkConfig__WEBPACK_IMPORTED_MODULE_1__/* .NetId */ .zr.MAINNET); } function calculateScore({ stakeBalance, tornadoServiceFee }, minFee = 0.33, maxFee = 0.53) { if (tornadoServiceFee < minFee) { @@ -63049,9 +76895,15 @@ function getWeightRandom(weightsScores, random) { } return Math.floor(Math.random() * weightsScores.length); } +function getSupportedInstances(instanceList) { + const rawList = Object.values(instanceList).map(({ instanceAddress }) => { + return Object.values(instanceAddress); + }).flat(); + return rawList.map((l) => (0,ethers__WEBPACK_IMPORTED_MODULE_5__/* .getAddress */ .b)(l)); +} function pickWeightedRandomRelayer(relayers, netId) { let minFee, maxFee; - if (Number(netId) !== 1) { + if (netId !== _networkConfig__WEBPACK_IMPORTED_MODULE_1__/* .NetId */ .zr.MAINNET) { minFee = 0.01; maxFee = 0.3; } @@ -63065,7 +76917,7 @@ function pickWeightedRandomRelayer(relayers, netId) { } class RelayerClient { constructor({ netId, config, Aggregator, fetchDataOptions: fetchDataOptions2 }) { - this.netId = Number(netId); + this.netId = netId; this.config = config; this.Aggregator = Aggregator; this.fetchDataOptions = fetchDataOptions2; @@ -63077,14 +76929,14 @@ class RelayerClient { }) { var _a, _b; const url = `https://${!hostname.endsWith("/") ? hostname + "/" : hostname}`; - const rawStatus = yield (0,_providers__WEBPACK_IMPORTED_MODULE_1__/* .fetchData */ .Fd)(`${url}status`, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const rawStatus = yield (0,_providers__WEBPACK_IMPORTED_MODULE_2__/* .fetchData */ .Fd)(`${url}status`, __spreadProps(__spreadValues({}, this.fetchDataOptions), { headers: { "Content-Type": "application/json, application/x-www-form-urlencoded" }, timeout: ((_a = this.fetchDataOptions) == null ? void 0 : _a.torPort) ? 1e4 : 3e3, maxRetry: ((_b = this.fetchDataOptions) == null ? void 0 : _b.torPort) ? 2 : 0 })); - const statusValidator = _schemas__WEBPACK_IMPORTED_MODULE_2__/* .ajv */ .SS.compile((0,_schemas__WEBPACK_IMPORTED_MODULE_2__/* .getStatusSchema */ .c_)(this.netId, this.config)); + const statusValidator = _schemas__WEBPACK_IMPORTED_MODULE_3__/* .ajv */ .SS.compile((0,_schemas__WEBPACK_IMPORTED_MODULE_3__/* .getStatusSchema */ .c_)(this.netId, this.config)); if (!statusValidator(rawStatus)) { throw new Error("Invalid status schema"); } @@ -63097,7 +76949,7 @@ class RelayerClient { if (status.netId !== this.netId) { throw new Error("This relayer serves a different network"); } - if (relayerAddress && this.netId === 1 && status.rewardAccount !== relayerAddress) { + if (relayerAddress && this.netId === _networkConfig__WEBPACK_IMPORTED_MODULE_1__/* .NetId */ .zr.MAINNET && status.rewardAccount !== relayerAddress) { throw new Error("The Relayer reward address must match registered address"); } if (!isRelayerUpdated(status.version, this.netId)) { @@ -63108,6 +76960,7 @@ class RelayerClient { } filterRelayer(curr, relayer, subdomains, debugRelayer = false) { return __async(this, null, function* () { + var _a; const { ensSubdomainKey } = this.config; const subdomainIndex = subdomains.indexOf(ensSubdomainKey); const mainnetSubdomain = curr.records[0]; @@ -63128,7 +76981,9 @@ class RelayerClient { ensName, stakeBalance, relayerAddress, - rewardAccount: status.rewardAccount, + rewardAccount: (0,ethers__WEBPACK_IMPORTED_MODULE_5__/* .getAddress */ .b)(status.rewardAccount), + instances: getSupportedInstances(status.instances), + gasPrice: (_a = status.gasPrices) == null ? void 0 : _a.fast, ethPrices: status.ethPrices, currentQueue: status.currentQueue, tornadoServiceFee: status.tornadoServiceFee @@ -63166,7 +77021,7 @@ class RelayerClient { } return false; }); - const relayerNameHashes = uniqueRelayers.map((r) => (0,ethers__WEBPACK_IMPORTED_MODULE_4__/* .namehash */ .kM)(r.ensName)); + const relayerNameHashes = uniqueRelayers.map((r) => (0,ethers__WEBPACK_IMPORTED_MODULE_6__/* .namehash */ .kM)(r.ensName)); const relayersData = yield this.Aggregator.relayersData.staticCall(relayerNameHashes, subdomains); const invalidRelayers = []; const validRelayers = (yield Promise.all( @@ -63190,7 +77045,7 @@ class RelayerClient { tornadoWithdraw(_0) { return __async(this, arguments, function* ({ contract, proof, args }) { const { url } = this.selectedRelayer; - const withdrawResponse = yield (0,_providers__WEBPACK_IMPORTED_MODULE_1__/* .fetchData */ .Fd)(`${url}v1/tornadoWithdraw`, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const withdrawResponse = yield (0,_providers__WEBPACK_IMPORTED_MODULE_2__/* .fetchData */ .Fd)(`${url}v1/tornadoWithdraw`, __spreadProps(__spreadValues({}, this.fetchDataOptions), { method: "POST", headers: { "Content-Type": "application/json" @@ -63210,7 +77065,7 @@ class RelayerClient { console.log(`Job submitted: ${jobUrl} `); while (!relayerStatus || !["FAILED", "CONFIRMED"].includes(relayerStatus)) { - const jobResponse = yield (0,_providers__WEBPACK_IMPORTED_MODULE_1__/* .fetchData */ .Fd)(jobUrl, __spreadProps(__spreadValues({}, this.fetchDataOptions), { + const jobResponse = yield (0,_providers__WEBPACK_IMPORTED_MODULE_2__/* .fetchData */ .Fd)(jobUrl, __spreadProps(__spreadValues({}, this.fetchDataOptions), { method: "GET", headers: { "Content-Type": "application/json" @@ -63219,7 +77074,7 @@ class RelayerClient { if (jobResponse.error) { throw new Error(error); } - const jobValidator = _schemas__WEBPACK_IMPORTED_MODULE_2__/* .ajv */ .SS.compile(_schemas__WEBPACK_IMPORTED_MODULE_2__/* .jobsSchema */ .Us); + const jobValidator = _schemas__WEBPACK_IMPORTED_MODULE_3__/* .ajv */ .SS.compile(_schemas__WEBPACK_IMPORTED_MODULE_3__/* .jobsSchema */ .Us); if (!jobValidator(jobResponse)) { const errMsg = `${jobUrl} has an invalid job response`; throw new Error(errMsg); @@ -63268,8 +77123,11 @@ __webpack_require__.d(__webpack_exports__, { // EXTERNAL MODULE: ./node_modules/ajv/dist/ajv.js var ajv = __webpack_require__(63282); var ajv_default = /*#__PURE__*/__webpack_require__.n(ajv); +// EXTERNAL MODULE: ./src/services/networkConfig.ts +var networkConfig = __webpack_require__(63852); ;// CONCATENATED MODULE: ./src/services/schemas/status.ts + const addressType = { type: "string", pattern: "^0x[a-fA-F0-9]{40}$" }; const bnType = { type: "string", BN: true }; const statusSchema = { @@ -63344,7 +77202,7 @@ function getStatusSchema(netId, config) { } ); schema.properties.instances = instances; - if (Number(netId) === 1) { + if (netId === networkConfig/* NetId */.zr.MAINNET) { const _tokens = Object.keys(tokens).filter((t) => t !== nativeCurrency); const ethPrices = { type: "object", @@ -63505,6 +77363,137 @@ function getTokenBalances(_0) { } +/***/ }), + +/***/ 36334: +/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ v: () => (/* binding */ TreeCache) +/* harmony export */ }); +/* harmony import */ var bloomfilter_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(65403); +/* harmony import */ var bloomfilter_js__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(bloomfilter_js__WEBPACK_IMPORTED_MODULE_0__); +/* harmony import */ var _data__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(30438); +/* provided dependency */ var console = __webpack_require__(96763); + +var __defProp = Object.defineProperty; +var __defProps = Object.defineProperties; +var __getOwnPropDescs = Object.getOwnPropertyDescriptors; +var __getOwnPropSymbols = Object.getOwnPropertySymbols; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __propIsEnum = Object.prototype.propertyIsEnumerable; +var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + if (__getOwnPropSymbols) + for (var prop of __getOwnPropSymbols(b)) { + if (__propIsEnum.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + } + return a; +}; +var __spreadProps = (a, b) => __defProps(a, __getOwnPropDescs(b)); +var __objRest = (source, exclude) => { + var target = {}; + for (var prop in source) + if (__hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0) + target[prop] = source[prop]; + if (source != null && __getOwnPropSymbols) + for (var prop of __getOwnPropSymbols(source)) { + if (exclude.indexOf(prop) < 0 && __propIsEnum.call(source, prop)) + target[prop] = source[prop]; + } + return target; +}; +var __async = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; + + +class TreeCache { + constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }) { + this.netId = netId; + this.amount = amount; + this.currency = currency; + this.userDirectory = userDirectory; + this.PARTS_COUNT = PARTS_COUNT; + } + getInstanceName() { + return `deposits_${this.netId}_${this.currency}_${this.amount}`; + } + createTree(events, tree) { + return __async(this, null, function* () { + const bloom = new (bloomfilter_js__WEBPACK_IMPORTED_MODULE_0___default())(events.length); + console.log(`Creating cached tree for ${this.getInstanceName()} +`); + const eventsData = events.reduce( + (acc, _a, i) => { + var _b = _a, { leafIndex, commitment } = _b, rest = __objRest(_b, ["leafIndex", "commitment"]); + if (leafIndex !== i) { + throw new Error(`leafIndex (${leafIndex}) !== i (${i})`); + } + acc[commitment] = __spreadProps(__spreadValues({}, rest), { leafIndex }); + return acc; + }, + {} + ); + const slices = tree.getTreeSlices(this.PARTS_COUNT); + yield Promise.all( + slices.map((slice, index) => __async(this, null, function* () { + const metadata = slice.elements.reduce((acc, curr) => { + if (index < this.PARTS_COUNT - 1) { + bloom.add(curr); + } + acc.push(eventsData[curr]); + return acc; + }, []); + const dataString2 = JSON.stringify( + __spreadProps(__spreadValues({}, slice), { + metadata + }), + null, + 2 + ) + "\n"; + const fileName2 = `${this.getInstanceName()}_slice${index + 1}.json`; + yield (0,_data__WEBPACK_IMPORTED_MODULE_1__/* .saveUserFile */ .jj)({ + fileName: fileName2, + userDirectory: this.userDirectory, + dataString: dataString2 + }); + })) + ); + const dataString = bloom.serialize() + "\n"; + const fileName = `${this.getInstanceName()}_bloom.json`; + yield (0,_data__WEBPACK_IMPORTED_MODULE_1__/* .saveUserFile */ .jj)({ + fileName, + userDirectory: this.userDirectory, + dataString + }); + }); + } +} + + /***/ }), /***/ 91401: @@ -63514,10 +77503,13 @@ function getTokenBalances(_0) { /* harmony export */ __webpack_require__.d(__webpack_exports__, { /* harmony export */ $W: () => (/* binding */ toFixedHex), /* harmony export */ EI: () => (/* binding */ leInt2Buff), +/* harmony export */ Et: () => (/* binding */ crypto), +/* harmony export */ Id: () => (/* binding */ concatBytes), /* harmony export */ Ju: () => (/* binding */ bytesToBN), /* harmony export */ Kp: () => (/* binding */ base64ToBytes), /* harmony export */ Ll: () => (/* binding */ isNode), /* harmony export */ My: () => (/* binding */ bytesToHex), +/* harmony export */ aT: () => (/* binding */ hexToBytes), /* harmony export */ ae: () => (/* binding */ leBuff2Int), /* harmony export */ gn: () => (/* binding */ bigIntReplacer), /* harmony export */ ib: () => (/* binding */ rBigInt), @@ -63530,8 +77522,7 @@ function getTokenBalances(_0) { /* harmony export */ wv: () => (/* binding */ validateUrl), /* harmony export */ yy: () => (/* binding */ sleep) /* harmony export */ }); -/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(40262); -/* harmony import */ var url__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(url__WEBPACK_IMPORTED_MODULE_0__); +/* harmony import */ var crypto__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(91565); /* harmony import */ var bn_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(39404); /* harmony import */ var bn_js__WEBPACK_IMPORTED_MODULE_1___default = /*#__PURE__*/__webpack_require__.n(bn_js__WEBPACK_IMPORTED_MODULE_1__); /* provided dependency */ var process = __webpack_require__(65606); @@ -63542,13 +77533,14 @@ BigInt.prototype.toJSON = function() { return this.toString(); }; const isNode = !process.browser && typeof globalThis.window === "undefined"; +const crypto = isNode ? crypto__WEBPACK_IMPORTED_MODULE_0__.webcrypto : globalThis.crypto; const chunk = (arr, size) => [...Array(Math.ceil(arr.length / size))].map((_, i) => arr.slice(size * i, size + size * i)); function sleep(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); } function validateUrl(url, protocols) { try { - const parsedUrl = new url__WEBPACK_IMPORTED_MODULE_0__.URL(url); + const parsedUrl = new URL(url); if (protocols && protocols.length) { return protocols.map((p) => p.toLowerCase()).includes(parsedUrl.protocol); } @@ -63557,28 +77549,36 @@ function validateUrl(url, protocols) { return false; } } +function concatBytes(...arrays) { + const totalSize = arrays.reduce((acc, e) => acc + e.length, 0); + const merged = new Uint8Array(totalSize); + arrays.forEach((array, i, arrays2) => { + const offset = arrays2.slice(0, i).reduce((acc, e) => acc + e.length, 0); + merged.set(array, offset); + }); + return merged; +} function bufferToBytes(b) { return new Uint8Array(b.buffer); } function bytesToBase64(bytes) { - let binary = ""; - const len = bytes.byteLength; - for (let i = 0; i < len; ++i) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); + return btoa(String.fromCharCode.apply(null, Array.from(bytes))); } function base64ToBytes(base64) { - const binaryString = atob(base64); - const bytes = new Uint8Array(binaryString.length); - for (let i = 0; i < binaryString.length; i++) { - bytes[i] = binaryString.charCodeAt(i); - } - return bytes; + return Uint8Array.from(atob(base64), (c) => c.charCodeAt(0)); } function bytesToHex(bytes) { return "0x" + Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join(""); } +function hexToBytes(hexString) { + if (hexString.slice(0, 2) === "0x") { + hexString = hexString.replace("0x", ""); + } + if (hexString.length % 2 !== 0) { + hexString = "0" + hexString; + } + return Uint8Array.from(hexString.match(/.{1,2}/g).map((byte) => parseInt(byte, 16))); +} function bytesToBN(bytes) { return BigInt(bytesToHex(bytes)); } @@ -66303,6 +80303,112 @@ ReverseRecords__factory.abi = ReverseRecords_factory_abi; +/***/ }), + +/***/ 32019: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.keccak512 = exports.keccak384 = exports.keccak256 = exports.keccak224 = void 0; +const sha3_1 = __webpack_require__(32955); +const utils_js_1 = __webpack_require__(82672); +exports.keccak224 = (0, utils_js_1.wrapHash)(sha3_1.keccak_224); +exports.keccak256 = (() => { + const k = (0, utils_js_1.wrapHash)(sha3_1.keccak_256); + k.create = sha3_1.keccak_256.create; + return k; +})(); +exports.keccak384 = (0, utils_js_1.wrapHash)(sha3_1.keccak_384); +exports.keccak512 = (0, utils_js_1.wrapHash)(sha3_1.keccak_512); + + +/***/ }), + +/***/ 26513: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.secp256k1 = void 0; +var secp256k1_1 = __webpack_require__(8510); +Object.defineProperty(exports, "secp256k1", ({ enumerable: true, get: function () { return secp256k1_1.secp256k1; } })); + + +/***/ }), + +/***/ 82672: +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* module decorator */ module = __webpack_require__.nmd(module); + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.crypto = exports.wrapHash = exports.equalsBytes = exports.hexToBytes = exports.bytesToUtf8 = exports.utf8ToBytes = exports.createView = exports.concatBytes = exports.toHex = exports.bytesToHex = exports.assertBytes = exports.assertBool = void 0; +const _assert_1 = __importDefault(__webpack_require__(67557)); +const utils_1 = __webpack_require__(99175); +const assertBool = _assert_1.default.bool; +exports.assertBool = assertBool; +const assertBytes = _assert_1.default.bytes; +exports.assertBytes = assertBytes; +var utils_2 = __webpack_require__(99175); +Object.defineProperty(exports, "bytesToHex", ({ enumerable: true, get: function () { return utils_2.bytesToHex; } })); +Object.defineProperty(exports, "toHex", ({ enumerable: true, get: function () { return utils_2.bytesToHex; } })); +Object.defineProperty(exports, "concatBytes", ({ enumerable: true, get: function () { return utils_2.concatBytes; } })); +Object.defineProperty(exports, "createView", ({ enumerable: true, get: function () { return utils_2.createView; } })); +Object.defineProperty(exports, "utf8ToBytes", ({ enumerable: true, get: function () { return utils_2.utf8ToBytes; } })); +// buf.toString('utf8') -> bytesToUtf8(buf) +function bytesToUtf8(data) { + if (!(data instanceof Uint8Array)) { + throw new TypeError(`bytesToUtf8 expected Uint8Array, got ${typeof data}`); + } + return new TextDecoder().decode(data); +} +exports.bytesToUtf8 = bytesToUtf8; +function hexToBytes(data) { + const sliced = data.startsWith("0x") ? data.substring(2) : data; + return (0, utils_1.hexToBytes)(sliced); +} +exports.hexToBytes = hexToBytes; +// buf.equals(buf2) -> equalsBytes(buf, buf2) +function equalsBytes(a, b) { + if (a.length !== b.length) { + return false; + } + for (let i = 0; i < a.length; i++) { + if (a[i] !== b[i]) { + return false; + } + } + return true; +} +exports.equalsBytes = equalsBytes; +// Internal utils +function wrapHash(hash) { + return (msg) => { + _assert_1.default.bytes(msg); + return hash(msg); + }; +} +exports.wrapHash = wrapHash; +// TODO(v3): switch away from node crypto, remove this unnecessary variable. +exports.crypto = (() => { + const webCrypto = typeof globalThis === "object" && "crypto" in globalThis ? globalThis.crypto : undefined; + const nodeRequire = true && + typeof module.require === "function" && + module.require.bind(module); + return { + node: nodeRequire && !webCrypto ? nodeRequire("crypto") : undefined, + web: webCrypto + }; +})(); + + /***/ }), /***/ 37007: @@ -69061,6 +83167,44 @@ HmacDRBG.prototype.generate = function generate(len, enc, add, addEnc) { }; +/***/ }), + +/***/ 11083: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +var http = __webpack_require__(11568) +var url = __webpack_require__(23276) + +var https = module.exports + +for (var key in http) { + if (http.hasOwnProperty(key)) https[key] = http[key] +} + +https.request = function (params, cb) { + params = validateParams(params) + return http.request.call(this, params, cb) +} + +https.get = function (params, cb) { + params = validateParams(params) + return http.get.call(this, params, cb) +} + +function validateParams (params) { + if (typeof params === 'string') { + params = url.parse(params) + } + if (!params.protocol) { + params.protocol = 'https:' + } + if (params.protocol !== 'https:') { + throw new Error('Protocol "' + params.protocol + '" not supported. Expected "https:"') + } + return params +} + + /***/ }), /***/ 251: @@ -70250,6 +84394,348 @@ function escapeJsonPtr(str) { } +/***/ }), + +/***/ 69749: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + + +// A linked list to keep track of recently-used-ness +const Yallist = __webpack_require__(28799) + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache + + /***/ }), /***/ 88276: @@ -70507,6 +84993,233 @@ HashBase.prototype._digest = function () { module.exports = HashBase +/***/ }), + +/***/ 6215: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +/* provided dependency */ var process = __webpack_require__(65606); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InvalidStatusCodeError = exports.InvalidCertError = void 0; +const DEFAULT_OPT = Object.freeze({ + redirect: true, + expectStatusCode: 200, + headers: {}, + full: false, + keepAlive: true, + cors: false, + referrer: false, + sslAllowSelfSigned: false, + _redirectCount: 0, +}); +class InvalidCertError extends Error { + constructor(msg, fingerprint256) { + super(msg); + this.fingerprint256 = fingerprint256; + } +} +exports.InvalidCertError = InvalidCertError; +class InvalidStatusCodeError extends Error { + constructor(statusCode) { + super(`Request Failed. Status Code: ${statusCode}`); + this.statusCode = statusCode; + } +} +exports.InvalidStatusCodeError = InvalidStatusCodeError; +function detectType(b, type) { + if (!type || type === 'text' || type === 'json') { + try { + let text = new TextDecoder('utf8', { fatal: true }).decode(b); + if (type === 'text') + return text; + try { + return JSON.parse(text); + } + catch (err) { + if (type === 'json') + throw err; + return text; + } + } + catch (err) { + if (type === 'text' || type === 'json') + throw err; + } + } + return b; +} +let agents = {}; +function fetchNode(url, _options) { + let options = { ...DEFAULT_OPT, ..._options }; + const http = __webpack_require__(11568); + const https = __webpack_require__(11083); + const zlib = __webpack_require__(78559); + const { promisify } = __webpack_require__(40537); + const { resolve: urlResolve } = __webpack_require__(59676); + const isSecure = !!/^https/.test(url); + let opts = { + method: options.method || 'GET', + headers: { 'Accept-Encoding': 'gzip, deflate, br' }, + }; + const compactFP = (s) => s.replace(/:| /g, '').toLowerCase(); + if (options.keepAlive) { + const agentOpt = { + keepAlive: true, + keepAliveMsecs: 30 * 1000, + maxFreeSockets: 1024, + maxCachedSessions: 1024, + }; + const agentKey = [ + isSecure, + isSecure && options.sslPinnedCertificates?.map((i) => compactFP(i)).sort(), + ].join(); + opts.agent = + agents[agentKey] || (agents[agentKey] = new (isSecure ? https : http).Agent(agentOpt)); + } + if (options.type === 'json') + opts.headers['Content-Type'] = 'application/json'; + if (options.data) { + if (!options.method) + opts.method = 'POST'; + opts.body = options.type === 'json' ? JSON.stringify(options.data) : options.data; + } + opts.headers = { ...opts.headers, ...options.headers }; + if (options.sslAllowSelfSigned) + opts.rejectUnauthorized = false; + const handleRes = async (res) => { + const status = res.statusCode; + if (options.redirect && 300 <= status && status < 400 && res.headers['location']) { + if (options._redirectCount == 10) + throw new Error('Request failed. Too much redirects.'); + options._redirectCount += 1; + return await fetchNode(urlResolve(url, res.headers['location']), options); + } + if (options.expectStatusCode && status !== options.expectStatusCode) { + res.resume(); + throw new InvalidStatusCodeError(status); + } + let buf = []; + for await (const chunk of res) + buf.push(chunk); + let bytes = Buffer.concat(buf); + const encoding = res.headers['content-encoding']; + if (encoding === 'br') + bytes = await promisify(zlib.brotliDecompress)(bytes); + if (encoding === 'gzip' || encoding === 'deflate') + bytes = await promisify(zlib.unzip)(bytes); + const body = detectType(bytes, options.type); + if (options.full) + return { headers: res.headers, status, body }; + return body; + }; + return new Promise((resolve, reject) => { + const handleError = async (err) => { + if (err && err.code === 'DEPTH_ZERO_SELF_SIGNED_CERT') { + try { + await fetchNode(url, { ...options, sslAllowSelfSigned: true, sslPinnedCertificates: [] }); + } + catch (e) { + if (e && e.fingerprint256) { + err = new InvalidCertError(`Self-signed SSL certificate: ${e.fingerprint256}`, e.fingerprint256); + } + } + } + reject(err); + }; + const req = (isSecure ? https : http).request(url, opts, (res) => { + res.on('error', handleError); + (async () => { + try { + resolve(await handleRes(res)); + } + catch (error) { + reject(error); + } + })(); + }); + req.on('error', handleError); + const pinned = options.sslPinnedCertificates?.map((i) => compactFP(i)); + const mfetchSecureConnect = (socket) => { + const fp256 = compactFP(socket.getPeerCertificate()?.fingerprint256 || ''); + if (!fp256 && socket.isSessionReused()) + return; + if (pinned.includes(fp256)) + return; + req.emit('error', new InvalidCertError(`Invalid SSL certificate: ${fp256} Expected: ${pinned}`, fp256)); + return req.abort(); + }; + if (options.sslPinnedCertificates) { + req.on('socket', (socket) => { + const hasListeners = socket + .listeners('secureConnect') + .map((i) => (i.name || '').replace('bound ', '')) + .includes('mfetchSecureConnect'); + if (hasListeners) + return; + socket.on('secureConnect', mfetchSecureConnect.bind(null, socket)); + }); + } + if (options.keepAlive) + req.setNoDelay(true); + if (opts.body) + req.write(opts.body); + req.end(); + }); +} +const SAFE_HEADERS = new Set(['Accept', 'Accept-Language', 'Content-Language', 'Content-Type'].map((i) => i.toLowerCase())); +const FORBIDDEN_HEADERS = new Set(['Accept-Charset', 'Accept-Encoding', 'Access-Control-Request-Headers', 'Access-Control-Request-Method', + 'Connection', 'Content-Length', 'Cookie', 'Cookie2', 'Date', 'DNT', 'Expect', 'Host', 'Keep-Alive', 'Origin', 'Referer', 'TE', 'Trailer', + 'Transfer-Encoding', 'Upgrade', 'Via'].map((i) => i.toLowerCase())); +async function fetchBrowser(url, _options) { + let options = { ...DEFAULT_OPT, ..._options }; + const headers = new Headers(); + if (options.type === 'json') + headers.set('Content-Type', 'application/json'); + let parsed = new URL(url); + if (parsed.username) { + const auth = btoa(`${parsed.username}:${parsed.password}`); + headers.set('Authorization', `Basic ${auth}`); + parsed.username = ''; + parsed.password = ''; + } + url = '' + parsed; + for (let k in options.headers) { + const name = k.toLowerCase(); + if (SAFE_HEADERS.has(name) || (options.cors && !FORBIDDEN_HEADERS.has(name))) + headers.set(k, options.headers[k]); + } + let opts = { headers, redirect: options.redirect ? 'follow' : 'manual' }; + if (!options.referrer) + opts.referrerPolicy = 'no-referrer'; + if (options.cors) + opts.mode = 'cors'; + if (options.data) { + if (!options.method) + opts.method = 'POST'; + opts.body = options.type === 'json' ? JSON.stringify(options.data) : options.data; + } + const res = await fetch(url, opts); + if (options.expectStatusCode && res.status !== options.expectStatusCode) + throw new InvalidStatusCodeError(res.status); + const body = detectType(new Uint8Array(await res.arrayBuffer()), options.type); + if (options.full) + return { headers: Object.fromEntries(res.headers.entries()), status: res.status, body }; + return body; +} +const IS_NODE = !!(typeof process == 'object' && + process.versions && + process.versions.node && + process.versions.v8); +function fetchUrl(url, options) { + const fn = IS_NODE ? fetchNode : fetchBrowser; + return fn(url, options); +} +exports["default"] = fetchUrl; + + /***/ }), /***/ 52244: @@ -95623,6 +110336,175 @@ webpackContext.id = 35358; }))); +/***/ }), + +/***/ 6585: +/***/ ((module) => { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + /***/ }), /***/ 86889: @@ -96106,6 +110988,5796 @@ exports.homedir = function () { }; +/***/ }), + +/***/ 9805: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + + +var TYPED_OK = (typeof Uint8Array !== 'undefined') && + (typeof Uint16Array !== 'undefined') && + (typeof Int32Array !== 'undefined'); + +function _has(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +exports.assign = function (obj /*from1, from2, from3, ...*/) { + var sources = Array.prototype.slice.call(arguments, 1); + while (sources.length) { + var source = sources.shift(); + if (!source) { continue; } + + if (typeof source !== 'object') { + throw new TypeError(source + 'must be non-object'); + } + + for (var p in source) { + if (_has(source, p)) { + obj[p] = source[p]; + } + } + } + + return obj; +}; + + +// reduce buffer size, avoiding mem copy +exports.shrinkBuf = function (buf, size) { + if (buf.length === size) { return buf; } + if (buf.subarray) { return buf.subarray(0, size); } + buf.length = size; + return buf; +}; + + +var fnTyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + if (src.subarray && dest.subarray) { + dest.set(src.subarray(src_offs, src_offs + len), dest_offs); + return; + } + // Fallback to ordinary array + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + var i, l, len, pos, chunk, result; + + // calculate data length + len = 0; + for (i = 0, l = chunks.length; i < l; i++) { + len += chunks[i].length; + } + + // join chunks + result = new Uint8Array(len); + pos = 0; + for (i = 0, l = chunks.length; i < l; i++) { + chunk = chunks[i]; + result.set(chunk, pos); + pos += chunk.length; + } + + return result; + } +}; + +var fnUntyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + return [].concat.apply([], chunks); + } +}; + + +// Enable/Disable typed arrays use, for testing +// +exports.setTyped = function (on) { + if (on) { + exports.Buf8 = Uint8Array; + exports.Buf16 = Uint16Array; + exports.Buf32 = Int32Array; + exports.assign(exports, fnTyped); + } else { + exports.Buf8 = Array; + exports.Buf16 = Array; + exports.Buf32 = Array; + exports.assign(exports, fnUntyped); + } +}; + +exports.setTyped(TYPED_OK); + + +/***/ }), + +/***/ 53269: +/***/ ((module) => { + +"use strict"; + + +// Note: adler32 takes 12% for level 0 and 2% for level 6. +// It isn't worth it to make additional optimizations as in original. +// Small size is preferable. + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) |0, + s2 = ((adler >>> 16) & 0xffff) |0, + n = 0; + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len; + len -= n; + + do { + s1 = (s1 + buf[pos++]) |0; + s2 = (s2 + s1) |0; + } while (--n); + + s1 %= 65521; + s2 %= 65521; + } + + return (s1 | (s2 << 16)) |0; +} + + +module.exports = adler32; + + +/***/ }), + +/***/ 19681: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +module.exports = { + + /* Allowed flush values; see deflate() and inflate() below for details */ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_TREES: 6, + + /* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + //Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + //Z_VERSION_ERROR: -6, + + /* compression levels */ + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + + + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + + /* Possible values of the data_type field (though see inflate()) */ + Z_BINARY: 0, + Z_TEXT: 1, + //Z_ASCII: 1, // = Z_TEXT (deprecated) + Z_UNKNOWN: 2, + + /* The deflate compression method */ + Z_DEFLATED: 8 + //Z_NULL: null // Use -1 or null inline, depending on var type +}; + + +/***/ }), + +/***/ 14823: +/***/ ((module) => { + +"use strict"; + + +// Note: we can't get significant speed boost here. +// So write code to minimize size - no pregenerated tables +// and array tools dependencies. + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +// Use ordinary array, since untyped makes no boost here +function makeTable() { + var c, table = []; + + for (var n = 0; n < 256; n++) { + c = n; + for (var k = 0; k < 8; k++) { + c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); + } + table[n] = c; + } + + return table; +} + +// Create table on load. Just 255 signed longs. Not a problem. +var crcTable = makeTable(); + + +function crc32(crc, buf, len, pos) { + var t = crcTable, + end = pos + len; + + crc ^= -1; + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; + } + + return (crc ^ (-1)); // >>> 0; +} + + +module.exports = crc32; + + +/***/ }), + +/***/ 58411: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __webpack_require__(9805); +var trees = __webpack_require__(23665); +var adler32 = __webpack_require__(53269); +var crc32 = __webpack_require__(14823); +var msg = __webpack_require__(54674); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +var Z_NO_FLUSH = 0; +var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +//var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +//var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +//var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + + +/* compression levels */ +//var Z_NO_COMPRESSION = 0; +//var Z_BEST_SPEED = 1; +//var Z_BEST_COMPRESSION = 9; +var Z_DEFAULT_COMPRESSION = -1; + + +var Z_FILTERED = 1; +var Z_HUFFMAN_ONLY = 2; +var Z_RLE = 3; +var Z_FIXED = 4; +var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +//var Z_BINARY = 0; +//var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + + +/* The deflate compression method */ +var Z_DEFLATED = 8; + +/*============================================================================*/ + + +var MAX_MEM_LEVEL = 9; +/* Maximum value for memLevel in deflateInit2 */ +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_MEM_LEVEL = 8; + + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ +var LITERALS = 256; +/* number of literal bytes 0..255 */ +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ +var D_CODES = 30; +/* number of distance codes */ +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); + +var PRESET_DICT = 0x20; + +var INIT_STATE = 42; +var EXTRA_STATE = 69; +var NAME_STATE = 73; +var COMMENT_STATE = 91; +var HCRC_STATE = 103; +var BUSY_STATE = 113; +var FINISH_STATE = 666; + +var BS_NEED_MORE = 1; /* block not completed, need more input or more output */ +var BS_BLOCK_DONE = 2; /* block flush performed */ +var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ +var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ + +var OS_CODE = 0x03; // Unix :) . Don't detect, use this default. + +function err(strm, errorCode) { + strm.msg = msg[errorCode]; + return errorCode; +} + +function rank(f) { + return ((f) << 1) - ((f) > 4 ? 9 : 0); +} + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + + +/* ========================================================================= + * Flush as much pending output as possible. All deflate() output goes + * through this function so some applications may wish to modify it + * to avoid allocating a large strm->output buffer and copying into it. + * (See also read_buf()). + */ +function flush_pending(strm) { + var s = strm.state; + + //_tr_flush_bits(s); + var len = s.pending; + if (len > strm.avail_out) { + len = strm.avail_out; + } + if (len === 0) { return; } + + utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out); + strm.next_out += len; + s.pending_out += len; + strm.total_out += len; + strm.avail_out -= len; + s.pending -= len; + if (s.pending === 0) { + s.pending_out = 0; + } +} + + +function flush_block_only(s, last) { + trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last); + s.block_start = s.strstart; + flush_pending(s.strm); +} + + +function put_byte(s, b) { + s.pending_buf[s.pending++] = b; +} + + +/* ========================================================================= + * Put a short in the pending buffer. The 16-bit value is put in MSB order. + * IN assertion: the stream state is correct and there is enough room in + * pending_buf. + */ +function putShortMSB(s, b) { +// put_byte(s, (Byte)(b >> 8)); +// put_byte(s, (Byte)(b & 0xff)); + s.pending_buf[s.pending++] = (b >>> 8) & 0xff; + s.pending_buf[s.pending++] = b & 0xff; +} + + +/* =========================================================================== + * Read a new buffer from the current input stream, update the adler32 + * and total number of bytes read. All deflate() input goes through + * this function so some applications may wish to modify it to avoid + * allocating a large strm->input buffer and copying from it. + * (See also flush_pending()). + */ +function read_buf(strm, buf, start, size) { + var len = strm.avail_in; + + if (len > size) { len = size; } + if (len === 0) { return 0; } + + strm.avail_in -= len; + + // zmemcpy(buf, strm->next_in, len); + utils.arraySet(buf, strm.input, strm.next_in, len, start); + if (strm.state.wrap === 1) { + strm.adler = adler32(strm.adler, buf, len, start); + } + + else if (strm.state.wrap === 2) { + strm.adler = crc32(strm.adler, buf, len, start); + } + + strm.next_in += len; + strm.total_in += len; + + return len; +} + + +/* =========================================================================== + * Set match_start to the longest match starting at the given string and + * return its length. Matches shorter or equal to prev_length are discarded, + * in which case the result is equal to prev_length and match_start is + * garbage. + * IN assertions: cur_match is the head of the hash chain for the current + * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 + * OUT assertion: the match length is not greater than s->lookahead. + */ +function longest_match(s, cur_match) { + var chain_length = s.max_chain_length; /* max hash chain length */ + var scan = s.strstart; /* current string */ + var match; /* matched string */ + var len; /* length of current match */ + var best_len = s.prev_length; /* best match length so far */ + var nice_match = s.nice_match; /* stop if match long enough */ + var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? + s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/; + + var _win = s.window; // shortcut + + var wmask = s.w_mask; + var prev = s.prev; + + /* Stop when cur_match becomes <= limit. To simplify the code, + * we prevent matches with the string of window index 0. + */ + + var strend = s.strstart + MAX_MATCH; + var scan_end1 = _win[scan + best_len - 1]; + var scan_end = _win[scan + best_len]; + + /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. + * It is easy to get rid of this optimization if necessary. + */ + // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); + + /* Do not waste too much time if we already have a good match: */ + if (s.prev_length >= s.good_match) { + chain_length >>= 2; + } + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + if (nice_match > s.lookahead) { nice_match = s.lookahead; } + + // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); + + do { + // Assert(cur_match < s->strstart, "no future"); + match = cur_match; + + /* Skip to next match if the match length cannot increase + * or if the match length is less than 2. Note that the checks below + * for insufficient lookahead only occur occasionally for performance + * reasons. Therefore uninitialized memory will be accessed, and + * conditional jumps will be made that depend on those values. + * However the length of the match is limited to the lookahead, so + * the output of deflate is not affected by the uninitialized values. + */ + + if (_win[match + best_len] !== scan_end || + _win[match + best_len - 1] !== scan_end1 || + _win[match] !== _win[scan] || + _win[++match] !== _win[scan + 1]) { + continue; + } + + /* The check at best_len-1 can be removed because it will be made + * again later. (This heuristic is not always a win.) + * It is not necessary to compare scan[2] and match[2] since they + * are always equal when the other bytes match, given that + * the hash keys are equal and that HASH_BITS >= 8. + */ + scan += 2; + match++; + // Assert(*scan == *match, "match[2]?"); + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart+258. + */ + do { + /*jshint noempty:false*/ + } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + scan < strend); + + // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); + + len = MAX_MATCH - (strend - scan); + scan = strend - MAX_MATCH; + + if (len > best_len) { + s.match_start = cur_match; + best_len = len; + if (len >= nice_match) { + break; + } + scan_end1 = _win[scan + best_len - 1]; + scan_end = _win[scan + best_len]; + } + } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0); + + if (best_len <= s.lookahead) { + return best_len; + } + return s.lookahead; +} + + +/* =========================================================================== + * Fill the window when the lookahead becomes insufficient. + * Updates strstart and lookahead. + * + * IN assertion: lookahead < MIN_LOOKAHEAD + * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD + * At least one byte has been read, or avail_in == 0; reads are + * performed for at least two bytes (required for the zip translate_eol + * option -- not supported here). + */ +function fill_window(s) { + var _w_size = s.w_size; + var p, n, m, more, str; + + //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); + + do { + more = s.window_size - s.lookahead - s.strstart; + + // JS ints have 32 bit, block below not needed + /* Deal with !@#$% 64K limit: */ + //if (sizeof(int) <= 2) { + // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { + // more = wsize; + // + // } else if (more == (unsigned)(-1)) { + // /* Very unlikely, but possible on 16 bit machine if + // * strstart == 0 && lookahead == 1 (input done a byte at time) + // */ + // more--; + // } + //} + + + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { + + utils.arraySet(s.window, s.window, _w_size, _w_size, 0); + s.match_start -= _w_size; + s.strstart -= _w_size; + /* we now have strstart >= MAX_DIST */ + s.block_start -= _w_size; + + /* Slide the hash table (could be avoided with 32 bit values + at the expense of memory usage). We slide even when level == 0 + to keep the hash table consistent if we switch back to level > 0 + later. (Using level 0 permanently is not an optimal usage of + zlib, so we don't care about this pathological case.) + */ + + n = s.hash_size; + p = n; + do { + m = s.head[--p]; + s.head[p] = (m >= _w_size ? m - _w_size : 0); + } while (--n); + + n = _w_size; + p = n; + do { + m = s.prev[--p]; + s.prev[p] = (m >= _w_size ? m - _w_size : 0); + /* If n is not on any hash chain, prev[n] is garbage but + * its value will never be used. + */ + } while (--n); + + more += _w_size; + } + if (s.strm.avail_in === 0) { + break; + } + + /* If there was no sliding: + * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && + * more == window_size - lookahead - strstart + * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) + * => more >= window_size - 2*WSIZE + 2 + * In the BIG_MEM or MMAP case (not yet supported), + * window_size == input_size + MIN_LOOKAHEAD && + * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. + * Otherwise, window_size == 2*WSIZE so more >= 2. + * If there was sliding, more >= WSIZE. So in all cases, more >= 2. + */ + //Assert(more >= 2, "more < 2"); + n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); + s.lookahead += n; + + /* Initialize the hash value now that we have some input: */ + if (s.lookahead + s.insert >= MIN_MATCH) { + str = s.strstart - s.insert; + s.ins_h = s.window[str]; + + /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; +//#if MIN_MATCH != 3 +// Call update_hash() MIN_MATCH-3 more times +//#endif + while (s.insert) { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = str; + str++; + s.insert--; + if (s.lookahead + s.insert < MIN_MATCH) { + break; + } + } + } + /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, + * but this is not important since only literal bytes will be emitted. + */ + + } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); + + /* If the WIN_INIT bytes after the end of the current data have never been + * written, then zero those bytes in order to avoid memory check reports of + * the use of uninitialized (or uninitialised as Julian writes) bytes by + * the longest match routines. Update the high water mark for the next + * time through here. WIN_INIT is set to MAX_MATCH since the longest match + * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. + */ +// if (s.high_water < s.window_size) { +// var curr = s.strstart + s.lookahead; +// var init = 0; +// +// if (s.high_water < curr) { +// /* Previous high water mark below current data -- zero WIN_INIT +// * bytes or up to end of window, whichever is less. +// */ +// init = s.window_size - curr; +// if (init > WIN_INIT) +// init = WIN_INIT; +// zmemzero(s->window + curr, (unsigned)init); +// s->high_water = curr + init; +// } +// else if (s->high_water < (ulg)curr + WIN_INIT) { +// /* High water mark at or above current data, but below current data +// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up +// * to end of window, whichever is less. +// */ +// init = (ulg)curr + WIN_INIT - s->high_water; +// if (init > s->window_size - s->high_water) +// init = s->window_size - s->high_water; +// zmemzero(s->window + s->high_water, (unsigned)init); +// s->high_water += init; +// } +// } +// +// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, +// "not enough room for search"); +} + +/* =========================================================================== + * Copy without compression as much as possible from the input stream, return + * the current block state. + * This function does not insert new strings in the dictionary since + * uncompressible data is probably not useful. This function is used + * only for the level=0 compression option. + * NOTE: this function should be optimized to avoid extra copying from + * window to pending_buf. + */ +function deflate_stored(s, flush) { + /* Stored blocks are limited to 0xffff bytes, pending_buf is limited + * to pending_buf_size, and each stored block has a 5 byte header: + */ + var max_block_size = 0xffff; + + if (max_block_size > s.pending_buf_size - 5) { + max_block_size = s.pending_buf_size - 5; + } + + /* Copy as much as possible from input to output: */ + for (;;) { + /* Fill the window as much as possible: */ + if (s.lookahead <= 1) { + + //Assert(s->strstart < s->w_size+MAX_DIST(s) || + // s->block_start >= (long)s->w_size, "slide too late"); +// if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || +// s.block_start >= s.w_size)) { +// throw new Error("slide too late"); +// } + + fill_window(s); + if (s.lookahead === 0 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + + if (s.lookahead === 0) { + break; + } + /* flush the current block */ + } + //Assert(s->block_start >= 0L, "block gone"); +// if (s.block_start < 0) throw new Error("block gone"); + + s.strstart += s.lookahead; + s.lookahead = 0; + + /* Emit a stored block if pending_buf will be full: */ + var max_start = s.block_start + max_block_size; + + if (s.strstart === 0 || s.strstart >= max_start) { + /* strstart == 0 is possible when wraparound on 16-bit machine */ + s.lookahead = s.strstart - max_start; + s.strstart = max_start; + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + + + } + /* Flush if we may have to slide, otherwise block_start may become + * negative and the data will be gone: + */ + if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + + s.insert = 0; + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + + if (s.strstart > s.block_start) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_NEED_MORE; +} + +/* =========================================================================== + * Compress as much as possible from the input stream, return the current + * block state. + * This function does not perform lazy evaluation of matches and inserts + * new strings in the dictionary only for unmatched strings or for short + * matches. It is used only for the fast compression options. + */ +function deflate_fast(s, flush) { + var hash_head; /* head of the hash chain */ + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { + break; /* flush the current block */ + } + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + * At this point we have always match_length < MIN_MATCH + */ + if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + } + if (s.match_length >= MIN_MATCH) { + // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only + + /*** _tr_tally_dist(s, s.strstart - s.match_start, + s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + + /* Insert new strings in the hash table only if the match length + * is not too large. This saves time but degrades compression. + */ + if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) { + s.match_length--; /* string at strstart already in table */ + do { + s.strstart++; + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + /* strstart never exceeds WSIZE-MAX_MATCH, so there are + * always MIN_MATCH bytes ahead. + */ + } while (--s.match_length !== 0); + s.strstart++; + } else + { + s.strstart += s.match_length; + s.match_length = 0; + s.ins_h = s.window[s.strstart]; + /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; + +//#if MIN_MATCH != 3 +// Call UPDATE_HASH() MIN_MATCH-3 more times +//#endif + /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not + * matter since it will be recomputed at next deflate call. + */ + } + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s.window[s.strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * Same as above, but achieves better compression. We use a lazy + * evaluation for matches: a match is finally adopted only if there is + * no better match at the next window position. + */ +function deflate_slow(s, flush) { + var hash_head; /* head of hash chain */ + var bflush; /* set if current block must be flushed */ + + var max_insert; + + /* Process the input block. */ + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + */ + s.prev_length = s.match_length; + s.prev_match = s.match_start; + s.match_length = MIN_MATCH - 1; + + if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match && + s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + + if (s.match_length <= 5 && + (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) { + + /* If prev_match is also MIN_MATCH, match_start is garbage + * but we will ignore the current match anyway. + */ + s.match_length = MIN_MATCH - 1; + } + } + /* If there was a match at the previous step and the current + * match is not better, output the previous match: + */ + if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { + max_insert = s.strstart + s.lookahead - MIN_MATCH; + /* Do not insert strings in hash table beyond this. */ + + //check_match(s, s.strstart-1, s.prev_match, s.prev_length); + + /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, + s.prev_length - MIN_MATCH, bflush);***/ + bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH); + /* Insert in hash table all strings up to the end of the match. + * strstart-1 and strstart are already inserted. If there is not + * enough lookahead, the last two strings are not inserted in + * the hash table. + */ + s.lookahead -= s.prev_length - 1; + s.prev_length -= 2; + do { + if (++s.strstart <= max_insert) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + } while (--s.prev_length !== 0); + s.match_available = 0; + s.match_length = MIN_MATCH - 1; + s.strstart++; + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + } else if (s.match_available) { + /* If there was no match at the previous position, output a + * single literal. If there was a match but the current match + * is longer, truncate the previous match to a single literal. + */ + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + if (bflush) { + /*** FLUSH_BLOCK_ONLY(s, 0) ***/ + flush_block_only(s, false); + /***/ + } + s.strstart++; + s.lookahead--; + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + } else { + /* There is no previous match to compare with, wait for + * the next step to decide. + */ + s.match_available = 1; + s.strstart++; + s.lookahead--; + } + } + //Assert (flush != Z_NO_FLUSH, "no flush?"); + if (s.match_available) { + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + s.match_available = 0; + } + s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_BLOCK_DONE; +} + + +/* =========================================================================== + * For Z_RLE, simply look for runs of bytes, generate matches only of distance + * one. Do not maintain a hash table. (It will be regenerated if this run of + * deflate switches away from Z_RLE.) + */ +function deflate_rle(s, flush) { + var bflush; /* set if current block must be flushed */ + var prev; /* byte at distance one to match */ + var scan, strend; /* scan goes up to strend for length of run */ + + var _win = s.window; + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the longest run, plus one for the unrolled loop. + */ + if (s.lookahead <= MAX_MATCH) { + fill_window(s); + if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* See how many times the previous byte repeats */ + s.match_length = 0; + if (s.lookahead >= MIN_MATCH && s.strstart > 0) { + scan = s.strstart - 1; + prev = _win[scan]; + if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { + strend = s.strstart + MAX_MATCH; + do { + /*jshint noempty:false*/ + } while (prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + scan < strend); + s.match_length = MAX_MATCH - (strend - scan); + if (s.match_length > s.lookahead) { + s.match_length = s.lookahead; + } + } + //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); + } + + /* Emit match if have run of MIN_MATCH or longer, else emit literal */ + if (s.match_length >= MIN_MATCH) { + //check_match(s, s.strstart, s.strstart - 1, s.match_length); + + /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + s.strstart += s.match_length; + s.match_length = 0; + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. + * (It will be regenerated if this run of deflate switches away from Huffman.) + */ +function deflate_huff(s, flush) { + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we have a literal to write. */ + if (s.lookahead === 0) { + fill_window(s); + if (s.lookahead === 0) { + if (flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + break; /* flush the current block */ + } + } + + /* Output a literal byte */ + s.match_length = 0; + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + s.lookahead--; + s.strstart++; + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* Values for max_lazy_match, good_match and max_chain_length, depending on + * the desired pack level (0..9). The values given below have been tuned to + * exclude worst case performance for pathological files. Better values may be + * found for specific files. + */ +function Config(good_length, max_lazy, nice_length, max_chain, func) { + this.good_length = good_length; + this.max_lazy = max_lazy; + this.nice_length = nice_length; + this.max_chain = max_chain; + this.func = func; +} + +var configuration_table; + +configuration_table = [ + /* good lazy nice chain */ + new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ + new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ + new Config(4, 5, 16, 8, deflate_fast), /* 2 */ + new Config(4, 6, 32, 32, deflate_fast), /* 3 */ + + new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ + new Config(8, 16, 32, 32, deflate_slow), /* 5 */ + new Config(8, 16, 128, 128, deflate_slow), /* 6 */ + new Config(8, 32, 128, 256, deflate_slow), /* 7 */ + new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ + new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */ +]; + + +/* =========================================================================== + * Initialize the "longest match" routines for a new zlib stream + */ +function lm_init(s) { + s.window_size = 2 * s.w_size; + + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + + /* Set the default configuration parameters: + */ + s.max_lazy_match = configuration_table[s.level].max_lazy; + s.good_match = configuration_table[s.level].good_length; + s.nice_match = configuration_table[s.level].nice_length; + s.max_chain_length = configuration_table[s.level].max_chain; + + s.strstart = 0; + s.block_start = 0; + s.lookahead = 0; + s.insert = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + s.ins_h = 0; +} + + +function DeflateState() { + this.strm = null; /* pointer back to this zlib stream */ + this.status = 0; /* as the name implies */ + this.pending_buf = null; /* output still pending */ + this.pending_buf_size = 0; /* size of pending_buf */ + this.pending_out = 0; /* next pending byte to output to the stream */ + this.pending = 0; /* nb of bytes in the pending buffer */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.gzhead = null; /* gzip header information to write */ + this.gzindex = 0; /* where in extra, name, or comment */ + this.method = Z_DEFLATED; /* can only be DEFLATED */ + this.last_flush = -1; /* value of flush param for previous deflate call */ + + this.w_size = 0; /* LZ77 window size (32K by default) */ + this.w_bits = 0; /* log2(w_size) (8..16) */ + this.w_mask = 0; /* w_size - 1 */ + + this.window = null; + /* Sliding window. Input bytes are read into the second half of the window, + * and move to the first half later to keep a dictionary of at least wSize + * bytes. With this organization, matches are limited to a distance of + * wSize-MAX_MATCH bytes, but this ensures that IO is always + * performed with a length multiple of the block size. + */ + + this.window_size = 0; + /* Actual size of window: 2*wSize, except when the user input buffer + * is directly used as sliding window. + */ + + this.prev = null; + /* Link to older string with same hash index. To limit the size of this + * array to 64K, this link is maintained only for the last 32K strings. + * An index in this array is thus a window index modulo 32K. + */ + + this.head = null; /* Heads of the hash chains or NIL. */ + + this.ins_h = 0; /* hash index of string to be inserted */ + this.hash_size = 0; /* number of elements in hash table */ + this.hash_bits = 0; /* log2(hash_size) */ + this.hash_mask = 0; /* hash_size-1 */ + + this.hash_shift = 0; + /* Number of bits by which ins_h must be shifted at each input + * step. It must be such that after MIN_MATCH steps, the oldest + * byte no longer takes part in the hash key, that is: + * hash_shift * MIN_MATCH >= hash_bits + */ + + this.block_start = 0; + /* Window position at the beginning of the current output block. Gets + * negative when the window is moved backwards. + */ + + this.match_length = 0; /* length of best match */ + this.prev_match = 0; /* previous match */ + this.match_available = 0; /* set if previous match exists */ + this.strstart = 0; /* start of string to insert */ + this.match_start = 0; /* start of matching string */ + this.lookahead = 0; /* number of valid bytes ahead in window */ + + this.prev_length = 0; + /* Length of the best match at previous step. Matches not greater than this + * are discarded. This is used in the lazy match evaluation. + */ + + this.max_chain_length = 0; + /* To speed up deflation, hash chains are never searched beyond this + * length. A higher limit improves compression ratio but degrades the + * speed. + */ + + this.max_lazy_match = 0; + /* Attempt to find a better match only when the current match is strictly + * smaller than this value. This mechanism is used only for compression + * levels >= 4. + */ + // That's alias to max_lazy_match, don't use directly + //this.max_insert_length = 0; + /* Insert new strings in the hash table only if the match length is not + * greater than this length. This saves time but degrades compression. + * max_insert_length is used only for compression levels <= 3. + */ + + this.level = 0; /* compression level (1..9) */ + this.strategy = 0; /* favor or force Huffman coding*/ + + this.good_match = 0; + /* Use a faster search when the previous match is longer than this */ + + this.nice_match = 0; /* Stop searching when current match exceeds this */ + + /* used by trees.c: */ + + /* Didn't use ct_data typedef below to suppress compiler warning */ + + // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ + // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ + // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ + + // Use flat array of DOUBLE size, with interleaved fata, + // because JS does not support effective + this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2); + this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2); + this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2); + zero(this.dyn_ltree); + zero(this.dyn_dtree); + zero(this.bl_tree); + + this.l_desc = null; /* desc. for literal tree */ + this.d_desc = null; /* desc. for distance tree */ + this.bl_desc = null; /* desc. for bit length tree */ + + //ush bl_count[MAX_BITS+1]; + this.bl_count = new utils.Buf16(MAX_BITS + 1); + /* number of codes at each bit length for an optimal tree */ + + //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ + this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */ + zero(this.heap); + + this.heap_len = 0; /* number of elements in the heap */ + this.heap_max = 0; /* element of largest frequency */ + /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. + * The same heap array is used to build all trees. + */ + + this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; + zero(this.depth); + /* Depth of each subtree used as tie breaker for trees of equal frequency + */ + + this.l_buf = 0; /* buffer index for literals or lengths */ + + this.lit_bufsize = 0; + /* Size of match buffer for literals/lengths. There are 4 reasons for + * limiting lit_bufsize to 64K: + * - frequencies can be kept in 16 bit counters + * - if compression is not successful for the first block, all input + * data is still in the window so we can still emit a stored block even + * when input comes from standard input. (This can also be done for + * all blocks if lit_bufsize is not greater than 32K.) + * - if compression is not successful for a file smaller than 64K, we can + * even emit a stored file instead of a stored block (saving 5 bytes). + * This is applicable only for zip (not gzip or zlib). + * - creating new Huffman trees less frequently may not provide fast + * adaptation to changes in the input data statistics. (Take for + * example a binary file with poorly compressible code followed by + * a highly compressible string table.) Smaller buffer sizes give + * fast adaptation but have of course the overhead of transmitting + * trees more frequently. + * - I can't count above 4 + */ + + this.last_lit = 0; /* running index in l_buf */ + + this.d_buf = 0; + /* Buffer index for distances. To simplify the code, d_buf and l_buf have + * the same number of elements. To use different lengths, an extra flag + * array would be necessary. + */ + + this.opt_len = 0; /* bit length of current block with optimal trees */ + this.static_len = 0; /* bit length of current block with static trees */ + this.matches = 0; /* number of string matches in current block */ + this.insert = 0; /* bytes at end of window left to insert */ + + + this.bi_buf = 0; + /* Output buffer. bits are inserted starting at the bottom (least + * significant bits). + */ + this.bi_valid = 0; + /* Number of valid bits in bi_buf. All bits above the last valid bit + * are always zero. + */ + + // Used for window memory init. We safely ignore it for JS. That makes + // sense only for pointers and memory check tools. + //this.high_water = 0; + /* High water mark offset in window for initialized bytes -- bytes above + * this are set to zero in order to avoid memory check warnings when + * longest match routines access bytes past the input. This is then + * updated to the new high water mark. + */ +} + + +function deflateResetKeep(strm) { + var s; + + if (!strm || !strm.state) { + return err(strm, Z_STREAM_ERROR); + } + + strm.total_in = strm.total_out = 0; + strm.data_type = Z_UNKNOWN; + + s = strm.state; + s.pending = 0; + s.pending_out = 0; + + if (s.wrap < 0) { + s.wrap = -s.wrap; + /* was made negative by deflate(..., Z_FINISH); */ + } + s.status = (s.wrap ? INIT_STATE : BUSY_STATE); + strm.adler = (s.wrap === 2) ? + 0 // crc32(0, Z_NULL, 0) + : + 1; // adler32(0, Z_NULL, 0) + s.last_flush = Z_NO_FLUSH; + trees._tr_init(s); + return Z_OK; +} + + +function deflateReset(strm) { + var ret = deflateResetKeep(strm); + if (ret === Z_OK) { + lm_init(strm.state); + } + return ret; +} + + +function deflateSetHeader(strm, head) { + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; } + strm.state.gzhead = head; + return Z_OK; +} + + +function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { + if (!strm) { // === Z_NULL + return Z_STREAM_ERROR; + } + var wrap = 1; + + if (level === Z_DEFAULT_COMPRESSION) { + level = 6; + } + + if (windowBits < 0) { /* suppress zlib wrapper */ + wrap = 0; + windowBits = -windowBits; + } + + else if (windowBits > 15) { + wrap = 2; /* write gzip wrapper instead */ + windowBits -= 16; + } + + + if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || + windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || + strategy < 0 || strategy > Z_FIXED) { + return err(strm, Z_STREAM_ERROR); + } + + + if (windowBits === 8) { + windowBits = 9; + } + /* until 256-byte window bug fixed */ + + var s = new DeflateState(); + + strm.state = s; + s.strm = strm; + + s.wrap = wrap; + s.gzhead = null; + s.w_bits = windowBits; + s.w_size = 1 << s.w_bits; + s.w_mask = s.w_size - 1; + + s.hash_bits = memLevel + 7; + s.hash_size = 1 << s.hash_bits; + s.hash_mask = s.hash_size - 1; + s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); + + s.window = new utils.Buf8(s.w_size * 2); + s.head = new utils.Buf16(s.hash_size); + s.prev = new utils.Buf16(s.w_size); + + // Don't need mem init magic for JS. + //s.high_water = 0; /* nothing written to s->window yet */ + + s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ + + s.pending_buf_size = s.lit_bufsize * 4; + + //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); + //s->pending_buf = (uchf *) overlay; + s.pending_buf = new utils.Buf8(s.pending_buf_size); + + // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) + //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); + s.d_buf = 1 * s.lit_bufsize; + + //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s.l_buf = (1 + 2) * s.lit_bufsize; + + s.level = level; + s.strategy = strategy; + s.method = method; + + return deflateReset(strm); +} + +function deflateInit(strm, level) { + return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); +} + + +function deflate(strm, flush) { + var old_flush, s; + var beg, val; // for gzip header write only + + if (!strm || !strm.state || + flush > Z_BLOCK || flush < 0) { + return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR; + } + + s = strm.state; + + if (!strm.output || + (!strm.input && strm.avail_in !== 0) || + (s.status === FINISH_STATE && flush !== Z_FINISH)) { + return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR); + } + + s.strm = strm; /* just in case */ + old_flush = s.last_flush; + s.last_flush = flush; + + /* Write the header */ + if (s.status === INIT_STATE) { + + if (s.wrap === 2) { // GZIP header + strm.adler = 0; //crc32(0L, Z_NULL, 0); + put_byte(s, 31); + put_byte(s, 139); + put_byte(s, 8); + if (!s.gzhead) { // s->gzhead == Z_NULL + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, OS_CODE); + s.status = BUSY_STATE; + } + else { + put_byte(s, (s.gzhead.text ? 1 : 0) + + (s.gzhead.hcrc ? 2 : 0) + + (!s.gzhead.extra ? 0 : 4) + + (!s.gzhead.name ? 0 : 8) + + (!s.gzhead.comment ? 0 : 16) + ); + put_byte(s, s.gzhead.time & 0xff); + put_byte(s, (s.gzhead.time >> 8) & 0xff); + put_byte(s, (s.gzhead.time >> 16) & 0xff); + put_byte(s, (s.gzhead.time >> 24) & 0xff); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, s.gzhead.os & 0xff); + if (s.gzhead.extra && s.gzhead.extra.length) { + put_byte(s, s.gzhead.extra.length & 0xff); + put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); + } + if (s.gzhead.hcrc) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); + } + s.gzindex = 0; + s.status = EXTRA_STATE; + } + } + else // DEFLATE header + { + var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; + var level_flags = -1; + + if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { + level_flags = 0; + } else if (s.level < 6) { + level_flags = 1; + } else if (s.level === 6) { + level_flags = 2; + } else { + level_flags = 3; + } + header |= (level_flags << 6); + if (s.strstart !== 0) { header |= PRESET_DICT; } + header += 31 - (header % 31); + + s.status = BUSY_STATE; + putShortMSB(s, header); + + /* Save the adler32 of the preset dictionary: */ + if (s.strstart !== 0) { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + strm.adler = 1; // adler32(0L, Z_NULL, 0); + } + } + +//#ifdef GZIP + if (s.status === EXTRA_STATE) { + if (s.gzhead.extra/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + + while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + break; + } + } + put_byte(s, s.gzhead.extra[s.gzindex] & 0xff); + s.gzindex++; + } + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (s.gzindex === s.gzhead.extra.length) { + s.gzindex = 0; + s.status = NAME_STATE; + } + } + else { + s.status = NAME_STATE; + } + } + if (s.status === NAME_STATE) { + if (s.gzhead.name/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.name.length) { + val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.gzindex = 0; + s.status = COMMENT_STATE; + } + } + else { + s.status = COMMENT_STATE; + } + } + if (s.status === COMMENT_STATE) { + if (s.gzhead.comment/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.comment.length) { + val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.status = HCRC_STATE; + } + } + else { + s.status = HCRC_STATE; + } + } + if (s.status === HCRC_STATE) { + if (s.gzhead.hcrc) { + if (s.pending + 2 > s.pending_buf_size) { + flush_pending(strm); + } + if (s.pending + 2 <= s.pending_buf_size) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + strm.adler = 0; //crc32(0L, Z_NULL, 0); + s.status = BUSY_STATE; + } + } + else { + s.status = BUSY_STATE; + } + } +//#endif + + /* Flush as much pending output as possible */ + if (s.pending !== 0) { + flush_pending(strm); + if (strm.avail_out === 0) { + /* Since avail_out is 0, deflate will be called again with + * more output space, but possibly with both pending and + * avail_in equal to zero. There won't be anything to do, + * but this is not an error situation so make sure we + * return OK instead of BUF_ERROR at next call of deflate: + */ + s.last_flush = -1; + return Z_OK; + } + + /* Make sure there is something to do and avoid duplicate consecutive + * flushes. For repeated and useless calls with Z_FINISH, we keep + * returning Z_STREAM_END instead of Z_BUF_ERROR. + */ + } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && + flush !== Z_FINISH) { + return err(strm, Z_BUF_ERROR); + } + + /* User must not provide more input after the first FINISH: */ + if (s.status === FINISH_STATE && strm.avail_in !== 0) { + return err(strm, Z_BUF_ERROR); + } + + /* Start a new block or continue the current one. + */ + if (strm.avail_in !== 0 || s.lookahead !== 0 || + (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { + var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : + (s.strategy === Z_RLE ? deflate_rle(s, flush) : + configuration_table[s.level].func(s, flush)); + + if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { + s.status = FINISH_STATE; + } + if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { + if (strm.avail_out === 0) { + s.last_flush = -1; + /* avoid BUF_ERROR next call, see above */ + } + return Z_OK; + /* If flush != Z_NO_FLUSH && avail_out == 0, the next call + * of deflate should use the same flush parameter to make sure + * that the flush is complete. So we don't have to output an + * empty block here, this will be done at next call. This also + * ensures that for a very small output buffer, we emit at most + * one empty block. + */ + } + if (bstate === BS_BLOCK_DONE) { + if (flush === Z_PARTIAL_FLUSH) { + trees._tr_align(s); + } + else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ + + trees._tr_stored_block(s, 0, 0, false); + /* For a full flush, this empty block will be recognized + * as a special marker by inflate_sync(). + */ + if (flush === Z_FULL_FLUSH) { + /*** CLEAR_HASH(s); ***/ /* forget history */ + zero(s.head); // Fill with NIL (= 0); + + if (s.lookahead === 0) { + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + } + } + flush_pending(strm); + if (strm.avail_out === 0) { + s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ + return Z_OK; + } + } + } + //Assert(strm->avail_out > 0, "bug2"); + //if (strm.avail_out <= 0) { throw new Error("bug2");} + + if (flush !== Z_FINISH) { return Z_OK; } + if (s.wrap <= 0) { return Z_STREAM_END; } + + /* Write the trailer */ + if (s.wrap === 2) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + put_byte(s, (strm.adler >> 16) & 0xff); + put_byte(s, (strm.adler >> 24) & 0xff); + put_byte(s, strm.total_in & 0xff); + put_byte(s, (strm.total_in >> 8) & 0xff); + put_byte(s, (strm.total_in >> 16) & 0xff); + put_byte(s, (strm.total_in >> 24) & 0xff); + } + else + { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + + flush_pending(strm); + /* If avail_out is zero, the application will call deflate again + * to flush the rest. + */ + if (s.wrap > 0) { s.wrap = -s.wrap; } + /* write the trailer only once! */ + return s.pending !== 0 ? Z_OK : Z_STREAM_END; +} + +function deflateEnd(strm) { + var status; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + status = strm.state.status; + if (status !== INIT_STATE && + status !== EXTRA_STATE && + status !== NAME_STATE && + status !== COMMENT_STATE && + status !== HCRC_STATE && + status !== BUSY_STATE && + status !== FINISH_STATE + ) { + return err(strm, Z_STREAM_ERROR); + } + + strm.state = null; + + return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK; +} + + +/* ========================================================================= + * Initializes the compression dictionary from the given byte + * sequence without producing any compressed output. + */ +function deflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var s; + var str, n; + var wrap; + var avail; + var next; + var input; + var tmpDict; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + s = strm.state; + wrap = s.wrap; + + if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { + return Z_STREAM_ERROR; + } + + /* when using zlib wrappers, compute Adler-32 for provided dictionary */ + if (wrap === 1) { + /* adler32(strm->adler, dictionary, dictLength); */ + strm.adler = adler32(strm.adler, dictionary, dictLength, 0); + } + + s.wrap = 0; /* avoid computing Adler-32 in read_buf */ + + /* if dictionary would fill window, just replace the history */ + if (dictLength >= s.w_size) { + if (wrap === 0) { /* already empty otherwise */ + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + /* use the tail */ + // dictionary = dictionary.slice(dictLength - s.w_size); + tmpDict = new utils.Buf8(s.w_size); + utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0); + dictionary = tmpDict; + dictLength = s.w_size; + } + /* insert dictionary into window and hash */ + avail = strm.avail_in; + next = strm.next_in; + input = strm.input; + strm.avail_in = dictLength; + strm.next_in = 0; + strm.input = dictionary; + fill_window(s); + while (s.lookahead >= MIN_MATCH) { + str = s.strstart; + n = s.lookahead - (MIN_MATCH - 1); + do { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + + s.head[s.ins_h] = str; + str++; + } while (--n); + s.strstart = str; + s.lookahead = MIN_MATCH - 1; + fill_window(s); + } + s.strstart += s.lookahead; + s.block_start = s.strstart; + s.insert = s.lookahead; + s.lookahead = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + strm.next_in = next; + strm.input = input; + strm.avail_in = avail; + s.wrap = wrap; + return Z_OK; +} + + +exports.deflateInit = deflateInit; +exports.deflateInit2 = deflateInit2; +exports.deflateReset = deflateReset; +exports.deflateResetKeep = deflateResetKeep; +exports.deflateSetHeader = deflateSetHeader; +exports.deflate = deflate; +exports.deflateEnd = deflateEnd; +exports.deflateSetDictionary = deflateSetDictionary; +exports.deflateInfo = 'pako deflate (from Nodeca project)'; + +/* Not implemented +exports.deflateBound = deflateBound; +exports.deflateCopy = deflateCopy; +exports.deflateParams = deflateParams; +exports.deflatePending = deflatePending; +exports.deflatePrime = deflatePrime; +exports.deflateTune = deflateTune; +*/ + + +/***/ }), + +/***/ 47293: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +// See state defs from inflate.js +var BAD = 30; /* got a data error -- remain here until reset */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ + +/* + Decode literal, length, and distance codes and write out the resulting + literal and match bytes until either not enough input or output is + available, an end-of-block is encountered, or a data error is encountered. + When large enough input and output buffers are supplied to inflate(), for + example, a 16K input buffer and a 64K output buffer, more than 95% of the + inflate execution time is spent in this routine. + + Entry assumptions: + + state.mode === LEN + strm.avail_in >= 6 + strm.avail_out >= 258 + start >= strm.avail_out + state.bits < 8 + + On return, state.mode is one of: + + LEN -- ran out of enough output space or enough available input + TYPE -- reached end of block code, inflate() to interpret next block + BAD -- error in block data + + Notes: + + - The maximum input bits used by a length/distance pair is 15 bits for the + length code, 5 bits for the length extra, 15 bits for the distance code, + and 13 bits for the distance extra. This totals 48 bits, or six bytes. + Therefore if strm.avail_in >= 6, then there is enough input to avoid + checking for available input while decoding. + + - The maximum bytes that a single length/distance pair can output is 258 + bytes, which is the maximum length that can be coded. inflate_fast() + requires strm.avail_out >= 258 for each loop to avoid checking for + output space. + */ +module.exports = function inflate_fast(strm, start) { + var state; + var _in; /* local strm.input */ + var last; /* have enough input while in < last */ + var _out; /* local strm.output */ + var beg; /* inflate()'s initial strm.output */ + var end; /* while out < end, enough space available */ +//#ifdef INFLATE_STRICT + var dmax; /* maximum distance from zlib header */ +//#endif + var wsize; /* window size or zero if not using window */ + var whave; /* valid bytes in the window */ + var wnext; /* window write index */ + // Use `s_window` instead `window`, avoid conflict with instrumentation tools + var s_window; /* allocated sliding window, if wsize != 0 */ + var hold; /* local strm.hold */ + var bits; /* local strm.bits */ + var lcode; /* local strm.lencode */ + var dcode; /* local strm.distcode */ + var lmask; /* mask for first level of length codes */ + var dmask; /* mask for first level of distance codes */ + var here; /* retrieved table entry */ + var op; /* code bits, operation, extra bits, or */ + /* window position, window bytes to copy */ + var len; /* match length, unused bytes */ + var dist; /* match distance */ + var from; /* where to copy match from */ + var from_source; + + + var input, output; // JS specific, because we have no pointers + + /* copy state to local variables */ + state = strm.state; + //here = state.here; + _in = strm.next_in; + input = strm.input; + last = _in + (strm.avail_in - 5); + _out = strm.next_out; + output = strm.output; + beg = _out - (start - strm.avail_out); + end = _out + (strm.avail_out - 257); +//#ifdef INFLATE_STRICT + dmax = state.dmax; +//#endif + wsize = state.wsize; + whave = state.whave; + wnext = state.wnext; + s_window = state.window; + hold = state.hold; + bits = state.bits; + lcode = state.lencode; + dcode = state.distcode; + lmask = (1 << state.lenbits) - 1; + dmask = (1 << state.distbits) - 1; + + + /* decode literals and length/distances until end-of-block or not enough + input data or output space */ + + top: + do { + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + + here = lcode[hold & lmask]; + + dolen: + for (;;) { // Goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + if (op === 0) { /* literal */ + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + output[_out++] = here & 0xffff/*here.val*/; + } + else if (op & 16) { /* length base */ + len = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (op) { + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + len += hold & ((1 << op) - 1); + hold >>>= op; + bits -= op; + } + //Tracevv((stderr, "inflate: length %u\n", len)); + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + here = dcode[hold & dmask]; + + dodist: + for (;;) { // goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + + if (op & 16) { /* distance base */ + dist = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + } + dist += hold & ((1 << op) - 1); +//#ifdef INFLATE_STRICT + if (dist > dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } +//#endif + hold >>>= op; + bits -= op; + //Tracevv((stderr, "inflate: distance %u\n", dist)); + op = _out - beg; /* max distance in output */ + if (dist > op) { /* see if copy from window */ + op = dist - op; /* distance back in window */ + if (op > whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } + +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// if (len <= op - whave) { +// do { +// output[_out++] = 0; +// } while (--len); +// continue top; +// } +// len -= op - whave; +// do { +// output[_out++] = 0; +// } while (--op > whave); +// if (op === 0) { +// from = _out - dist; +// do { +// output[_out++] = output[from++]; +// } while (--len); +// continue top; +// } +//#endif + } + from = 0; // window index + from_source = s_window; + if (wnext === 0) { /* very common case */ + from += wsize - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + else if (wnext < op) { /* wrap around window */ + from += wsize + wnext - op; + op -= wnext; + if (op < len) { /* some from end of window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = 0; + if (wnext < len) { /* some from start of window */ + op = wnext; + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + } + else { /* contiguous in window */ + from += wnext - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + while (len > 2) { + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + len -= 3; + } + if (len) { + output[_out++] = from_source[from++]; + if (len > 1) { + output[_out++] = from_source[from++]; + } + } + } + else { + from = _out - dist; /* copy direct from output */ + do { /* minimum length is three */ + output[_out++] = output[from++]; + output[_out++] = output[from++]; + output[_out++] = output[from++]; + len -= 3; + } while (len > 2); + if (len) { + output[_out++] = output[from++]; + if (len > 1) { + output[_out++] = output[from++]; + } + } + } + } + else if ((op & 64) === 0) { /* 2nd level distance code */ + here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dodist; + } + else { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } + else if ((op & 64) === 0) { /* 2nd level length code */ + here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dolen; + } + else if (op & 32) { /* end-of-block */ + //Tracevv((stderr, "inflate: end of block\n")); + state.mode = TYPE; + break top; + } + else { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } while (_in < last && _out < end); + + /* return unused bytes (on entry, bits < 8, so in won't go too far back) */ + len = bits >> 3; + _in -= len; + bits -= len << 3; + hold &= (1 << bits) - 1; + + /* update state and return */ + strm.next_in = _in; + strm.next_out = _out; + strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last)); + strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end)); + state.hold = hold; + state.bits = bits; + return; +}; + + +/***/ }), + +/***/ 71447: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __webpack_require__(9805); +var adler32 = __webpack_require__(53269); +var crc32 = __webpack_require__(14823); +var inflate_fast = __webpack_require__(47293); +var inflate_table = __webpack_require__(21998); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +//var Z_NO_FLUSH = 0; +//var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +//var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + +/* The deflate compression method */ +var Z_DEFLATED = 8; + + +/* STATES ====================================================================*/ +/* ===========================================================================*/ + + +var HEAD = 1; /* i: waiting for magic header */ +var FLAGS = 2; /* i: waiting for method and flags (gzip) */ +var TIME = 3; /* i: waiting for modification time (gzip) */ +var OS = 4; /* i: waiting for extra flags and operating system (gzip) */ +var EXLEN = 5; /* i: waiting for extra length (gzip) */ +var EXTRA = 6; /* i: waiting for extra bytes (gzip) */ +var NAME = 7; /* i: waiting for end of file name (gzip) */ +var COMMENT = 8; /* i: waiting for end of comment (gzip) */ +var HCRC = 9; /* i: waiting for header crc (gzip) */ +var DICTID = 10; /* i: waiting for dictionary check value */ +var DICT = 11; /* waiting for inflateSetDictionary() call */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ +var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */ +var STORED = 14; /* i: waiting for stored size (length and complement) */ +var COPY_ = 15; /* i/o: same as COPY below, but only first time in */ +var COPY = 16; /* i/o: waiting for input or output to copy stored block */ +var TABLE = 17; /* i: waiting for dynamic block table lengths */ +var LENLENS = 18; /* i: waiting for code length code lengths */ +var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */ +var LEN_ = 20; /* i: same as LEN below, but only first time in */ +var LEN = 21; /* i: waiting for length/lit/eob code */ +var LENEXT = 22; /* i: waiting for length extra bits */ +var DIST = 23; /* i: waiting for distance code */ +var DISTEXT = 24; /* i: waiting for distance extra bits */ +var MATCH = 25; /* o: waiting for output space to copy string */ +var LIT = 26; /* o: waiting for output space to write literal */ +var CHECK = 27; /* i: waiting for 32-bit check value */ +var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */ +var DONE = 29; /* finished check, done -- remain here until reset */ +var BAD = 30; /* got a data error -- remain here until reset */ +var MEM = 31; /* got an inflate() memory error -- remain here until reset */ +var SYNC = 32; /* looking for synchronization bytes to restart inflate() */ + +/* ===========================================================================*/ + + + +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_WBITS = MAX_WBITS; + + +function zswap32(q) { + return (((q >>> 24) & 0xff) + + ((q >>> 8) & 0xff00) + + ((q & 0xff00) << 8) + + ((q & 0xff) << 24)); +} + + +function InflateState() { + this.mode = 0; /* current inflate mode */ + this.last = false; /* true if processing last block */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.havedict = false; /* true if dictionary provided */ + this.flags = 0; /* gzip header method and flags (0 if zlib) */ + this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */ + this.check = 0; /* protected copy of check value */ + this.total = 0; /* protected copy of output count */ + // TODO: may be {} + this.head = null; /* where to save gzip header information */ + + /* sliding window */ + this.wbits = 0; /* log base 2 of requested window size */ + this.wsize = 0; /* window size or zero if not using window */ + this.whave = 0; /* valid bytes in the window */ + this.wnext = 0; /* window write index */ + this.window = null; /* allocated sliding window, if needed */ + + /* bit accumulator */ + this.hold = 0; /* input bit accumulator */ + this.bits = 0; /* number of bits in "in" */ + + /* for string and stored block copying */ + this.length = 0; /* literal or length of data to copy */ + this.offset = 0; /* distance back to copy string from */ + + /* for table and code decoding */ + this.extra = 0; /* extra bits needed */ + + /* fixed and dynamic code tables */ + this.lencode = null; /* starting table for length/literal codes */ + this.distcode = null; /* starting table for distance codes */ + this.lenbits = 0; /* index bits for lencode */ + this.distbits = 0; /* index bits for distcode */ + + /* dynamic table building */ + this.ncode = 0; /* number of code length code lengths */ + this.nlen = 0; /* number of length code lengths */ + this.ndist = 0; /* number of distance code lengths */ + this.have = 0; /* number of code lengths in lens[] */ + this.next = null; /* next available space in codes[] */ + + this.lens = new utils.Buf16(320); /* temporary storage for code lengths */ + this.work = new utils.Buf16(288); /* work area for code table building */ + + /* + because we don't have pointers in js, we use lencode and distcode directly + as buffers so we don't need codes + */ + //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */ + this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */ + this.distdyn = null; /* dynamic table for distance codes (JS specific) */ + this.sane = 0; /* if false, allow invalid distance too far */ + this.back = 0; /* bits back of last unprocessed length/lit */ + this.was = 0; /* initial length of match */ +} + +function inflateResetKeep(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + strm.total_in = strm.total_out = state.total = 0; + strm.msg = ''; /*Z_NULL*/ + if (state.wrap) { /* to support ill-conceived Java test suite */ + strm.adler = state.wrap & 1; + } + state.mode = HEAD; + state.last = 0; + state.havedict = 0; + state.dmax = 32768; + state.head = null/*Z_NULL*/; + state.hold = 0; + state.bits = 0; + //state.lencode = state.distcode = state.next = state.codes; + state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS); + state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS); + + state.sane = 1; + state.back = -1; + //Tracev((stderr, "inflate: reset\n")); + return Z_OK; +} + +function inflateReset(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + state.wsize = 0; + state.whave = 0; + state.wnext = 0; + return inflateResetKeep(strm); + +} + +function inflateReset2(strm, windowBits) { + var wrap; + var state; + + /* get the state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + + /* extract wrap request from windowBits parameter */ + if (windowBits < 0) { + wrap = 0; + windowBits = -windowBits; + } + else { + wrap = (windowBits >> 4) + 1; + if (windowBits < 48) { + windowBits &= 15; + } + } + + /* set number of window bits, free window if different */ + if (windowBits && (windowBits < 8 || windowBits > 15)) { + return Z_STREAM_ERROR; + } + if (state.window !== null && state.wbits !== windowBits) { + state.window = null; + } + + /* update state and reset the rest of it */ + state.wrap = wrap; + state.wbits = windowBits; + return inflateReset(strm); +} + +function inflateInit2(strm, windowBits) { + var ret; + var state; + + if (!strm) { return Z_STREAM_ERROR; } + //strm.msg = Z_NULL; /* in case we return an error */ + + state = new InflateState(); + + //if (state === Z_NULL) return Z_MEM_ERROR; + //Tracev((stderr, "inflate: allocated\n")); + strm.state = state; + state.window = null/*Z_NULL*/; + ret = inflateReset2(strm, windowBits); + if (ret !== Z_OK) { + strm.state = null/*Z_NULL*/; + } + return ret; +} + +function inflateInit(strm) { + return inflateInit2(strm, DEF_WBITS); +} + + +/* + Return state with length and distance decoding tables and index sizes set to + fixed code decoding. Normally this returns fixed tables from inffixed.h. + If BUILDFIXED is defined, then instead this routine builds the tables the + first time it's called, and returns those tables the first time and + thereafter. This reduces the size of the code by about 2K bytes, in + exchange for a little execution time. However, BUILDFIXED should not be + used for threaded applications, since the rewriting of the tables and virgin + may not be thread-safe. + */ +var virgin = true; + +var lenfix, distfix; // We have no pointers in JS, so keep tables separate + +function fixedtables(state) { + /* build fixed huffman tables if first call (may not be thread safe) */ + if (virgin) { + var sym; + + lenfix = new utils.Buf32(512); + distfix = new utils.Buf32(32); + + /* literal/length table */ + sym = 0; + while (sym < 144) { state.lens[sym++] = 8; } + while (sym < 256) { state.lens[sym++] = 9; } + while (sym < 280) { state.lens[sym++] = 7; } + while (sym < 288) { state.lens[sym++] = 8; } + + inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 }); + + /* distance table */ + sym = 0; + while (sym < 32) { state.lens[sym++] = 5; } + + inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 }); + + /* do this just once */ + virgin = false; + } + + state.lencode = lenfix; + state.lenbits = 9; + state.distcode = distfix; + state.distbits = 5; +} + + +/* + Update the window with the last wsize (normally 32K) bytes written before + returning. If window does not exist yet, create it. This is only called + when a window is already in use, or when output has been written during this + inflate call, but the end of the deflate stream has not been reached yet. + It is also called to create a window for dictionary data when a dictionary + is loaded. + + Providing output buffers larger than 32K to inflate() should provide a speed + advantage, since only the last 32K of output is copied to the sliding window + upon return from inflate(), and since all distances after the first 32K of + output will fall in the output data, making match copies simpler and faster. + The advantage may be dependent on the size of the processor's data caches. + */ +function updatewindow(strm, src, end, copy) { + var dist; + var state = strm.state; + + /* if it hasn't been done already, allocate space for the window */ + if (state.window === null) { + state.wsize = 1 << state.wbits; + state.wnext = 0; + state.whave = 0; + + state.window = new utils.Buf8(state.wsize); + } + + /* copy state->wsize or less output bytes into the circular window */ + if (copy >= state.wsize) { + utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0); + state.wnext = 0; + state.whave = state.wsize; + } + else { + dist = state.wsize - state.wnext; + if (dist > copy) { + dist = copy; + } + //zmemcpy(state->window + state->wnext, end - copy, dist); + utils.arraySet(state.window, src, end - copy, dist, state.wnext); + copy -= dist; + if (copy) { + //zmemcpy(state->window, end - copy, copy); + utils.arraySet(state.window, src, end - copy, copy, 0); + state.wnext = copy; + state.whave = state.wsize; + } + else { + state.wnext += dist; + if (state.wnext === state.wsize) { state.wnext = 0; } + if (state.whave < state.wsize) { state.whave += dist; } + } + } + return 0; +} + +function inflate(strm, flush) { + var state; + var input, output; // input/output buffers + var next; /* next input INDEX */ + var put; /* next output INDEX */ + var have, left; /* available input and output */ + var hold; /* bit buffer */ + var bits; /* bits in bit buffer */ + var _in, _out; /* save starting available input and output */ + var copy; /* number of stored or match bytes to copy */ + var from; /* where to copy match bytes from */ + var from_source; + var here = 0; /* current decoding table entry */ + var here_bits, here_op, here_val; // paked "here" denormalized (JS specific) + //var last; /* parent table entry */ + var last_bits, last_op, last_val; // paked "last" denormalized (JS specific) + var len; /* length to copy for repeats, bits to drop */ + var ret; /* return code */ + var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */ + var opts; + + var n; // temporary var for NEED_BITS + + var order = /* permutation of code lengths */ + [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; + + + if (!strm || !strm.state || !strm.output || + (!strm.input && strm.avail_in !== 0)) { + return Z_STREAM_ERROR; + } + + state = strm.state; + if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */ + + + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + _in = have; + _out = left; + ret = Z_OK; + + inf_leave: // goto emulation + for (;;) { + switch (state.mode) { + case HEAD: + if (state.wrap === 0) { + state.mode = TYPEDO; + break; + } + //=== NEEDBITS(16); + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */ + state.check = 0/*crc32(0L, Z_NULL, 0)*/; + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = FLAGS; + break; + } + state.flags = 0; /* expect zlib header */ + if (state.head) { + state.head.done = false; + } + if (!(state.wrap & 1) || /* check if zlib header allowed */ + (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) { + strm.msg = 'incorrect header check'; + state.mode = BAD; + break; + } + if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// + len = (hold & 0x0f)/*BITS(4)*/ + 8; + if (state.wbits === 0) { + state.wbits = len; + } + else if (len > state.wbits) { + strm.msg = 'invalid window size'; + state.mode = BAD; + break; + } + state.dmax = 1 << len; + //Tracev((stderr, "inflate: zlib header ok\n")); + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = hold & 0x200 ? DICTID : TYPE; + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + break; + case FLAGS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.flags = hold; + if ((state.flags & 0xff) !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + if (state.flags & 0xe000) { + strm.msg = 'unknown header flags set'; + state.mode = BAD; + break; + } + if (state.head) { + state.head.text = ((hold >> 8) & 1); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = TIME; + /* falls through */ + case TIME: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.time = hold; + } + if (state.flags & 0x0200) { + //=== CRC4(state.check, hold) + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + hbuf[2] = (hold >>> 16) & 0xff; + hbuf[3] = (hold >>> 24) & 0xff; + state.check = crc32(state.check, hbuf, 4, 0); + //=== + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = OS; + /* falls through */ + case OS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.xflags = (hold & 0xff); + state.head.os = (hold >> 8); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = EXLEN; + /* falls through */ + case EXLEN: + if (state.flags & 0x0400) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length = hold; + if (state.head) { + state.head.extra_len = hold; + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + else if (state.head) { + state.head.extra = null/*Z_NULL*/; + } + state.mode = EXTRA; + /* falls through */ + case EXTRA: + if (state.flags & 0x0400) { + copy = state.length; + if (copy > have) { copy = have; } + if (copy) { + if (state.head) { + len = state.head.extra_len - state.length; + if (!state.head.extra) { + // Use untyped array for more convenient processing later + state.head.extra = new Array(state.head.extra_len); + } + utils.arraySet( + state.head.extra, + input, + next, + // extra field is limited to 65536 bytes + // - no need for additional size check + copy, + /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/ + len + ); + //zmemcpy(state.head.extra + len, next, + // len + copy > state.head.extra_max ? + // state.head.extra_max - len : copy); + } + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + state.length -= copy; + } + if (state.length) { break inf_leave; } + } + state.length = 0; + state.mode = NAME; + /* falls through */ + case NAME: + if (state.flags & 0x0800) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + // TODO: 2 or 1 bytes? + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.name_max*/)) { + state.head.name += String.fromCharCode(len); + } + } while (len && copy < have); + + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.name = null; + } + state.length = 0; + state.mode = COMMENT; + /* falls through */ + case COMMENT: + if (state.flags & 0x1000) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.comm_max*/)) { + state.head.comment += String.fromCharCode(len); + } + } while (len && copy < have); + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.comment = null; + } + state.mode = HCRC; + /* falls through */ + case HCRC: + if (state.flags & 0x0200) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.check & 0xffff)) { + strm.msg = 'header crc mismatch'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + if (state.head) { + state.head.hcrc = ((state.flags >> 9) & 1); + state.head.done = true; + } + strm.adler = state.check = 0; + state.mode = TYPE; + break; + case DICTID: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + strm.adler = state.check = zswap32(hold); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = DICT; + /* falls through */ + case DICT: + if (state.havedict === 0) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + return Z_NEED_DICT; + } + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = TYPE; + /* falls through */ + case TYPE: + if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; } + /* falls through */ + case TYPEDO: + if (state.last) { + //--- BYTEBITS() ---// + hold >>>= bits & 7; + bits -= bits & 7; + //---// + state.mode = CHECK; + break; + } + //=== NEEDBITS(3); */ + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.last = (hold & 0x01)/*BITS(1)*/; + //--- DROPBITS(1) ---// + hold >>>= 1; + bits -= 1; + //---// + + switch ((hold & 0x03)/*BITS(2)*/) { + case 0: /* stored block */ + //Tracev((stderr, "inflate: stored block%s\n", + // state.last ? " (last)" : "")); + state.mode = STORED; + break; + case 1: /* fixed block */ + fixedtables(state); + //Tracev((stderr, "inflate: fixed codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = LEN_; /* decode codes */ + if (flush === Z_TREES) { + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break inf_leave; + } + break; + case 2: /* dynamic block */ + //Tracev((stderr, "inflate: dynamic codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = TABLE; + break; + case 3: + strm.msg = 'invalid block type'; + state.mode = BAD; + } + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break; + case STORED: + //--- BYTEBITS() ---// /* go to byte boundary */ + hold >>>= bits & 7; + bits -= bits & 7; + //---// + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) { + strm.msg = 'invalid stored block lengths'; + state.mode = BAD; + break; + } + state.length = hold & 0xffff; + //Tracev((stderr, "inflate: stored length %u\n", + // state.length)); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = COPY_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case COPY_: + state.mode = COPY; + /* falls through */ + case COPY: + copy = state.length; + if (copy) { + if (copy > have) { copy = have; } + if (copy > left) { copy = left; } + if (copy === 0) { break inf_leave; } + //--- zmemcpy(put, next, copy); --- + utils.arraySet(output, input, next, copy, put); + //---// + have -= copy; + next += copy; + left -= copy; + put += copy; + state.length -= copy; + break; + } + //Tracev((stderr, "inflate: stored end\n")); + state.mode = TYPE; + break; + case TABLE: + //=== NEEDBITS(14); */ + while (bits < 14) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4; + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// +//#ifndef PKZIP_BUG_WORKAROUND + if (state.nlen > 286 || state.ndist > 30) { + strm.msg = 'too many length or distance symbols'; + state.mode = BAD; + break; + } +//#endif + //Tracev((stderr, "inflate: table sizes ok\n")); + state.have = 0; + state.mode = LENLENS; + /* falls through */ + case LENLENS: + while (state.have < state.ncode) { + //=== NEEDBITS(3); + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.lens[order[state.have++]] = (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + while (state.have < 19) { + state.lens[order[state.have++]] = 0; + } + // We have separate tables & no pointers. 2 commented lines below not needed. + //state.next = state.codes; + //state.lencode = state.next; + // Switch to use dynamic table + state.lencode = state.lendyn; + state.lenbits = 7; + + opts = { bits: state.lenbits }; + ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts); + state.lenbits = opts.bits; + + if (ret) { + strm.msg = 'invalid code lengths set'; + state.mode = BAD; + break; + } + //Tracev((stderr, "inflate: code lengths ok\n")); + state.have = 0; + state.mode = CODELENS; + /* falls through */ + case CODELENS: + while (state.have < state.nlen + state.ndist) { + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_val < 16) { + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.lens[state.have++] = here_val; + } + else { + if (here_val === 16) { + //=== NEEDBITS(here.bits + 2); + n = here_bits + 2; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + if (state.have === 0) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + len = state.lens[state.have - 1]; + copy = 3 + (hold & 0x03);//BITS(2); + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + } + else if (here_val === 17) { + //=== NEEDBITS(here.bits + 3); + n = here_bits + 3; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 3 + (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + else { + //=== NEEDBITS(here.bits + 7); + n = here_bits + 7; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 11 + (hold & 0x7f);//BITS(7); + //--- DROPBITS(7) ---// + hold >>>= 7; + bits -= 7; + //---// + } + if (state.have + copy > state.nlen + state.ndist) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + while (copy--) { + state.lens[state.have++] = len; + } + } + } + + /* handle error breaks in while */ + if (state.mode === BAD) { break; } + + /* check for end-of-block code (better have one) */ + if (state.lens[256] === 0) { + strm.msg = 'invalid code -- missing end-of-block'; + state.mode = BAD; + break; + } + + /* build code tables -- note: do not change the lenbits or distbits + values here (9 and 6) without reading the comments in inftrees.h + concerning the ENOUGH constants, which depend on those values */ + state.lenbits = 9; + + opts = { bits: state.lenbits }; + ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.lenbits = opts.bits; + // state.lencode = state.next; + + if (ret) { + strm.msg = 'invalid literal/lengths set'; + state.mode = BAD; + break; + } + + state.distbits = 6; + //state.distcode.copy(state.codes); + // Switch to use dynamic table + state.distcode = state.distdyn; + opts = { bits: state.distbits }; + ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.distbits = opts.bits; + // state.distcode = state.next; + + if (ret) { + strm.msg = 'invalid distances set'; + state.mode = BAD; + break; + } + //Tracev((stderr, 'inflate: codes ok\n')); + state.mode = LEN_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case LEN_: + state.mode = LEN; + /* falls through */ + case LEN: + if (have >= 6 && left >= 258) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + inflate_fast(strm, _out); + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + if (state.mode === TYPE) { + state.back = -1; + } + break; + } + state.back = 0; + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if (here_bits <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_op && (here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.lencode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + state.length = here_val; + if (here_op === 0) { + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + state.mode = LIT; + break; + } + if (here_op & 32) { + //Tracevv((stderr, "inflate: end of block\n")); + state.back = -1; + state.mode = TYPE; + break; + } + if (here_op & 64) { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break; + } + state.extra = here_op & 15; + state.mode = LENEXT; + /* falls through */ + case LENEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } + //Tracevv((stderr, "inflate: length %u\n", state.length)); + state.was = state.length; + state.mode = DIST; + /* falls through */ + case DIST: + for (;;) { + here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if ((here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.distcode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + if (here_op & 64) { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break; + } + state.offset = here_val; + state.extra = (here_op) & 15; + state.mode = DISTEXT; + /* falls through */ + case DISTEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } +//#ifdef INFLATE_STRICT + if (state.offset > state.dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +//#endif + //Tracevv((stderr, "inflate: distance %u\n", state.offset)); + state.mode = MATCH; + /* falls through */ + case MATCH: + if (left === 0) { break inf_leave; } + copy = _out - left; + if (state.offset > copy) { /* copy from window */ + copy = state.offset - copy; + if (copy > state.whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// Trace((stderr, "inflate.c too far\n")); +// copy -= state.whave; +// if (copy > state.length) { copy = state.length; } +// if (copy > left) { copy = left; } +// left -= copy; +// state.length -= copy; +// do { +// output[put++] = 0; +// } while (--copy); +// if (state.length === 0) { state.mode = LEN; } +// break; +//#endif + } + if (copy > state.wnext) { + copy -= state.wnext; + from = state.wsize - copy; + } + else { + from = state.wnext - copy; + } + if (copy > state.length) { copy = state.length; } + from_source = state.window; + } + else { /* copy from output */ + from_source = output; + from = put - state.offset; + copy = state.length; + } + if (copy > left) { copy = left; } + left -= copy; + state.length -= copy; + do { + output[put++] = from_source[from++]; + } while (--copy); + if (state.length === 0) { state.mode = LEN; } + break; + case LIT: + if (left === 0) { break inf_leave; } + output[put++] = state.length; + left--; + state.mode = LEN; + break; + case CHECK: + if (state.wrap) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + // Use '|' instead of '+' to make sure that result is signed + hold |= input[next++] << bits; + bits += 8; + } + //===// + _out -= left; + strm.total_out += _out; + state.total += _out; + if (_out) { + strm.adler = state.check = + /*UPDATE(state.check, put - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out)); + + } + _out = left; + // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too + if ((state.flags ? hold : zswap32(hold)) !== state.check) { + strm.msg = 'incorrect data check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: check matches trailer\n")); + } + state.mode = LENGTH; + /* falls through */ + case LENGTH: + if (state.wrap && state.flags) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.total & 0xffffffff)) { + strm.msg = 'incorrect length check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: length matches trailer\n")); + } + state.mode = DONE; + /* falls through */ + case DONE: + ret = Z_STREAM_END; + break inf_leave; + case BAD: + ret = Z_DATA_ERROR; + break inf_leave; + case MEM: + return Z_MEM_ERROR; + case SYNC: + /* falls through */ + default: + return Z_STREAM_ERROR; + } + } + + // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave" + + /* + Return from inflate(), updating the total counts and the check value. + If there was no progress during the inflate() call, return a buffer + error. Call updatewindow() to create and/or update the window state. + Note: a memory error from inflate() is non-recoverable. + */ + + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + + if (state.wsize || (_out !== strm.avail_out && state.mode < BAD && + (state.mode < CHECK || flush !== Z_FINISH))) { + if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) { + state.mode = MEM; + return Z_MEM_ERROR; + } + } + _in -= strm.avail_in; + _out -= strm.avail_out; + strm.total_in += _in; + strm.total_out += _out; + state.total += _out; + if (state.wrap && _out) { + strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out)); + } + strm.data_type = state.bits + (state.last ? 64 : 0) + + (state.mode === TYPE ? 128 : 0) + + (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0); + if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) { + ret = Z_BUF_ERROR; + } + return ret; +} + +function inflateEnd(strm) { + + if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) { + return Z_STREAM_ERROR; + } + + var state = strm.state; + if (state.window) { + state.window = null; + } + strm.state = null; + return Z_OK; +} + +function inflateGetHeader(strm, head) { + var state; + + /* check state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; } + + /* save header structure */ + state.head = head; + head.done = false; + return Z_OK; +} + +function inflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var state; + var dictid; + var ret; + + /* check state */ + if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; } + state = strm.state; + + if (state.wrap !== 0 && state.mode !== DICT) { + return Z_STREAM_ERROR; + } + + /* check for correct dictionary identifier */ + if (state.mode === DICT) { + dictid = 1; /* adler32(0, null, 0)*/ + /* dictid = adler32(dictid, dictionary, dictLength); */ + dictid = adler32(dictid, dictionary, dictLength, 0); + if (dictid !== state.check) { + return Z_DATA_ERROR; + } + } + /* copy dictionary to window using updatewindow(), which will amend the + existing dictionary if appropriate */ + ret = updatewindow(strm, dictionary, dictLength, dictLength); + if (ret) { + state.mode = MEM; + return Z_MEM_ERROR; + } + state.havedict = 1; + // Tracev((stderr, "inflate: dictionary set\n")); + return Z_OK; +} + +exports.inflateReset = inflateReset; +exports.inflateReset2 = inflateReset2; +exports.inflateResetKeep = inflateResetKeep; +exports.inflateInit = inflateInit; +exports.inflateInit2 = inflateInit2; +exports.inflate = inflate; +exports.inflateEnd = inflateEnd; +exports.inflateGetHeader = inflateGetHeader; +exports.inflateSetDictionary = inflateSetDictionary; +exports.inflateInfo = 'pako inflate (from Nodeca project)'; + +/* Not implemented +exports.inflateCopy = inflateCopy; +exports.inflateGetDictionary = inflateGetDictionary; +exports.inflateMark = inflateMark; +exports.inflatePrime = inflatePrime; +exports.inflateSync = inflateSync; +exports.inflateSyncPoint = inflateSyncPoint; +exports.inflateUndermine = inflateUndermine; +*/ + + +/***/ }), + +/***/ 21998: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __webpack_require__(9805); + +var MAXBITS = 15; +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +var lbase = [ /* Length codes 257..285 base */ + 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, + 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0 +]; + +var lext = [ /* Length codes 257..285 extra */ + 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, + 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78 +]; + +var dbase = [ /* Distance codes 0..29 base */ + 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, + 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, + 8193, 12289, 16385, 24577, 0, 0 +]; + +var dext = [ /* Distance codes 0..29 extra */ + 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, + 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, + 28, 28, 29, 29, 64, 64 +]; + +module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts) +{ + var bits = opts.bits; + //here = opts.here; /* table entry for duplication */ + + var len = 0; /* a code's length in bits */ + var sym = 0; /* index of code symbols */ + var min = 0, max = 0; /* minimum and maximum code lengths */ + var root = 0; /* number of index bits for root table */ + var curr = 0; /* number of index bits for current table */ + var drop = 0; /* code bits to drop for sub-table */ + var left = 0; /* number of prefix codes available */ + var used = 0; /* code entries in table used */ + var huff = 0; /* Huffman code */ + var incr; /* for incrementing code, index */ + var fill; /* index for replicating entries */ + var low; /* low bits for current root entry */ + var mask; /* mask for low root bits */ + var next; /* next available space in table */ + var base = null; /* base value table to use */ + var base_index = 0; +// var shoextra; /* extra bits table to use */ + var end; /* use base and extra for symbol > end */ + var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */ + var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */ + var extra = null; + var extra_index = 0; + + var here_bits, here_op, here_val; + + /* + Process a set of code lengths to create a canonical Huffman code. The + code lengths are lens[0..codes-1]. Each length corresponds to the + symbols 0..codes-1. The Huffman code is generated by first sorting the + symbols by length from short to long, and retaining the symbol order + for codes with equal lengths. Then the code starts with all zero bits + for the first code of the shortest length, and the codes are integer + increments for the same length, and zeros are appended as the length + increases. For the deflate format, these bits are stored backwards + from their more natural integer increment ordering, and so when the + decoding tables are built in the large loop below, the integer codes + are incremented backwards. + + This routine assumes, but does not check, that all of the entries in + lens[] are in the range 0..MAXBITS. The caller must assure this. + 1..MAXBITS is interpreted as that code length. zero means that that + symbol does not occur in this code. + + The codes are sorted by computing a count of codes for each length, + creating from that a table of starting indices for each length in the + sorted table, and then entering the symbols in order in the sorted + table. The sorted table is work[], with that space being provided by + the caller. + + The length counts are used for other purposes as well, i.e. finding + the minimum and maximum length codes, determining if there are any + codes at all, checking for a valid set of lengths, and looking ahead + at length counts to determine sub-table sizes when building the + decoding tables. + */ + + /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */ + for (len = 0; len <= MAXBITS; len++) { + count[len] = 0; + } + for (sym = 0; sym < codes; sym++) { + count[lens[lens_index + sym]]++; + } + + /* bound code lengths, force root to be within code lengths */ + root = bits; + for (max = MAXBITS; max >= 1; max--) { + if (count[max] !== 0) { break; } + } + if (root > max) { + root = max; + } + if (max === 0) { /* no symbols to code at all */ + //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */ + //table.bits[opts.table_index] = 1; //here.bits = (var char)1; + //table.val[opts.table_index++] = 0; //here.val = (var short)0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + + //table.op[opts.table_index] = 64; + //table.bits[opts.table_index] = 1; + //table.val[opts.table_index++] = 0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + opts.bits = 1; + return 0; /* no symbols, but wait for decoding to report error */ + } + for (min = 1; min < max; min++) { + if (count[min] !== 0) { break; } + } + if (root < min) { + root = min; + } + + /* check for an over-subscribed or incomplete set of lengths */ + left = 1; + for (len = 1; len <= MAXBITS; len++) { + left <<= 1; + left -= count[len]; + if (left < 0) { + return -1; + } /* over-subscribed */ + } + if (left > 0 && (type === CODES || max !== 1)) { + return -1; /* incomplete set */ + } + + /* generate offsets into symbol table for each length for sorting */ + offs[1] = 0; + for (len = 1; len < MAXBITS; len++) { + offs[len + 1] = offs[len] + count[len]; + } + + /* sort symbols by length, by symbol order within each length */ + for (sym = 0; sym < codes; sym++) { + if (lens[lens_index + sym] !== 0) { + work[offs[lens[lens_index + sym]]++] = sym; + } + } + + /* + Create and fill in decoding tables. In this loop, the table being + filled is at next and has curr index bits. The code being used is huff + with length len. That code is converted to an index by dropping drop + bits off of the bottom. For codes where len is less than drop + curr, + those top drop + curr - len bits are incremented through all values to + fill the table with replicated entries. + + root is the number of index bits for the root table. When len exceeds + root, sub-tables are created pointed to by the root entry with an index + of the low root bits of huff. This is saved in low to check for when a + new sub-table should be started. drop is zero when the root table is + being filled, and drop is root when sub-tables are being filled. + + When a new sub-table is needed, it is necessary to look ahead in the + code lengths to determine what size sub-table is needed. The length + counts are used for this, and so count[] is decremented as codes are + entered in the tables. + + used keeps track of how many table entries have been allocated from the + provided *table space. It is checked for LENS and DIST tables against + the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in + the initial root table size constants. See the comments in inftrees.h + for more information. + + sym increments through all symbols, and the loop terminates when + all codes of length max, i.e. all codes, have been processed. This + routine permits incomplete codes, so another loop after this one fills + in the rest of the decoding tables with invalid code markers. + */ + + /* set up for code type */ + // poor man optimization - use if-else instead of switch, + // to avoid deopts in old v8 + if (type === CODES) { + base = extra = work; /* dummy value--not used */ + end = 19; + + } else if (type === LENS) { + base = lbase; + base_index -= 257; + extra = lext; + extra_index -= 257; + end = 256; + + } else { /* DISTS */ + base = dbase; + extra = dext; + end = -1; + } + + /* initialize opts for loop */ + huff = 0; /* starting code */ + sym = 0; /* starting code symbol */ + len = min; /* starting code length */ + next = table_index; /* current table to fill in */ + curr = root; /* current table index bits */ + drop = 0; /* current bits to drop from code for index */ + low = -1; /* trigger new sub-table when len > root */ + used = 1 << root; /* use root table entries */ + mask = used - 1; /* mask for comparing low */ + + /* check available table space */ + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* process all codes and make table entries */ + for (;;) { + /* create table entry */ + here_bits = len - drop; + if (work[sym] < end) { + here_op = 0; + here_val = work[sym]; + } + else if (work[sym] > end) { + here_op = extra[extra_index + work[sym]]; + here_val = base[base_index + work[sym]]; + } + else { + here_op = 32 + 64; /* end of block */ + here_val = 0; + } + + /* replicate for those indices with low len bits equal to huff */ + incr = 1 << (len - drop); + fill = 1 << curr; + min = fill; /* save offset to next table */ + do { + fill -= incr; + table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0; + } while (fill !== 0); + + /* backwards increment the len-bit code huff */ + incr = 1 << (len - 1); + while (huff & incr) { + incr >>= 1; + } + if (incr !== 0) { + huff &= incr - 1; + huff += incr; + } else { + huff = 0; + } + + /* go to next symbol, update count, len */ + sym++; + if (--count[len] === 0) { + if (len === max) { break; } + len = lens[lens_index + work[sym]]; + } + + /* create new sub-table if needed */ + if (len > root && (huff & mask) !== low) { + /* if first time, transition to sub-tables */ + if (drop === 0) { + drop = root; + } + + /* increment past last table */ + next += min; /* here min is 1 << curr */ + + /* determine length of next table */ + curr = len - drop; + left = 1 << curr; + while (curr + drop < max) { + left -= count[curr + drop]; + if (left <= 0) { break; } + curr++; + left <<= 1; + } + + /* check for enough space */ + used += 1 << curr; + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* point entry in root table to sub-table */ + low = huff & mask; + /*table.op[low] = curr; + table.bits[low] = root; + table.val[low] = next - opts.table_index;*/ + table[low] = (root << 24) | (curr << 16) | (next - table_index) |0; + } + } + + /* fill in remaining table entry if code is incomplete (guaranteed to have + at most one remaining entry, since if the code is incomplete, the + maximum code length that was allowed to get this far is one bit) */ + if (huff !== 0) { + //table.op[next + huff] = 64; /* invalid code marker */ + //table.bits[next + huff] = len - drop; + //table.val[next + huff] = 0; + table[next + huff] = ((len - drop) << 24) | (64 << 16) |0; + } + + /* set return parameters */ + //opts.table_index += used; + opts.bits = root; + return 0; +}; + + +/***/ }), + +/***/ 54674: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +module.exports = { + 2: 'need dictionary', /* Z_NEED_DICT 2 */ + 1: 'stream end', /* Z_STREAM_END 1 */ + 0: '', /* Z_OK 0 */ + '-1': 'file error', /* Z_ERRNO (-1) */ + '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ +}; + + +/***/ }), + +/***/ 23665: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +/* eslint-disable space-unary-ops */ + +var utils = __webpack_require__(9805); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +//var Z_FILTERED = 1; +//var Z_HUFFMAN_ONLY = 2; +//var Z_RLE = 3; +var Z_FIXED = 4; +//var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +var Z_BINARY = 0; +var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + +/*============================================================================*/ + + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + +// From zutil.h + +var STORED_BLOCK = 0; +var STATIC_TREES = 1; +var DYN_TREES = 2; +/* The three kinds of block type */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +/* The minimum and maximum match lengths */ + +// From deflate.h +/* =========================================================================== + * Internal compression state. + */ + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ + +var LITERALS = 256; +/* number of literal bytes 0..255 */ + +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ + +var D_CODES = 30; +/* number of distance codes */ + +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ + +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ + +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var Buf_size = 16; +/* size of bit buffer in bi_buf */ + + +/* =========================================================================== + * Constants + */ + +var MAX_BL_BITS = 7; +/* Bit length codes must not exceed MAX_BL_BITS bits */ + +var END_BLOCK = 256; +/* end of block literal code */ + +var REP_3_6 = 16; +/* repeat previous bit length 3-6 times (2 bits of repeat count) */ + +var REPZ_3_10 = 17; +/* repeat a zero length 3-10 times (3 bits of repeat count) */ + +var REPZ_11_138 = 18; +/* repeat a zero length 11-138 times (7 bits of repeat count) */ + +/* eslint-disable comma-spacing,array-bracket-spacing */ +var extra_lbits = /* extra bits for each length code */ + [0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]; + +var extra_dbits = /* extra bits for each distance code */ + [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]; + +var extra_blbits = /* extra bits for each bit length code */ + [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]; + +var bl_order = + [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]; +/* eslint-enable comma-spacing,array-bracket-spacing */ + +/* The lengths of the bit length codes are sent in order of decreasing + * probability, to avoid transmitting the lengths for unused bit length codes. + */ + +/* =========================================================================== + * Local data. These are initialized only once. + */ + +// We pre-fill arrays with 0 to avoid uninitialized gaps + +var DIST_CODE_LEN = 512; /* see definition of array dist_code below */ + +// !!!! Use flat array instead of structure, Freq = i*2, Len = i*2+1 +var static_ltree = new Array((L_CODES + 2) * 2); +zero(static_ltree); +/* The static literal tree. Since the bit lengths are imposed, there is no + * need for the L_CODES extra codes used during heap construction. However + * The codes 286 and 287 are needed to build a canonical tree (see _tr_init + * below). + */ + +var static_dtree = new Array(D_CODES * 2); +zero(static_dtree); +/* The static distance tree. (Actually a trivial tree since all codes use + * 5 bits.) + */ + +var _dist_code = new Array(DIST_CODE_LEN); +zero(_dist_code); +/* Distance codes. The first 256 values correspond to the distances + * 3 .. 258, the last 256 values correspond to the top 8 bits of + * the 15 bit distances. + */ + +var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1); +zero(_length_code); +/* length code for each normalized match length (0 == MIN_MATCH) */ + +var base_length = new Array(LENGTH_CODES); +zero(base_length); +/* First normalized length for each code (0 = MIN_MATCH) */ + +var base_dist = new Array(D_CODES); +zero(base_dist); +/* First normalized distance for each code (0 = distance of 1) */ + + +function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { + + this.static_tree = static_tree; /* static tree or NULL */ + this.extra_bits = extra_bits; /* extra bits for each code or NULL */ + this.extra_base = extra_base; /* base index for extra_bits */ + this.elems = elems; /* max number of elements in the tree */ + this.max_length = max_length; /* max bit length for the codes */ + + // show if `static_tree` has data or dummy - needed for monomorphic objects + this.has_stree = static_tree && static_tree.length; +} + + +var static_l_desc; +var static_d_desc; +var static_bl_desc; + + +function TreeDesc(dyn_tree, stat_desc) { + this.dyn_tree = dyn_tree; /* the dynamic tree */ + this.max_code = 0; /* largest code with non zero frequency */ + this.stat_desc = stat_desc; /* the corresponding static tree */ +} + + + +function d_code(dist) { + return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)]; +} + + +/* =========================================================================== + * Output a short LSB first on the stream. + * IN assertion: there is enough room in pendingBuf. + */ +function put_short(s, w) { +// put_byte(s, (uch)((w) & 0xff)); +// put_byte(s, (uch)((ush)(w) >> 8)); + s.pending_buf[s.pending++] = (w) & 0xff; + s.pending_buf[s.pending++] = (w >>> 8) & 0xff; +} + + +/* =========================================================================== + * Send a value on a given number of bits. + * IN assertion: length <= 16 and value fits in length bits. + */ +function send_bits(s, value, length) { + if (s.bi_valid > (Buf_size - length)) { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + put_short(s, s.bi_buf); + s.bi_buf = value >> (Buf_size - s.bi_valid); + s.bi_valid += length - Buf_size; + } else { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + s.bi_valid += length; + } +} + + +function send_code(s, c, tree) { + send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/); +} + + +/* =========================================================================== + * Reverse the first len bits of a code, using straightforward code (a faster + * method would use a table) + * IN assertion: 1 <= len <= 15 + */ +function bi_reverse(code, len) { + var res = 0; + do { + res |= code & 1; + code >>>= 1; + res <<= 1; + } while (--len > 0); + return res >>> 1; +} + + +/* =========================================================================== + * Flush the bit buffer, keeping at most 7 bits in it. + */ +function bi_flush(s) { + if (s.bi_valid === 16) { + put_short(s, s.bi_buf); + s.bi_buf = 0; + s.bi_valid = 0; + + } else if (s.bi_valid >= 8) { + s.pending_buf[s.pending++] = s.bi_buf & 0xff; + s.bi_buf >>= 8; + s.bi_valid -= 8; + } +} + + +/* =========================================================================== + * Compute the optimal bit lengths for a tree and update the total bit length + * for the current block. + * IN assertion: the fields freq and dad are set, heap[heap_max] and + * above are the tree nodes sorted by increasing frequency. + * OUT assertions: the field len is set to the optimal bit length, the + * array bl_count contains the frequencies for each bit length. + * The length opt_len is updated; static_len is also updated if stree is + * not null. + */ +function gen_bitlen(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var max_code = desc.max_code; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var extra = desc.stat_desc.extra_bits; + var base = desc.stat_desc.extra_base; + var max_length = desc.stat_desc.max_length; + var h; /* heap index */ + var n, m; /* iterate over the tree elements */ + var bits; /* bit length */ + var xbits; /* extra bits */ + var f; /* frequency */ + var overflow = 0; /* number of elements with bit length too large */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + s.bl_count[bits] = 0; + } + + /* In a first pass, compute the optimal bit lengths (which may + * overflow in the case of the bit length tree). + */ + tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */ + + for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { + n = s.heap[h]; + bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1; + if (bits > max_length) { + bits = max_length; + overflow++; + } + tree[n * 2 + 1]/*.Len*/ = bits; + /* We overwrite tree[n].Dad which is no longer needed */ + + if (n > max_code) { continue; } /* not a leaf node */ + + s.bl_count[bits]++; + xbits = 0; + if (n >= base) { + xbits = extra[n - base]; + } + f = tree[n * 2]/*.Freq*/; + s.opt_len += f * (bits + xbits); + if (has_stree) { + s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits); + } + } + if (overflow === 0) { return; } + + // Trace((stderr,"\nbit length overflow\n")); + /* This happens for example on obj2 and pic of the Calgary corpus */ + + /* Find the first bit length which could increase: */ + do { + bits = max_length - 1; + while (s.bl_count[bits] === 0) { bits--; } + s.bl_count[bits]--; /* move one leaf down the tree */ + s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ + s.bl_count[max_length]--; + /* The brother of the overflow item also moves one step up, + * but this does not affect bl_count[max_length] + */ + overflow -= 2; + } while (overflow > 0); + + /* Now recompute all bit lengths, scanning in increasing frequency. + * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all + * lengths instead of fixing only the wrong ones. This idea is taken + * from 'ar' written by Haruhiko Okumura.) + */ + for (bits = max_length; bits !== 0; bits--) { + n = s.bl_count[bits]; + while (n !== 0) { + m = s.heap[--h]; + if (m > max_code) { continue; } + if (tree[m * 2 + 1]/*.Len*/ !== bits) { + // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); + s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/; + tree[m * 2 + 1]/*.Len*/ = bits; + } + n--; + } + } +} + + +/* =========================================================================== + * Generate the codes for a given tree and bit counts (which need not be + * optimal). + * IN assertion: the array bl_count contains the bit length statistics for + * the given tree and the field len is set for all tree elements. + * OUT assertion: the field code is set for all tree elements of non + * zero code length. + */ +function gen_codes(tree, max_code, bl_count) +// ct_data *tree; /* the tree to decorate */ +// int max_code; /* largest code with non zero frequency */ +// ushf *bl_count; /* number of codes at each bit length */ +{ + var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */ + var code = 0; /* running code value */ + var bits; /* bit index */ + var n; /* code index */ + + /* The distribution counts are first used to generate the code values + * without bit reversal. + */ + for (bits = 1; bits <= MAX_BITS; bits++) { + next_code[bits] = code = (code + bl_count[bits - 1]) << 1; + } + /* Check that the bit counts in bl_count are consistent. The last code + * must be all ones. + */ + //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ + length = 0; + for (code = 0; code < LENGTH_CODES - 1; code++) { + base_length[code] = length; + for (n = 0; n < (1 << extra_lbits[code]); n++) { + _length_code[length++] = code; + } + } + //Assert (length == 256, "tr_static_init: length != 256"); + /* Note that the length 255 (match length 258) can be represented + * in two different ways: code 284 + 5 bits or code 285, so we + * overwrite length_code[255] to use the best encoding: + */ + _length_code[length - 1] = code; + + /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ + dist = 0; + for (code = 0; code < 16; code++) { + base_dist[code] = dist; + for (n = 0; n < (1 << extra_dbits[code]); n++) { + _dist_code[dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: dist != 256"); + dist >>= 7; /* from now on, all distances are divided by 128 */ + for (; code < D_CODES; code++) { + base_dist[code] = dist << 7; + for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { + _dist_code[256 + dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: 256+dist != 512"); + + /* Construct the codes of the static literal tree */ + for (bits = 0; bits <= MAX_BITS; bits++) { + bl_count[bits] = 0; + } + + n = 0; + while (n <= 143) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + while (n <= 255) { + static_ltree[n * 2 + 1]/*.Len*/ = 9; + n++; + bl_count[9]++; + } + while (n <= 279) { + static_ltree[n * 2 + 1]/*.Len*/ = 7; + n++; + bl_count[7]++; + } + while (n <= 287) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + /* Codes 286 and 287 do not exist, but we must include them in the + * tree construction to get a canonical Huffman tree (longest code + * all ones) + */ + gen_codes(static_ltree, L_CODES + 1, bl_count); + + /* The static distance tree is trivial: */ + for (n = 0; n < D_CODES; n++) { + static_dtree[n * 2 + 1]/*.Len*/ = 5; + static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5); + } + + // Now data ready and we can init static trees + static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); + static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS); + static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS); + + //static_init_done = true; +} + + +/* =========================================================================== + * Initialize a new block. + */ +function init_block(s) { + var n; /* iterates over tree elements */ + + /* Initialize the trees. */ + for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; } + + s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1; + s.opt_len = s.static_len = 0; + s.last_lit = s.matches = 0; +} + + +/* =========================================================================== + * Flush the bit buffer and align the output on a byte boundary + */ +function bi_windup(s) +{ + if (s.bi_valid > 8) { + put_short(s, s.bi_buf); + } else if (s.bi_valid > 0) { + //put_byte(s, (Byte)s->bi_buf); + s.pending_buf[s.pending++] = s.bi_buf; + } + s.bi_buf = 0; + s.bi_valid = 0; +} + +/* =========================================================================== + * Copy a stored block, storing first the length and its + * one's complement if requested. + */ +function copy_block(s, buf, len, header) +//DeflateState *s; +//charf *buf; /* the input data */ +//unsigned len; /* its length */ +//int header; /* true if block header must be written */ +{ + bi_windup(s); /* align on byte boundary */ + + if (header) { + put_short(s, len); + put_short(s, ~len); + } +// while (len--) { +// put_byte(s, *buf++); +// } + utils.arraySet(s.pending_buf, s.window, buf, len, s.pending); + s.pending += len; +} + +/* =========================================================================== + * Compares to subtrees, using the tree depth as tie breaker when + * the subtrees have equal frequency. This minimizes the worst case length. + */ +function smaller(tree, n, m, depth) { + var _n2 = n * 2; + var _m2 = m * 2; + return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ || + (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m])); +} + +/* =========================================================================== + * Restore the heap property by moving down the tree starting at node k, + * exchanging a node with the smallest of its two sons if necessary, stopping + * when the heap property is re-established (each father smaller than its + * two sons). + */ +function pqdownheap(s, tree, k) +// deflate_state *s; +// ct_data *tree; /* the tree to restore */ +// int k; /* node to move down */ +{ + var v = s.heap[k]; + var j = k << 1; /* left son of k */ + while (j <= s.heap_len) { + /* Set j to the smallest of the two sons: */ + if (j < s.heap_len && + smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { + j++; + } + /* Exit if v is smaller than both sons */ + if (smaller(tree, v, s.heap[j], s.depth)) { break; } + + /* Exchange v with the smallest son */ + s.heap[k] = s.heap[j]; + k = j; + + /* And continue down the tree, setting j to the left son of k */ + j <<= 1; + } + s.heap[k] = v; +} + + +// inlined manually +// var SMALLEST = 1; + +/* =========================================================================== + * Send the block data compressed using the given Huffman trees + */ +function compress_block(s, ltree, dtree) +// deflate_state *s; +// const ct_data *ltree; /* literal tree */ +// const ct_data *dtree; /* distance tree */ +{ + var dist; /* distance of matched string */ + var lc; /* match length or unmatched char (if dist == 0) */ + var lx = 0; /* running index in l_buf */ + var code; /* the code to send */ + var extra; /* number of extra bits to send */ + + if (s.last_lit !== 0) { + do { + dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]); + lc = s.pending_buf[s.l_buf + lx]; + lx++; + + if (dist === 0) { + send_code(s, lc, ltree); /* send a literal byte */ + //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); + } else { + /* Here, lc is the match length - MIN_MATCH */ + code = _length_code[lc]; + send_code(s, code + LITERALS + 1, ltree); /* send the length code */ + extra = extra_lbits[code]; + if (extra !== 0) { + lc -= base_length[code]; + send_bits(s, lc, extra); /* send the extra length bits */ + } + dist--; /* dist is now the match distance - 1 */ + code = d_code(dist); + //Assert (code < D_CODES, "bad d_code"); + + send_code(s, code, dtree); /* send the distance code */ + extra = extra_dbits[code]; + if (extra !== 0) { + dist -= base_dist[code]; + send_bits(s, dist, extra); /* send the extra distance bits */ + } + } /* literal or match pair ? */ + + /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ + //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, + // "pendingBuf overflow"); + + } while (lx < s.last_lit); + } + + send_code(s, END_BLOCK, ltree); +} + + +/* =========================================================================== + * Construct one Huffman tree and assigns the code bit strings and lengths. + * Update the total bit length for the current block. + * IN assertion: the field freq is set for all tree elements. + * OUT assertions: the fields len and code are set to the optimal bit length + * and corresponding code. The length opt_len is updated; static_len is + * also updated if stree is not null. The field max_code is set. + */ +function build_tree(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var elems = desc.stat_desc.elems; + var n, m; /* iterate over heap elements */ + var max_code = -1; /* largest code with non zero frequency */ + var node; /* new node being created */ + + /* Construct the initial heap, with least frequent element in + * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. + * heap[0] is not used. + */ + s.heap_len = 0; + s.heap_max = HEAP_SIZE; + + for (n = 0; n < elems; n++) { + if (tree[n * 2]/*.Freq*/ !== 0) { + s.heap[++s.heap_len] = max_code = n; + s.depth[n] = 0; + + } else { + tree[n * 2 + 1]/*.Len*/ = 0; + } + } + + /* The pkzip format requires that at least one distance code exists, + * and that at least one bit should be sent even if there is only one + * possible code. So to avoid special checks later on we force at least + * two codes of non zero frequency. + */ + while (s.heap_len < 2) { + node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); + tree[node * 2]/*.Freq*/ = 1; + s.depth[node] = 0; + s.opt_len--; + + if (has_stree) { + s.static_len -= stree[node * 2 + 1]/*.Len*/; + } + /* node is 0 or 1 so it does not have extra bits */ + } + desc.max_code = max_code; + + /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, + * establish sub-heaps of increasing lengths: + */ + for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); } + + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + node = elems; /* next internal node of the tree */ + do { + //pqremove(s, tree, n); /* n = node of least frequency */ + /*** pqremove ***/ + n = s.heap[1/*SMALLEST*/]; + s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--]; + pqdownheap(s, tree, 1/*SMALLEST*/); + /***/ + + m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */ + + s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */ + s.heap[--s.heap_max] = m; + + /* Create a new node father of n and m */ + tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/; + s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1; + tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node; + + /* and insert the new node in the heap */ + s.heap[1/*SMALLEST*/] = node++; + pqdownheap(s, tree, 1/*SMALLEST*/); + + } while (s.heap_len >= 2); + + s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/]; + + /* At this point, the fields freq and dad are set. We can now + * generate the bit lengths. + */ + gen_bitlen(s, desc); + + /* The field len is now set, we can generate the bit codes */ + gen_codes(tree, max_code, s.bl_count); +} + + +/* =========================================================================== + * Scan a literal or distance tree to determine the frequencies of the codes + * in the bit length tree. + */ +function scan_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */ + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + s.bl_tree[curlen * 2]/*.Freq*/ += count; + + } else if (curlen !== 0) { + + if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; } + s.bl_tree[REP_3_6 * 2]/*.Freq*/++; + + } else if (count <= 10) { + s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++; + + } else { + s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++; + } + + count = 0; + prevlen = curlen; + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Send a literal or distance tree in compressed form, using the codes in + * bl_tree. + */ +function send_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + /* tree[max_code+1].Len = -1; */ /* guard already set */ + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + do { send_code(s, curlen, s.bl_tree); } while (--count !== 0); + + } else if (curlen !== 0) { + if (curlen !== prevlen) { + send_code(s, curlen, s.bl_tree); + count--; + } + //Assert(count >= 3 && count <= 6, " 3_6?"); + send_code(s, REP_3_6, s.bl_tree); + send_bits(s, count - 3, 2); + + } else if (count <= 10) { + send_code(s, REPZ_3_10, s.bl_tree); + send_bits(s, count - 3, 3); + + } else { + send_code(s, REPZ_11_138, s.bl_tree); + send_bits(s, count - 11, 7); + } + + count = 0; + prevlen = curlen; + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Construct the Huffman tree for the bit lengths and return the index in + * bl_order of the last bit length code to send. + */ +function build_bl_tree(s) { + var max_blindex; /* index of last bit length code of non zero freq */ + + /* Determine the bit length frequencies for literal and distance trees */ + scan_tree(s, s.dyn_ltree, s.l_desc.max_code); + scan_tree(s, s.dyn_dtree, s.d_desc.max_code); + + /* Build the bit length tree: */ + build_tree(s, s.bl_desc); + /* opt_len now includes the length of the tree representations, except + * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. + */ + + /* Determine the number of bit length codes to send. The pkzip format + * requires that at least 4 bit length codes be sent. (appnote.txt says + * 3 but the actual value used is 4.) + */ + for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { + if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) { + break; + } + } + /* Update opt_len to include the bit length tree and counts */ + s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; + //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", + // s->opt_len, s->static_len)); + + return max_blindex; +} + + +/* =========================================================================== + * Send the header for a block using dynamic Huffman trees: the counts, the + * lengths of the bit length codes, the literal tree and the distance tree. + * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. + */ +function send_all_trees(s, lcodes, dcodes, blcodes) +// deflate_state *s; +// int lcodes, dcodes, blcodes; /* number of codes for each tree */ +{ + var rank; /* index in bl_order */ + + //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); + //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, + // "too many codes"); + //Tracev((stderr, "\nbl counts: ")); + send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ + send_bits(s, dcodes - 1, 5); + send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ + for (rank = 0; rank < blcodes; rank++) { + //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); + send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3); + } + //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */ + //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */ + //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); +} + + +/* =========================================================================== + * Check if the data type is TEXT or BINARY, using the following algorithm: + * - TEXT if the two conditions below are satisfied: + * a) There are no non-portable control characters belonging to the + * "black list" (0..6, 14..25, 28..31). + * b) There is at least one printable character belonging to the + * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). + * - BINARY otherwise. + * - The following partially-portable control characters form a + * "gray list" that is ignored in this detection algorithm: + * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). + * IN assertion: the fields Freq of dyn_ltree are set. + */ +function detect_data_type(s) { + /* black_mask is the bit mask of black-listed bytes + * set bits 0..6, 14..25, and 28..31 + * 0xf3ffc07f = binary 11110011111111111100000001111111 + */ + var black_mask = 0xf3ffc07f; + var n; + + /* Check for non-textual ("black-listed") bytes. */ + for (n = 0; n <= 31; n++, black_mask >>>= 1) { + if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) { + return Z_BINARY; + } + } + + /* Check for textual ("white-listed") bytes. */ + if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 || + s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + for (n = 32; n < LITERALS; n++) { + if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + } + + /* There are no "black-listed" or "white-listed" bytes: + * this stream either is empty or has tolerated ("gray-listed") bytes only. + */ + return Z_BINARY; +} + + +var static_init_done = false; + +/* =========================================================================== + * Initialize the tree data structures for a new zlib stream. + */ +function _tr_init(s) +{ + + if (!static_init_done) { + tr_static_init(); + static_init_done = true; + } + + s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc); + s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc); + s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc); + + s.bi_buf = 0; + s.bi_valid = 0; + + /* Initialize the first block of the first file: */ + init_block(s); +} + + +/* =========================================================================== + * Send a stored block + */ +function _tr_stored_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */ + copy_block(s, buf, stored_len, true); /* with header */ +} + + +/* =========================================================================== + * Send one empty static block to give enough lookahead for inflate. + * This takes 10 bits, of which 7 may remain in the bit buffer. + */ +function _tr_align(s) { + send_bits(s, STATIC_TREES << 1, 3); + send_code(s, END_BLOCK, static_ltree); + bi_flush(s); +} + + +/* =========================================================================== + * Determine the best encoding for the current block: dynamic trees, static + * trees or store, and output the encoded block to the zip file. + */ +function _tr_flush_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block, or NULL if too old */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + var opt_lenb, static_lenb; /* opt_len and static_len in bytes */ + var max_blindex = 0; /* index of last bit length code of non zero freq */ + + /* Build the Huffman trees unless a stored block is forced */ + if (s.level > 0) { + + /* Check if the file is binary or text */ + if (s.strm.data_type === Z_UNKNOWN) { + s.strm.data_type = detect_data_type(s); + } + + /* Construct the literal and distance trees */ + build_tree(s, s.l_desc); + // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + build_tree(s, s.d_desc); + // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + /* At this point, opt_len and static_len are the total bit lengths of + * the compressed block data, excluding the tree representations. + */ + + /* Build the bit length tree for the above two trees, and get the index + * in bl_order of the last bit length code to send. + */ + max_blindex = build_bl_tree(s); + + /* Determine the best encoding. Compute the block lengths in bytes. */ + opt_lenb = (s.opt_len + 3 + 7) >>> 3; + static_lenb = (s.static_len + 3 + 7) >>> 3; + + // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", + // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, + // s->last_lit)); + + if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; } + + } else { + // Assert(buf != (char*)0, "lost buf"); + opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ + } + + if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) { + /* 4: two words for the lengths */ + + /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. + * Otherwise we can't have processed more than WSIZE input bytes since + * the last block flush, because compression would have been + * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to + * transform a block into a stored block. + */ + _tr_stored_block(s, buf, stored_len, last); + + } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { + + send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3); + compress_block(s, static_ltree, static_dtree); + + } else { + send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3); + send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1); + compress_block(s, s.dyn_ltree, s.dyn_dtree); + } + // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); + /* The above check is made mod 2^32, for files larger than 512 MB + * and uLong implemented on 32 bits. + */ + init_block(s); + + if (last) { + bi_windup(s); + } + // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, + // s->compressed_len-7*last)); +} + +/* =========================================================================== + * Save the match info and tally the frequency counts. Return true if + * the current block must be flushed. + */ +function _tr_tally(s, dist, lc) +// deflate_state *s; +// unsigned dist; /* distance of matched string */ +// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ +{ + //var out_length, in_length, dcode; + + s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff; + s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff; + + s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff; + s.last_lit++; + + if (dist === 0) { + /* lc is the unmatched char */ + s.dyn_ltree[lc * 2]/*.Freq*/++; + } else { + s.matches++; + /* Here, lc is the match length - MIN_MATCH */ + dist--; /* dist = match distance - 1 */ + //Assert((ush)dist < (ush)MAX_DIST(s) && + // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && + // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); + + s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++; + s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++; + } + +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility + +//#ifdef TRUNCATE_BLOCK +// /* Try to guess if it is profitable to stop the current block here */ +// if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { +// /* Compute an upper bound for the compressed length */ +// out_length = s.last_lit*8; +// in_length = s.strstart - s.block_start; +// +// for (dcode = 0; dcode < D_CODES; dcode++) { +// out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); +// } +// out_length >>>= 3; +// //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", +// // s->last_lit, in_length, out_length, +// // 100L - out_length*100L/in_length)); +// if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { +// return true; +// } +// } +//#endif + + return (s.last_lit === s.lit_bufsize - 1); + /* We avoid equality with lit_bufsize because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ +} + +exports._tr_init = _tr_init; +exports._tr_stored_block = _tr_stored_block; +exports._tr_flush_block = _tr_flush_block; +exports._tr_tally = _tr_tally; +exports._tr_align = _tr_align; + + +/***/ }), + +/***/ 44442: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function ZStream() { + /* next input byte */ + this.input = null; // JS specific, because we have no pointers + this.next_in = 0; + /* number of bytes available at input */ + this.avail_in = 0; + /* total number of input bytes read so far */ + this.total_in = 0; + /* next output byte should be put there */ + this.output = null; // JS specific, because we have no pointers + this.next_out = 0; + /* remaining free space at output */ + this.avail_out = 0; + /* total number of bytes output so far */ + this.total_out = 0; + /* last error message, NULL if no error */ + this.msg = ''/*Z_NULL*/; + /* not visible by applications */ + this.state = null; + /* best guess about the data type: binary or text */ + this.data_type = 2/*Z_UNKNOWN*/; + /* adler32 value of the uncompressed data */ + this.adler = 0; +} + +module.exports = ZStream; + + /***/ }), /***/ 21137: @@ -96806,6 +117478,216 @@ module.exports = function (thing, encoding, name) { } +/***/ }), + +/***/ 71843: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + + +const { ErrorWithCause } = __webpack_require__(75832); // linemod-replace-with: export { ErrorWithCause } from './lib/error-with-cause.mjs'; + +const { // linemod-replace-with: export { + findCauseByReference, + getErrorCause, + messageWithCauses, + stackWithCauses, +} = __webpack_require__(94306); // linemod-replace-with: } from './lib/helpers.mjs'; + +module.exports = { // linemod-remove + ErrorWithCause, // linemod-remove + findCauseByReference, // linemod-remove + getErrorCause, // linemod-remove + stackWithCauses, // linemod-remove + messageWithCauses, // linemod-remove +}; // linemod-remove + + +/***/ }), + +/***/ 75832: +/***/ ((module) => { + +"use strict"; + + +/** @template [T=undefined] */ +class ErrorWithCause extends Error { // linemod-prefix-with: export + /** + * @param {string} message + * @param {{ cause?: T }} options + */ + constructor (message, { cause } = {}) { + super(message); + + /** @type {string} */ + this.name = ErrorWithCause.name; + if (cause) { + /** @type {T} */ + this.cause = cause; + } + /** @type {string} */ + this.message = message; + } +} + +module.exports = { // linemod-remove + ErrorWithCause, // linemod-remove +}; // linemod-remove + + +/***/ }), + +/***/ 94306: +/***/ ((module) => { + +"use strict"; + + +/** + * @template {Error} T + * @param {unknown} err + * @param {new(...args: any[]) => T} reference + * @returns {T|undefined} + */ +const findCauseByReference = (err, reference) => { // linemod-prefix-with: export + if (!err || !reference) return; + if (!(err instanceof Error)) return; + if ( + !(reference.prototype instanceof Error) && + // @ts-ignore + reference !== Error + ) return; + + /** + * Ensures we don't go circular + * + * @type {Set} + */ + const seen = new Set(); + + /** @type {Error|undefined} */ + let currentErr = err; + + while (currentErr && !seen.has(currentErr)) { + seen.add(currentErr); + + if (currentErr instanceof reference) { + return currentErr; + } + + currentErr = getErrorCause(currentErr); + } +}; + +/** + * @param {Error|{ cause?: unknown|(()=>err)}} err + * @returns {Error|undefined} + */ +const getErrorCause = (err) => { // linemod-prefix-with: export + if (!err || typeof err !== 'object' || !('cause' in err)) { + return; + } + + // VError / NError style causes + if (typeof err.cause === 'function') { + const causeResult = err.cause(); + + return causeResult instanceof Error + ? causeResult + : undefined; + } else { + return err.cause instanceof Error + ? err.cause + : undefined; + } +}; + +/** + * Internal method that keeps a track of which error we have already added, to avoid circular recursion + * + * @private + * @param {Error} err + * @param {Set} seen + * @returns {string} + */ +const _stackWithCauses = (err, seen) => { + if (!(err instanceof Error)) return ''; + + const stack = err.stack || ''; + + // Ensure we don't go circular or crazily deep + if (seen.has(err)) { + return stack + '\ncauses have become circular...'; + } + + const cause = getErrorCause(err); + + // TODO: Follow up in https://github.com/nodejs/node/issues/38725#issuecomment-920309092 on how to log stuff + + if (cause) { + seen.add(err); + return (stack + '\ncaused by: ' + _stackWithCauses(cause, seen)); + } else { + return stack; + } +}; + +/** + * @param {Error} err + * @returns {string} + */ +const stackWithCauses = (err) => _stackWithCauses(err, new Set()); // linemod-prefix-with: export + +/** + * Internal method that keeps a track of which error we have already added, to avoid circular recursion + * + * @private + * @param {Error} err + * @param {Set} seen + * @param {boolean} [skip] + * @returns {string} + */ +const _messageWithCauses = (err, seen, skip) => { + if (!(err instanceof Error)) return ''; + + const message = skip ? '' : (err.message || ''); + + // Ensure we don't go circular or crazily deep + if (seen.has(err)) { + return message + ': ...'; + } + + const cause = getErrorCause(err); + + if (cause) { + seen.add(err); + + const skipIfVErrorStyleCause = 'cause' in err && typeof err.cause === 'function'; + + return (message + + (skipIfVErrorStyleCause ? '' : ': ') + + _messageWithCauses(cause, seen, skipIfVErrorStyleCause)); + } else { + return message; + } +}; + +/** + * @param {Error} err + * @returns {string} + */ +const messageWithCauses = (err) => _messageWithCauses(err, new Set()); // linemod-prefix-with: export + +module.exports = { // linemod-remove + findCauseByReference, // linemod-remove + getErrorCause, // linemod-remove + stackWithCauses, // linemod-remove + messageWithCauses, // linemod-remove +}; // linemod-remove + + /***/ }), /***/ 76578: @@ -104260,6 +125142,2495 @@ SafeBuffer.allocUnsafeSlow = function (size) { } +/***/ }), + +/***/ 93904: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = __webpack_require__(98587) +const { safeRe: re, t } = __webpack_require__(99718) +const cmp = __webpack_require__(72111) +const debug = __webpack_require__(57272) +const SemVer = __webpack_require__(53908) +const Range = __webpack_require__(78311) + + +/***/ }), + +/***/ 78311: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.format() + } + + format () { + this.range = this.set + .map((comps) => comps.join(' ').trim()) + .join('||') + .trim() + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = __webpack_require__(69749) +const cache = new LRU({ max: 1000 }) + +const parseOptions = __webpack_require__(98587) +const Comparator = __webpack_require__(93904) +const debug = __webpack_require__(57272) +const SemVer = __webpack_require__(53908) +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = __webpack_require__(99718) +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = __webpack_require__(16874) + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + + +/***/ }), + +/***/ 53908: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const debug = __webpack_require__(57272) +const { MAX_LENGTH, MAX_SAFE_INTEGER } = __webpack_require__(16874) +const { safeRe: re, t } = __webpack_require__(99718) + +const parseOptions = __webpack_require__(98587) +const { compareIdentifiers } = __webpack_require__(61123) +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer + + +/***/ }), + +/***/ 57414: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const parse = __webpack_require__(30144) +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean + + +/***/ }), + +/***/ 72111: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const eq = __webpack_require__(94641) +const neq = __webpack_require__(13999) +const gt = __webpack_require__(35580) +const gte = __webpack_require__(54089) +const lt = __webpack_require__(7059) +const lte = __webpack_require__(25200) + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp + + +/***/ }), + +/***/ 46170: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const parse = __webpack_require__(30144) +const { safeRe: re, t } = __webpack_require__(99718) + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 + } + + if (match === null) { + return null + } + + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' + + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) +} +module.exports = coerce + + +/***/ }), + +/***/ 40909: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild + + +/***/ }), + +/***/ 11763: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose + + +/***/ }), + +/***/ 50560: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare + + +/***/ }), + +/***/ 51832: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const parse = __webpack_require__(30144) + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff + + +/***/ }), + +/***/ 94641: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq + + +/***/ }), + +/***/ 35580: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt + + +/***/ }), + +/***/ 54089: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte + + +/***/ }), + +/***/ 93007: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc + + +/***/ }), + +/***/ 7059: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt + + +/***/ }), + +/***/ 25200: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte + + +/***/ }), + +/***/ 32938: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major + + +/***/ }), + +/***/ 46254: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor + + +/***/ }), + +/***/ 13999: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq + + +/***/ }), + +/***/ 30144: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse + + +/***/ }), + +/***/ 24493: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch + + +/***/ }), + +/***/ 31729: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const parse = __webpack_require__(30144) +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease + + +/***/ }), + +/***/ 9970: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare + + +/***/ }), + +/***/ 74277: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compareBuild = __webpack_require__(40909) +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort + + +/***/ }), + +/***/ 97638: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies + + +/***/ }), + +/***/ 43927: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compareBuild = __webpack_require__(40909) +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort + + +/***/ }), + +/***/ 56953: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const parse = __webpack_require__(30144) +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid + + +/***/ }), + +/***/ 99589: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// just pre-load all the stuff that index.js lazily exports +const internalRe = __webpack_require__(99718) +const constants = __webpack_require__(16874) +const SemVer = __webpack_require__(53908) +const identifiers = __webpack_require__(61123) +const parse = __webpack_require__(30144) +const valid = __webpack_require__(56953) +const clean = __webpack_require__(57414) +const inc = __webpack_require__(93007) +const diff = __webpack_require__(51832) +const major = __webpack_require__(32938) +const minor = __webpack_require__(46254) +const patch = __webpack_require__(24493) +const prerelease = __webpack_require__(31729) +const compare = __webpack_require__(50560) +const rcompare = __webpack_require__(9970) +const compareLoose = __webpack_require__(11763) +const compareBuild = __webpack_require__(40909) +const sort = __webpack_require__(43927) +const rsort = __webpack_require__(74277) +const gt = __webpack_require__(35580) +const lt = __webpack_require__(7059) +const eq = __webpack_require__(94641) +const neq = __webpack_require__(13999) +const gte = __webpack_require__(54089) +const lte = __webpack_require__(25200) +const cmp = __webpack_require__(72111) +const coerce = __webpack_require__(46170) +const Comparator = __webpack_require__(93904) +const Range = __webpack_require__(78311) +const satisfies = __webpack_require__(97638) +const toComparators = __webpack_require__(77631) +const maxSatisfying = __webpack_require__(19628) +const minSatisfying = __webpack_require__(270) +const minVersion = __webpack_require__(41261) +const validRange = __webpack_require__(13874) +const outside = __webpack_require__(97075) +const gtr = __webpack_require__(75571) +const ltr = __webpack_require__(5342) +const intersects = __webpack_require__(76780) +const simplifyRange = __webpack_require__(72525) +const subset = __webpack_require__(75032) +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} + + +/***/ }), + +/***/ 16874: +/***/ ((module) => { + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} + + +/***/ }), + +/***/ 57272: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +/* provided dependency */ var process = __webpack_require__(65606); +/* provided dependency */ var console = __webpack_require__(96763); +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug + + +/***/ }), + +/***/ 61123: +/***/ ((module) => { + +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} + + +/***/ }), + +/***/ 98587: +/***/ ((module) => { + +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions + + +/***/ }), + +/***/ 99718: +/***/ ((module, exports, __webpack_require__) => { + +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = __webpack_require__(16874) +const debug = __webpack_require__(57272) +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') + + +/***/ }), + +/***/ 75571: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// Determine if version is greater than all the versions possible in the range. +const outside = __webpack_require__(97075) +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr + + +/***/ }), + +/***/ 76780: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects + + +/***/ }), + +/***/ 5342: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const outside = __webpack_require__(97075) +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr + + +/***/ }), + +/***/ 19628: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const Range = __webpack_require__(78311) + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying + + +/***/ }), + +/***/ 270: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const Range = __webpack_require__(78311) +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying + + +/***/ }), + +/***/ 41261: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const Range = __webpack_require__(78311) +const gt = __webpack_require__(35580) + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion + + +/***/ }), + +/***/ 97075: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const Comparator = __webpack_require__(93904) +const { ANY } = Comparator +const Range = __webpack_require__(78311) +const satisfies = __webpack_require__(97638) +const gt = __webpack_require__(35580) +const lt = __webpack_require__(7059) +const lte = __webpack_require__(25200) +const gte = __webpack_require__(54089) + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside + + +/***/ }), + +/***/ 72525: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = __webpack_require__(97638) +const compare = __webpack_require__(50560) +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} + + +/***/ }), + +/***/ 75032: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) +const Comparator = __webpack_require__(93904) +const { ANY } = Comparator +const satisfies = __webpack_require__(97638) +const compare = __webpack_require__(50560) + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset + + +/***/ }), + +/***/ 77631: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators + + +/***/ }), + +/***/ 13874: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange + + /***/ }), /***/ 96897: @@ -105296,6 +128667,744 @@ Stream.prototype.pipe = function(dest, options) { }; +/***/ }), + +/***/ 11568: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +var ClientRequest = __webpack_require__(55537) +var response = __webpack_require__(6917) +var extend = __webpack_require__(57510) +var statusCodes = __webpack_require__(86866) +var url = __webpack_require__(59817) + +var http = exports + +http.request = function (opts, cb) { + if (typeof opts === 'string') + opts = url.parse(opts) + else + opts = extend(opts) + + // Normally, the page is loaded from http or https, so not specifying a protocol + // will result in a (valid) protocol-relative url. However, this won't work if + // the protocol is something else, like 'file:' + var defaultProtocol = __webpack_require__.g.location.protocol.search(/^https?:$/) === -1 ? 'http:' : '' + + var protocol = opts.protocol || defaultProtocol + var host = opts.hostname || opts.host + var port = opts.port + var path = opts.path || '/' + + // Necessary for IPv6 addresses + if (host && host.indexOf(':') !== -1) + host = '[' + host + ']' + + // This may be a relative url. The browser should always be able to interpret it correctly. + opts.url = (host ? (protocol + '//' + host) : '') + (port ? ':' + port : '') + path + opts.method = (opts.method || 'GET').toUpperCase() + opts.headers = opts.headers || {} + + // Also valid opts.auth, opts.mode + + var req = new ClientRequest(opts) + if (cb) + req.on('response', cb) + return req +} + +http.get = function get (opts, cb) { + var req = http.request(opts, cb) + req.end() + return req +} + +http.ClientRequest = ClientRequest +http.IncomingMessage = response.IncomingMessage + +http.Agent = function () {} +http.Agent.defaultMaxSockets = 4 + +http.globalAgent = new http.Agent() + +http.STATUS_CODES = statusCodes + +http.METHODS = [ + 'CHECKOUT', + 'CONNECT', + 'COPY', + 'DELETE', + 'GET', + 'HEAD', + 'LOCK', + 'M-SEARCH', + 'MERGE', + 'MKACTIVITY', + 'MKCOL', + 'MOVE', + 'NOTIFY', + 'OPTIONS', + 'PATCH', + 'POST', + 'PROPFIND', + 'PROPPATCH', + 'PURGE', + 'PUT', + 'REPORT', + 'SEARCH', + 'SUBSCRIBE', + 'TRACE', + 'UNLOCK', + 'UNSUBSCRIBE' +] + +/***/ }), + +/***/ 6688: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +exports.fetch = isFunction(__webpack_require__.g.fetch) && isFunction(__webpack_require__.g.ReadableStream) + +exports.writableStream = isFunction(__webpack_require__.g.WritableStream) + +exports.abortController = isFunction(__webpack_require__.g.AbortController) + +// The xhr request to example.com may violate some restrictive CSP configurations, +// so if we're running in a browser that supports `fetch`, avoid calling getXHR() +// and assume support for certain features below. +var xhr +function getXHR () { + // Cache the xhr value + if (xhr !== undefined) return xhr + + if (__webpack_require__.g.XMLHttpRequest) { + xhr = new __webpack_require__.g.XMLHttpRequest() + // If XDomainRequest is available (ie only, where xhr might not work + // cross domain), use the page location. Otherwise use example.com + // Note: this doesn't actually make an http request. + try { + xhr.open('GET', __webpack_require__.g.XDomainRequest ? '/' : 'https://example.com') + } catch(e) { + xhr = null + } + } else { + // Service workers don't have XHR + xhr = null + } + return xhr +} + +function checkTypeSupport (type) { + var xhr = getXHR() + if (!xhr) return false + try { + xhr.responseType = type + return xhr.responseType === type + } catch (e) {} + return false +} + +// If fetch is supported, then arraybuffer will be supported too. Skip calling +// checkTypeSupport(), since that calls getXHR(). +exports.arraybuffer = exports.fetch || checkTypeSupport('arraybuffer') + +// These next two tests unavoidably show warnings in Chrome. Since fetch will always +// be used if it's available, just return false for these to avoid the warnings. +exports.msstream = !exports.fetch && checkTypeSupport('ms-stream') +exports.mozchunkedarraybuffer = !exports.fetch && checkTypeSupport('moz-chunked-arraybuffer') + +// If fetch is supported, then overrideMimeType will be supported too. Skip calling +// getXHR(). +exports.overrideMimeType = exports.fetch || (getXHR() ? isFunction(getXHR().overrideMimeType) : false) + +function isFunction (value) { + return typeof value === 'function' +} + +xhr = null // Help gc + + +/***/ }), + +/***/ 55537: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +/* provided dependency */ var process = __webpack_require__(65606); +var capability = __webpack_require__(6688) +var inherits = __webpack_require__(56698) +var response = __webpack_require__(6917) +var stream = __webpack_require__(28399) + +var IncomingMessage = response.IncomingMessage +var rStates = response.readyStates + +function decideMode (preferBinary, useFetch) { + if (capability.fetch && useFetch) { + return 'fetch' + } else if (capability.mozchunkedarraybuffer) { + return 'moz-chunked-arraybuffer' + } else if (capability.msstream) { + return 'ms-stream' + } else if (capability.arraybuffer && preferBinary) { + return 'arraybuffer' + } else { + return 'text' + } +} + +var ClientRequest = module.exports = function (opts) { + var self = this + stream.Writable.call(self) + + self._opts = opts + self._body = [] + self._headers = {} + if (opts.auth) + self.setHeader('Authorization', 'Basic ' + Buffer.from(opts.auth).toString('base64')) + Object.keys(opts.headers).forEach(function (name) { + self.setHeader(name, opts.headers[name]) + }) + + var preferBinary + var useFetch = true + if (opts.mode === 'disable-fetch' || ('requestTimeout' in opts && !capability.abortController)) { + // If the use of XHR should be preferred. Not typically needed. + useFetch = false + preferBinary = true + } else if (opts.mode === 'prefer-streaming') { + // If streaming is a high priority but binary compatibility and + // the accuracy of the 'content-type' header aren't + preferBinary = false + } else if (opts.mode === 'allow-wrong-content-type') { + // If streaming is more important than preserving the 'content-type' header + preferBinary = !capability.overrideMimeType + } else if (!opts.mode || opts.mode === 'default' || opts.mode === 'prefer-fast') { + // Use binary if text streaming may corrupt data or the content-type header, or for speed + preferBinary = true + } else { + throw new Error('Invalid value for opts.mode') + } + self._mode = decideMode(preferBinary, useFetch) + self._fetchTimer = null + self._socketTimeout = null + self._socketTimer = null + + self.on('finish', function () { + self._onFinish() + }) +} + +inherits(ClientRequest, stream.Writable) + +ClientRequest.prototype.setHeader = function (name, value) { + var self = this + var lowerName = name.toLowerCase() + // This check is not necessary, but it prevents warnings from browsers about setting unsafe + // headers. To be honest I'm not entirely sure hiding these warnings is a good thing, but + // http-browserify did it, so I will too. + if (unsafeHeaders.indexOf(lowerName) !== -1) + return + + self._headers[lowerName] = { + name: name, + value: value + } +} + +ClientRequest.prototype.getHeader = function (name) { + var header = this._headers[name.toLowerCase()] + if (header) + return header.value + return null +} + +ClientRequest.prototype.removeHeader = function (name) { + var self = this + delete self._headers[name.toLowerCase()] +} + +ClientRequest.prototype._onFinish = function () { + var self = this + + if (self._destroyed) + return + var opts = self._opts + + if ('timeout' in opts && opts.timeout !== 0) { + self.setTimeout(opts.timeout) + } + + var headersObj = self._headers + var body = null + if (opts.method !== 'GET' && opts.method !== 'HEAD') { + body = new Blob(self._body, { + type: (headersObj['content-type'] || {}).value || '' + }); + } + + // create flattened list of headers + var headersList = [] + Object.keys(headersObj).forEach(function (keyName) { + var name = headersObj[keyName].name + var value = headersObj[keyName].value + if (Array.isArray(value)) { + value.forEach(function (v) { + headersList.push([name, v]) + }) + } else { + headersList.push([name, value]) + } + }) + + if (self._mode === 'fetch') { + var signal = null + if (capability.abortController) { + var controller = new AbortController() + signal = controller.signal + self._fetchAbortController = controller + + if ('requestTimeout' in opts && opts.requestTimeout !== 0) { + self._fetchTimer = __webpack_require__.g.setTimeout(function () { + self.emit('requestTimeout') + if (self._fetchAbortController) + self._fetchAbortController.abort() + }, opts.requestTimeout) + } + } + + __webpack_require__.g.fetch(self._opts.url, { + method: self._opts.method, + headers: headersList, + body: body || undefined, + mode: 'cors', + credentials: opts.withCredentials ? 'include' : 'same-origin', + signal: signal + }).then(function (response) { + self._fetchResponse = response + self._resetTimers(false) + self._connect() + }, function (reason) { + self._resetTimers(true) + if (!self._destroyed) + self.emit('error', reason) + }) + } else { + var xhr = self._xhr = new __webpack_require__.g.XMLHttpRequest() + try { + xhr.open(self._opts.method, self._opts.url, true) + } catch (err) { + process.nextTick(function () { + self.emit('error', err) + }) + return + } + + // Can't set responseType on really old browsers + if ('responseType' in xhr) + xhr.responseType = self._mode + + if ('withCredentials' in xhr) + xhr.withCredentials = !!opts.withCredentials + + if (self._mode === 'text' && 'overrideMimeType' in xhr) + xhr.overrideMimeType('text/plain; charset=x-user-defined') + + if ('requestTimeout' in opts) { + xhr.timeout = opts.requestTimeout + xhr.ontimeout = function () { + self.emit('requestTimeout') + } + } + + headersList.forEach(function (header) { + xhr.setRequestHeader(header[0], header[1]) + }) + + self._response = null + xhr.onreadystatechange = function () { + switch (xhr.readyState) { + case rStates.LOADING: + case rStates.DONE: + self._onXHRProgress() + break + } + } + // Necessary for streaming in Firefox, since xhr.response is ONLY defined + // in onprogress, not in onreadystatechange with xhr.readyState = 3 + if (self._mode === 'moz-chunked-arraybuffer') { + xhr.onprogress = function () { + self._onXHRProgress() + } + } + + xhr.onerror = function () { + if (self._destroyed) + return + self._resetTimers(true) + self.emit('error', new Error('XHR error')) + } + + try { + xhr.send(body) + } catch (err) { + process.nextTick(function () { + self.emit('error', err) + }) + return + } + } +} + +/** + * Checks if xhr.status is readable and non-zero, indicating no error. + * Even though the spec says it should be available in readyState 3, + * accessing it throws an exception in IE8 + */ +function statusValid (xhr) { + try { + var status = xhr.status + return (status !== null && status !== 0) + } catch (e) { + return false + } +} + +ClientRequest.prototype._onXHRProgress = function () { + var self = this + + self._resetTimers(false) + + if (!statusValid(self._xhr) || self._destroyed) + return + + if (!self._response) + self._connect() + + self._response._onXHRProgress(self._resetTimers.bind(self)) +} + +ClientRequest.prototype._connect = function () { + var self = this + + if (self._destroyed) + return + + self._response = new IncomingMessage(self._xhr, self._fetchResponse, self._mode, self._resetTimers.bind(self)) + self._response.on('error', function(err) { + self.emit('error', err) + }) + + self.emit('response', self._response) +} + +ClientRequest.prototype._write = function (chunk, encoding, cb) { + var self = this + + self._body.push(chunk) + cb() +} + +ClientRequest.prototype._resetTimers = function (done) { + var self = this + + __webpack_require__.g.clearTimeout(self._socketTimer) + self._socketTimer = null + + if (done) { + __webpack_require__.g.clearTimeout(self._fetchTimer) + self._fetchTimer = null + } else if (self._socketTimeout) { + self._socketTimer = __webpack_require__.g.setTimeout(function () { + self.emit('timeout') + }, self._socketTimeout) + } +} + +ClientRequest.prototype.abort = ClientRequest.prototype.destroy = function (err) { + var self = this + self._destroyed = true + self._resetTimers(true) + if (self._response) + self._response._destroyed = true + if (self._xhr) + self._xhr.abort() + else if (self._fetchAbortController) + self._fetchAbortController.abort() + + if (err) + self.emit('error', err) +} + +ClientRequest.prototype.end = function (data, encoding, cb) { + var self = this + if (typeof data === 'function') { + cb = data + data = undefined + } + + stream.Writable.prototype.end.call(self, data, encoding, cb) +} + +ClientRequest.prototype.setTimeout = function (timeout, cb) { + var self = this + + if (cb) + self.once('timeout', cb) + + self._socketTimeout = timeout + self._resetTimers(false) +} + +ClientRequest.prototype.flushHeaders = function () {} +ClientRequest.prototype.setNoDelay = function () {} +ClientRequest.prototype.setSocketKeepAlive = function () {} + +// Taken from http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader%28%29-method +var unsafeHeaders = [ + 'accept-charset', + 'accept-encoding', + 'access-control-request-headers', + 'access-control-request-method', + 'connection', + 'content-length', + 'cookie', + 'cookie2', + 'date', + 'dnt', + 'expect', + 'host', + 'keep-alive', + 'origin', + 'referer', + 'te', + 'trailer', + 'transfer-encoding', + 'upgrade', + 'via' +] + + +/***/ }), + +/***/ 6917: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +/* provided dependency */ var process = __webpack_require__(65606); +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +var capability = __webpack_require__(6688) +var inherits = __webpack_require__(56698) +var stream = __webpack_require__(28399) + +var rStates = exports.readyStates = { + UNSENT: 0, + OPENED: 1, + HEADERS_RECEIVED: 2, + LOADING: 3, + DONE: 4 +} + +var IncomingMessage = exports.IncomingMessage = function (xhr, response, mode, resetTimers) { + var self = this + stream.Readable.call(self) + + self._mode = mode + self.headers = {} + self.rawHeaders = [] + self.trailers = {} + self.rawTrailers = [] + + // Fake the 'close' event, but only once 'end' fires + self.on('end', function () { + // The nextTick is necessary to prevent the 'request' module from causing an infinite loop + process.nextTick(function () { + self.emit('close') + }) + }) + + if (mode === 'fetch') { + self._fetchResponse = response + + self.url = response.url + self.statusCode = response.status + self.statusMessage = response.statusText + + response.headers.forEach(function (header, key){ + self.headers[key.toLowerCase()] = header + self.rawHeaders.push(key, header) + }) + + if (capability.writableStream) { + var writable = new WritableStream({ + write: function (chunk) { + resetTimers(false) + return new Promise(function (resolve, reject) { + if (self._destroyed) { + reject() + } else if(self.push(Buffer.from(chunk))) { + resolve() + } else { + self._resumeFetch = resolve + } + }) + }, + close: function () { + resetTimers(true) + if (!self._destroyed) + self.push(null) + }, + abort: function (err) { + resetTimers(true) + if (!self._destroyed) + self.emit('error', err) + } + }) + + try { + response.body.pipeTo(writable).catch(function (err) { + resetTimers(true) + if (!self._destroyed) + self.emit('error', err) + }) + return + } catch (e) {} // pipeTo method isn't defined. Can't find a better way to feature test this + } + // fallback for when writableStream or pipeTo aren't available + var reader = response.body.getReader() + function read () { + reader.read().then(function (result) { + if (self._destroyed) + return + resetTimers(result.done) + if (result.done) { + self.push(null) + return + } + self.push(Buffer.from(result.value)) + read() + }).catch(function (err) { + resetTimers(true) + if (!self._destroyed) + self.emit('error', err) + }) + } + read() + } else { + self._xhr = xhr + self._pos = 0 + + self.url = xhr.responseURL + self.statusCode = xhr.status + self.statusMessage = xhr.statusText + var headers = xhr.getAllResponseHeaders().split(/\r?\n/) + headers.forEach(function (header) { + var matches = header.match(/^([^:]+):\s*(.*)/) + if (matches) { + var key = matches[1].toLowerCase() + if (key === 'set-cookie') { + if (self.headers[key] === undefined) { + self.headers[key] = [] + } + self.headers[key].push(matches[2]) + } else if (self.headers[key] !== undefined) { + self.headers[key] += ', ' + matches[2] + } else { + self.headers[key] = matches[2] + } + self.rawHeaders.push(matches[1], matches[2]) + } + }) + + self._charset = 'x-user-defined' + if (!capability.overrideMimeType) { + var mimeType = self.rawHeaders['mime-type'] + if (mimeType) { + var charsetMatch = mimeType.match(/;\s*charset=([^;])(;|$)/) + if (charsetMatch) { + self._charset = charsetMatch[1].toLowerCase() + } + } + if (!self._charset) + self._charset = 'utf-8' // best guess + } + } +} + +inherits(IncomingMessage, stream.Readable) + +IncomingMessage.prototype._read = function () { + var self = this + + var resolve = self._resumeFetch + if (resolve) { + self._resumeFetch = null + resolve() + } +} + +IncomingMessage.prototype._onXHRProgress = function (resetTimers) { + var self = this + + var xhr = self._xhr + + var response = null + switch (self._mode) { + case 'text': + response = xhr.responseText + if (response.length > self._pos) { + var newData = response.substr(self._pos) + if (self._charset === 'x-user-defined') { + var buffer = Buffer.alloc(newData.length) + for (var i = 0; i < newData.length; i++) + buffer[i] = newData.charCodeAt(i) & 0xff + + self.push(buffer) + } else { + self.push(newData, self._charset) + } + self._pos = response.length + } + break + case 'arraybuffer': + if (xhr.readyState !== rStates.DONE || !xhr.response) + break + response = xhr.response + self.push(Buffer.from(new Uint8Array(response))) + break + case 'moz-chunked-arraybuffer': // take whole + response = xhr.response + if (xhr.readyState !== rStates.LOADING || !response) + break + self.push(Buffer.from(new Uint8Array(response))) + break + case 'ms-stream': + response = xhr.response + if (xhr.readyState !== rStates.LOADING) + break + var reader = new __webpack_require__.g.MSStreamReader() + reader.onprogress = function () { + if (reader.result.byteLength > self._pos) { + self.push(Buffer.from(new Uint8Array(reader.result.slice(self._pos)))) + self._pos = reader.result.byteLength + } + } + reader.onload = function () { + resetTimers(true) + self.push(null) + } + // reader.onerror = ??? // TODO: this + reader.readAsArrayBuffer(response) + break + } + + // The ms-stream case handles end separately in reader.onload() + if (self._xhr.readyState === rStates.DONE && self._mode !== 'ms-stream') { + resetTimers(true) + self.push(null) + } +} + + /***/ }), /***/ 83141: @@ -105599,6 +129708,2493 @@ function simpleEnd(buf) { return buf && buf.length ? this.write(buf) : ''; } +/***/ }), + +/***/ 76386: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +// Written in 2014-2016 by Dmitry Chestnykh and Devi Mandiri. +// Public domain. +(function(root, f) { + 'use strict'; + if ( true && module.exports) module.exports = f(); + else if (root.nacl) root.nacl.util = f(); + else { + root.nacl = {}; + root.nacl.util = f(); + } +}(this, function() { + 'use strict'; + + var util = {}; + + function validateBase64(s) { + if (!(/^(?:[A-Za-z0-9+\/]{2}[A-Za-z0-9+\/]{2})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/.test(s))) { + throw new TypeError('invalid encoding'); + } + } + + util.decodeUTF8 = function(s) { + if (typeof s !== 'string') throw new TypeError('expected string'); + var i, d = unescape(encodeURIComponent(s)), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; + }; + + util.encodeUTF8 = function(arr) { + var i, s = []; + for (i = 0; i < arr.length; i++) s.push(String.fromCharCode(arr[i])); + return decodeURIComponent(escape(s.join(''))); + }; + + if (typeof atob === 'undefined') { + // Node.js + + if (typeof Buffer.from !== 'undefined') { + // Node v6 and later + util.encodeBase64 = function (arr) { // v6 and later + return Buffer.from(arr).toString('base64'); + }; + + util.decodeBase64 = function (s) { + validateBase64(s); + return new Uint8Array(Array.prototype.slice.call(Buffer.from(s, 'base64'), 0)); + }; + + } else { + // Node earlier than v6 + util.encodeBase64 = function (arr) { // v6 and later + return (new Buffer(arr)).toString('base64'); + }; + + util.decodeBase64 = function(s) { + validateBase64(s); + return new Uint8Array(Array.prototype.slice.call(new Buffer(s, 'base64'), 0)); + }; + } + + } else { + // Browsers + + util.encodeBase64 = function(arr) { + var i, s = [], len = arr.length; + for (i = 0; i < len; i++) s.push(String.fromCharCode(arr[i])); + return btoa(s.join('')); + }; + + util.decodeBase64 = function(s) { + validateBase64(s); + var i, d = atob(s), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; + }; + + } + + return util; + +})); + + +/***/ }), + +/***/ 88947: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +(function(nacl) { +'use strict'; + +// Ported in 2014 by Dmitry Chestnykh and Devi Mandiri. +// Public domain. +// +// Implementation derived from TweetNaCl version 20140427. +// See for details: http://tweetnacl.cr.yp.to/ + +var gf = function(init) { + var i, r = new Float64Array(16); + if (init) for (i = 0; i < init.length; i++) r[i] = init[i]; + return r; +}; + +// Pluggable, initialized in high-level API below. +var randombytes = function(/* x, n */) { throw new Error('no PRNG'); }; + +var _0 = new Uint8Array(16); +var _9 = new Uint8Array(32); _9[0] = 9; + +var gf0 = gf(), + gf1 = gf([1]), + _121665 = gf([0xdb41, 1]), + D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]), + D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]), + X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]), + Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]), + I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); + +function ts64(x, i, h, l) { + x[i] = (h >> 24) & 0xff; + x[i+1] = (h >> 16) & 0xff; + x[i+2] = (h >> 8) & 0xff; + x[i+3] = h & 0xff; + x[i+4] = (l >> 24) & 0xff; + x[i+5] = (l >> 16) & 0xff; + x[i+6] = (l >> 8) & 0xff; + x[i+7] = l & 0xff; +} + +function vn(x, xi, y, yi, n) { + var i,d = 0; + for (i = 0; i < n; i++) d |= x[xi+i]^y[yi+i]; + return (1 & ((d - 1) >>> 8)) - 1; +} + +function crypto_verify_16(x, xi, y, yi) { + return vn(x,xi,y,yi,16); +} + +function crypto_verify_32(x, xi, y, yi) { + return vn(x,xi,y,yi,32); +} + +function core_salsa20(o, p, k, c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + x0 = x0 + j0 | 0; + x1 = x1 + j1 | 0; + x2 = x2 + j2 | 0; + x3 = x3 + j3 | 0; + x4 = x4 + j4 | 0; + x5 = x5 + j5 | 0; + x6 = x6 + j6 | 0; + x7 = x7 + j7 | 0; + x8 = x8 + j8 | 0; + x9 = x9 + j9 | 0; + x10 = x10 + j10 | 0; + x11 = x11 + j11 | 0; + x12 = x12 + j12 | 0; + x13 = x13 + j13 | 0; + x14 = x14 + j14 | 0; + x15 = x15 + j15 | 0; + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x1 >>> 0 & 0xff; + o[ 5] = x1 >>> 8 & 0xff; + o[ 6] = x1 >>> 16 & 0xff; + o[ 7] = x1 >>> 24 & 0xff; + + o[ 8] = x2 >>> 0 & 0xff; + o[ 9] = x2 >>> 8 & 0xff; + o[10] = x2 >>> 16 & 0xff; + o[11] = x2 >>> 24 & 0xff; + + o[12] = x3 >>> 0 & 0xff; + o[13] = x3 >>> 8 & 0xff; + o[14] = x3 >>> 16 & 0xff; + o[15] = x3 >>> 24 & 0xff; + + o[16] = x4 >>> 0 & 0xff; + o[17] = x4 >>> 8 & 0xff; + o[18] = x4 >>> 16 & 0xff; + o[19] = x4 >>> 24 & 0xff; + + o[20] = x5 >>> 0 & 0xff; + o[21] = x5 >>> 8 & 0xff; + o[22] = x5 >>> 16 & 0xff; + o[23] = x5 >>> 24 & 0xff; + + o[24] = x6 >>> 0 & 0xff; + o[25] = x6 >>> 8 & 0xff; + o[26] = x6 >>> 16 & 0xff; + o[27] = x6 >>> 24 & 0xff; + + o[28] = x7 >>> 0 & 0xff; + o[29] = x7 >>> 8 & 0xff; + o[30] = x7 >>> 16 & 0xff; + o[31] = x7 >>> 24 & 0xff; + + o[32] = x8 >>> 0 & 0xff; + o[33] = x8 >>> 8 & 0xff; + o[34] = x8 >>> 16 & 0xff; + o[35] = x8 >>> 24 & 0xff; + + o[36] = x9 >>> 0 & 0xff; + o[37] = x9 >>> 8 & 0xff; + o[38] = x9 >>> 16 & 0xff; + o[39] = x9 >>> 24 & 0xff; + + o[40] = x10 >>> 0 & 0xff; + o[41] = x10 >>> 8 & 0xff; + o[42] = x10 >>> 16 & 0xff; + o[43] = x10 >>> 24 & 0xff; + + o[44] = x11 >>> 0 & 0xff; + o[45] = x11 >>> 8 & 0xff; + o[46] = x11 >>> 16 & 0xff; + o[47] = x11 >>> 24 & 0xff; + + o[48] = x12 >>> 0 & 0xff; + o[49] = x12 >>> 8 & 0xff; + o[50] = x12 >>> 16 & 0xff; + o[51] = x12 >>> 24 & 0xff; + + o[52] = x13 >>> 0 & 0xff; + o[53] = x13 >>> 8 & 0xff; + o[54] = x13 >>> 16 & 0xff; + o[55] = x13 >>> 24 & 0xff; + + o[56] = x14 >>> 0 & 0xff; + o[57] = x14 >>> 8 & 0xff; + o[58] = x14 >>> 16 & 0xff; + o[59] = x14 >>> 24 & 0xff; + + o[60] = x15 >>> 0 & 0xff; + o[61] = x15 >>> 8 & 0xff; + o[62] = x15 >>> 16 & 0xff; + o[63] = x15 >>> 24 & 0xff; +} + +function core_hsalsa20(o,p,k,c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x5 >>> 0 & 0xff; + o[ 5] = x5 >>> 8 & 0xff; + o[ 6] = x5 >>> 16 & 0xff; + o[ 7] = x5 >>> 24 & 0xff; + + o[ 8] = x10 >>> 0 & 0xff; + o[ 9] = x10 >>> 8 & 0xff; + o[10] = x10 >>> 16 & 0xff; + o[11] = x10 >>> 24 & 0xff; + + o[12] = x15 >>> 0 & 0xff; + o[13] = x15 >>> 8 & 0xff; + o[14] = x15 >>> 16 & 0xff; + o[15] = x15 >>> 24 & 0xff; + + o[16] = x6 >>> 0 & 0xff; + o[17] = x6 >>> 8 & 0xff; + o[18] = x6 >>> 16 & 0xff; + o[19] = x6 >>> 24 & 0xff; + + o[20] = x7 >>> 0 & 0xff; + o[21] = x7 >>> 8 & 0xff; + o[22] = x7 >>> 16 & 0xff; + o[23] = x7 >>> 24 & 0xff; + + o[24] = x8 >>> 0 & 0xff; + o[25] = x8 >>> 8 & 0xff; + o[26] = x8 >>> 16 & 0xff; + o[27] = x8 >>> 24 & 0xff; + + o[28] = x9 >>> 0 & 0xff; + o[29] = x9 >>> 8 & 0xff; + o[30] = x9 >>> 16 & 0xff; + o[31] = x9 >>> 24 & 0xff; +} + +function crypto_core_salsa20(out,inp,k,c) { + core_salsa20(out,inp,k,c); +} + +function crypto_core_hsalsa20(out,inp,k,c) { + core_hsalsa20(out,inp,k,c); +} + +var sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]); + // "expand 32-byte k" + +function crypto_stream_salsa20_xor(c,cpos,m,mpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + mpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + } + return 0; +} + +function crypto_stream_salsa20(c,cpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = x[i]; + } + return 0; +} + +function crypto_stream(c,cpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20(c,cpos,d,sn,s); +} + +function crypto_stream_xor(c,cpos,m,mpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20_xor(c,cpos,m,mpos,d,sn,s); +} + +/* +* Port of Andrew Moon's Poly1305-donna-16. Public domain. +* https://github.com/floodyberry/poly1305-donna +*/ + +var poly1305 = function(key) { + this.buffer = new Uint8Array(16); + this.r = new Uint16Array(10); + this.h = new Uint16Array(10); + this.pad = new Uint16Array(8); + this.leftover = 0; + this.fin = 0; + + var t0, t1, t2, t3, t4, t5, t6, t7; + + t0 = key[ 0] & 0xff | (key[ 1] & 0xff) << 8; this.r[0] = ( t0 ) & 0x1fff; + t1 = key[ 2] & 0xff | (key[ 3] & 0xff) << 8; this.r[1] = ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = key[ 4] & 0xff | (key[ 5] & 0xff) << 8; this.r[2] = ((t1 >>> 10) | (t2 << 6)) & 0x1f03; + t3 = key[ 6] & 0xff | (key[ 7] & 0xff) << 8; this.r[3] = ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = key[ 8] & 0xff | (key[ 9] & 0xff) << 8; this.r[4] = ((t3 >>> 4) | (t4 << 12)) & 0x00ff; + this.r[5] = ((t4 >>> 1)) & 0x1ffe; + t5 = key[10] & 0xff | (key[11] & 0xff) << 8; this.r[6] = ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = key[12] & 0xff | (key[13] & 0xff) << 8; this.r[7] = ((t5 >>> 11) | (t6 << 5)) & 0x1f81; + t7 = key[14] & 0xff | (key[15] & 0xff) << 8; this.r[8] = ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + this.r[9] = ((t7 >>> 5)) & 0x007f; + + this.pad[0] = key[16] & 0xff | (key[17] & 0xff) << 8; + this.pad[1] = key[18] & 0xff | (key[19] & 0xff) << 8; + this.pad[2] = key[20] & 0xff | (key[21] & 0xff) << 8; + this.pad[3] = key[22] & 0xff | (key[23] & 0xff) << 8; + this.pad[4] = key[24] & 0xff | (key[25] & 0xff) << 8; + this.pad[5] = key[26] & 0xff | (key[27] & 0xff) << 8; + this.pad[6] = key[28] & 0xff | (key[29] & 0xff) << 8; + this.pad[7] = key[30] & 0xff | (key[31] & 0xff) << 8; +}; + +poly1305.prototype.blocks = function(m, mpos, bytes) { + var hibit = this.fin ? 0 : (1 << 11); + var t0, t1, t2, t3, t4, t5, t6, t7, c; + var d0, d1, d2, d3, d4, d5, d6, d7, d8, d9; + + var h0 = this.h[0], + h1 = this.h[1], + h2 = this.h[2], + h3 = this.h[3], + h4 = this.h[4], + h5 = this.h[5], + h6 = this.h[6], + h7 = this.h[7], + h8 = this.h[8], + h9 = this.h[9]; + + var r0 = this.r[0], + r1 = this.r[1], + r2 = this.r[2], + r3 = this.r[3], + r4 = this.r[4], + r5 = this.r[5], + r6 = this.r[6], + r7 = this.r[7], + r8 = this.r[8], + r9 = this.r[9]; + + while (bytes >= 16) { + t0 = m[mpos+ 0] & 0xff | (m[mpos+ 1] & 0xff) << 8; h0 += ( t0 ) & 0x1fff; + t1 = m[mpos+ 2] & 0xff | (m[mpos+ 3] & 0xff) << 8; h1 += ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = m[mpos+ 4] & 0xff | (m[mpos+ 5] & 0xff) << 8; h2 += ((t1 >>> 10) | (t2 << 6)) & 0x1fff; + t3 = m[mpos+ 6] & 0xff | (m[mpos+ 7] & 0xff) << 8; h3 += ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = m[mpos+ 8] & 0xff | (m[mpos+ 9] & 0xff) << 8; h4 += ((t3 >>> 4) | (t4 << 12)) & 0x1fff; + h5 += ((t4 >>> 1)) & 0x1fff; + t5 = m[mpos+10] & 0xff | (m[mpos+11] & 0xff) << 8; h6 += ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = m[mpos+12] & 0xff | (m[mpos+13] & 0xff) << 8; h7 += ((t5 >>> 11) | (t6 << 5)) & 0x1fff; + t7 = m[mpos+14] & 0xff | (m[mpos+15] & 0xff) << 8; h8 += ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + h9 += ((t7 >>> 5)) | hibit; + + c = 0; + + d0 = c; + d0 += h0 * r0; + d0 += h1 * (5 * r9); + d0 += h2 * (5 * r8); + d0 += h3 * (5 * r7); + d0 += h4 * (5 * r6); + c = (d0 >>> 13); d0 &= 0x1fff; + d0 += h5 * (5 * r5); + d0 += h6 * (5 * r4); + d0 += h7 * (5 * r3); + d0 += h8 * (5 * r2); + d0 += h9 * (5 * r1); + c += (d0 >>> 13); d0 &= 0x1fff; + + d1 = c; + d1 += h0 * r1; + d1 += h1 * r0; + d1 += h2 * (5 * r9); + d1 += h3 * (5 * r8); + d1 += h4 * (5 * r7); + c = (d1 >>> 13); d1 &= 0x1fff; + d1 += h5 * (5 * r6); + d1 += h6 * (5 * r5); + d1 += h7 * (5 * r4); + d1 += h8 * (5 * r3); + d1 += h9 * (5 * r2); + c += (d1 >>> 13); d1 &= 0x1fff; + + d2 = c; + d2 += h0 * r2; + d2 += h1 * r1; + d2 += h2 * r0; + d2 += h3 * (5 * r9); + d2 += h4 * (5 * r8); + c = (d2 >>> 13); d2 &= 0x1fff; + d2 += h5 * (5 * r7); + d2 += h6 * (5 * r6); + d2 += h7 * (5 * r5); + d2 += h8 * (5 * r4); + d2 += h9 * (5 * r3); + c += (d2 >>> 13); d2 &= 0x1fff; + + d3 = c; + d3 += h0 * r3; + d3 += h1 * r2; + d3 += h2 * r1; + d3 += h3 * r0; + d3 += h4 * (5 * r9); + c = (d3 >>> 13); d3 &= 0x1fff; + d3 += h5 * (5 * r8); + d3 += h6 * (5 * r7); + d3 += h7 * (5 * r6); + d3 += h8 * (5 * r5); + d3 += h9 * (5 * r4); + c += (d3 >>> 13); d3 &= 0x1fff; + + d4 = c; + d4 += h0 * r4; + d4 += h1 * r3; + d4 += h2 * r2; + d4 += h3 * r1; + d4 += h4 * r0; + c = (d4 >>> 13); d4 &= 0x1fff; + d4 += h5 * (5 * r9); + d4 += h6 * (5 * r8); + d4 += h7 * (5 * r7); + d4 += h8 * (5 * r6); + d4 += h9 * (5 * r5); + c += (d4 >>> 13); d4 &= 0x1fff; + + d5 = c; + d5 += h0 * r5; + d5 += h1 * r4; + d5 += h2 * r3; + d5 += h3 * r2; + d5 += h4 * r1; + c = (d5 >>> 13); d5 &= 0x1fff; + d5 += h5 * r0; + d5 += h6 * (5 * r9); + d5 += h7 * (5 * r8); + d5 += h8 * (5 * r7); + d5 += h9 * (5 * r6); + c += (d5 >>> 13); d5 &= 0x1fff; + + d6 = c; + d6 += h0 * r6; + d6 += h1 * r5; + d6 += h2 * r4; + d6 += h3 * r3; + d6 += h4 * r2; + c = (d6 >>> 13); d6 &= 0x1fff; + d6 += h5 * r1; + d6 += h6 * r0; + d6 += h7 * (5 * r9); + d6 += h8 * (5 * r8); + d6 += h9 * (5 * r7); + c += (d6 >>> 13); d6 &= 0x1fff; + + d7 = c; + d7 += h0 * r7; + d7 += h1 * r6; + d7 += h2 * r5; + d7 += h3 * r4; + d7 += h4 * r3; + c = (d7 >>> 13); d7 &= 0x1fff; + d7 += h5 * r2; + d7 += h6 * r1; + d7 += h7 * r0; + d7 += h8 * (5 * r9); + d7 += h9 * (5 * r8); + c += (d7 >>> 13); d7 &= 0x1fff; + + d8 = c; + d8 += h0 * r8; + d8 += h1 * r7; + d8 += h2 * r6; + d8 += h3 * r5; + d8 += h4 * r4; + c = (d8 >>> 13); d8 &= 0x1fff; + d8 += h5 * r3; + d8 += h6 * r2; + d8 += h7 * r1; + d8 += h8 * r0; + d8 += h9 * (5 * r9); + c += (d8 >>> 13); d8 &= 0x1fff; + + d9 = c; + d9 += h0 * r9; + d9 += h1 * r8; + d9 += h2 * r7; + d9 += h3 * r6; + d9 += h4 * r5; + c = (d9 >>> 13); d9 &= 0x1fff; + d9 += h5 * r4; + d9 += h6 * r3; + d9 += h7 * r2; + d9 += h8 * r1; + d9 += h9 * r0; + c += (d9 >>> 13); d9 &= 0x1fff; + + c = (((c << 2) + c)) | 0; + c = (c + d0) | 0; + d0 = c & 0x1fff; + c = (c >>> 13); + d1 += c; + + h0 = d0; + h1 = d1; + h2 = d2; + h3 = d3; + h4 = d4; + h5 = d5; + h6 = d6; + h7 = d7; + h8 = d8; + h9 = d9; + + mpos += 16; + bytes -= 16; + } + this.h[0] = h0; + this.h[1] = h1; + this.h[2] = h2; + this.h[3] = h3; + this.h[4] = h4; + this.h[5] = h5; + this.h[6] = h6; + this.h[7] = h7; + this.h[8] = h8; + this.h[9] = h9; +}; + +poly1305.prototype.finish = function(mac, macpos) { + var g = new Uint16Array(10); + var c, mask, f, i; + + if (this.leftover) { + i = this.leftover; + this.buffer[i++] = 1; + for (; i < 16; i++) this.buffer[i] = 0; + this.fin = 1; + this.blocks(this.buffer, 0, 16); + } + + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + for (i = 2; i < 10; i++) { + this.h[i] += c; + c = this.h[i] >>> 13; + this.h[i] &= 0x1fff; + } + this.h[0] += (c * 5); + c = this.h[0] >>> 13; + this.h[0] &= 0x1fff; + this.h[1] += c; + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + this.h[2] += c; + + g[0] = this.h[0] + 5; + c = g[0] >>> 13; + g[0] &= 0x1fff; + for (i = 1; i < 10; i++) { + g[i] = this.h[i] + c; + c = g[i] >>> 13; + g[i] &= 0x1fff; + } + g[9] -= (1 << 13); + + mask = (c ^ 1) - 1; + for (i = 0; i < 10; i++) g[i] &= mask; + mask = ~mask; + for (i = 0; i < 10; i++) this.h[i] = (this.h[i] & mask) | g[i]; + + this.h[0] = ((this.h[0] ) | (this.h[1] << 13) ) & 0xffff; + this.h[1] = ((this.h[1] >>> 3) | (this.h[2] << 10) ) & 0xffff; + this.h[2] = ((this.h[2] >>> 6) | (this.h[3] << 7) ) & 0xffff; + this.h[3] = ((this.h[3] >>> 9) | (this.h[4] << 4) ) & 0xffff; + this.h[4] = ((this.h[4] >>> 12) | (this.h[5] << 1) | (this.h[6] << 14)) & 0xffff; + this.h[5] = ((this.h[6] >>> 2) | (this.h[7] << 11) ) & 0xffff; + this.h[6] = ((this.h[7] >>> 5) | (this.h[8] << 8) ) & 0xffff; + this.h[7] = ((this.h[8] >>> 8) | (this.h[9] << 5) ) & 0xffff; + + f = this.h[0] + this.pad[0]; + this.h[0] = f & 0xffff; + for (i = 1; i < 8; i++) { + f = (((this.h[i] + this.pad[i]) | 0) + (f >>> 16)) | 0; + this.h[i] = f & 0xffff; + } + + mac[macpos+ 0] = (this.h[0] >>> 0) & 0xff; + mac[macpos+ 1] = (this.h[0] >>> 8) & 0xff; + mac[macpos+ 2] = (this.h[1] >>> 0) & 0xff; + mac[macpos+ 3] = (this.h[1] >>> 8) & 0xff; + mac[macpos+ 4] = (this.h[2] >>> 0) & 0xff; + mac[macpos+ 5] = (this.h[2] >>> 8) & 0xff; + mac[macpos+ 6] = (this.h[3] >>> 0) & 0xff; + mac[macpos+ 7] = (this.h[3] >>> 8) & 0xff; + mac[macpos+ 8] = (this.h[4] >>> 0) & 0xff; + mac[macpos+ 9] = (this.h[4] >>> 8) & 0xff; + mac[macpos+10] = (this.h[5] >>> 0) & 0xff; + mac[macpos+11] = (this.h[5] >>> 8) & 0xff; + mac[macpos+12] = (this.h[6] >>> 0) & 0xff; + mac[macpos+13] = (this.h[6] >>> 8) & 0xff; + mac[macpos+14] = (this.h[7] >>> 0) & 0xff; + mac[macpos+15] = (this.h[7] >>> 8) & 0xff; +}; + +poly1305.prototype.update = function(m, mpos, bytes) { + var i, want; + + if (this.leftover) { + want = (16 - this.leftover); + if (want > bytes) + want = bytes; + for (i = 0; i < want; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + bytes -= want; + mpos += want; + this.leftover += want; + if (this.leftover < 16) + return; + this.blocks(this.buffer, 0, 16); + this.leftover = 0; + } + + if (bytes >= 16) { + want = bytes - (bytes % 16); + this.blocks(m, mpos, want); + mpos += want; + bytes -= want; + } + + if (bytes) { + for (i = 0; i < bytes; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + this.leftover += bytes; + } +}; + +function crypto_onetimeauth(out, outpos, m, mpos, n, k) { + var s = new poly1305(k); + s.update(m, mpos, n); + s.finish(out, outpos); + return 0; +} + +function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) { + var x = new Uint8Array(16); + crypto_onetimeauth(x,0,m,mpos,n,k); + return crypto_verify_16(h,hpos,x,0); +} + +function crypto_secretbox(c,m,d,n,k) { + var i; + if (d < 32) return -1; + crypto_stream_xor(c,0,m,0,d,n,k); + crypto_onetimeauth(c, 16, c, 32, d - 32, c); + for (i = 0; i < 16; i++) c[i] = 0; + return 0; +} + +function crypto_secretbox_open(m,c,d,n,k) { + var i; + var x = new Uint8Array(32); + if (d < 32) return -1; + crypto_stream(x,0,32,n,k); + if (crypto_onetimeauth_verify(c, 16,c, 32,d - 32,x) !== 0) return -1; + crypto_stream_xor(m,0,c,0,d,n,k); + for (i = 0; i < 32; i++) m[i] = 0; + return 0; +} + +function set25519(r, a) { + var i; + for (i = 0; i < 16; i++) r[i] = a[i]|0; +} + +function car25519(o) { + var i, v, c = 1; + for (i = 0; i < 16; i++) { + v = o[i] + c + 65535; + c = Math.floor(v / 65536); + o[i] = v - c * 65536; + } + o[0] += c-1 + 37 * (c-1); +} + +function sel25519(p, q, b) { + var t, c = ~(b-1); + for (var i = 0; i < 16; i++) { + t = c & (p[i] ^ q[i]); + p[i] ^= t; + q[i] ^= t; + } +} + +function pack25519(o, n) { + var i, j, b; + var m = gf(), t = gf(); + for (i = 0; i < 16; i++) t[i] = n[i]; + car25519(t); + car25519(t); + car25519(t); + for (j = 0; j < 2; j++) { + m[0] = t[0] - 0xffed; + for (i = 1; i < 15; i++) { + m[i] = t[i] - 0xffff - ((m[i-1]>>16) & 1); + m[i-1] &= 0xffff; + } + m[15] = t[15] - 0x7fff - ((m[14]>>16) & 1); + b = (m[15]>>16) & 1; + m[14] &= 0xffff; + sel25519(t, m, 1-b); + } + for (i = 0; i < 16; i++) { + o[2*i] = t[i] & 0xff; + o[2*i+1] = t[i]>>8; + } +} + +function neq25519(a, b) { + var c = new Uint8Array(32), d = new Uint8Array(32); + pack25519(c, a); + pack25519(d, b); + return crypto_verify_32(c, 0, d, 0); +} + +function par25519(a) { + var d = new Uint8Array(32); + pack25519(d, a); + return d[0] & 1; +} + +function unpack25519(o, n) { + var i; + for (i = 0; i < 16; i++) o[i] = n[2*i] + (n[2*i+1] << 8); + o[15] &= 0x7fff; +} + +function A(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] + b[i]; +} + +function Z(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] - b[i]; +} + +function M(o, a, b) { + var v, c, + t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0, + t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0, + t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0, + t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0, + b0 = b[0], + b1 = b[1], + b2 = b[2], + b3 = b[3], + b4 = b[4], + b5 = b[5], + b6 = b[6], + b7 = b[7], + b8 = b[8], + b9 = b[9], + b10 = b[10], + b11 = b[11], + b12 = b[12], + b13 = b[13], + b14 = b[14], + b15 = b[15]; + + v = a[0]; + t0 += v * b0; + t1 += v * b1; + t2 += v * b2; + t3 += v * b3; + t4 += v * b4; + t5 += v * b5; + t6 += v * b6; + t7 += v * b7; + t8 += v * b8; + t9 += v * b9; + t10 += v * b10; + t11 += v * b11; + t12 += v * b12; + t13 += v * b13; + t14 += v * b14; + t15 += v * b15; + v = a[1]; + t1 += v * b0; + t2 += v * b1; + t3 += v * b2; + t4 += v * b3; + t5 += v * b4; + t6 += v * b5; + t7 += v * b6; + t8 += v * b7; + t9 += v * b8; + t10 += v * b9; + t11 += v * b10; + t12 += v * b11; + t13 += v * b12; + t14 += v * b13; + t15 += v * b14; + t16 += v * b15; + v = a[2]; + t2 += v * b0; + t3 += v * b1; + t4 += v * b2; + t5 += v * b3; + t6 += v * b4; + t7 += v * b5; + t8 += v * b6; + t9 += v * b7; + t10 += v * b8; + t11 += v * b9; + t12 += v * b10; + t13 += v * b11; + t14 += v * b12; + t15 += v * b13; + t16 += v * b14; + t17 += v * b15; + v = a[3]; + t3 += v * b0; + t4 += v * b1; + t5 += v * b2; + t6 += v * b3; + t7 += v * b4; + t8 += v * b5; + t9 += v * b6; + t10 += v * b7; + t11 += v * b8; + t12 += v * b9; + t13 += v * b10; + t14 += v * b11; + t15 += v * b12; + t16 += v * b13; + t17 += v * b14; + t18 += v * b15; + v = a[4]; + t4 += v * b0; + t5 += v * b1; + t6 += v * b2; + t7 += v * b3; + t8 += v * b4; + t9 += v * b5; + t10 += v * b6; + t11 += v * b7; + t12 += v * b8; + t13 += v * b9; + t14 += v * b10; + t15 += v * b11; + t16 += v * b12; + t17 += v * b13; + t18 += v * b14; + t19 += v * b15; + v = a[5]; + t5 += v * b0; + t6 += v * b1; + t7 += v * b2; + t8 += v * b3; + t9 += v * b4; + t10 += v * b5; + t11 += v * b6; + t12 += v * b7; + t13 += v * b8; + t14 += v * b9; + t15 += v * b10; + t16 += v * b11; + t17 += v * b12; + t18 += v * b13; + t19 += v * b14; + t20 += v * b15; + v = a[6]; + t6 += v * b0; + t7 += v * b1; + t8 += v * b2; + t9 += v * b3; + t10 += v * b4; + t11 += v * b5; + t12 += v * b6; + t13 += v * b7; + t14 += v * b8; + t15 += v * b9; + t16 += v * b10; + t17 += v * b11; + t18 += v * b12; + t19 += v * b13; + t20 += v * b14; + t21 += v * b15; + v = a[7]; + t7 += v * b0; + t8 += v * b1; + t9 += v * b2; + t10 += v * b3; + t11 += v * b4; + t12 += v * b5; + t13 += v * b6; + t14 += v * b7; + t15 += v * b8; + t16 += v * b9; + t17 += v * b10; + t18 += v * b11; + t19 += v * b12; + t20 += v * b13; + t21 += v * b14; + t22 += v * b15; + v = a[8]; + t8 += v * b0; + t9 += v * b1; + t10 += v * b2; + t11 += v * b3; + t12 += v * b4; + t13 += v * b5; + t14 += v * b6; + t15 += v * b7; + t16 += v * b8; + t17 += v * b9; + t18 += v * b10; + t19 += v * b11; + t20 += v * b12; + t21 += v * b13; + t22 += v * b14; + t23 += v * b15; + v = a[9]; + t9 += v * b0; + t10 += v * b1; + t11 += v * b2; + t12 += v * b3; + t13 += v * b4; + t14 += v * b5; + t15 += v * b6; + t16 += v * b7; + t17 += v * b8; + t18 += v * b9; + t19 += v * b10; + t20 += v * b11; + t21 += v * b12; + t22 += v * b13; + t23 += v * b14; + t24 += v * b15; + v = a[10]; + t10 += v * b0; + t11 += v * b1; + t12 += v * b2; + t13 += v * b3; + t14 += v * b4; + t15 += v * b5; + t16 += v * b6; + t17 += v * b7; + t18 += v * b8; + t19 += v * b9; + t20 += v * b10; + t21 += v * b11; + t22 += v * b12; + t23 += v * b13; + t24 += v * b14; + t25 += v * b15; + v = a[11]; + t11 += v * b0; + t12 += v * b1; + t13 += v * b2; + t14 += v * b3; + t15 += v * b4; + t16 += v * b5; + t17 += v * b6; + t18 += v * b7; + t19 += v * b8; + t20 += v * b9; + t21 += v * b10; + t22 += v * b11; + t23 += v * b12; + t24 += v * b13; + t25 += v * b14; + t26 += v * b15; + v = a[12]; + t12 += v * b0; + t13 += v * b1; + t14 += v * b2; + t15 += v * b3; + t16 += v * b4; + t17 += v * b5; + t18 += v * b6; + t19 += v * b7; + t20 += v * b8; + t21 += v * b9; + t22 += v * b10; + t23 += v * b11; + t24 += v * b12; + t25 += v * b13; + t26 += v * b14; + t27 += v * b15; + v = a[13]; + t13 += v * b0; + t14 += v * b1; + t15 += v * b2; + t16 += v * b3; + t17 += v * b4; + t18 += v * b5; + t19 += v * b6; + t20 += v * b7; + t21 += v * b8; + t22 += v * b9; + t23 += v * b10; + t24 += v * b11; + t25 += v * b12; + t26 += v * b13; + t27 += v * b14; + t28 += v * b15; + v = a[14]; + t14 += v * b0; + t15 += v * b1; + t16 += v * b2; + t17 += v * b3; + t18 += v * b4; + t19 += v * b5; + t20 += v * b6; + t21 += v * b7; + t22 += v * b8; + t23 += v * b9; + t24 += v * b10; + t25 += v * b11; + t26 += v * b12; + t27 += v * b13; + t28 += v * b14; + t29 += v * b15; + v = a[15]; + t15 += v * b0; + t16 += v * b1; + t17 += v * b2; + t18 += v * b3; + t19 += v * b4; + t20 += v * b5; + t21 += v * b6; + t22 += v * b7; + t23 += v * b8; + t24 += v * b9; + t25 += v * b10; + t26 += v * b11; + t27 += v * b12; + t28 += v * b13; + t29 += v * b14; + t30 += v * b15; + + t0 += 38 * t16; + t1 += 38 * t17; + t2 += 38 * t18; + t3 += 38 * t19; + t4 += 38 * t20; + t5 += 38 * t21; + t6 += 38 * t22; + t7 += 38 * t23; + t8 += 38 * t24; + t9 += 38 * t25; + t10 += 38 * t26; + t11 += 38 * t27; + t12 += 38 * t28; + t13 += 38 * t29; + t14 += 38 * t30; + // t15 left as is + + // first car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + // second car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + o[ 0] = t0; + o[ 1] = t1; + o[ 2] = t2; + o[ 3] = t3; + o[ 4] = t4; + o[ 5] = t5; + o[ 6] = t6; + o[ 7] = t7; + o[ 8] = t8; + o[ 9] = t9; + o[10] = t10; + o[11] = t11; + o[12] = t12; + o[13] = t13; + o[14] = t14; + o[15] = t15; +} + +function S(o, a) { + M(o, a, a); +} + +function inv25519(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 253; a >= 0; a--) { + S(c, c); + if(a !== 2 && a !== 4) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function pow2523(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 250; a >= 0; a--) { + S(c, c); + if(a !== 1) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function crypto_scalarmult(q, n, p) { + var z = new Uint8Array(32); + var x = new Float64Array(80), r, i; + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(); + for (i = 0; i < 31; i++) z[i] = n[i]; + z[31]=(n[31]&127)|64; + z[0]&=248; + unpack25519(x,p); + for (i = 0; i < 16; i++) { + b[i]=x[i]; + d[i]=a[i]=c[i]=0; + } + a[0]=d[0]=1; + for (i=254; i>=0; --i) { + r=(z[i>>>3]>>>(i&7))&1; + sel25519(a,b,r); + sel25519(c,d,r); + A(e,a,c); + Z(a,a,c); + A(c,b,d); + Z(b,b,d); + S(d,e); + S(f,a); + M(a,c,a); + M(c,b,e); + A(e,a,c); + Z(a,a,c); + S(b,a); + Z(c,d,f); + M(a,c,_121665); + A(a,a,d); + M(c,c,a); + M(a,d,f); + M(d,b,x); + S(b,e); + sel25519(a,b,r); + sel25519(c,d,r); + } + for (i = 0; i < 16; i++) { + x[i+16]=a[i]; + x[i+32]=c[i]; + x[i+48]=b[i]; + x[i+64]=d[i]; + } + var x32 = x.subarray(32); + var x16 = x.subarray(16); + inv25519(x32,x32); + M(x16,x16,x32); + pack25519(q,x16); + return 0; +} + +function crypto_scalarmult_base(q, n) { + return crypto_scalarmult(q, n, _9); +} + +function crypto_box_keypair(y, x) { + randombytes(x, 32); + return crypto_scalarmult_base(y, x); +} + +function crypto_box_beforenm(k, y, x) { + var s = new Uint8Array(32); + crypto_scalarmult(s, x, y); + return crypto_core_hsalsa20(k, _0, s, sigma); +} + +var crypto_box_afternm = crypto_secretbox; +var crypto_box_open_afternm = crypto_secretbox_open; + +function crypto_box(c, m, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_afternm(c, m, d, n, k); +} + +function crypto_box_open(m, c, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_open_afternm(m, c, d, n, k); +} + +var K = [ + 0x428a2f98, 0xd728ae22, 0x71374491, 0x23ef65cd, + 0xb5c0fbcf, 0xec4d3b2f, 0xe9b5dba5, 0x8189dbbc, + 0x3956c25b, 0xf348b538, 0x59f111f1, 0xb605d019, + 0x923f82a4, 0xaf194f9b, 0xab1c5ed5, 0xda6d8118, + 0xd807aa98, 0xa3030242, 0x12835b01, 0x45706fbe, + 0x243185be, 0x4ee4b28c, 0x550c7dc3, 0xd5ffb4e2, + 0x72be5d74, 0xf27b896f, 0x80deb1fe, 0x3b1696b1, + 0x9bdc06a7, 0x25c71235, 0xc19bf174, 0xcf692694, + 0xe49b69c1, 0x9ef14ad2, 0xefbe4786, 0x384f25e3, + 0x0fc19dc6, 0x8b8cd5b5, 0x240ca1cc, 0x77ac9c65, + 0x2de92c6f, 0x592b0275, 0x4a7484aa, 0x6ea6e483, + 0x5cb0a9dc, 0xbd41fbd4, 0x76f988da, 0x831153b5, + 0x983e5152, 0xee66dfab, 0xa831c66d, 0x2db43210, + 0xb00327c8, 0x98fb213f, 0xbf597fc7, 0xbeef0ee4, + 0xc6e00bf3, 0x3da88fc2, 0xd5a79147, 0x930aa725, + 0x06ca6351, 0xe003826f, 0x14292967, 0x0a0e6e70, + 0x27b70a85, 0x46d22ffc, 0x2e1b2138, 0x5c26c926, + 0x4d2c6dfc, 0x5ac42aed, 0x53380d13, 0x9d95b3df, + 0x650a7354, 0x8baf63de, 0x766a0abb, 0x3c77b2a8, + 0x81c2c92e, 0x47edaee6, 0x92722c85, 0x1482353b, + 0xa2bfe8a1, 0x4cf10364, 0xa81a664b, 0xbc423001, + 0xc24b8b70, 0xd0f89791, 0xc76c51a3, 0x0654be30, + 0xd192e819, 0xd6ef5218, 0xd6990624, 0x5565a910, + 0xf40e3585, 0x5771202a, 0x106aa070, 0x32bbd1b8, + 0x19a4c116, 0xb8d2d0c8, 0x1e376c08, 0x5141ab53, + 0x2748774c, 0xdf8eeb99, 0x34b0bcb5, 0xe19b48a8, + 0x391c0cb3, 0xc5c95a63, 0x4ed8aa4a, 0xe3418acb, + 0x5b9cca4f, 0x7763e373, 0x682e6ff3, 0xd6b2b8a3, + 0x748f82ee, 0x5defb2fc, 0x78a5636f, 0x43172f60, + 0x84c87814, 0xa1f0ab72, 0x8cc70208, 0x1a6439ec, + 0x90befffa, 0x23631e28, 0xa4506ceb, 0xde82bde9, + 0xbef9a3f7, 0xb2c67915, 0xc67178f2, 0xe372532b, + 0xca273ece, 0xea26619c, 0xd186b8c7, 0x21c0c207, + 0xeada7dd6, 0xcde0eb1e, 0xf57d4f7f, 0xee6ed178, + 0x06f067aa, 0x72176fba, 0x0a637dc5, 0xa2c898a6, + 0x113f9804, 0xbef90dae, 0x1b710b35, 0x131c471b, + 0x28db77f5, 0x23047d84, 0x32caab7b, 0x40c72493, + 0x3c9ebe0a, 0x15c9bebc, 0x431d67c4, 0x9c100d4c, + 0x4cc5d4be, 0xcb3e42b6, 0x597f299c, 0xfc657e2a, + 0x5fcb6fab, 0x3ad6faec, 0x6c44198c, 0x4a475817 +]; + +function crypto_hashblocks_hl(hh, hl, m, n) { + var wh = new Int32Array(16), wl = new Int32Array(16), + bh0, bh1, bh2, bh3, bh4, bh5, bh6, bh7, + bl0, bl1, bl2, bl3, bl4, bl5, bl6, bl7, + th, tl, i, j, h, l, a, b, c, d; + + var ah0 = hh[0], + ah1 = hh[1], + ah2 = hh[2], + ah3 = hh[3], + ah4 = hh[4], + ah5 = hh[5], + ah6 = hh[6], + ah7 = hh[7], + + al0 = hl[0], + al1 = hl[1], + al2 = hl[2], + al3 = hl[3], + al4 = hl[4], + al5 = hl[5], + al6 = hl[6], + al7 = hl[7]; + + var pos = 0; + while (n >= 128) { + for (i = 0; i < 16; i++) { + j = 8 * i + pos; + wh[i] = (m[j+0] << 24) | (m[j+1] << 16) | (m[j+2] << 8) | m[j+3]; + wl[i] = (m[j+4] << 24) | (m[j+5] << 16) | (m[j+6] << 8) | m[j+7]; + } + for (i = 0; i < 80; i++) { + bh0 = ah0; + bh1 = ah1; + bh2 = ah2; + bh3 = ah3; + bh4 = ah4; + bh5 = ah5; + bh6 = ah6; + bh7 = ah7; + + bl0 = al0; + bl1 = al1; + bl2 = al2; + bl3 = al3; + bl4 = al4; + bl5 = al5; + bl6 = al6; + bl7 = al7; + + // add + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma1 + h = ((ah4 >>> 14) | (al4 << (32-14))) ^ ((ah4 >>> 18) | (al4 << (32-18))) ^ ((al4 >>> (41-32)) | (ah4 << (32-(41-32)))); + l = ((al4 >>> 14) | (ah4 << (32-14))) ^ ((al4 >>> 18) | (ah4 << (32-18))) ^ ((ah4 >>> (41-32)) | (al4 << (32-(41-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Ch + h = (ah4 & ah5) ^ (~ah4 & ah6); + l = (al4 & al5) ^ (~al4 & al6); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // K + h = K[i*2]; + l = K[i*2+1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // w + h = wh[i%16]; + l = wl[i%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + th = c & 0xffff | d << 16; + tl = a & 0xffff | b << 16; + + // add + h = th; + l = tl; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma0 + h = ((ah0 >>> 28) | (al0 << (32-28))) ^ ((al0 >>> (34-32)) | (ah0 << (32-(34-32)))) ^ ((al0 >>> (39-32)) | (ah0 << (32-(39-32)))); + l = ((al0 >>> 28) | (ah0 << (32-28))) ^ ((ah0 >>> (34-32)) | (al0 << (32-(34-32)))) ^ ((ah0 >>> (39-32)) | (al0 << (32-(39-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Maj + h = (ah0 & ah1) ^ (ah0 & ah2) ^ (ah1 & ah2); + l = (al0 & al1) ^ (al0 & al2) ^ (al1 & al2); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh7 = (c & 0xffff) | (d << 16); + bl7 = (a & 0xffff) | (b << 16); + + // add + h = bh3; + l = bl3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = th; + l = tl; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh3 = (c & 0xffff) | (d << 16); + bl3 = (a & 0xffff) | (b << 16); + + ah1 = bh0; + ah2 = bh1; + ah3 = bh2; + ah4 = bh3; + ah5 = bh4; + ah6 = bh5; + ah7 = bh6; + ah0 = bh7; + + al1 = bl0; + al2 = bl1; + al3 = bl2; + al4 = bl3; + al5 = bl4; + al6 = bl5; + al7 = bl6; + al0 = bl7; + + if (i%16 === 15) { + for (j = 0; j < 16; j++) { + // add + h = wh[j]; + l = wl[j]; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = wh[(j+9)%16]; + l = wl[(j+9)%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma0 + th = wh[(j+1)%16]; + tl = wl[(j+1)%16]; + h = ((th >>> 1) | (tl << (32-1))) ^ ((th >>> 8) | (tl << (32-8))) ^ (th >>> 7); + l = ((tl >>> 1) | (th << (32-1))) ^ ((tl >>> 8) | (th << (32-8))) ^ ((tl >>> 7) | (th << (32-7))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma1 + th = wh[(j+14)%16]; + tl = wl[(j+14)%16]; + h = ((th >>> 19) | (tl << (32-19))) ^ ((tl >>> (61-32)) | (th << (32-(61-32)))) ^ (th >>> 6); + l = ((tl >>> 19) | (th << (32-19))) ^ ((th >>> (61-32)) | (tl << (32-(61-32)))) ^ ((tl >>> 6) | (th << (32-6))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + wh[j] = (c & 0xffff) | (d << 16); + wl[j] = (a & 0xffff) | (b << 16); + } + } + } + + // add + h = ah0; + l = al0; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[0]; + l = hl[0]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[0] = ah0 = (c & 0xffff) | (d << 16); + hl[0] = al0 = (a & 0xffff) | (b << 16); + + h = ah1; + l = al1; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[1]; + l = hl[1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[1] = ah1 = (c & 0xffff) | (d << 16); + hl[1] = al1 = (a & 0xffff) | (b << 16); + + h = ah2; + l = al2; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[2]; + l = hl[2]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[2] = ah2 = (c & 0xffff) | (d << 16); + hl[2] = al2 = (a & 0xffff) | (b << 16); + + h = ah3; + l = al3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[3]; + l = hl[3]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[3] = ah3 = (c & 0xffff) | (d << 16); + hl[3] = al3 = (a & 0xffff) | (b << 16); + + h = ah4; + l = al4; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[4]; + l = hl[4]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[4] = ah4 = (c & 0xffff) | (d << 16); + hl[4] = al4 = (a & 0xffff) | (b << 16); + + h = ah5; + l = al5; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[5]; + l = hl[5]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[5] = ah5 = (c & 0xffff) | (d << 16); + hl[5] = al5 = (a & 0xffff) | (b << 16); + + h = ah6; + l = al6; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[6]; + l = hl[6]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[6] = ah6 = (c & 0xffff) | (d << 16); + hl[6] = al6 = (a & 0xffff) | (b << 16); + + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[7]; + l = hl[7]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[7] = ah7 = (c & 0xffff) | (d << 16); + hl[7] = al7 = (a & 0xffff) | (b << 16); + + pos += 128; + n -= 128; + } + + return n; +} + +function crypto_hash(out, m, n) { + var hh = new Int32Array(8), + hl = new Int32Array(8), + x = new Uint8Array(256), + i, b = n; + + hh[0] = 0x6a09e667; + hh[1] = 0xbb67ae85; + hh[2] = 0x3c6ef372; + hh[3] = 0xa54ff53a; + hh[4] = 0x510e527f; + hh[5] = 0x9b05688c; + hh[6] = 0x1f83d9ab; + hh[7] = 0x5be0cd19; + + hl[0] = 0xf3bcc908; + hl[1] = 0x84caa73b; + hl[2] = 0xfe94f82b; + hl[3] = 0x5f1d36f1; + hl[4] = 0xade682d1; + hl[5] = 0x2b3e6c1f; + hl[6] = 0xfb41bd6b; + hl[7] = 0x137e2179; + + crypto_hashblocks_hl(hh, hl, m, n); + n %= 128; + + for (i = 0; i < n; i++) x[i] = m[b-n+i]; + x[n] = 128; + + n = 256-128*(n<112?1:0); + x[n-9] = 0; + ts64(x, n-8, (b / 0x20000000) | 0, b << 3); + crypto_hashblocks_hl(hh, hl, x, n); + + for (i = 0; i < 8; i++) ts64(out, 8*i, hh[i], hl[i]); + + return 0; +} + +function add(p, q) { + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(), + g = gf(), h = gf(), t = gf(); + + Z(a, p[1], p[0]); + Z(t, q[1], q[0]); + M(a, a, t); + A(b, p[0], p[1]); + A(t, q[0], q[1]); + M(b, b, t); + M(c, p[3], q[3]); + M(c, c, D2); + M(d, p[2], q[2]); + A(d, d, d); + Z(e, b, a); + Z(f, d, c); + A(g, d, c); + A(h, b, a); + + M(p[0], e, f); + M(p[1], h, g); + M(p[2], g, f); + M(p[3], e, h); +} + +function cswap(p, q, b) { + var i; + for (i = 0; i < 4; i++) { + sel25519(p[i], q[i], b); + } +} + +function pack(r, p) { + var tx = gf(), ty = gf(), zi = gf(); + inv25519(zi, p[2]); + M(tx, p[0], zi); + M(ty, p[1], zi); + pack25519(r, ty); + r[31] ^= par25519(tx) << 7; +} + +function scalarmult(p, q, s) { + var b, i; + set25519(p[0], gf0); + set25519(p[1], gf1); + set25519(p[2], gf1); + set25519(p[3], gf0); + for (i = 255; i >= 0; --i) { + b = (s[(i/8)|0] >> (i&7)) & 1; + cswap(p, q, b); + add(q, p); + add(p, p); + cswap(p, q, b); + } +} + +function scalarbase(p, s) { + var q = [gf(), gf(), gf(), gf()]; + set25519(q[0], X); + set25519(q[1], Y); + set25519(q[2], gf1); + M(q[3], X, Y); + scalarmult(p, q, s); +} + +function crypto_sign_keypair(pk, sk, seeded) { + var d = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()]; + var i; + + if (!seeded) randombytes(sk, 32); + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + scalarbase(p, d); + pack(pk, p); + + for (i = 0; i < 32; i++) sk[i+32] = pk[i]; + return 0; +} + +var L = new Float64Array([0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); + +function modL(r, x) { + var carry, i, j, k; + for (i = 63; i >= 32; --i) { + carry = 0; + for (j = i - 32, k = i - 12; j < k; ++j) { + x[j] += carry - 16 * x[i] * L[j - (i - 32)]; + carry = Math.floor((x[j] + 128) / 256); + x[j] -= carry * 256; + } + x[j] += carry; + x[i] = 0; + } + carry = 0; + for (j = 0; j < 32; j++) { + x[j] += carry - (x[31] >> 4) * L[j]; + carry = x[j] >> 8; + x[j] &= 255; + } + for (j = 0; j < 32; j++) x[j] -= carry * L[j]; + for (i = 0; i < 32; i++) { + x[i+1] += x[i] >> 8; + r[i] = x[i] & 255; + } +} + +function reduce(r) { + var x = new Float64Array(64), i; + for (i = 0; i < 64; i++) x[i] = r[i]; + for (i = 0; i < 64; i++) r[i] = 0; + modL(r, x); +} + +// Note: difference from C - smlen returned, not passed as argument. +function crypto_sign(sm, m, n, sk) { + var d = new Uint8Array(64), h = new Uint8Array(64), r = new Uint8Array(64); + var i, j, x = new Float64Array(64); + var p = [gf(), gf(), gf(), gf()]; + + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + var smlen = n + 64; + for (i = 0; i < n; i++) sm[64 + i] = m[i]; + for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i]; + + crypto_hash(r, sm.subarray(32), n+32); + reduce(r); + scalarbase(p, r); + pack(sm, p); + + for (i = 32; i < 64; i++) sm[i] = sk[i]; + crypto_hash(h, sm, n + 64); + reduce(h); + + for (i = 0; i < 64; i++) x[i] = 0; + for (i = 0; i < 32; i++) x[i] = r[i]; + for (i = 0; i < 32; i++) { + for (j = 0; j < 32; j++) { + x[i+j] += h[i] * d[j]; + } + } + + modL(sm.subarray(32), x); + return smlen; +} + +function unpackneg(r, p) { + var t = gf(), chk = gf(), num = gf(), + den = gf(), den2 = gf(), den4 = gf(), + den6 = gf(); + + set25519(r[2], gf1); + unpack25519(r[1], p); + S(num, r[1]); + M(den, num, D); + Z(num, num, r[2]); + A(den, r[2], den); + + S(den2, den); + S(den4, den2); + M(den6, den4, den2); + M(t, den6, num); + M(t, t, den); + + pow2523(t, t); + M(t, t, num); + M(t, t, den); + M(t, t, den); + M(r[0], t, den); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) M(r[0], r[0], I); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) return -1; + + if (par25519(r[0]) === (p[31]>>7)) Z(r[0], gf0, r[0]); + + M(r[3], r[0], r[1]); + return 0; +} + +function crypto_sign_open(m, sm, n, pk) { + var i; + var t = new Uint8Array(32), h = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()], + q = [gf(), gf(), gf(), gf()]; + + if (n < 64) return -1; + + if (unpackneg(q, pk)) return -1; + + for (i = 0; i < n; i++) m[i] = sm[i]; + for (i = 0; i < 32; i++) m[i+32] = pk[i]; + crypto_hash(h, m, n); + reduce(h); + scalarmult(p, q, h); + + scalarbase(q, sm.subarray(32)); + add(p, q); + pack(t, p); + + n -= 64; + if (crypto_verify_32(sm, 0, t, 0)) { + for (i = 0; i < n; i++) m[i] = 0; + return -1; + } + + for (i = 0; i < n; i++) m[i] = sm[i + 64]; + return n; +} + +var crypto_secretbox_KEYBYTES = 32, + crypto_secretbox_NONCEBYTES = 24, + crypto_secretbox_ZEROBYTES = 32, + crypto_secretbox_BOXZEROBYTES = 16, + crypto_scalarmult_BYTES = 32, + crypto_scalarmult_SCALARBYTES = 32, + crypto_box_PUBLICKEYBYTES = 32, + crypto_box_SECRETKEYBYTES = 32, + crypto_box_BEFORENMBYTES = 32, + crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES, + crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES, + crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES, + crypto_sign_BYTES = 64, + crypto_sign_PUBLICKEYBYTES = 32, + crypto_sign_SECRETKEYBYTES = 64, + crypto_sign_SEEDBYTES = 32, + crypto_hash_BYTES = 64; + +nacl.lowlevel = { + crypto_core_hsalsa20: crypto_core_hsalsa20, + crypto_stream_xor: crypto_stream_xor, + crypto_stream: crypto_stream, + crypto_stream_salsa20_xor: crypto_stream_salsa20_xor, + crypto_stream_salsa20: crypto_stream_salsa20, + crypto_onetimeauth: crypto_onetimeauth, + crypto_onetimeauth_verify: crypto_onetimeauth_verify, + crypto_verify_16: crypto_verify_16, + crypto_verify_32: crypto_verify_32, + crypto_secretbox: crypto_secretbox, + crypto_secretbox_open: crypto_secretbox_open, + crypto_scalarmult: crypto_scalarmult, + crypto_scalarmult_base: crypto_scalarmult_base, + crypto_box_beforenm: crypto_box_beforenm, + crypto_box_afternm: crypto_box_afternm, + crypto_box: crypto_box, + crypto_box_open: crypto_box_open, + crypto_box_keypair: crypto_box_keypair, + crypto_hash: crypto_hash, + crypto_sign: crypto_sign, + crypto_sign_keypair: crypto_sign_keypair, + crypto_sign_open: crypto_sign_open, + + crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES, + crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES, + crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES, + crypto_scalarmult_BYTES: crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES, + crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES, + crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES, + crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES, + crypto_box_NONCEBYTES: crypto_box_NONCEBYTES, + crypto_box_ZEROBYTES: crypto_box_ZEROBYTES, + crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES, + crypto_sign_BYTES: crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES, + crypto_hash_BYTES: crypto_hash_BYTES, + + gf: gf, + D: D, + L: L, + pack25519: pack25519, + unpack25519: unpack25519, + M: M, + A: A, + S: S, + Z: Z, + pow2523: pow2523, + add: add, + set25519: set25519, + modL: modL, + scalarmult: scalarmult, + scalarbase: scalarbase, +}; + +/* High-level API */ + +function checkLengths(k, n) { + if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size'); + if (n.length !== crypto_secretbox_NONCEBYTES) throw new Error('bad nonce size'); +} + +function checkBoxLengths(pk, sk) { + if (pk.length !== crypto_box_PUBLICKEYBYTES) throw new Error('bad public key size'); + if (sk.length !== crypto_box_SECRETKEYBYTES) throw new Error('bad secret key size'); +} + +function checkArrayTypes() { + for (var i = 0; i < arguments.length; i++) { + if (!(arguments[i] instanceof Uint8Array)) + throw new TypeError('unexpected type, use Uint8Array'); + } +} + +function cleanup(arr) { + for (var i = 0; i < arr.length; i++) arr[i] = 0; +} + +nacl.randomBytes = function(n) { + var b = new Uint8Array(n); + randombytes(b, n); + return b; +}; + +nacl.secretbox = function(msg, nonce, key) { + checkArrayTypes(msg, nonce, key); + checkLengths(key, nonce); + var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length); + var c = new Uint8Array(m.length); + for (var i = 0; i < msg.length; i++) m[i+crypto_secretbox_ZEROBYTES] = msg[i]; + crypto_secretbox(c, m, m.length, nonce, key); + return c.subarray(crypto_secretbox_BOXZEROBYTES); +}; + +nacl.secretbox.open = function(box, nonce, key) { + checkArrayTypes(box, nonce, key); + checkLengths(key, nonce); + var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length); + var m = new Uint8Array(c.length); + for (var i = 0; i < box.length; i++) c[i+crypto_secretbox_BOXZEROBYTES] = box[i]; + if (c.length < 32) return null; + if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return null; + return m.subarray(crypto_secretbox_ZEROBYTES); +}; + +nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES; +nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES; +nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES; + +nacl.scalarMult = function(n, p) { + checkArrayTypes(n, p); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult(q, n, p); + return q; +}; + +nacl.scalarMult.base = function(n) { + checkArrayTypes(n); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult_base(q, n); + return q; +}; + +nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES; +nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES; + +nacl.box = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox(msg, nonce, k); +}; + +nacl.box.before = function(publicKey, secretKey) { + checkArrayTypes(publicKey, secretKey); + checkBoxLengths(publicKey, secretKey); + var k = new Uint8Array(crypto_box_BEFORENMBYTES); + crypto_box_beforenm(k, publicKey, secretKey); + return k; +}; + +nacl.box.after = nacl.secretbox; + +nacl.box.open = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox.open(msg, nonce, k); +}; + +nacl.box.open.after = nacl.secretbox.open; + +nacl.box.keyPair = function() { + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_box_SECRETKEYBYTES); + crypto_box_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.box.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_box_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + crypto_scalarmult_base(pk, secretKey); + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES; +nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES; +nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES; +nacl.box.nonceLength = crypto_box_NONCEBYTES; +nacl.box.overheadLength = nacl.secretbox.overheadLength; + +nacl.sign = function(msg, secretKey) { + checkArrayTypes(msg, secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var signedMsg = new Uint8Array(crypto_sign_BYTES+msg.length); + crypto_sign(signedMsg, msg, msg.length, secretKey); + return signedMsg; +}; + +nacl.sign.open = function(signedMsg, publicKey) { + checkArrayTypes(signedMsg, publicKey); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var tmp = new Uint8Array(signedMsg.length); + var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey); + if (mlen < 0) return null; + var m = new Uint8Array(mlen); + for (var i = 0; i < m.length; i++) m[i] = tmp[i]; + return m; +}; + +nacl.sign.detached = function(msg, secretKey) { + var signedMsg = nacl.sign(msg, secretKey); + var sig = new Uint8Array(crypto_sign_BYTES); + for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i]; + return sig; +}; + +nacl.sign.detached.verify = function(msg, sig, publicKey) { + checkArrayTypes(msg, sig, publicKey); + if (sig.length !== crypto_sign_BYTES) + throw new Error('bad signature size'); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var sm = new Uint8Array(crypto_sign_BYTES + msg.length); + var m = new Uint8Array(crypto_sign_BYTES + msg.length); + var i; + for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i]; + for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i]; + return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); +}; + +nacl.sign.keyPair = function() { + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + crypto_sign_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32+i]; + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.sign.keyPair.fromSeed = function(seed) { + checkArrayTypes(seed); + if (seed.length !== crypto_sign_SEEDBYTES) + throw new Error('bad seed size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + for (var i = 0; i < 32; i++) sk[i] = seed[i]; + crypto_sign_keypair(pk, sk, true); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES; +nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES; +nacl.sign.seedLength = crypto_sign_SEEDBYTES; +nacl.sign.signatureLength = crypto_sign_BYTES; + +nacl.hash = function(msg) { + checkArrayTypes(msg); + var h = new Uint8Array(crypto_hash_BYTES); + crypto_hash(h, msg, msg.length); + return h; +}; + +nacl.hash.hashLength = crypto_hash_BYTES; + +nacl.verify = function(x, y) { + checkArrayTypes(x, y); + // Zero length arguments are considered not equal. + if (x.length === 0 || y.length === 0) return false; + if (x.length !== y.length) return false; + return (vn(x, 0, y, 0, x.length) === 0) ? true : false; +}; + +nacl.setPRNG = function(fn) { + randombytes = fn; +}; + +(function() { + // Initialize PRNG if environment provides CSPRNG. + // If not, methods calling randombytes will throw. + var crypto = typeof self !== 'undefined' ? (self.crypto || self.msCrypto) : null; + if (crypto && crypto.getRandomValues) { + // Browsers. + var QUOTA = 65536; + nacl.setPRNG(function(x, n) { + var i, v = new Uint8Array(n); + for (i = 0; i < n; i += QUOTA) { + crypto.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA))); + } + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } else if (true) { + // Node.js. + crypto = __webpack_require__(71281); + if (crypto && crypto.randomBytes) { + nacl.setPRNG(function(x, n) { + var i, v = crypto.randomBytes(n); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } +})(); + +})( true && module.exports ? module.exports : (self.nacl = self.nacl || {})); + + /***/ }), /***/ 46579: @@ -121748,6 +148344,482 @@ module.exports = function whichTypedArray(value) { }; +/***/ }), + +/***/ 57510: +/***/ ((module) => { + +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} + + +/***/ }), + +/***/ 40259: +/***/ ((module) => { + +"use strict"; + +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } + } +} + + +/***/ }), + +/***/ 28799: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + +module.exports = Yallist + +Yallist.Node = Node +Yallist.create = Yallist + +function Yallist (list) { + var self = this + if (!(self instanceof Yallist)) { + self = new Yallist() + } + + self.tail = null + self.head = null + self.length = 0 + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item) + }) + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]) + } + } + + return self +} + +Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next + var prev = node.prev + + if (next) { + next.prev = prev + } + + if (prev) { + prev.next = next + } + + if (node === this.head) { + this.head = next + } + if (node === this.tail) { + this.tail = prev + } + + node.list.length-- + node.next = null + node.prev = null + node.list = null + + return next +} + +Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var head = this.head + node.list = this + node.next = head + if (head) { + head.prev = node + } + + this.head = node + if (!this.tail) { + this.tail = node + } + this.length++ +} + +Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var tail = this.tail + node.list = this + node.prev = tail + if (tail) { + tail.next = node + } + + this.tail = node + if (!this.head) { + this.head = node + } + this.length++ +} + +Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } + + var res = this.tail.value + this.tail = this.tail.prev + if (this.tail) { + this.tail.next = null + } else { + this.head = null + } + this.length-- + return res +} + +Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + + var res = this.head.value + this.head = this.head.next + if (this.head) { + this.head.prev = null + } else { + this.tail = null + } + this.length-- + return res +} + +Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this) + walker = walker.next + } +} + +Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this) + walker = walker.prev + } +} + +Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.next + } + return res +} + +Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.prev + } + return res +} + +Yallist.prototype.reduce = function (fn, initial) { + var acc + var walker = this.head + if (arguments.length > 1) { + acc = initial + } else if (this.head) { + walker = this.head.next + acc = this.head.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i) + walker = walker.next + } + + return acc +} + +Yallist.prototype.reduceReverse = function (fn, initial) { + var acc + var walker = this.tail + if (arguments.length > 1) { + acc = initial + } else if (this.tail) { + walker = this.tail.prev + acc = this.tail.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i) + walker = walker.prev + } + + return acc +} + +Yallist.prototype.toArray = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value + walker = walker.next + } + return arr +} + +Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value + walker = walker.prev + } + return arr +} + +Yallist.prototype.slice = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.splice = function (start, deleteCount, ...nodes) { + if (start > this.length) { + start = this.length - 1 + } + if (start < 0) { + start = this.length + start; + } + + for (var i = 0, walker = this.head; walker !== null && i < start; i++) { + walker = walker.next + } + + var ret = [] + for (var i = 0; walker && i < deleteCount; i++) { + ret.push(walker.value) + walker = this.removeNode(walker) + } + if (walker === null) { + walker = this.tail + } + + if (walker !== this.head && walker !== this.tail) { + walker = walker.prev + } + + for (var i = 0; i < nodes.length; i++) { + walker = insert(this, walker, nodes[i]) + } + return ret; +} + +Yallist.prototype.reverse = function () { + var head = this.head + var tail = this.tail + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev + walker.prev = walker.next + walker.next = p + } + this.head = tail + this.tail = head + return this +} + +function insert (self, node, value) { + var inserted = node === self.head ? + new Node(value, null, node, self) : + new Node(value, node, node.next, self) + + if (inserted.next === null) { + self.tail = inserted + } + if (inserted.prev === null) { + self.head = inserted + } + + self.length++ + + return inserted +} + +function push (self, item) { + self.tail = new Node(item, self.tail, null, self) + if (!self.head) { + self.head = self.tail + } + self.length++ +} + +function unshift (self, item) { + self.head = new Node(item, null, self.head, self) + if (!self.tail) { + self.tail = self.head + } + self.length++ +} + +function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) + } + + this.list = list + this.value = value + + if (prev) { + prev.next = this + this.prev = prev + } else { + this.prev = null + } + + if (next) { + next.prev = this + this.next = next + } else { + this.next = null + } +} + +try { + // add if support for Symbol.iterator is present + __webpack_require__(40259)(Yallist) +} catch (er) {} + + /***/ }), /***/ 5183: @@ -121813,6 +148885,20 @@ module.exports = function whichTypedArray(value) { /***/ }), +/***/ 23276: +/***/ (() => { + +/* (ignored) */ + +/***/ }), + +/***/ 59676: +/***/ (() => { + +/* (ignored) */ + +/***/ }), + /***/ 64688: /***/ (() => { @@ -121841,6 +148927,20 @@ module.exports = function whichTypedArray(value) { /***/ }), +/***/ 59817: +/***/ (() => { + +/* (ignored) */ + +/***/ }), + +/***/ 71281: +/***/ (() => { + +/* (ignored) */ + +/***/ }), + /***/ 20933: /***/ (() => { @@ -121890,13 +148990,6 @@ module.exports = function whichTypedArray(value) { /***/ }), -/***/ 40262: -/***/ (() => { - -/* (ignored) */ - -/***/ }), - /***/ 40607: /***/ (() => { @@ -121927,773 +149020,6 @@ module.exports = function availableTypedArrays() { }; -/***/ }), - -/***/ 89190: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { - -"use strict"; -/* harmony export */ __webpack_require__.d(__webpack_exports__, { -/* harmony export */ CG: () => (/* binding */ output), -/* harmony export */ ai: () => (/* binding */ number), -/* harmony export */ ee: () => (/* binding */ bytes), -/* harmony export */ t2: () => (/* binding */ exists), -/* harmony export */ tW: () => (/* binding */ hash) -/* harmony export */ }); -/* unused harmony export bool */ -function number(n) { - if (!Number.isSafeInteger(n) || n < 0) - throw new Error(`Wrong positive integer: ${n}`); -} -function bool(b) { - if (typeof b !== 'boolean') - throw new Error(`Expected boolean, not ${b}`); -} -function bytes(b, ...lengths) { - if (!(b instanceof Uint8Array)) - throw new Error('Expected Uint8Array'); - if (lengths.length > 0 && !lengths.includes(b.length)) - throw new Error(`Expected Uint8Array of length ${lengths}, not of length=${b.length}`); -} -function hash(hash) { - if (typeof hash !== 'function' || typeof hash.create !== 'function') - throw new Error('Hash should be wrapped by utils.wrapConstructor'); - number(hash.outputLen); - number(hash.blockLen); -} -function exists(instance, checkFinished = true) { - if (instance.destroyed) - throw new Error('Hash instance has been destroyed'); - if (checkFinished && instance.finished) - throw new Error('Hash#digest() has already been called'); -} -function output(out, instance) { - bytes(out); - const min = instance.outputLen; - if (out.length < min) { - throw new Error(`digestInto() expects output buffer of length at least ${min}`); - } -} - -const assert = { number, bool, bytes, hash, exists, output }; -/* unused harmony default export */ var __WEBPACK_DEFAULT_EXPORT__ = ((/* unused pure expression or super */ null && (assert))); -//# sourceMappingURL=_assert.js.map - -/***/ }), - -/***/ 6800: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { - -"use strict"; -/* harmony export */ __webpack_require__.d(__webpack_exports__, { -/* harmony export */ D: () => (/* binding */ SHA2) -/* harmony export */ }); -/* harmony import */ var _assert_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(89190); -/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(32531); - - -// Polyfill for Safari 14 -function setBigUint64(view, byteOffset, value, isLE) { - if (typeof view.setBigUint64 === 'function') - return view.setBigUint64(byteOffset, value, isLE); - const _32n = BigInt(32); - const _u32_max = BigInt(0xffffffff); - const wh = Number((value >> _32n) & _u32_max); - const wl = Number(value & _u32_max); - const h = isLE ? 4 : 0; - const l = isLE ? 0 : 4; - view.setUint32(byteOffset + h, wh, isLE); - view.setUint32(byteOffset + l, wl, isLE); -} -// Base SHA2 class (RFC 6234) -class SHA2 extends _utils_js__WEBPACK_IMPORTED_MODULE_0__/* .Hash */ .Vw { - constructor(blockLen, outputLen, padOffset, isLE) { - super(); - this.blockLen = blockLen; - this.outputLen = outputLen; - this.padOffset = padOffset; - this.isLE = isLE; - this.finished = false; - this.length = 0; - this.pos = 0; - this.destroyed = false; - this.buffer = new Uint8Array(blockLen); - this.view = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .createView */ .O8)(this.buffer); - } - update(data) { - (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .exists */ .t2)(this); - const { view, buffer, blockLen } = this; - data = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .toBytes */ .ZJ)(data); - const len = data.length; - for (let pos = 0; pos < len;) { - const take = Math.min(blockLen - this.pos, len - pos); - // Fast path: we have at least one block in input, cast it to view and process - if (take === blockLen) { - const dataView = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .createView */ .O8)(data); - for (; blockLen <= len - pos; pos += blockLen) - this.process(dataView, pos); - continue; - } - buffer.set(data.subarray(pos, pos + take), this.pos); - this.pos += take; - pos += take; - if (this.pos === blockLen) { - this.process(view, 0); - this.pos = 0; - } - } - this.length += data.length; - this.roundClean(); - return this; - } - digestInto(out) { - (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .exists */ .t2)(this); - (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .output */ .CG)(out, this); - this.finished = true; - // Padding - // We can avoid allocation of buffer for padding completely if it - // was previously not allocated here. But it won't change performance. - const { buffer, view, blockLen, isLE } = this; - let { pos } = this; - // append the bit '1' to the message - buffer[pos++] = 0b10000000; - this.buffer.subarray(pos).fill(0); - // we have less than padOffset left in buffer, so we cannot put length in current block, need process it and pad again - if (this.padOffset > blockLen - pos) { - this.process(view, 0); - pos = 0; - } - // Pad until full block byte with zeros - for (let i = pos; i < blockLen; i++) - buffer[i] = 0; - // Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that - // You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen. - // So we just write lowest 64 bits of that value. - setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE); - this.process(view, 0); - const oview = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .createView */ .O8)(out); - const len = this.outputLen; - // NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT - if (len % 4) - throw new Error('_sha2: outputLen should be aligned to 32bit'); - const outLen = len / 4; - const state = this.get(); - if (outLen > state.length) - throw new Error('_sha2: outputLen bigger than state'); - for (let i = 0; i < outLen; i++) - oview.setUint32(4 * i, state[i], isLE); - } - digest() { - const { buffer, outputLen } = this; - this.digestInto(buffer); - const res = buffer.slice(0, outputLen); - this.destroy(); - return res; - } - _cloneInto(to) { - to || (to = new this.constructor()); - to.set(...this.get()); - const { blockLen, buffer, length, finished, destroyed, pos } = this; - to.length = length; - to.pos = pos; - to.finished = finished; - to.destroyed = destroyed; - if (length % blockLen) - to.buffer.set(buffer); - return to; - } -} -//# sourceMappingURL=_sha2.js.map - -/***/ }), - -/***/ 79271: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { - -"use strict"; -/* harmony export */ __webpack_require__.d(__webpack_exports__, { -/* harmony export */ Ay: () => (__WEBPACK_DEFAULT_EXPORT__), -/* harmony export */ B4: () => (/* binding */ rotlSL), -/* harmony export */ P5: () => (/* binding */ rotlSH), -/* harmony export */ WM: () => (/* binding */ rotlBH), -/* harmony export */ im: () => (/* binding */ rotlBL), -/* harmony export */ lD: () => (/* binding */ split) -/* harmony export */ }); -/* unused harmony exports fromBig, toBig, shrSH, shrSL, rotrSH, rotrSL, rotrBH, rotrBL, rotr32H, rotr32L, add, add3L, add3H, add4L, add4H, add5H, add5L */ -const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); -const _32n = /* @__PURE__ */ BigInt(32); -// We are not using BigUint64Array, because they are extremely slow as per 2022 -function fromBig(n, le = false) { - if (le) - return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; - return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; -} -function split(lst, le = false) { - let Ah = new Uint32Array(lst.length); - let Al = new Uint32Array(lst.length); - for (let i = 0; i < lst.length; i++) { - const { h, l } = fromBig(lst[i], le); - [Ah[i], Al[i]] = [h, l]; - } - return [Ah, Al]; -} -const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0); -// for Shift in [0, 32) -const shrSH = (h, _l, s) => h >>> s; -const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); -// Right rotate for Shift in [1, 32) -const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s)); -const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); -// Right rotate for Shift in (32, 64), NOTE: 32 is special case. -const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); -const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); -// Right rotate for shift===32 (just swaps l&h) -const rotr32H = (_h, l) => l; -const rotr32L = (h, _l) => h; -// Left rotate for Shift in [1, 32) -const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); -const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); -// Left rotate for Shift in (32, 64), NOTE: 32 is special case. -const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); -const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); -// JS uses 32-bit signed integers for bitwise operations which means we cannot -// simple take carry out of low bit sum by shift, we need to use division. -function add(Ah, Al, Bh, Bl) { - const l = (Al >>> 0) + (Bl >>> 0); - return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; -} -// Addition with more than 2 elements -const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); -const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; -const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); -const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; -const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); -const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; -// prettier-ignore - -// prettier-ignore -const u64 = { - fromBig, split, toBig, - shrSH, shrSL, - rotrSH, rotrSL, rotrBH, rotrBL, - rotr32H, rotr32L, - rotlSH, rotlSL, rotlBH, rotlBL, - add, add3L, add3H, add4L, add4H, add5H, add5L, -}; -/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (u64); -//# sourceMappingURL=_u64.js.map - -/***/ }), - -/***/ 21454: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { - -"use strict"; -/* harmony export */ __webpack_require__.d(__webpack_exports__, { -/* harmony export */ w: () => (/* binding */ hmac) -/* harmony export */ }); -/* unused harmony export HMAC */ -/* harmony import */ var _assert_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(89190); -/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(32531); - - -// HMAC (RFC 2104) -class HMAC extends _utils_js__WEBPACK_IMPORTED_MODULE_0__/* .Hash */ .Vw { - constructor(hash, _key) { - super(); - this.finished = false; - this.destroyed = false; - (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .hash */ .tW)(hash); - const key = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .toBytes */ .ZJ)(_key); - this.iHash = hash.create(); - if (typeof this.iHash.update !== 'function') - throw new Error('Expected instance of class which extends utils.Hash'); - this.blockLen = this.iHash.blockLen; - this.outputLen = this.iHash.outputLen; - const blockLen = this.blockLen; - const pad = new Uint8Array(blockLen); - // blockLen can be bigger than outputLen - pad.set(key.length > blockLen ? hash.create().update(key).digest() : key); - for (let i = 0; i < pad.length; i++) - pad[i] ^= 0x36; - this.iHash.update(pad); - // By doing update (processing of first block) of outer hash here we can re-use it between multiple calls via clone - this.oHash = hash.create(); - // Undo internal XOR && apply outer XOR - for (let i = 0; i < pad.length; i++) - pad[i] ^= 0x36 ^ 0x5c; - this.oHash.update(pad); - pad.fill(0); - } - update(buf) { - (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .exists */ .t2)(this); - this.iHash.update(buf); - return this; - } - digestInto(out) { - (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .exists */ .t2)(this); - (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .bytes */ .ee)(out, this.outputLen); - this.finished = true; - this.iHash.digestInto(out); - this.oHash.update(out); - this.oHash.digestInto(out); - this.destroy(); - } - digest() { - const out = new Uint8Array(this.oHash.outputLen); - this.digestInto(out); - return out; - } - _cloneInto(to) { - // Create new instance without calling constructor since key already in state and we don't know it. - to || (to = Object.create(Object.getPrototypeOf(this), {})); - const { oHash, iHash, finished, destroyed, blockLen, outputLen } = this; - to = to; - to.finished = finished; - to.destroyed = destroyed; - to.blockLen = blockLen; - to.outputLen = outputLen; - to.oHash = oHash._cloneInto(to.oHash); - to.iHash = iHash._cloneInto(to.iHash); - return to; - } - destroy() { - this.destroyed = true; - this.oHash.destroy(); - this.iHash.destroy(); - } -} -/** - * HMAC: RFC2104 message authentication code. - * @param hash - function that would be used e.g. sha256 - * @param key - message key - * @param message - message data - */ -const hmac = (hash, key, message) => new HMAC(hash, key).update(message).digest(); -hmac.create = (hash, key) => new HMAC(hash, key); -//# sourceMappingURL=hmac.js.map - -/***/ }), - -/***/ 2200: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { - -"use strict"; -/* harmony export */ __webpack_require__.d(__webpack_exports__, { -/* harmony export */ A: () => (/* binding */ pbkdf2) -/* harmony export */ }); -/* unused harmony export pbkdf2Async */ -/* harmony import */ var _assert_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(89190); -/* harmony import */ var _hmac_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(21454); -/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(32531); - - - -// Common prologue and epilogue for sync/async functions -function pbkdf2Init(hash, _password, _salt, _opts) { - (0,_assert_js__WEBPACK_IMPORTED_MODULE_0__/* .hash */ .tW)(hash); - const opts = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .checkOpts */ .tY)({ dkLen: 32, asyncTick: 10 }, _opts); - const { c, dkLen, asyncTick } = opts; - (0,_assert_js__WEBPACK_IMPORTED_MODULE_0__/* .number */ .ai)(c); - (0,_assert_js__WEBPACK_IMPORTED_MODULE_0__/* .number */ .ai)(dkLen); - (0,_assert_js__WEBPACK_IMPORTED_MODULE_0__/* .number */ .ai)(asyncTick); - if (c < 1) - throw new Error('PBKDF2: iterations (c) should be >= 1'); - const password = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .toBytes */ .ZJ)(_password); - const salt = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .toBytes */ .ZJ)(_salt); - // DK = PBKDF2(PRF, Password, Salt, c, dkLen); - const DK = new Uint8Array(dkLen); - // U1 = PRF(Password, Salt + INT_32_BE(i)) - const PRF = _hmac_js__WEBPACK_IMPORTED_MODULE_2__/* .hmac */ .w.create(hash, password); - const PRFSalt = PRF._cloneInto().update(salt); - return { c, dkLen, asyncTick, DK, PRF, PRFSalt }; -} -function pbkdf2Output(PRF, PRFSalt, DK, prfW, u) { - PRF.destroy(); - PRFSalt.destroy(); - if (prfW) - prfW.destroy(); - u.fill(0); - return DK; -} -/** - * PBKDF2-HMAC: RFC 2898 key derivation function - * @param hash - hash function that would be used e.g. sha256 - * @param password - password from which a derived key is generated - * @param salt - cryptographic salt - * @param opts - {c, dkLen} where c is work factor and dkLen is output message size - */ -function pbkdf2(hash, password, salt, opts) { - const { c, dkLen, DK, PRF, PRFSalt } = pbkdf2Init(hash, password, salt, opts); - let prfW; // Working copy - const arr = new Uint8Array(4); - const view = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .createView */ .O8)(arr); - const u = new Uint8Array(PRF.outputLen); - // DK = T1 + T2 + ⋯ + Tdklen/hlen - for (let ti = 1, pos = 0; pos < dkLen; ti++, pos += PRF.outputLen) { - // Ti = F(Password, Salt, c, i) - const Ti = DK.subarray(pos, pos + PRF.outputLen); - view.setInt32(0, ti, false); - // F(Password, Salt, c, i) = U1 ^ U2 ^ ⋯ ^ Uc - // U1 = PRF(Password, Salt + INT_32_BE(i)) - (prfW = PRFSalt._cloneInto(prfW)).update(arr).digestInto(u); - Ti.set(u.subarray(0, Ti.length)); - for (let ui = 1; ui < c; ui++) { - // Uc = PRF(Password, Uc−1) - PRF._cloneInto(prfW).update(u).digestInto(u); - for (let i = 0; i < Ti.length; i++) - Ti[i] ^= u[i]; - } - } - return pbkdf2Output(PRF, PRFSalt, DK, prfW, u); -} -async function pbkdf2Async(hash, password, salt, opts) { - const { c, dkLen, asyncTick, DK, PRF, PRFSalt } = pbkdf2Init(hash, password, salt, opts); - let prfW; // Working copy - const arr = new Uint8Array(4); - const view = createView(arr); - const u = new Uint8Array(PRF.outputLen); - // DK = T1 + T2 + ⋯ + Tdklen/hlen - for (let ti = 1, pos = 0; pos < dkLen; ti++, pos += PRF.outputLen) { - // Ti = F(Password, Salt, c, i) - const Ti = DK.subarray(pos, pos + PRF.outputLen); - view.setInt32(0, ti, false); - // F(Password, Salt, c, i) = U1 ^ U2 ^ ⋯ ^ Uc - // U1 = PRF(Password, Salt + INT_32_BE(i)) - (prfW = PRFSalt._cloneInto(prfW)).update(arr).digestInto(u); - Ti.set(u.subarray(0, Ti.length)); - await asyncLoop(c - 1, asyncTick, () => { - // Uc = PRF(Password, Uc−1) - PRF._cloneInto(prfW).update(u).digestInto(u); - for (let i = 0; i < Ti.length; i++) - Ti[i] ^= u[i]; - }); - } - return pbkdf2Output(PRF, PRFSalt, DK, prfW, u); -} -//# sourceMappingURL=pbkdf2.js.map - -/***/ }), - -/***/ 78226: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { - -"use strict"; -/* harmony export */ __webpack_require__.d(__webpack_exports__, { -/* harmony export */ s: () => (/* binding */ sha256) -/* harmony export */ }); -/* unused harmony export sha224 */ -/* harmony import */ var _sha2_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(6800); -/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(32531); - - -// SHA2-256 need to try 2^128 hashes to execute birthday attack. -// BTC network is doing 2^67 hashes/sec as per early 2023. -// Choice: a ? b : c -const Chi = (a, b, c) => (a & b) ^ (~a & c); -// Majority function, true if any two inpust is true -const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c); -// Round constants: -// first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311) -// prettier-ignore -const SHA256_K = /* @__PURE__ */ new Uint32Array([ - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 -]); -// Initial state (first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19): -// prettier-ignore -const IV = /* @__PURE__ */ new Uint32Array([ - 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 -]); -// Temporary buffer, not used to store anything between runs -// Named this way because it matches specification. -const SHA256_W = /* @__PURE__ */ new Uint32Array(64); -class SHA256 extends _sha2_js__WEBPACK_IMPORTED_MODULE_0__/* .SHA2 */ .D { - constructor() { - super(64, 32, 8, false); - // We cannot use array here since array allows indexing by variable - // which means optimizer/compiler cannot use registers. - this.A = IV[0] | 0; - this.B = IV[1] | 0; - this.C = IV[2] | 0; - this.D = IV[3] | 0; - this.E = IV[4] | 0; - this.F = IV[5] | 0; - this.G = IV[6] | 0; - this.H = IV[7] | 0; - } - get() { - const { A, B, C, D, E, F, G, H } = this; - return [A, B, C, D, E, F, G, H]; - } - // prettier-ignore - set(A, B, C, D, E, F, G, H) { - this.A = A | 0; - this.B = B | 0; - this.C = C | 0; - this.D = D | 0; - this.E = E | 0; - this.F = F | 0; - this.G = G | 0; - this.H = H | 0; - } - process(view, offset) { - // Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array - for (let i = 0; i < 16; i++, offset += 4) - SHA256_W[i] = view.getUint32(offset, false); - for (let i = 16; i < 64; i++) { - const W15 = SHA256_W[i - 15]; - const W2 = SHA256_W[i - 2]; - const s0 = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(W15, 7) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(W15, 18) ^ (W15 >>> 3); - const s1 = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(W2, 17) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(W2, 19) ^ (W2 >>> 10); - SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0; - } - // Compression function main loop, 64 rounds - let { A, B, C, D, E, F, G, H } = this; - for (let i = 0; i < 64; i++) { - const sigma1 = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(E, 6) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(E, 11) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(E, 25); - const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0; - const sigma0 = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(A, 2) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(A, 13) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(A, 22); - const T2 = (sigma0 + Maj(A, B, C)) | 0; - H = G; - G = F; - F = E; - E = (D + T1) | 0; - D = C; - C = B; - B = A; - A = (T1 + T2) | 0; - } - // Add the compressed chunk to the current hash value - A = (A + this.A) | 0; - B = (B + this.B) | 0; - C = (C + this.C) | 0; - D = (D + this.D) | 0; - E = (E + this.E) | 0; - F = (F + this.F) | 0; - G = (G + this.G) | 0; - H = (H + this.H) | 0; - this.set(A, B, C, D, E, F, G, H); - } - roundClean() { - SHA256_W.fill(0); - } - destroy() { - this.set(0, 0, 0, 0, 0, 0, 0, 0); - this.buffer.fill(0); - } -} -// Constants from https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf -class SHA224 extends SHA256 { - constructor() { - super(); - this.A = 0xc1059ed8 | 0; - this.B = 0x367cd507 | 0; - this.C = 0x3070dd17 | 0; - this.D = 0xf70e5939 | 0; - this.E = 0xffc00b31 | 0; - this.F = 0x68581511 | 0; - this.G = 0x64f98fa7 | 0; - this.H = 0xbefa4fa4 | 0; - this.outputLen = 28; - } -} -/** - * SHA2-256 hash function - * @param message - data that would be hashed - */ -const sha256 = /* @__PURE__ */ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .wrapConstructor */ .ld)(() => new SHA256()); -const sha224 = /* @__PURE__ */ (/* unused pure expression or super */ null && (wrapConstructor(() => new SHA224()))); -//# sourceMappingURL=sha256.js.map - -/***/ }), - -/***/ 32531: -/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { - -"use strict"; - -// EXPORTS -__webpack_require__.d(__webpack_exports__, { - Vw: () => (/* binding */ Hash), - $h: () => (/* binding */ asyncLoop), - tY: () => (/* binding */ checkOpts), - Id: () => (/* binding */ concatBytes), - O8: () => (/* binding */ createView), - po: () => (/* binding */ randomBytes), - Ow: () => (/* binding */ rotr), - ZJ: () => (/* binding */ toBytes), - DH: () => (/* binding */ u32), - ld: () => (/* binding */ wrapConstructor) -}); - -// UNUSED EXPORTS: bytesToHex, hexToBytes, isLE, nextTick, u8, utf8ToBytes, wrapConstructorWithOpts, wrapXOFConstructorWithOpts - -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/crypto.js -const crypto_crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; -//# sourceMappingURL=crypto.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/utils.js -/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ -// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. -// node.js versions earlier than v19 don't declare it in global scope. -// For node.js, package.json#exports field mapping rewrites import -// from `crypto` to `cryptoNode`, which imports native module. -// Makes the utils un-importable in browsers without a bundler. -// Once node.js 18 is deprecated, we can just drop the import. - -const u8a = (a) => a instanceof Uint8Array; -// Cast array to different type -const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); -const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); -// Cast array to view -const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); -// The rotate right (circular right shift) operation for uint32 -const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); -// big-endian hardware is rare. Just in case someone still decides to run hashes: -// early-throw an error because we don't support BE yet. -const isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; -if (!isLE) - throw new Error('Non little-endian hardware is not supported'); -const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); -/** - * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' - */ -function bytesToHex(bytes) { - if (!u8a(bytes)) - throw new Error('Uint8Array expected'); - // pre-caching improves the speed 6x - let hex = ''; - for (let i = 0; i < bytes.length; i++) { - hex += hexes[bytes[i]]; - } - return hex; -} -/** - * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) - */ -function hexToBytes(hex) { - if (typeof hex !== 'string') - throw new Error('hex string expected, got ' + typeof hex); - const len = hex.length; - if (len % 2) - throw new Error('padded hex string expected, got unpadded hex of length ' + len); - const array = new Uint8Array(len / 2); - for (let i = 0; i < array.length; i++) { - const j = i * 2; - const hexByte = hex.slice(j, j + 2); - const byte = Number.parseInt(hexByte, 16); - if (Number.isNaN(byte) || byte < 0) - throw new Error('Invalid byte sequence'); - array[i] = byte; - } - return array; -} -// There is no setImmediate in browser and setTimeout is slow. -// call of async fn will return Promise, which will be fullfiled only on -// next scheduler queue processing step and this is exactly what we need. -const nextTick = async () => { }; -// Returns control to thread each 'tick' ms to avoid blocking -async function asyncLoop(iters, tick, cb) { - let ts = Date.now(); - for (let i = 0; i < iters; i++) { - cb(i); - // Date.now() is not monotonic, so in case if clock goes backwards we return return control too - const diff = Date.now() - ts; - if (diff >= 0 && diff < tick) - continue; - await nextTick(); - ts += diff; - } -} -/** - * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) - */ -function utf8ToBytes(str) { - if (typeof str !== 'string') - throw new Error(`utf8ToBytes expected string, got ${typeof str}`); - return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 -} -/** - * Normalizes (non-hex) string or Uint8Array to Uint8Array. - * Warning: when Uint8Array is passed, it would NOT get copied. - * Keep in mind for future mutable operations. - */ -function toBytes(data) { - if (typeof data === 'string') - data = utf8ToBytes(data); - if (!u8a(data)) - throw new Error(`expected Uint8Array, got ${typeof data}`); - return data; -} -/** - * Copies several Uint8Arrays into one. - */ -function concatBytes(...arrays) { - const r = new Uint8Array(arrays.reduce((sum, a) => sum + a.length, 0)); - let pad = 0; // walk through each item, ensure they have proper type - arrays.forEach((a) => { - if (!u8a(a)) - throw new Error('Uint8Array expected'); - r.set(a, pad); - pad += a.length; - }); - return r; -} -// For runtime check if class implements interface -class Hash { - // Safe version that clones internal state - clone() { - return this._cloneInto(); - } -} -const toStr = {}.toString; -function checkOpts(defaults, opts) { - if (opts !== undefined && toStr.call(opts) !== '[object Object]') - throw new Error('Options should be object or undefined'); - const merged = Object.assign(defaults, opts); - return merged; -} -function wrapConstructor(hashCons) { - const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); - const tmp = hashCons(); - hashC.outputLen = tmp.outputLen; - hashC.blockLen = tmp.blockLen; - hashC.create = () => hashCons(); - return hashC; -} -function wrapConstructorWithOpts(hashCons) { - const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); - const tmp = hashCons({}); - hashC.outputLen = tmp.outputLen; - hashC.blockLen = tmp.blockLen; - hashC.create = (opts) => hashCons(opts); - return hashC; -} -function wrapXOFConstructorWithOpts(hashCons) { - const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); - const tmp = hashCons({}); - hashC.outputLen = tmp.outputLen; - hashC.blockLen = tmp.blockLen; - hashC.create = (opts) => hashCons(opts); - return hashC; -} -/** - * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. - */ -function randomBytes(bytesLength = 32) { - if (crypto_crypto && typeof crypto_crypto.getRandomValues === 'function') { - return crypto_crypto.getRandomValues(new Uint8Array(bytesLength)); - } - throw new Error('crypto.getRandomValues must be defined'); -} -//# sourceMappingURL=utils.js.map - /***/ }), /***/ 89082: @@ -160483,7 +186809,7 @@ class StructFragment extends NamedFragment { /* harmony export */ KA: () => (/* binding */ Interface) /* harmony export */ }); /* unused harmony exports LogDescription, TransactionDescription, ErrorDescription, Indexed */ -/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(2011); +/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(15539); /* harmony import */ var _hash_index_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(38264); /* harmony import */ var _utils_index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(88081); /* harmony import */ var _utils_index_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(57339); @@ -162216,7 +188542,7 @@ class Typed { /* harmony export */ b: () => (/* binding */ getAddress) /* harmony export */ }); /* unused harmony export getIcapAddress */ -/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(2011); +/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(15539); /* harmony import */ var _utils_index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(36212); /* harmony import */ var _utils_index_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(57339); @@ -163688,7 +190014,7 @@ class Contract extends _ContractBase() { /***/ }), -/***/ 68682: +/***/ 8180: /***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { "use strict"; @@ -163701,19 +190027,19 @@ __webpack_require__.d(__webpack_exports__, { po: () => (/* binding */ randomBytes) }); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/hmac.js -var hmac = __webpack_require__(21454); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/pbkdf2.js -var pbkdf2 = __webpack_require__(2200); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/sha256.js -var sha256 = __webpack_require__(78226); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/_sha2.js -var _sha2 = __webpack_require__(6800); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/_u64.js -var _u64 = __webpack_require__(79271); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/utils.js + 1 modules -var utils = __webpack_require__(32531); -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/sha512.js +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/hmac.js +var hmac = __webpack_require__(4655); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/pbkdf2.js +var pbkdf2 = __webpack_require__(84877); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/sha256.js +var sha256 = __webpack_require__(3439); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_sha2.js +var _sha2 = __webpack_require__(37171); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_u64.js +var _u64 = __webpack_require__(86558); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/utils.js + 1 modules +var utils = __webpack_require__(10750); +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/sha512.js @@ -163999,7 +190325,7 @@ function randomBytes(length) { /***/ }), -/***/ 2011: +/***/ 15539: /***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { "use strict"; @@ -164009,13 +190335,13 @@ __webpack_require__.d(__webpack_exports__, { S: () => (/* binding */ keccak256) }); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/_assert.js -var _assert = __webpack_require__(89190); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/_u64.js -var _u64 = __webpack_require__(79271); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/utils.js + 1 modules -var utils = __webpack_require__(32531); -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/sha3.js +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_assert.js +var _assert = __webpack_require__(27125); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_u64.js +var _u64 = __webpack_require__(86558); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/utils.js + 1 modules +var utils = __webpack_require__(10750); +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/sha3.js @@ -164285,7 +190611,7 @@ Object.freeze(keccak256); /* harmony export */ __webpack_require__.d(__webpack_exports__, { /* harmony export */ A: () => (/* binding */ pbkdf2) /* harmony export */ }); -/* harmony import */ var _crypto_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(68682); +/* harmony import */ var _crypto_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(8180); /* harmony import */ var _utils_index_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(36212); /** * A **Password-Based Key-Derivation Function** is designed to create @@ -164347,7 +190673,7 @@ Object.freeze(pbkdf2); /* harmony export */ s: () => (/* binding */ sha256) /* harmony export */ }); /* unused harmony export sha512 */ -/* harmony import */ var _crypto_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(68682); +/* harmony import */ var _crypto_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(8180); /* harmony import */ var _utils_index_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(36212); @@ -164752,7 +191078,7 @@ class Signature { /***/ }), -/***/ 72588: +/***/ 15496: /***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { "use strict"; @@ -164762,7 +191088,7 @@ __webpack_require__.d(__webpack_exports__, { h: () => (/* binding */ SigningKey) }); -// NAMESPACE OBJECT: ./node_modules/@noble/curves/esm/abstract/utils.js +// NAMESPACE OBJECT: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/utils.js var utils_namespaceObject = {}; __webpack_require__.r(utils_namespaceObject); __webpack_require__.d(utils_namespaceObject, { @@ -164779,9 +191105,9 @@ __webpack_require__.d(utils_namespaceObject, { Q5: () => (validateObject) }); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/sha256.js -var esm_sha256 = __webpack_require__(78226); -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/abstract/utils.js +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/sha256.js +var esm_sha256 = __webpack_require__(3439); +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/utils.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // 100 lines of code in the file are duplicated from noble-hashes (utils). // This is OK: `abstract` directory does not use noble-hashes. @@ -165048,7 +191374,7 @@ function validateObject(object, validators, optValidators = {}) { // const z3 = validateObject(o, { test: 'boolean', z: 'bug' }); // const z4 = validateObject(o, { a: 'boolean', z: 'bug' }); //# sourceMappingURL=utils.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/abstract/modular.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/modular.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Utilities for modular arithmetics and finite fields @@ -165465,11 +191791,11 @@ function mapHashToField(key, fieldOrder, isLE = false) { return isLE ? numberToBytesLE(reduced, fieldLen) : utils_numberToBytesBE(reduced, fieldLen); } //# sourceMappingURL=modular.js.map -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/hmac.js -var hmac = __webpack_require__(21454); -// EXTERNAL MODULE: ./node_modules/@noble/hashes/esm/utils.js + 1 modules -var utils = __webpack_require__(32531); -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/abstract/curve.js +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/hmac.js +var hmac = __webpack_require__(4655); +// EXTERNAL MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/utils.js + 1 modules +var utils = __webpack_require__(10750); +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/curve.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Abelian group utilities @@ -165626,7 +191952,7 @@ function validateBasic(curve) { }); } //# sourceMappingURL=curve.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/abstract/weierstrass.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/weierstrass.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Short Weierstrass curve. The formula is: y² = x³ + ax + b @@ -166683,7 +193009,7 @@ function weierstrass_mapToCurveSimpleSWU(Fp, opts) { }; } //# sourceMappingURL=weierstrass.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/_shortw_utils.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/_shortw_utils.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ @@ -166701,7 +193027,7 @@ function createCurve(curveDef, defHash) { return Object.freeze({ ...create(defHash), create }); } //# sourceMappingURL=_shortw_utils.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/secp256k1.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/secp256k1.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ @@ -167141,7 +193467,7 @@ class SigningKey { /* harmony export */ __webpack_require__.d(__webpack_exports__, { /* harmony export */ id: () => (/* binding */ id) /* harmony export */ }); -/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2011); +/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(15539); /* harmony import */ var _utils_index_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(87303); @@ -167177,7 +193503,7 @@ __webpack_require__.d(__webpack_exports__, { // UNUSED EXPORTS: ensNormalize, isValidName // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/keccak.js + 1 modules -var keccak = __webpack_require__(2011); +var keccak = __webpack_require__(15539); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/utils/errors.js var errors = __webpack_require__(57339); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/utils/utf8.js @@ -169897,8 +196223,8 @@ function accessListify(value) { /* harmony export */ x: () => (/* binding */ recoverAddress) /* harmony export */ }); /* harmony import */ var _address_index_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(30031); -/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(72588); -/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(2011); +/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(15496); +/* harmony import */ var _crypto_index_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(15539); /** @@ -169946,9 +196272,9 @@ var sha2 = __webpack_require__(68650); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/signature.js + 1 modules var crypto_signature = __webpack_require__(20260); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/keccak.js + 1 modules -var keccak = __webpack_require__(2011); +var keccak = __webpack_require__(15539); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/crypto/signing-key.js + 6 modules -var signing_key = __webpack_require__(72588); +var signing_key = __webpack_require__(15496); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/utils/errors.js var errors = __webpack_require__(57339); // EXTERNAL MODULE: ./node_modules/ethers/lib.esm/utils/maths.js @@ -173142,6 +199468,1839 @@ class LangEn extends WordlistOwl { } //# sourceMappingURL=lang-en.js.map +/***/ }), + +/***/ 27125: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ CG: () => (/* binding */ output), +/* harmony export */ ai: () => (/* binding */ number), +/* harmony export */ ee: () => (/* binding */ bytes), +/* harmony export */ t2: () => (/* binding */ exists), +/* harmony export */ tW: () => (/* binding */ hash) +/* harmony export */ }); +/* unused harmony export bool */ +function number(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`Wrong positive integer: ${n}`); +} +function bool(b) { + if (typeof b !== 'boolean') + throw new Error(`Expected boolean, not ${b}`); +} +function bytes(b, ...lengths) { + if (!(b instanceof Uint8Array)) + throw new Error('Expected Uint8Array'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Expected Uint8Array of length ${lengths}, not of length=${b.length}`); +} +function hash(hash) { + if (typeof hash !== 'function' || typeof hash.create !== 'function') + throw new Error('Hash should be wrapped by utils.wrapConstructor'); + number(hash.outputLen); + number(hash.blockLen); +} +function exists(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); +} +function output(out, instance) { + bytes(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } +} + +const assert = { number, bool, bytes, hash, exists, output }; +/* unused harmony default export */ var __WEBPACK_DEFAULT_EXPORT__ = ((/* unused pure expression or super */ null && (assert))); +//# sourceMappingURL=_assert.js.map + +/***/ }), + +/***/ 37171: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ D: () => (/* binding */ SHA2) +/* harmony export */ }); +/* harmony import */ var _assert_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(27125); +/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(10750); + + +// Polyfill for Safari 14 +function setBigUint64(view, byteOffset, value, isLE) { + if (typeof view.setBigUint64 === 'function') + return view.setBigUint64(byteOffset, value, isLE); + const _32n = BigInt(32); + const _u32_max = BigInt(0xffffffff); + const wh = Number((value >> _32n) & _u32_max); + const wl = Number(value & _u32_max); + const h = isLE ? 4 : 0; + const l = isLE ? 0 : 4; + view.setUint32(byteOffset + h, wh, isLE); + view.setUint32(byteOffset + l, wl, isLE); +} +// Base SHA2 class (RFC 6234) +class SHA2 extends _utils_js__WEBPACK_IMPORTED_MODULE_0__/* .Hash */ .Vw { + constructor(blockLen, outputLen, padOffset, isLE) { + super(); + this.blockLen = blockLen; + this.outputLen = outputLen; + this.padOffset = padOffset; + this.isLE = isLE; + this.finished = false; + this.length = 0; + this.pos = 0; + this.destroyed = false; + this.buffer = new Uint8Array(blockLen); + this.view = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .createView */ .O8)(this.buffer); + } + update(data) { + (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .exists */ .t2)(this); + const { view, buffer, blockLen } = this; + data = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .toBytes */ .ZJ)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + // Fast path: we have at least one block in input, cast it to view and process + if (take === blockLen) { + const dataView = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .createView */ .O8)(data); + for (; blockLen <= len - pos; pos += blockLen) + this.process(dataView, pos); + continue; + } + buffer.set(data.subarray(pos, pos + take), this.pos); + this.pos += take; + pos += take; + if (this.pos === blockLen) { + this.process(view, 0); + this.pos = 0; + } + } + this.length += data.length; + this.roundClean(); + return this; + } + digestInto(out) { + (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .exists */ .t2)(this); + (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .output */ .CG)(out, this); + this.finished = true; + // Padding + // We can avoid allocation of buffer for padding completely if it + // was previously not allocated here. But it won't change performance. + const { buffer, view, blockLen, isLE } = this; + let { pos } = this; + // append the bit '1' to the message + buffer[pos++] = 0b10000000; + this.buffer.subarray(pos).fill(0); + // we have less than padOffset left in buffer, so we cannot put length in current block, need process it and pad again + if (this.padOffset > blockLen - pos) { + this.process(view, 0); + pos = 0; + } + // Pad until full block byte with zeros + for (let i = pos; i < blockLen; i++) + buffer[i] = 0; + // Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that + // You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen. + // So we just write lowest 64 bits of that value. + setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE); + this.process(view, 0); + const oview = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .createView */ .O8)(out); + const len = this.outputLen; + // NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT + if (len % 4) + throw new Error('_sha2: outputLen should be aligned to 32bit'); + const outLen = len / 4; + const state = this.get(); + if (outLen > state.length) + throw new Error('_sha2: outputLen bigger than state'); + for (let i = 0; i < outLen; i++) + oview.setUint32(4 * i, state[i], isLE); + } + digest() { + const { buffer, outputLen } = this; + this.digestInto(buffer); + const res = buffer.slice(0, outputLen); + this.destroy(); + return res; + } + _cloneInto(to) { + to || (to = new this.constructor()); + to.set(...this.get()); + const { blockLen, buffer, length, finished, destroyed, pos } = this; + to.length = length; + to.pos = pos; + to.finished = finished; + to.destroyed = destroyed; + if (length % blockLen) + to.buffer.set(buffer); + return to; + } +} +//# sourceMappingURL=_sha2.js.map + +/***/ }), + +/***/ 86558: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ Ay: () => (__WEBPACK_DEFAULT_EXPORT__), +/* harmony export */ B4: () => (/* binding */ rotlSL), +/* harmony export */ P5: () => (/* binding */ rotlSH), +/* harmony export */ WM: () => (/* binding */ rotlBH), +/* harmony export */ im: () => (/* binding */ rotlBL), +/* harmony export */ lD: () => (/* binding */ split) +/* harmony export */ }); +/* unused harmony exports fromBig, toBig, shrSH, shrSL, rotrSH, rotrSL, rotrBH, rotrBL, rotr32H, rotr32L, add, add3L, add3H, add4L, add4H, add5H, add5L */ +const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); +const _32n = /* @__PURE__ */ BigInt(32); +// We are not using BigUint64Array, because they are extremely slow as per 2022 +function fromBig(n, le = false) { + if (le) + return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; + return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; +} +function split(lst, le = false) { + let Ah = new Uint32Array(lst.length); + let Al = new Uint32Array(lst.length); + for (let i = 0; i < lst.length; i++) { + const { h, l } = fromBig(lst[i], le); + [Ah[i], Al[i]] = [h, l]; + } + return [Ah, Al]; +} +const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0); +// for Shift in [0, 32) +const shrSH = (h, _l, s) => h >>> s; +const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +// Right rotate for Shift in [1, 32) +const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s)); +const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +// Right rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); +const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); +// Right rotate for shift===32 (just swaps l&h) +const rotr32H = (_h, l) => l; +const rotr32L = (h, _l) => h; +// Left rotate for Shift in [1, 32) +const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); +const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); +// Left rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); +const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); +// JS uses 32-bit signed integers for bitwise operations which means we cannot +// simple take carry out of low bit sum by shift, we need to use division. +function add(Ah, Al, Bh, Bl) { + const l = (Al >>> 0) + (Bl >>> 0); + return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; +} +// Addition with more than 2 elements +const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); +const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; +const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); +const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; +const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); +const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; +// prettier-ignore + +// prettier-ignore +const u64 = { + fromBig, split, toBig, + shrSH, shrSL, + rotrSH, rotrSL, rotrBH, rotrBL, + rotr32H, rotr32L, + rotlSH, rotlSL, rotlBH, rotlBL, + add, add3L, add3H, add4L, add4H, add5H, add5L, +}; +/* harmony default export */ const __WEBPACK_DEFAULT_EXPORT__ = (u64); +//# sourceMappingURL=_u64.js.map + +/***/ }), + +/***/ 4655: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ w: () => (/* binding */ hmac) +/* harmony export */ }); +/* unused harmony export HMAC */ +/* harmony import */ var _assert_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(27125); +/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(10750); + + +// HMAC (RFC 2104) +class HMAC extends _utils_js__WEBPACK_IMPORTED_MODULE_0__/* .Hash */ .Vw { + constructor(hash, _key) { + super(); + this.finished = false; + this.destroyed = false; + (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .hash */ .tW)(hash); + const key = (0,_utils_js__WEBPACK_IMPORTED_MODULE_0__/* .toBytes */ .ZJ)(_key); + this.iHash = hash.create(); + if (typeof this.iHash.update !== 'function') + throw new Error('Expected instance of class which extends utils.Hash'); + this.blockLen = this.iHash.blockLen; + this.outputLen = this.iHash.outputLen; + const blockLen = this.blockLen; + const pad = new Uint8Array(blockLen); + // blockLen can be bigger than outputLen + pad.set(key.length > blockLen ? hash.create().update(key).digest() : key); + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36; + this.iHash.update(pad); + // By doing update (processing of first block) of outer hash here we can re-use it between multiple calls via clone + this.oHash = hash.create(); + // Undo internal XOR && apply outer XOR + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36 ^ 0x5c; + this.oHash.update(pad); + pad.fill(0); + } + update(buf) { + (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .exists */ .t2)(this); + this.iHash.update(buf); + return this; + } + digestInto(out) { + (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .exists */ .t2)(this); + (0,_assert_js__WEBPACK_IMPORTED_MODULE_1__/* .bytes */ .ee)(out, this.outputLen); + this.finished = true; + this.iHash.digestInto(out); + this.oHash.update(out); + this.oHash.digestInto(out); + this.destroy(); + } + digest() { + const out = new Uint8Array(this.oHash.outputLen); + this.digestInto(out); + return out; + } + _cloneInto(to) { + // Create new instance without calling constructor since key already in state and we don't know it. + to || (to = Object.create(Object.getPrototypeOf(this), {})); + const { oHash, iHash, finished, destroyed, blockLen, outputLen } = this; + to = to; + to.finished = finished; + to.destroyed = destroyed; + to.blockLen = blockLen; + to.outputLen = outputLen; + to.oHash = oHash._cloneInto(to.oHash); + to.iHash = iHash._cloneInto(to.iHash); + return to; + } + destroy() { + this.destroyed = true; + this.oHash.destroy(); + this.iHash.destroy(); + } +} +/** + * HMAC: RFC2104 message authentication code. + * @param hash - function that would be used e.g. sha256 + * @param key - message key + * @param message - message data + */ +const hmac = (hash, key, message) => new HMAC(hash, key).update(message).digest(); +hmac.create = (hash, key) => new HMAC(hash, key); +//# sourceMappingURL=hmac.js.map + +/***/ }), + +/***/ 84877: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ A: () => (/* binding */ pbkdf2) +/* harmony export */ }); +/* unused harmony export pbkdf2Async */ +/* harmony import */ var _assert_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(27125); +/* harmony import */ var _hmac_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(4655); +/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(10750); + + + +// Common prologue and epilogue for sync/async functions +function pbkdf2Init(hash, _password, _salt, _opts) { + (0,_assert_js__WEBPACK_IMPORTED_MODULE_0__/* .hash */ .tW)(hash); + const opts = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .checkOpts */ .tY)({ dkLen: 32, asyncTick: 10 }, _opts); + const { c, dkLen, asyncTick } = opts; + (0,_assert_js__WEBPACK_IMPORTED_MODULE_0__/* .number */ .ai)(c); + (0,_assert_js__WEBPACK_IMPORTED_MODULE_0__/* .number */ .ai)(dkLen); + (0,_assert_js__WEBPACK_IMPORTED_MODULE_0__/* .number */ .ai)(asyncTick); + if (c < 1) + throw new Error('PBKDF2: iterations (c) should be >= 1'); + const password = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .toBytes */ .ZJ)(_password); + const salt = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .toBytes */ .ZJ)(_salt); + // DK = PBKDF2(PRF, Password, Salt, c, dkLen); + const DK = new Uint8Array(dkLen); + // U1 = PRF(Password, Salt + INT_32_BE(i)) + const PRF = _hmac_js__WEBPACK_IMPORTED_MODULE_2__/* .hmac */ .w.create(hash, password); + const PRFSalt = PRF._cloneInto().update(salt); + return { c, dkLen, asyncTick, DK, PRF, PRFSalt }; +} +function pbkdf2Output(PRF, PRFSalt, DK, prfW, u) { + PRF.destroy(); + PRFSalt.destroy(); + if (prfW) + prfW.destroy(); + u.fill(0); + return DK; +} +/** + * PBKDF2-HMAC: RFC 2898 key derivation function + * @param hash - hash function that would be used e.g. sha256 + * @param password - password from which a derived key is generated + * @param salt - cryptographic salt + * @param opts - {c, dkLen} where c is work factor and dkLen is output message size + */ +function pbkdf2(hash, password, salt, opts) { + const { c, dkLen, DK, PRF, PRFSalt } = pbkdf2Init(hash, password, salt, opts); + let prfW; // Working copy + const arr = new Uint8Array(4); + const view = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .createView */ .O8)(arr); + const u = new Uint8Array(PRF.outputLen); + // DK = T1 + T2 + ⋯ + Tdklen/hlen + for (let ti = 1, pos = 0; pos < dkLen; ti++, pos += PRF.outputLen) { + // Ti = F(Password, Salt, c, i) + const Ti = DK.subarray(pos, pos + PRF.outputLen); + view.setInt32(0, ti, false); + // F(Password, Salt, c, i) = U1 ^ U2 ^ ⋯ ^ Uc + // U1 = PRF(Password, Salt + INT_32_BE(i)) + (prfW = PRFSalt._cloneInto(prfW)).update(arr).digestInto(u); + Ti.set(u.subarray(0, Ti.length)); + for (let ui = 1; ui < c; ui++) { + // Uc = PRF(Password, Uc−1) + PRF._cloneInto(prfW).update(u).digestInto(u); + for (let i = 0; i < Ti.length; i++) + Ti[i] ^= u[i]; + } + } + return pbkdf2Output(PRF, PRFSalt, DK, prfW, u); +} +async function pbkdf2Async(hash, password, salt, opts) { + const { c, dkLen, asyncTick, DK, PRF, PRFSalt } = pbkdf2Init(hash, password, salt, opts); + let prfW; // Working copy + const arr = new Uint8Array(4); + const view = createView(arr); + const u = new Uint8Array(PRF.outputLen); + // DK = T1 + T2 + ⋯ + Tdklen/hlen + for (let ti = 1, pos = 0; pos < dkLen; ti++, pos += PRF.outputLen) { + // Ti = F(Password, Salt, c, i) + const Ti = DK.subarray(pos, pos + PRF.outputLen); + view.setInt32(0, ti, false); + // F(Password, Salt, c, i) = U1 ^ U2 ^ ⋯ ^ Uc + // U1 = PRF(Password, Salt + INT_32_BE(i)) + (prfW = PRFSalt._cloneInto(prfW)).update(arr).digestInto(u); + Ti.set(u.subarray(0, Ti.length)); + await asyncLoop(c - 1, asyncTick, () => { + // Uc = PRF(Password, Uc−1) + PRF._cloneInto(prfW).update(u).digestInto(u); + for (let i = 0; i < Ti.length; i++) + Ti[i] ^= u[i]; + }); + } + return pbkdf2Output(PRF, PRFSalt, DK, prfW, u); +} +//# sourceMappingURL=pbkdf2.js.map + +/***/ }), + +/***/ 3439: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ s: () => (/* binding */ sha256) +/* harmony export */ }); +/* unused harmony export sha224 */ +/* harmony import */ var _sha2_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(37171); +/* harmony import */ var _utils_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(10750); + + +// SHA2-256 need to try 2^128 hashes to execute birthday attack. +// BTC network is doing 2^67 hashes/sec as per early 2023. +// Choice: a ? b : c +const Chi = (a, b, c) => (a & b) ^ (~a & c); +// Majority function, true if any two inpust is true +const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c); +// Round constants: +// first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311) +// prettier-ignore +const SHA256_K = /* @__PURE__ */ new Uint32Array([ + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 +]); +// Initial state (first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19): +// prettier-ignore +const IV = /* @__PURE__ */ new Uint32Array([ + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 +]); +// Temporary buffer, not used to store anything between runs +// Named this way because it matches specification. +const SHA256_W = /* @__PURE__ */ new Uint32Array(64); +class SHA256 extends _sha2_js__WEBPACK_IMPORTED_MODULE_0__/* .SHA2 */ .D { + constructor() { + super(64, 32, 8, false); + // We cannot use array here since array allows indexing by variable + // which means optimizer/compiler cannot use registers. + this.A = IV[0] | 0; + this.B = IV[1] | 0; + this.C = IV[2] | 0; + this.D = IV[3] | 0; + this.E = IV[4] | 0; + this.F = IV[5] | 0; + this.G = IV[6] | 0; + this.H = IV[7] | 0; + } + get() { + const { A, B, C, D, E, F, G, H } = this; + return [A, B, C, D, E, F, G, H]; + } + // prettier-ignore + set(A, B, C, D, E, F, G, H) { + this.A = A | 0; + this.B = B | 0; + this.C = C | 0; + this.D = D | 0; + this.E = E | 0; + this.F = F | 0; + this.G = G | 0; + this.H = H | 0; + } + process(view, offset) { + // Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array + for (let i = 0; i < 16; i++, offset += 4) + SHA256_W[i] = view.getUint32(offset, false); + for (let i = 16; i < 64; i++) { + const W15 = SHA256_W[i - 15]; + const W2 = SHA256_W[i - 2]; + const s0 = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(W15, 7) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(W15, 18) ^ (W15 >>> 3); + const s1 = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(W2, 17) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(W2, 19) ^ (W2 >>> 10); + SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0; + } + // Compression function main loop, 64 rounds + let { A, B, C, D, E, F, G, H } = this; + for (let i = 0; i < 64; i++) { + const sigma1 = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(E, 6) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(E, 11) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(E, 25); + const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0; + const sigma0 = (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(A, 2) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(A, 13) ^ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .rotr */ .Ow)(A, 22); + const T2 = (sigma0 + Maj(A, B, C)) | 0; + H = G; + G = F; + F = E; + E = (D + T1) | 0; + D = C; + C = B; + B = A; + A = (T1 + T2) | 0; + } + // Add the compressed chunk to the current hash value + A = (A + this.A) | 0; + B = (B + this.B) | 0; + C = (C + this.C) | 0; + D = (D + this.D) | 0; + E = (E + this.E) | 0; + F = (F + this.F) | 0; + G = (G + this.G) | 0; + H = (H + this.H) | 0; + this.set(A, B, C, D, E, F, G, H); + } + roundClean() { + SHA256_W.fill(0); + } + destroy() { + this.set(0, 0, 0, 0, 0, 0, 0, 0); + this.buffer.fill(0); + } +} +// Constants from https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf +class SHA224 extends SHA256 { + constructor() { + super(); + this.A = 0xc1059ed8 | 0; + this.B = 0x367cd507 | 0; + this.C = 0x3070dd17 | 0; + this.D = 0xf70e5939 | 0; + this.E = 0xffc00b31 | 0; + this.F = 0x68581511 | 0; + this.G = 0x64f98fa7 | 0; + this.H = 0xbefa4fa4 | 0; + this.outputLen = 28; + } +} +/** + * SHA2-256 hash function + * @param message - data that would be hashed + */ +const sha256 = /* @__PURE__ */ (0,_utils_js__WEBPACK_IMPORTED_MODULE_1__/* .wrapConstructor */ .ld)(() => new SHA256()); +const sha224 = /* @__PURE__ */ (/* unused pure expression or super */ null && (wrapConstructor(() => new SHA224()))); +//# sourceMappingURL=sha256.js.map + +/***/ }), + +/***/ 10750: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; + +// EXPORTS +__webpack_require__.d(__webpack_exports__, { + Vw: () => (/* binding */ Hash), + $h: () => (/* binding */ asyncLoop), + tY: () => (/* binding */ checkOpts), + Id: () => (/* binding */ concatBytes), + O8: () => (/* binding */ createView), + po: () => (/* binding */ randomBytes), + Ow: () => (/* binding */ rotr), + ZJ: () => (/* binding */ toBytes), + DH: () => (/* binding */ u32), + ld: () => (/* binding */ wrapConstructor) +}); + +// UNUSED EXPORTS: bytesToHex, hexToBytes, isLE, nextTick, u8, utf8ToBytes, wrapConstructorWithOpts, wrapXOFConstructorWithOpts + +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/crypto.js +const crypto_crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; +//# sourceMappingURL=crypto.js.map +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/utils.js +/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. +// node.js versions earlier than v19 don't declare it in global scope. +// For node.js, package.json#exports field mapping rewrites import +// from `crypto` to `cryptoNode`, which imports native module. +// Makes the utils un-importable in browsers without a bundler. +// Once node.js 18 is deprecated, we can just drop the import. + +const u8a = (a) => a instanceof Uint8Array; +// Cast array to different type +const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); +const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); +// Cast array to view +const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); +// The rotate right (circular right shift) operation for uint32 +const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); +// big-endian hardware is rare. Just in case someone still decides to run hashes: +// early-throw an error because we don't support BE yet. +const isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; +if (!isLE) + throw new Error('Non little-endian hardware is not supported'); +const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +/** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ +function bytesToHex(bytes) { + if (!u8a(bytes)) + throw new Error('Uint8Array expected'); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; +} +/** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ +function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const len = hex.length; + if (len % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + len); + const array = new Uint8Array(len / 2); + for (let i = 0; i < array.length; i++) { + const j = i * 2; + const hexByte = hex.slice(j, j + 2); + const byte = Number.parseInt(hexByte, 16); + if (Number.isNaN(byte) || byte < 0) + throw new Error('Invalid byte sequence'); + array[i] = byte; + } + return array; +} +// There is no setImmediate in browser and setTimeout is slow. +// call of async fn will return Promise, which will be fullfiled only on +// next scheduler queue processing step and this is exactly what we need. +const nextTick = async () => { }; +// Returns control to thread each 'tick' ms to avoid blocking +async function asyncLoop(iters, tick, cb) { + let ts = Date.now(); + for (let i = 0; i < iters; i++) { + cb(i); + // Date.now() is not monotonic, so in case if clock goes backwards we return return control too + const diff = Date.now() - ts; + if (diff >= 0 && diff < tick) + continue; + await nextTick(); + ts += diff; + } +} +/** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ +function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 +} +/** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ +function toBytes(data) { + if (typeof data === 'string') + data = utf8ToBytes(data); + if (!u8a(data)) + throw new Error(`expected Uint8Array, got ${typeof data}`); + return data; +} +/** + * Copies several Uint8Arrays into one. + */ +function concatBytes(...arrays) { + const r = new Uint8Array(arrays.reduce((sum, a) => sum + a.length, 0)); + let pad = 0; // walk through each item, ensure they have proper type + arrays.forEach((a) => { + if (!u8a(a)) + throw new Error('Uint8Array expected'); + r.set(a, pad); + pad += a.length; + }); + return r; +} +// For runtime check if class implements interface +class Hash { + // Safe version that clones internal state + clone() { + return this._cloneInto(); + } +} +const toStr = {}.toString; +function checkOpts(defaults, opts) { + if (opts !== undefined && toStr.call(opts) !== '[object Object]') + throw new Error('Options should be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; +} +function wrapConstructor(hashCons) { + const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); + const tmp = hashCons(); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = () => hashCons(); + return hashC; +} +function wrapConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +function wrapXOFConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +/** + * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. + */ +function randomBytes(bytesLength = 32) { + if (crypto_crypto && typeof crypto_crypto.getRandomValues === 'function') { + return crypto_crypto.getRandomValues(new Uint8Array(bytesLength)); + } + throw new Error('crypto.getRandomValues must be defined'); +} +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 2150: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ Struct: () => (/* binding */ Struct), +/* harmony export */ StructError: () => (/* binding */ StructError), +/* harmony export */ any: () => (/* binding */ any), +/* harmony export */ array: () => (/* binding */ array), +/* harmony export */ assert: () => (/* binding */ assert), +/* harmony export */ assign: () => (/* binding */ assign), +/* harmony export */ bigint: () => (/* binding */ bigint), +/* harmony export */ boolean: () => (/* binding */ boolean), +/* harmony export */ coerce: () => (/* binding */ coerce), +/* harmony export */ create: () => (/* binding */ create), +/* harmony export */ date: () => (/* binding */ date), +/* harmony export */ defaulted: () => (/* binding */ defaulted), +/* harmony export */ define: () => (/* binding */ define), +/* harmony export */ deprecated: () => (/* binding */ deprecated), +/* harmony export */ dynamic: () => (/* binding */ dynamic), +/* harmony export */ empty: () => (/* binding */ empty), +/* harmony export */ enums: () => (/* binding */ enums), +/* harmony export */ func: () => (/* binding */ func), +/* harmony export */ instance: () => (/* binding */ instance), +/* harmony export */ integer: () => (/* binding */ integer), +/* harmony export */ intersection: () => (/* binding */ intersection), +/* harmony export */ is: () => (/* binding */ is), +/* harmony export */ lazy: () => (/* binding */ lazy), +/* harmony export */ literal: () => (/* binding */ literal), +/* harmony export */ map: () => (/* binding */ map), +/* harmony export */ mask: () => (/* binding */ mask), +/* harmony export */ max: () => (/* binding */ max), +/* harmony export */ min: () => (/* binding */ min), +/* harmony export */ never: () => (/* binding */ never), +/* harmony export */ nonempty: () => (/* binding */ nonempty), +/* harmony export */ nullable: () => (/* binding */ nullable), +/* harmony export */ number: () => (/* binding */ number), +/* harmony export */ object: () => (/* binding */ object), +/* harmony export */ omit: () => (/* binding */ omit), +/* harmony export */ optional: () => (/* binding */ optional), +/* harmony export */ partial: () => (/* binding */ partial), +/* harmony export */ pattern: () => (/* binding */ pattern), +/* harmony export */ pick: () => (/* binding */ pick), +/* harmony export */ record: () => (/* binding */ record), +/* harmony export */ refine: () => (/* binding */ refine), +/* harmony export */ regexp: () => (/* binding */ regexp), +/* harmony export */ set: () => (/* binding */ set), +/* harmony export */ size: () => (/* binding */ size), +/* harmony export */ string: () => (/* binding */ string), +/* harmony export */ struct: () => (/* binding */ struct), +/* harmony export */ trimmed: () => (/* binding */ trimmed), +/* harmony export */ tuple: () => (/* binding */ tuple), +/* harmony export */ type: () => (/* binding */ type), +/* harmony export */ union: () => (/* binding */ union), +/* harmony export */ unknown: () => (/* binding */ unknown), +/* harmony export */ validate: () => (/* binding */ validate) +/* harmony export */ }); +/* provided dependency */ var console = __webpack_require__(96763); +/** + * A `StructFailure` represents a single specific failure in validation. + */ +/** + * `StructError` objects are thrown (or returned) when validation fails. + * + * Validation logic is design to exit early for maximum performance. The error + * represents the first error encountered during validation. For more detail, + * the `error.failures` property is a generator function that can be run to + * continue validation and receive all the failures in the data. + */ +class StructError extends TypeError { + constructor(failure, failures) { + let cached; + const { message, explanation, ...rest } = failure; + const { path } = failure; + const msg = path.length === 0 ? message : `At path: ${path.join('.')} -- ${message}`; + super(explanation ?? msg); + if (explanation != null) + this.cause = msg; + Object.assign(this, rest); + this.name = this.constructor.name; + this.failures = () => { + return (cached ?? (cached = [failure, ...failures()])); + }; + } +} + +/** + * Check if a value is an iterator. + */ +function isIterable(x) { + return isObject(x) && typeof x[Symbol.iterator] === 'function'; +} +/** + * Check if a value is a plain object. + */ +function isObject(x) { + return typeof x === 'object' && x != null; +} +/** + * Check if a value is a plain object. + */ +function isPlainObject(x) { + if (Object.prototype.toString.call(x) !== '[object Object]') { + return false; + } + const prototype = Object.getPrototypeOf(x); + return prototype === null || prototype === Object.prototype; +} +/** + * Return a value as a printable string. + */ +function print(value) { + if (typeof value === 'symbol') { + return value.toString(); + } + return typeof value === 'string' ? JSON.stringify(value) : `${value}`; +} +/** + * Shifts (removes and returns) the first value from the `input` iterator. + * Like `Array.prototype.shift()` but for an `Iterator`. + */ +function shiftIterator(input) { + const { done, value } = input.next(); + return done ? undefined : value; +} +/** + * Convert a single validation result to a failure. + */ +function toFailure(result, context, struct, value) { + if (result === true) { + return; + } + else if (result === false) { + result = {}; + } + else if (typeof result === 'string') { + result = { message: result }; + } + const { path, branch } = context; + const { type } = struct; + const { refinement, message = `Expected a value of type \`${type}\`${refinement ? ` with refinement \`${refinement}\`` : ''}, but received: \`${print(value)}\``, } = result; + return { + value, + type, + refinement, + key: path[path.length - 1], + path, + branch, + ...result, + message, + }; +} +/** + * Convert a validation result to an iterable of failures. + */ +function* toFailures(result, context, struct, value) { + if (!isIterable(result)) { + result = [result]; + } + for (const r of result) { + const failure = toFailure(r, context, struct, value); + if (failure) { + yield failure; + } + } +} +/** + * Check a value against a struct, traversing deeply into nested values, and + * returning an iterator of failures or success. + */ +function* run(value, struct, options = {}) { + const { path = [], branch = [value], coerce = false, mask = false } = options; + const ctx = { path, branch }; + if (coerce) { + value = struct.coercer(value, ctx); + if (mask && + struct.type !== 'type' && + isObject(struct.schema) && + isObject(value) && + !Array.isArray(value)) { + for (const key in value) { + if (struct.schema[key] === undefined) { + delete value[key]; + } + } + } + } + let status = 'valid'; + for (const failure of struct.validator(value, ctx)) { + failure.explanation = options.message; + status = 'not_valid'; + yield [failure, undefined]; + } + for (let [k, v, s] of struct.entries(value, ctx)) { + const ts = run(v, s, { + path: k === undefined ? path : [...path, k], + branch: k === undefined ? branch : [...branch, v], + coerce, + mask, + message: options.message, + }); + for (const t of ts) { + if (t[0]) { + status = t[0].refinement != null ? 'not_refined' : 'not_valid'; + yield [t[0], undefined]; + } + else if (coerce) { + v = t[1]; + if (k === undefined) { + value = v; + } + else if (value instanceof Map) { + value.set(k, v); + } + else if (value instanceof Set) { + value.add(v); + } + else if (isObject(value)) { + if (v !== undefined || k in value) + value[k] = v; + } + } + } + } + if (status !== 'not_valid') { + for (const failure of struct.refiner(value, ctx)) { + failure.explanation = options.message; + status = 'not_refined'; + yield [failure, undefined]; + } + } + if (status === 'valid') { + yield [undefined, value]; + } +} + +/** + * `Struct` objects encapsulate the validation logic for a specific type of + * values. Once constructed, you use the `assert`, `is` or `validate` helpers to + * validate unknown input data against the struct. + */ +class Struct { + constructor(props) { + const { type, schema, validator, refiner, coercer = (value) => value, entries = function* () { }, } = props; + this.type = type; + this.schema = schema; + this.entries = entries; + this.coercer = coercer; + if (validator) { + this.validator = (value, context) => { + const result = validator(value, context); + return toFailures(result, context, this, value); + }; + } + else { + this.validator = () => []; + } + if (refiner) { + this.refiner = (value, context) => { + const result = refiner(value, context); + return toFailures(result, context, this, value); + }; + } + else { + this.refiner = () => []; + } + } + /** + * Assert that a value passes the struct's validation, throwing if it doesn't. + */ + assert(value, message) { + return assert(value, this, message); + } + /** + * Create a value with the struct's coercion logic, then validate it. + */ + create(value, message) { + return create(value, this, message); + } + /** + * Check if a value passes the struct's validation. + */ + is(value) { + return is(value, this); + } + /** + * Mask a value, coercing and validating it, but returning only the subset of + * properties defined by the struct's schema. + */ + mask(value, message) { + return mask(value, this, message); + } + /** + * Validate a value with the struct's validation logic, returning a tuple + * representing the result. + * + * You may optionally pass `true` for the `withCoercion` argument to coerce + * the value before attempting to validate it. If you do, the result will + * contain the coerced result when successful. + */ + validate(value, options = {}) { + return validate(value, this, options); + } +} +/** + * Assert that a value passes a struct, throwing if it doesn't. + */ +function assert(value, struct, message) { + const result = validate(value, struct, { message }); + if (result[0]) { + throw result[0]; + } +} +/** + * Create a value with the coercion logic of struct and validate it. + */ +function create(value, struct, message) { + const result = validate(value, struct, { coerce: true, message }); + if (result[0]) { + throw result[0]; + } + else { + return result[1]; + } +} +/** + * Mask a value, returning only the subset of properties defined by a struct. + */ +function mask(value, struct, message) { + const result = validate(value, struct, { coerce: true, mask: true, message }); + if (result[0]) { + throw result[0]; + } + else { + return result[1]; + } +} +/** + * Check if a value passes a struct. + */ +function is(value, struct) { + const result = validate(value, struct); + return !result[0]; +} +/** + * Validate a value against a struct, returning an error if invalid, or the + * value (with potential coercion) if valid. + */ +function validate(value, struct, options = {}) { + const tuples = run(value, struct, options); + const tuple = shiftIterator(tuples); + if (tuple[0]) { + const error = new StructError(tuple[0], function* () { + for (const t of tuples) { + if (t[0]) { + yield t[0]; + } + } + }); + return [error, undefined]; + } + else { + const v = tuple[1]; + return [undefined, v]; + } +} + +function assign(...Structs) { + const isType = Structs[0].type === 'type'; + const schemas = Structs.map((s) => s.schema); + const schema = Object.assign({}, ...schemas); + return isType ? type(schema) : object(schema); +} +/** + * Define a new struct type with a custom validation function. + */ +function define(name, validator) { + return new Struct({ type: name, schema: null, validator }); +} +/** + * Create a new struct based on an existing struct, but the value is allowed to + * be `undefined`. `log` will be called if the value is not `undefined`. + */ +function deprecated(struct, log) { + return new Struct({ + ...struct, + refiner: (value, ctx) => value === undefined || struct.refiner(value, ctx), + validator(value, ctx) { + if (value === undefined) { + return true; + } + else { + log(value, ctx); + return struct.validator(value, ctx); + } + }, + }); +} +/** + * Create a struct with dynamic validation logic. + * + * The callback will receive the value currently being validated, and must + * return a struct object to validate it with. This can be useful to model + * validation logic that changes based on its input. + */ +function dynamic(fn) { + return new Struct({ + type: 'dynamic', + schema: null, + *entries(value, ctx) { + const struct = fn(value, ctx); + yield* struct.entries(value, ctx); + }, + validator(value, ctx) { + const struct = fn(value, ctx); + return struct.validator(value, ctx); + }, + coercer(value, ctx) { + const struct = fn(value, ctx); + return struct.coercer(value, ctx); + }, + refiner(value, ctx) { + const struct = fn(value, ctx); + return struct.refiner(value, ctx); + }, + }); +} +/** + * Create a struct with lazily evaluated validation logic. + * + * The first time validation is run with the struct, the callback will be called + * and must return a struct object to use. This is useful for cases where you + * want to have self-referential structs for nested data structures to avoid a + * circular definition problem. + */ +function lazy(fn) { + let struct; + return new Struct({ + type: 'lazy', + schema: null, + *entries(value, ctx) { + struct ?? (struct = fn()); + yield* struct.entries(value, ctx); + }, + validator(value, ctx) { + struct ?? (struct = fn()); + return struct.validator(value, ctx); + }, + coercer(value, ctx) { + struct ?? (struct = fn()); + return struct.coercer(value, ctx); + }, + refiner(value, ctx) { + struct ?? (struct = fn()); + return struct.refiner(value, ctx); + }, + }); +} +/** + * Create a new struct based on an existing object struct, but excluding + * specific properties. + * + * Like TypeScript's `Omit` utility. + */ +function omit(struct, keys) { + const { schema } = struct; + const subschema = { ...schema }; + for (const key of keys) { + delete subschema[key]; + } + switch (struct.type) { + case 'type': + return type(subschema); + default: + return object(subschema); + } +} +/** + * Create a new struct based on an existing object struct, but with all of its + * properties allowed to be `undefined`. + * + * Like TypeScript's `Partial` utility. + */ +function partial(struct) { + const isStruct = struct instanceof Struct; + const schema = isStruct ? { ...struct.schema } : { ...struct }; + for (const key in schema) { + schema[key] = optional(schema[key]); + } + if (isStruct && struct.type === 'type') { + return type(schema); + } + return object(schema); +} +/** + * Create a new struct based on an existing object struct, but only including + * specific properties. + * + * Like TypeScript's `Pick` utility. + */ +function pick(struct, keys) { + const { schema } = struct; + const subschema = {}; + for (const key of keys) { + subschema[key] = schema[key]; + } + switch (struct.type) { + case 'type': + return type(subschema); + default: + return object(subschema); + } +} +/** + * Define a new struct type with a custom validation function. + * + * @deprecated This function has been renamed to `define`. + */ +function struct(name, validator) { + console.warn('superstruct@0.11 - The `struct` helper has been renamed to `define`.'); + return define(name, validator); +} + +/** + * Ensure that any value passes validation. + */ +function any() { + return define('any', () => true); +} +function array(Element) { + return new Struct({ + type: 'array', + schema: Element, + *entries(value) { + if (Element && Array.isArray(value)) { + for (const [i, v] of value.entries()) { + yield [i, v, Element]; + } + } + }, + coercer(value) { + return Array.isArray(value) ? value.slice() : value; + }, + validator(value) { + return (Array.isArray(value) || + `Expected an array value, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a bigint. + */ +function bigint() { + return define('bigint', (value) => { + return typeof value === 'bigint'; + }); +} +/** + * Ensure that a value is a boolean. + */ +function boolean() { + return define('boolean', (value) => { + return typeof value === 'boolean'; + }); +} +/** + * Ensure that a value is a valid `Date`. + * + * Note: this also ensures that the value is *not* an invalid `Date` object, + * which can occur when parsing a date fails but still returns a `Date`. + */ +function date() { + return define('date', (value) => { + return ((value instanceof Date && !isNaN(value.getTime())) || + `Expected a valid \`Date\` object, but received: ${print(value)}`); + }); +} +function enums(values) { + const schema = {}; + const description = values.map((v) => print(v)).join(); + for (const key of values) { + schema[key] = key; + } + return new Struct({ + type: 'enums', + schema, + validator(value) { + return (values.includes(value) || + `Expected one of \`${description}\`, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a function. + */ +function func() { + return define('func', (value) => { + return (typeof value === 'function' || + `Expected a function, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is an instance of a specific class. + */ +function instance(Class) { + return define('instance', (value) => { + return (value instanceof Class || + `Expected a \`${Class.name}\` instance, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is an integer. + */ +function integer() { + return define('integer', (value) => { + return ((typeof value === 'number' && !isNaN(value) && Number.isInteger(value)) || + `Expected an integer, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value matches all of a set of types. + */ +function intersection(Structs) { + return new Struct({ + type: 'intersection', + schema: null, + *entries(value, ctx) { + for (const S of Structs) { + yield* S.entries(value, ctx); + } + }, + *validator(value, ctx) { + for (const S of Structs) { + yield* S.validator(value, ctx); + } + }, + *refiner(value, ctx) { + for (const S of Structs) { + yield* S.refiner(value, ctx); + } + }, + }); +} +function literal(constant) { + const description = print(constant); + const t = typeof constant; + return new Struct({ + type: 'literal', + schema: t === 'string' || t === 'number' || t === 'boolean' ? constant : null, + validator(value) { + return (value === constant || + `Expected the literal \`${description}\`, but received: ${print(value)}`); + }, + }); +} +function map(Key, Value) { + return new Struct({ + type: 'map', + schema: null, + *entries(value) { + if (Key && Value && value instanceof Map) { + for (const [k, v] of value.entries()) { + yield [k, k, Key]; + yield [k, v, Value]; + } + } + }, + coercer(value) { + return value instanceof Map ? new Map(value) : value; + }, + validator(value) { + return (value instanceof Map || + `Expected a \`Map\` object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that no value ever passes validation. + */ +function never() { + return define('never', () => false); +} +/** + * Augment an existing struct to allow `null` values. + */ +function nullable(struct) { + return new Struct({ + ...struct, + validator: (value, ctx) => value === null || struct.validator(value, ctx), + refiner: (value, ctx) => value === null || struct.refiner(value, ctx), + }); +} +/** + * Ensure that a value is a number. + */ +function number() { + return define('number', (value) => { + return ((typeof value === 'number' && !isNaN(value)) || + `Expected a number, but received: ${print(value)}`); + }); +} +function object(schema) { + const knowns = schema ? Object.keys(schema) : []; + const Never = never(); + return new Struct({ + type: 'object', + schema: schema ? schema : null, + *entries(value) { + if (schema && isObject(value)) { + const unknowns = new Set(Object.keys(value)); + for (const key of knowns) { + unknowns.delete(key); + yield [key, value[key], schema[key]]; + } + for (const key of unknowns) { + yield [key, value[key], Never]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + coercer(value) { + return isObject(value) ? { ...value } : value; + }, + }); +} +/** + * Augment a struct to allow `undefined` values. + */ +function optional(struct) { + return new Struct({ + ...struct, + validator: (value, ctx) => value === undefined || struct.validator(value, ctx), + refiner: (value, ctx) => value === undefined || struct.refiner(value, ctx), + }); +} +/** + * Ensure that a value is an object with keys and values of specific types, but + * without ensuring any specific shape of properties. + * + * Like TypeScript's `Record` utility. + */ +function record(Key, Value) { + return new Struct({ + type: 'record', + schema: null, + *entries(value) { + if (isObject(value)) { + for (const k in value) { + const v = value[k]; + yield [k, k, Key]; + yield [k, v, Value]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a `RegExp`. + * + * Note: this does not test the value against the regular expression! For that + * you need to use the `pattern()` refinement. + */ +function regexp() { + return define('regexp', (value) => { + return value instanceof RegExp; + }); +} +function set(Element) { + return new Struct({ + type: 'set', + schema: null, + *entries(value) { + if (Element && value instanceof Set) { + for (const v of value) { + yield [v, v, Element]; + } + } + }, + coercer(value) { + return value instanceof Set ? new Set(value) : value; + }, + validator(value) { + return (value instanceof Set || + `Expected a \`Set\` object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a string. + */ +function string() { + return define('string', (value) => { + return (typeof value === 'string' || + `Expected a string, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is a tuple of a specific length, and that each of its + * elements is of a specific type. + */ +function tuple(Structs) { + const Never = never(); + return new Struct({ + type: 'tuple', + schema: null, + *entries(value) { + if (Array.isArray(value)) { + const length = Math.max(Structs.length, value.length); + for (let i = 0; i < length; i++) { + yield [i, value[i], Structs[i] || Never]; + } + } + }, + validator(value) { + return (Array.isArray(value) || + `Expected an array, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value has a set of known properties of specific types. + * + * Note: Unrecognized properties are allowed and untouched. This is similar to + * how TypeScript's structural typing works. + */ +function type(schema) { + const keys = Object.keys(schema); + return new Struct({ + type: 'type', + schema, + *entries(value) { + if (isObject(value)) { + for (const k of keys) { + yield [k, value[k], schema[k]]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + coercer(value) { + return isObject(value) ? { ...value } : value; + }, + }); +} +/** + * Ensure that a value matches one of a set of types. + */ +function union(Structs) { + const description = Structs.map((s) => s.type).join(' | '); + return new Struct({ + type: 'union', + schema: null, + coercer(value) { + for (const S of Structs) { + const [error, coerced] = S.validate(value, { coerce: true }); + if (!error) { + return coerced; + } + } + return value; + }, + validator(value, ctx) { + const failures = []; + for (const S of Structs) { + const [...tuples] = run(value, S, ctx); + const [first] = tuples; + if (!first[0]) { + return []; + } + else { + for (const [failure] of tuples) { + if (failure) { + failures.push(failure); + } + } + } + } + return [ + `Expected the value to satisfy a union of \`${description}\`, but received: ${print(value)}`, + ...failures, + ]; + }, + }); +} +/** + * Ensure that any value passes validation, without widening its type to `any`. + */ +function unknown() { + return define('unknown', () => true); +} + +/** + * Augment a `Struct` to add an additional coercion step to its input. + * + * This allows you to transform input data before validating it, to increase the + * likelihood that it passes validation—for example for default values, parsing + * different formats, etc. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function coerce(struct, condition, coercer) { + return new Struct({ + ...struct, + coercer: (value, ctx) => { + return is(value, condition) + ? struct.coercer(coercer(value, ctx), ctx) + : struct.coercer(value, ctx); + }, + }); +} +/** + * Augment a struct to replace `undefined` values with a default. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function defaulted(struct, fallback, options = {}) { + return coerce(struct, unknown(), (x) => { + const f = typeof fallback === 'function' ? fallback() : fallback; + if (x === undefined) { + return f; + } + if (!options.strict && isPlainObject(x) && isPlainObject(f)) { + const ret = { ...x }; + let changed = false; + for (const key in f) { + if (ret[key] === undefined) { + ret[key] = f[key]; + changed = true; + } + } + if (changed) { + return ret; + } + } + return x; + }); +} +/** + * Augment a struct to trim string inputs. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function trimmed(struct) { + return coerce(struct, string(), (x) => x.trim()); +} + +/** + * Ensure that a string, array, map, or set is empty. + */ +function empty(struct) { + return refine(struct, 'empty', (value) => { + const size = getSize(value); + return (size === 0 || + `Expected an empty ${struct.type} but received one with a size of \`${size}\``); + }); +} +function getSize(value) { + if (value instanceof Map || value instanceof Set) { + return value.size; + } + else { + return value.length; + } +} +/** + * Ensure that a number or date is below a threshold. + */ +function max(struct, threshold, options = {}) { + const { exclusive } = options; + return refine(struct, 'max', (value) => { + return exclusive + ? value < threshold + : value <= threshold || + `Expected a ${struct.type} less than ${exclusive ? '' : 'or equal to '}${threshold} but received \`${value}\``; + }); +} +/** + * Ensure that a number or date is above a threshold. + */ +function min(struct, threshold, options = {}) { + const { exclusive } = options; + return refine(struct, 'min', (value) => { + return exclusive + ? value > threshold + : value >= threshold || + `Expected a ${struct.type} greater than ${exclusive ? '' : 'or equal to '}${threshold} but received \`${value}\``; + }); +} +/** + * Ensure that a string, array, map or set is not empty. + */ +function nonempty(struct) { + return refine(struct, 'nonempty', (value) => { + const size = getSize(value); + return (size > 0 || `Expected a nonempty ${struct.type} but received an empty one`); + }); +} +/** + * Ensure that a string matches a regular expression. + */ +function pattern(struct, regexp) { + return refine(struct, 'pattern', (value) => { + return (regexp.test(value) || + `Expected a ${struct.type} matching \`/${regexp.source}/\` but received "${value}"`); + }); +} +/** + * Ensure that a string, array, number, date, map, or set has a size (or length, or time) between `min` and `max`. + */ +function size(struct, min, max = min) { + const expected = `Expected a ${struct.type}`; + const of = min === max ? `of \`${min}\`` : `between \`${min}\` and \`${max}\``; + return refine(struct, 'size', (value) => { + if (typeof value === 'number' || value instanceof Date) { + return ((min <= value && value <= max) || + `${expected} ${of} but received \`${value}\``); + } + else if (value instanceof Map || value instanceof Set) { + const { size } = value; + return ((min <= size && size <= max) || + `${expected} with a size ${of} but received one with a size of \`${size}\``); + } + else { + const { length } = value; + return ((min <= length && length <= max) || + `${expected} with a length ${of} but received one with a length of \`${length}\``); + } + }); +} +/** + * Augment a `Struct` to add an additional refinement to the validation. + * + * The refiner function is guaranteed to receive a value of the struct's type, + * because the struct's existing validation will already have passed. This + * allows you to layer additional validation on top of existing structs. + */ +function refine(struct, name, refiner) { + return new Struct({ + ...struct, + *refiner(value, ctx) { + yield* struct.refiner(value, ctx); + const result = refiner(value, ctx); + const failures = toFailures(result, ctx, struct, value); + for (const failure of failures) { + yield { ...failure, refinement: name }; + } + }, + }); +} + + +//# sourceMappingURL=index.mjs.map + + /***/ }), /***/ 63837: diff --git a/dist/program.d.ts b/dist/program.d.ts index 9f141e4..dd5788b 100644 --- a/dist/program.d.ts +++ b/dist/program.d.ts @@ -1,13 +1,14 @@ import 'dotenv/config'; import { Command } from 'commander'; -import { JsonRpcProvider, Provider, TransactionLike, Wallet, VoidSigner, BigNumberish } from 'ethers'; -import { getProviderOptions, TornadoWallet, TornadoVoidSigner, Relayer, RelayerInfo, RelayerError, RelayerClient, fetchDataOptions, Config } from './services'; +import { JsonRpcProvider, Provider, TransactionLike, Wallet, VoidSigner } from 'ethers'; +import { getProviderOptions, TornadoWallet, TornadoVoidSigner, Relayer, RelayerInfo, RelayerError, RelayerClient, fetchDataOptions, NetIdType, Config } from './services'; export type commonProgramOptions = { rpc?: string; ethRpc?: string; graph?: string; ethGraph?: string; disableGraph?: boolean; + accountKey?: string; relayer?: string; walletWithdrawal?: boolean; torPort?: number; @@ -29,7 +30,7 @@ export declare function getProgramOptions(options: commonProgramOptions): Promis fetchDataOptions: fetchDataOptions; }>; export declare function getProgramGraphAPI(options: commonProgramOptions, config: Config): string; -export declare function getProgramProvider(netId: BigNumberish, rpcUrl: string | undefined, config: Config, providerOptions?: getProviderOptions): JsonRpcProvider; +export declare function getProgramProvider(netId: NetIdType, rpcUrl: string | undefined, config: Config, providerOptions?: getProviderOptions): JsonRpcProvider; export declare function getProgramSigner({ options, provider, }: { options: commonProgramOptions; provider: Provider; @@ -37,7 +38,7 @@ export declare function getProgramSigner({ options, provider, }: { export declare function getProgramRelayer({ options, fetchDataOptions, netId, }: { options: commonProgramOptions; fetchDataOptions?: fetchDataOptions; - netId: number | string; + netId: NetIdType; }): Promise<{ validRelayers?: RelayerInfo[] | Relayer[]; invalidRelayers?: RelayerError[]; diff --git a/dist/services/data.d.ts b/dist/services/data.d.ts index 3f83108..9a46b52 100644 --- a/dist/services/data.d.ts +++ b/dist/services/data.d.ts @@ -3,10 +3,10 @@ import { BaseEvents, MinimalEvents } from './events'; export declare function existsAsync(fileOrDir: string): Promise; export declare function zipAsync(file: AsyncZippable): Promise; export declare function unzipAsync(data: Uint8Array): Promise; -export declare function saveEvents({ name, userDirectory, events, }: { - name: string; +export declare function saveUserFile({ fileName, userDirectory, dataString, }: { + fileName: string; userDirectory: string; - events: T[]; + dataString: string; }): Promise; export declare function loadSavedEvents({ name, userDirectory, deployedBlock, }: { name: string; diff --git a/dist/services/deposits.d.ts b/dist/services/deposits.d.ts index ce399c8..ae2ca21 100644 --- a/dist/services/deposits.d.ts +++ b/dist/services/deposits.d.ts @@ -1,7 +1,8 @@ +import type { NetIdType } from './networkConfig'; export type DepositType = { currency: string; amount: string; - netId: string | number; + netId: NetIdType; }; export type createDepositParams = { nullifier: bigint; @@ -34,7 +35,7 @@ export declare function createDeposit({ nullifier, secret }: createDepositParams export interface DepositConstructor { currency: string; amount: string; - netId: number; + netId: NetIdType; nullifier: bigint; secret: bigint; note: string; @@ -46,7 +47,7 @@ export interface DepositConstructor { export declare class Deposit { currency: string; amount: string; - netId: number; + netId: NetIdType; nullifier: bigint; secret: bigint; note: string; @@ -65,7 +66,7 @@ export type parsedInvoiceExec = DepositType & { export declare class Invoice { currency: string; amount: string; - netId: number; + netId: NetIdType; commitment: string; invoice: string; constructor(invoiceString: string); diff --git a/dist/services/encryptedNotes.d.ts b/dist/services/encryptedNotes.d.ts new file mode 100644 index 0000000..4e20fe0 --- /dev/null +++ b/dist/services/encryptedNotes.d.ts @@ -0,0 +1,48 @@ +import { EthEncryptedData } from '@metamask/eth-sig-util'; +import { Echoer } from '@tornado/contracts'; +import { Wallet } from 'ethers'; +import { EchoEvents, EncryptedNotesEvents } from './events'; +import type { NetIdType } from './networkConfig'; +export interface NoteToEncrypt { + address: string; + noteHex: string; +} +export interface DecryptedNotes { + blockNumber: number; + address: string; + noteHex: string; +} +export declare function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext }: EthEncryptedData): string; +export declare function unpackEncryptedMessage(encryptedMessage: string): EthEncryptedData & { + messageBuff: string; +}; +export interface NoteAccountConstructor { + netId: NetIdType; + blockNumber?: number; + recoveryKey?: string; + Echoer: Echoer; +} +export declare class NoteAccount { + netId: NetIdType; + blockNumber?: number; + recoveryKey: string; + recoveryAddress: string; + recoveryPublicKey: string; + Echoer: Echoer; + constructor({ netId, blockNumber, recoveryKey, Echoer }: NoteAccountConstructor); + /** + * Intends to mock eth_getEncryptionPublicKey behavior from MetaMask + * In order to make the recoveryKey retrival from Echoer possible from the bare private key + */ + static getWalletPublicKey(wallet: Wallet): string; + getEncryptedAccount(walletPublicKey: string): { + encryptedData: EthEncryptedData; + data: string; + }; + /** + * Decrypt Echoer backuped note encryption account with private keys + */ + decryptAccountsWithWallet(wallet: Wallet, events: EchoEvents[]): NoteAccount[]; + decryptNotes(events: EncryptedNotesEvents[]): DecryptedNotes[]; + encryptNote({ address, noteHex }: NoteToEncrypt): string; +} diff --git a/dist/services/events/base.d.ts b/dist/services/events/base.d.ts index 7ad9138..910f907 100644 --- a/dist/services/events/base.d.ts +++ b/dist/services/events/base.d.ts @@ -1,12 +1,13 @@ import { BaseContract, Provider, EventLog, ContractEventName } from 'ethers'; -import type { Tornado, TornadoRouter, TornadoProxyLight, Governance, RelayerRegistry } from '@tornado/contracts'; +import type { Tornado, TornadoRouter, TornadoProxyLight, Governance, RelayerRegistry, Echoer } from '@tornado/contracts'; import { BatchEventsService, BatchBlockService, BatchTransactionService, BatchEventOnProgress, BatchBlockOnProgress } from '../batch'; import { fetchDataOptions } from '../providers'; -import type { BaseEvents, MinimalEvents, DepositsEvents, WithdrawalsEvents, EncryptedNotesEvents, GovernanceProposalCreatedEvents, GovernanceVotedEvents, GovernanceDelegatedEvents, GovernanceUndelegatedEvents, RegistersEvents } from './types'; +import type { NetIdType } from '../networkConfig'; +import type { BaseEvents, MinimalEvents, DepositsEvents, WithdrawalsEvents, EncryptedNotesEvents, AllGovernanceEvents, RegistersEvents, EchoEvents } from './types'; export declare const DEPOSIT = "deposit"; export declare const WITHDRAWAL = "withdrawal"; export type BaseEventsServiceConstructor = { - netId: number | string; + netId: NetIdType; provider: Provider; graphApi?: string; subgraphName?: string; @@ -28,7 +29,7 @@ export type BaseGraphParams = { onProgress?: BatchGraphOnProgress; }; export declare class BaseEventsService { - netId: number | string; + netId: NetIdType; provider: Provider; graphApi?: string; subgraphName?: string; @@ -81,7 +82,7 @@ export declare class BaseEventsService { }>; } export type BaseDepositsServiceConstructor = { - netId: number | string; + netId: NetIdType; provider: Provider; graphApi?: string; subgraphName?: string; @@ -110,8 +111,27 @@ export declare class BaseDepositsService extends BaseEventsService { + constructor({ netId, provider, graphApi, subgraphName, Echoer, deployedBlock, fetchDataOptions, }: BaseEchoServiceConstructor); + getInstanceName(): string; + getType(): string; + getGraphMethod(): string; + formatEvents(events: EventLog[]): Promise; + getEventsFromGraph({ fromBlock }: { + fromBlock: number; + }): Promise>; +} export type BaseEncryptedNotesServiceConstructor = { - netId: number | string; + netId: NetIdType; provider: Provider; graphApi?: string; subgraphName?: string; @@ -126,9 +146,8 @@ export declare class BaseEncryptedNotesService extends BaseEventsService; } -export type BaseGovernanceEventTypes = GovernanceProposalCreatedEvents | GovernanceVotedEvents | GovernanceDelegatedEvents | GovernanceUndelegatedEvents; export type BaseGovernanceServiceConstructor = { - netId: number | string; + netId: NetIdType; provider: Provider; graphApi?: string; subgraphName?: string; @@ -136,19 +155,19 @@ export type BaseGovernanceServiceConstructor = { deployedBlock?: number; fetchDataOptions?: fetchDataOptions; }; -export declare class BaseGovernanceService extends BaseEventsService { +export declare class BaseGovernanceService extends BaseEventsService { batchTransactionService: BatchTransactionService; constructor({ netId, provider, graphApi, subgraphName, Governance, deployedBlock, fetchDataOptions, }: BaseGovernanceServiceConstructor); getInstanceName(): string; getType(): string; getGraphMethod(): string; - formatEvents(events: EventLog[]): Promise; + formatEvents(events: EventLog[]): Promise; getEventsFromGraph({ fromBlock }: { fromBlock: number; - }): Promise>; + }): Promise>; } export type BaseRegistryServiceConstructor = { - netId: number | string; + netId: NetIdType; provider: Provider; graphApi?: string; subgraphName?: string; diff --git a/dist/services/events/node.d.ts b/dist/services/events/node.d.ts index f30de86..331204e 100644 --- a/dist/services/events/node.d.ts +++ b/dist/services/events/node.d.ts @@ -1,6 +1,6 @@ import { BatchBlockOnProgress, BatchEventOnProgress } from '../batch'; -import { BaseDepositsService, BaseEncryptedNotesService, BaseGovernanceService, BaseRegistryService, BaseDepositsServiceConstructor, BaseEncryptedNotesServiceConstructor, BaseGovernanceServiceConstructor, BaseRegistryServiceConstructor, BaseGovernanceEventTypes } from './base'; -import type { BaseEvents, DepositsEvents, WithdrawalsEvents, EncryptedNotesEvents, RegistersEvents } from './types'; +import { BaseDepositsService, BaseEncryptedNotesService, BaseGovernanceService, BaseRegistryService, BaseDepositsServiceConstructor, BaseEncryptedNotesServiceConstructor, BaseGovernanceServiceConstructor, BaseRegistryServiceConstructor, BaseEchoServiceConstructor, BaseEchoService } from './base'; +import type { BaseEvents, DepositsEvents, WithdrawalsEvents, EncryptedNotesEvents, RegistersEvents, AllGovernanceEvents, EchoEvents } from './types'; export type NodeDepositsServiceConstructor = BaseDepositsServiceConstructor & { cacheDirectory?: string; userDirectory?: string; @@ -17,6 +17,20 @@ export declare class NodeDepositsService extends BaseDepositsService { getEventsFromCache(): Promise>; saveEvents({ events, lastBlock }: BaseEvents): Promise; } +export type NodeEchoServiceConstructor = BaseEchoServiceConstructor & { + cacheDirectory?: string; + userDirectory?: string; +}; +export declare class NodeEchoService extends BaseEchoService { + cacheDirectory?: string; + userDirectory?: string; + constructor({ netId, provider, graphApi, subgraphName, Echoer, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeEchoServiceConstructor); + updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void; + updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void; + getEventsFromDB(): Promise>; + getEventsFromCache(): Promise>; + saveEvents({ events, lastBlock }: BaseEvents): Promise; +} export type NodeEncryptedNotesServiceConstructor = BaseEncryptedNotesServiceConstructor & { cacheDirectory?: string; userDirectory?: string; @@ -42,9 +56,9 @@ export declare class NodeGovernanceService extends BaseGovernanceService { updateEventProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void; updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters[0]): void; updateTransactionProgress({ currentIndex, totalIndex }: Parameters[0]): void; - getEventsFromDB(): Promise>; - getEventsFromCache(): Promise>; - saveEvents({ events, lastBlock }: BaseEvents): Promise; + getEventsFromDB(): Promise>; + getEventsFromCache(): Promise>; + saveEvents({ events, lastBlock }: BaseEvents): Promise; } export type NodeRegistryServiceConstructor = BaseRegistryServiceConstructor & { cacheDirectory?: string; diff --git a/dist/services/events/types.d.ts b/dist/services/events/types.d.ts index b49179a..51cadee 100644 --- a/dist/services/events/types.d.ts +++ b/dist/services/events/types.d.ts @@ -39,6 +39,7 @@ export type GovernanceUndelegatedEvents = GovernanceEvents & { account: string; delegateFrom: string; }; +export type AllGovernanceEvents = GovernanceProposalCreatedEvents | GovernanceVotedEvents | GovernanceDelegatedEvents | GovernanceUndelegatedEvents; export type RegistersEvents = MinimalEvents & RelayerParams; export type DepositsEvents = MinimalEvents & { commitment: string; @@ -52,6 +53,10 @@ export type WithdrawalsEvents = MinimalEvents & { fee: string; timestamp: number; }; +export type EchoEvents = MinimalEvents & { + address: string; + encryptedAccount: string; +}; export type EncryptedNotesEvents = MinimalEvents & { encryptedNote: string; }; diff --git a/dist/services/graphql/index.d.ts b/dist/services/graphql/index.d.ts index 38cbb8b..c9832ae 100644 --- a/dist/services/graphql/index.d.ts +++ b/dist/services/graphql/index.d.ts @@ -1,5 +1,5 @@ import { fetchDataOptions } from '../providers'; -import type { BaseGraphEvents, RegistersEvents, DepositsEvents, WithdrawalsEvents, EncryptedNotesEvents, BatchGraphOnProgress } from '../events'; +import type { BaseGraphEvents, RegistersEvents, DepositsEvents, WithdrawalsEvents, EncryptedNotesEvents, BatchGraphOnProgress, EchoEvents, AllGovernanceEvents } from '../events'; export * from './queries'; export type queryGraphParams = { graphApi: string; @@ -165,6 +165,29 @@ export interface getNoteAccountsReturns { lastSyncBlock: null | number; } export declare function getNoteAccounts({ graphApi, subgraphName, address, fetchDataOptions, }: getNoteAccountsParams): Promise; +export interface GraphEchoEvents { + noteAccounts: { + id: string; + blockNumber: string; + address: string; + encryptedAccount: string; + }[]; + _meta: { + block: { + number: number; + }; + hasIndexingErrors: boolean; + }; +} +export interface getGraphEchoEventsParams { + graphApi: string; + subgraphName: string; + fromBlock: number; + fetchDataOptions?: fetchDataOptions; + onProgress?: BatchGraphOnProgress; +} +export declare function getGraphEchoEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions, }: getGraphEchoEventsParams): Promise; +export declare function getAllGraphEchoEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions, onProgress, }: getGraphEchoEventsParams): Promise>; export interface GraphEncryptedNotes { encryptedNotes: { blockNumber: string; @@ -188,3 +211,56 @@ export interface getEncryptedNotesParams { } export declare function getEncryptedNotes({ graphApi, subgraphName, fromBlock, fetchDataOptions, }: getEncryptedNotesParams): Promise; export declare function getAllEncryptedNotes({ graphApi, subgraphName, fromBlock, fetchDataOptions, onProgress, }: getEncryptedNotesParams): Promise>; +export interface GraphGovernanceEvents { + proposals: { + blockNumber: number; + logIndex: number; + transactionHash: string; + proposalId: number; + proposer: string; + target: string; + startTime: number; + endTime: number; + description: string; + }[]; + votes: { + blockNumber: number; + logIndex: number; + transactionHash: string; + proposalId: number; + voter: string; + support: boolean; + votes: string; + from: string; + input: string; + }[]; + delegates: { + blockNumber: number; + logIndex: number; + transactionHash: string; + account: string; + delegateTo: string; + }[]; + undelegates: { + blockNumber: number; + logIndex: number; + transactionHash: string; + account: string; + delegateFrom: string; + }[]; + _meta: { + block: { + number: number; + }; + hasIndexingErrors: boolean; + }; +} +export interface getGovernanceEventsParams { + graphApi: string; + subgraphName: string; + fromBlock: number; + fetchDataOptions?: fetchDataOptions; + onProgress?: BatchGraphOnProgress; +} +export declare function getGovernanceEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions, }: getGovernanceEventsParams): Promise; +export declare function getAllGovernanceEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions, onProgress, }: getGovernanceEventsParams): Promise>; diff --git a/dist/services/graphql/queries.d.ts b/dist/services/graphql/queries.d.ts index 3193fc1..26f8777 100644 --- a/dist/services/graphql/queries.d.ts +++ b/dist/services/graphql/queries.d.ts @@ -4,4 +4,7 @@ export declare const GET_REGISTERED = "\n query getRegistered($first: Int, $fro export declare const GET_DEPOSITS = "\n query getDeposits($currency: String!, $amount: String!, $first: Int, $fromBlock: Int) {\n deposits(first: $first, orderBy: index, orderDirection: asc, where: { \n amount: $amount,\n currency: $currency,\n blockNumber_gte: $fromBlock\n }) {\n id\n blockNumber\n commitment\n index\n timestamp\n from\n }\n _meta {\n block {\n number\n }\n hasIndexingErrors\n }\n }\n"; export declare const GET_WITHDRAWALS = "\n query getWithdrawals($currency: String!, $amount: String!, $first: Int, $fromBlock: Int!) {\n withdrawals(first: $first, orderBy: blockNumber, orderDirection: asc, where: { \n currency: $currency,\n amount: $amount,\n blockNumber_gte: $fromBlock\n }) {\n id\n blockNumber\n nullifier\n to\n fee\n timestamp\n }\n _meta {\n block {\n number\n }\n hasIndexingErrors\n }\n }\n"; export declare const GET_NOTE_ACCOUNTS = "\n query getNoteAccount($address: String!) {\n noteAccounts(where: { address: $address }) {\n id\n index\n address\n encryptedAccount\n }\n _meta {\n block {\n number\n }\n hasIndexingErrors\n }\n }\n"; +export declare const GET_ECHO_EVENTS = "\n query getNoteAccounts($first: Int, $fromBlock: Int) {\n noteAccounts(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) {\n id\n blockNumber\n address\n encryptedAccount\n }\n _meta {\n block {\n number\n }\n hasIndexingErrors\n }\n }\n"; export declare const GET_ENCRYPTED_NOTES = "\n query getEncryptedNotes($first: Int, $fromBlock: Int) {\n encryptedNotes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) {\n blockNumber\n index\n transactionHash\n encryptedNote\n }\n _meta {\n block {\n number\n }\n hasIndexingErrors\n }\n }\n"; +export declare const GET_GOVERNANCE_EVENTS = "\n query getGovernanceEvents($first: Int, $fromBlock: Int) {\n proposals(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) {\n blockNumber\n logIndex\n transactionHash\n proposalId\n proposer\n target\n startTime\n endTime\n description\n }\n votes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) {\n blockNumber\n logIndex\n transactionHash\n proposalId\n voter\n support\n votes\n from\n input\n }\n delegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) {\n blockNumber\n logIndex\n transactionHash\n account\n delegateTo\n }\n undelegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) {\n blockNumber\n logIndex\n transactionHash\n account\n delegateFrom\n }\n _meta {\n block {\n number\n }\n hasIndexingErrors\n }\n }\n"; +export declare const GET_GOVERNANCE_APY = "\n stakeDailyBurns(first: 30, orderBy: date, orderDirection: desc) {\n id\n date\n dailyAmountBurned\n }\n"; diff --git a/dist/services/index.d.ts b/dist/services/index.d.ts index 764eed9..aa959c0 100644 --- a/dist/services/index.d.ts +++ b/dist/services/index.d.ts @@ -4,6 +4,7 @@ export * from './schemas'; export * from './batch'; export * from './data'; export * from './deposits'; +export * from './encryptedNotes'; export * from './fees'; export * from './merkleTree'; export * from './mimc'; @@ -15,5 +16,6 @@ export * from './prices'; export * from './providers'; export * from './relayerClient'; export * from './tokens'; +export * from './treeCache'; export * from './utils'; export * from './websnark'; diff --git a/dist/services/merkleTree.d.ts b/dist/services/merkleTree.d.ts index be26649..663ba99 100644 --- a/dist/services/merkleTree.d.ts +++ b/dist/services/merkleTree.d.ts @@ -1,10 +1,11 @@ -import { MerkleTree, Element } from '@tornado/fixed-merkle-tree'; +import { MerkleTree, PartialMerkleTree, Element, TreeEdge } from '@tornado/fixed-merkle-tree'; import type { Tornado } from '@tornado/contracts'; import type { DepositType } from './deposits'; import type { DepositsEvents } from './events'; +import type { NetIdType } from './networkConfig'; export type MerkleTreeConstructor = DepositType & { Tornado: Tornado; - commitment?: string; + commitmentHex?: string; merkleTreeHeight?: number; emptyElement?: string; merkleWorkerPath?: string; @@ -12,18 +13,18 @@ export type MerkleTreeConstructor = DepositType & { export declare class MerkleTreeService { currency: string; amount: string; - netId: number; + netId: NetIdType; Tornado: Tornado; - commitment?: string; + commitmentHex?: string; instanceName: string; merkleTreeHeight: number; emptyElement: string; merkleWorkerPath?: string; - constructor({ netId, amount, currency, Tornado, commitment, merkleTreeHeight, emptyElement, merkleWorkerPath, }: MerkleTreeConstructor); - createTree({ events }: { - events: Element[]; - }): Promise; - verifyTree({ events }: { - events: DepositsEvents[]; - }): Promise; + constructor({ netId, amount, currency, Tornado, commitmentHex, merkleTreeHeight, emptyElement, merkleWorkerPath, }: MerkleTreeConstructor); + createTree(events: Element[]): Promise; + createPartialTree({ edge, elements }: { + edge: TreeEdge; + elements: Element[]; + }): Promise; + verifyTree(events: DepositsEvents[]): Promise; } diff --git a/dist/services/networkConfig.d.ts b/dist/services/networkConfig.d.ts index 28c1626..1789ac2 100644 --- a/dist/services/networkConfig.d.ts +++ b/dist/services/networkConfig.d.ts @@ -1,3 +1,17 @@ +/** + * Type of default supported networks + */ +export declare enum NetId { + MAINNET = 1, + BSC = 56, + POLYGON = 137, + OPTIMISM = 10, + ARBITRUM = 42161, + GNOSIS = 100, + AVALANCHE = 43114, + SEPOLIA = 11155111 +} +export type NetIdType = NetId | number; export interface RpcUrl { name: string; url: string; @@ -37,20 +51,20 @@ export type Config = { }; nativeCurrency: string; currencyName: string; - explorerUrl: { - tx: string; - address: string; - block: string; - }; + explorerUrl: string; merkleTreeHeight: number; emptyElement: string; networkName: string; deployedBlock: number; rpcUrls: RpcUrls; - multicall: string; + multicallContract: string; routerContract: string; - registryContract?: string; echoContract: string; + offchainOracleContract?: string; + tornContract?: string; + governanceContract?: string; + stakingRewardsContract?: string; + registryContract?: string; aggregatorContract?: string; reverseRecordsContract?: string; gasPriceOracleContract?: string; @@ -58,6 +72,7 @@ export type Config = { ovmGasPriceOracleContract?: string; tornadoSubgraph: string; registrySubgraph?: string; + governanceSubgraph?: string; subgraphs: SubgraphUrls; tokens: TokenInstances; optionalTokens?: string[]; @@ -70,17 +85,32 @@ export type Config = { REGISTRY_BLOCK?: number; MINING_BLOCK_TIME?: number; }; - 'torn.contract.tornadocash.eth'?: string; - 'governance.contract.tornadocash.eth'?: string; - 'staking-rewards.contract.tornadocash.eth'?: string; - 'tornado-router.contract.tornadocash.eth'?: string; - 'tornado-proxy-light.contract.tornadocash.eth'?: string; }; export type networkConfig = { - [key in string]: Config; + [key in NetIdType]: Config; }; -export declare const blockSyncInterval = 10000; -export declare const enabledChains: string[]; -export declare const networkConfig: networkConfig; -export declare const subdomains: string[]; -export default networkConfig; +export declare const defaultConfig: networkConfig; +export declare const enabledChains: number[]; +/** + * Custom config object to extend default config + * + * Inspired by getUrlFunc from ethers.js + * https://github.com/ethers-io/ethers.js/blob/v6/src.ts/utils/fetch.ts#L59 + */ +export declare let customConfig: networkConfig; +/** + * Add or override existing network config object + * + * Could be also called on the UI hook so that the UI could allow people to use custom privacy pools + */ +export declare function addNetwork(newConfig: networkConfig): void; +export declare function getNetworkConfig(): networkConfig; +export declare function getConfig(netId: NetIdType): Config; +export declare function getInstanceByAddress({ netId, address }: { + netId: NetIdType; + address: string; +}): { + amount: string; + currency: string; +} | undefined; +export declare function getSubdomains(): string[]; diff --git a/dist/services/parser.d.ts b/dist/services/parser.d.ts index 1f191a7..35da7f2 100644 --- a/dist/services/parser.d.ts +++ b/dist/services/parser.d.ts @@ -4,3 +4,7 @@ export declare function parseRelayer(value?: string): string; export declare function parseAddress(value?: string): string; export declare function parseMnemonic(value?: string): string; export declare function parseKey(value?: string): string; +/** + * Recovery key shouldn't have a 0x prefix (Also this is how the UI generates) + */ +export declare function parseRecoveryKey(value?: string): string; diff --git a/dist/services/providers.d.ts b/dist/services/providers.d.ts index 3396d13..d85ddd1 100644 --- a/dist/services/providers.d.ts +++ b/dist/services/providers.d.ts @@ -3,9 +3,9 @@ /// import type { EventEmitter } from 'stream'; import type { RequestOptions } from 'http'; -import { JsonRpcApiProvider, JsonRpcProvider, Wallet, FetchGetUrlFunc, Provider, SigningKey, TransactionRequest, JsonRpcSigner, BrowserProvider, Networkish, Eip1193Provider, VoidSigner, FetchUrlFeeDataNetworkPlugin, BigNumberish } from 'ethers'; +import { JsonRpcApiProvider, JsonRpcProvider, Wallet, FetchGetUrlFunc, Provider, SigningKey, TransactionRequest, JsonRpcSigner, BrowserProvider, Networkish, Eip1193Provider, VoidSigner, FetchUrlFeeDataNetworkPlugin } from 'ethers'; import type { RequestInfo, RequestInit, Response, HeadersInit } from 'node-fetch'; -import type { Config } from './networkConfig'; +import type { Config, NetIdType } from './networkConfig'; declare global { interface Window { ethereum?: Eip1193Provider & EventEmitter; @@ -41,7 +41,7 @@ export type getProviderOptions = fetchDataOptions & { }; export declare function getGasOraclePlugin(networkKey: string, fetchOptions?: getProviderOptions): FetchUrlFeeDataNetworkPlugin; export declare function getProvider(rpcUrl: string, fetchOptions?: getProviderOptions): Promise; -export declare function getProviderWithNetId(netId: BigNumberish, rpcUrl: string, config: Config, fetchOptions?: getProviderOptions): JsonRpcProvider; +export declare function getProviderWithNetId(netId: NetIdType, rpcUrl: string, config: Config, fetchOptions?: getProviderOptions): JsonRpcProvider; export declare const populateTransaction: (signer: TornadoWallet | TornadoVoidSigner | TornadoRpcSigner, tx: TransactionRequest) => Promise; export type TornadoWalletOptions = { gasPriceBump?: number; @@ -80,7 +80,7 @@ export declare class TornadoRpcSigner extends JsonRpcSigner { export type connectWalletFunc = (...args: any[]) => Promise; export type handleWalletFunc = (...args: any[]) => void; export type TornadoBrowserProviderOptions = TornadoWalletOptions & { - webChainId?: BigNumberish; + webChainId?: NetIdType; connectWallet?: connectWalletFunc; handleNetworkChanges?: handleWalletFunc; handleAccountChanges?: handleWalletFunc; diff --git a/dist/services/relayerClient.d.ts b/dist/services/relayerClient.d.ts index a663fa8..351ff41 100644 --- a/dist/services/relayerClient.d.ts +++ b/dist/services/relayerClient.d.ts @@ -1,6 +1,6 @@ import type { Aggregator } from '@tornado/contracts'; import type { RelayerStructOutput } from '@tornado/contracts/dist/contracts/Governance/Aggregator/Aggregator'; -import type { Config } from './networkConfig'; +import { NetIdType, Config } from './networkConfig'; import { fetchDataOptions } from './providers'; import type { snarkProofs } from './websnark'; export declare const MIN_STAKE_BALANCE: bigint; @@ -9,20 +9,22 @@ export interface RelayerParams { relayerAddress?: string; } export interface Relayer { - netId: number; + netId: NetIdType; url: string; + hostname: string; rewardAccount: string; + instances: string[]; + gasPrice?: number; + ethPrices?: { + [key in string]: string; + }; currentQueue: number; tornadoServiceFee: number; } export type RelayerInfo = Relayer & { - hostname: string; ensName: string; stakeBalance: bigint; relayerAddress: string; - ethPrices?: { - [key in string]: string; - }; }; export type RelayerError = { hostname: string; @@ -46,7 +48,7 @@ export interface RelayerStatus { fast: number; additionalProperties?: number; }; - netId: number; + netId: NetIdType; ethPrices?: { [key in string]: string; }; @@ -84,12 +86,20 @@ export interface semanticVersion { buildmetadata?: string; } export declare function parseSemanticVersion(version: string): semanticVersion; -export declare function isRelayerUpdated(relayerVersion: string, netId: number | string): boolean; +export declare function isRelayerUpdated(relayerVersion: string, netId: NetIdType): boolean; export declare function calculateScore({ stakeBalance, tornadoServiceFee }: RelayerInfo, minFee?: number, maxFee?: number): bigint; export declare function getWeightRandom(weightsScores: bigint[], random: bigint): number; -export declare function pickWeightedRandomRelayer(relayers: RelayerInfo[], netId: string | number): RelayerInfo; +export type RelayerInstanceList = { + [key in string]: { + instanceAddress: { + [key in string]: string; + }; + }; +}; +export declare function getSupportedInstances(instanceList: RelayerInstanceList): string[]; +export declare function pickWeightedRandomRelayer(relayers: RelayerInfo[], netId: NetIdType): RelayerInfo; export interface RelayerClientConstructor { - netId: number | string; + netId: NetIdType; config: Config; Aggregator: Aggregator; fetchDataOptions?: fetchDataOptions; @@ -98,7 +108,7 @@ export type RelayerClientWithdraw = snarkProofs & { contract: string; }; export declare class RelayerClient { - netId: number; + netId: NetIdType; config: Config; Aggregator: Aggregator; selectedRelayer?: Relayer; diff --git a/dist/services/schemas/status.d.ts b/dist/services/schemas/status.d.ts index 83912ee..f5e3b4e 100644 --- a/dist/services/schemas/status.d.ts +++ b/dist/services/schemas/status.d.ts @@ -1,4 +1,4 @@ -import type { Config } from '../networkConfig'; +import { Config, NetIdType } from '../networkConfig'; export type statusInstanceType = { type: string; properties: { @@ -88,5 +88,5 @@ declare const bnType: { type: string; BN: boolean; }; -export declare function getStatusSchema(netId: number | string, config: Config): statusSchema; +export declare function getStatusSchema(netId: NetIdType, config: Config): statusSchema; export {}; diff --git a/dist/services/treeCache.d.ts b/dist/services/treeCache.d.ts new file mode 100644 index 0000000..926c37a --- /dev/null +++ b/dist/services/treeCache.d.ts @@ -0,0 +1,35 @@ +/** + * Create tree cache file from node.js + * + * Only works for node.js, modified from https://github.com/tornadocash/tornado-classic-ui/blob/master/scripts/updateTree.js + */ +import { MerkleTree } from '@tornado/fixed-merkle-tree'; +import { DepositsEvents } from './events'; +import type { NetIdType } from './networkConfig'; +export interface TreeCacheConstructor { + netId: NetIdType; + amount: string; + currency: string; + userDirectory: string; + PARTS_COUNT?: number; + LEAVES?: number; + zeroElement?: string; +} +export interface treeMetadata { + blockNumber: number; + logIndex: number; + transactionHash: string; + timestamp: number; + from: string; + leafIndex: number; +} +export declare class TreeCache { + netId: NetIdType; + amount: string; + currency: string; + userDirectory: string; + PARTS_COUNT: number; + constructor({ netId, amount, currency, userDirectory, PARTS_COUNT }: TreeCacheConstructor); + getInstanceName(): string; + createTree(events: DepositsEvents[], tree: MerkleTree): Promise; +} diff --git a/dist/services/utils.d.ts b/dist/services/utils.d.ts index 6795036..8ffe743 100644 --- a/dist/services/utils.d.ts +++ b/dist/services/utils.d.ts @@ -1,15 +1,20 @@ /// +/// +import { webcrypto } from 'crypto'; import BN from 'bn.js'; import type { BigNumberish } from 'ethers'; type bnInput = number | string | number[] | Uint8Array | Buffer | BN; export declare const isNode: boolean; +export declare const crypto: webcrypto.Crypto; export declare const chunk: (arr: T[], size: number) => T[][]; export declare function sleep(ms: number): Promise; export declare function validateUrl(url: string, protocols?: string[]): boolean; +export declare function concatBytes(...arrays: Uint8Array[]): Uint8Array; export declare function bufferToBytes(b: Buffer): Uint8Array; export declare function bytesToBase64(bytes: Uint8Array): string; export declare function base64ToBytes(base64: string): Uint8Array; export declare function bytesToHex(bytes: Uint8Array): string; +export declare function hexToBytes(hexString: string): Uint8Array; export declare function bytesToBN(bytes: Uint8Array): bigint; export declare function bnToBytes(bigint: bigint | string): Uint8Array; export declare function leBuff2Int(bytes: Uint8Array): BN; diff --git a/package.json b/package.json index 4905228..1ec5d19 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,7 @@ "lint": "eslint src/**/*.ts --ext .ts --ignore-pattern src/typechain", "build:node": "ts-node scripts/fflate.ts && rollup -c", "build:web": "webpack", - "build": "yarn build:node && yarn build:web", + "build": "yarn types && yarn build:node && yarn build:web", "start": "ts-node src/cli.ts", "startHelp": "ts-node src/cli.ts help", "createDeposit": "ts-node src/cli.ts create", diff --git a/static/merkleTreeWorker.js b/static/merkleTreeWorker.js index c64d6b6..fd4df55 100644 --- a/static/merkleTreeWorker.js +++ b/static/merkleTreeWorker.js @@ -19824,5 +19824,5 @@ if (isNode && threads) { postMessage(merkleTree.toString()); })); } else { - throw new Error("This browser / environment doesn't support workers!"); + throw new Error("This browser / environment does not support workers!"); } diff --git a/static/merkleTreeWorker.umd.js b/static/merkleTreeWorker.umd.js index a407b99..b496862 100644 --- a/static/merkleTreeWorker.umd.js +++ b/static/merkleTreeWorker.umd.js @@ -11,6 +11,11175 @@ return /******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ +/***/ 66289: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.RLP = exports.utils = exports.decode = exports.encode = void 0; +/** + * RLP Encoding based on https://ethereum.org/en/developers/docs/data-structures-and-encoding/rlp/ + * This function takes in data, converts it to Uint8Array if not, + * and adds a length for recursion. + * @param input Will be converted to Uint8Array + * @returns Uint8Array of encoded data + **/ +function encode(input) { + if (Array.isArray(input)) { + const output = []; + let outputLength = 0; + for (let i = 0; i < input.length; i++) { + const encoded = encode(input[i]); + output.push(encoded); + outputLength += encoded.length; + } + return concatBytes(encodeLength(outputLength, 192), ...output); + } + const inputBuf = toBytes(input); + if (inputBuf.length === 1 && inputBuf[0] < 128) { + return inputBuf; + } + return concatBytes(encodeLength(inputBuf.length, 128), inputBuf); +} +exports.encode = encode; +/** + * Slices a Uint8Array, throws if the slice goes out-of-bounds of the Uint8Array. + * E.g. `safeSlice(hexToBytes('aa'), 1, 2)` will throw. + * @param input + * @param start + * @param end + */ +function safeSlice(input, start, end) { + if (end > input.length) { + throw new Error('invalid RLP (safeSlice): end slice of Uint8Array out-of-bounds'); + } + return input.slice(start, end); +} +/** + * Parse integers. Check if there is no leading zeros + * @param v The value to parse + */ +function decodeLength(v) { + if (v[0] === 0) { + throw new Error('invalid RLP: extra zeros'); + } + return parseHexByte(bytesToHex(v)); +} +function encodeLength(len, offset) { + if (len < 56) { + return Uint8Array.from([len + offset]); + } + const hexLength = numberToHex(len); + const lLength = hexLength.length / 2; + const firstByte = numberToHex(offset + 55 + lLength); + return Uint8Array.from(hexToBytes(firstByte + hexLength)); +} +function decode(input, stream = false) { + if (typeof input === 'undefined' || input === null || input.length === 0) { + return Uint8Array.from([]); + } + const inputBytes = toBytes(input); + const decoded = _decode(inputBytes); + if (stream) { + return decoded; + } + if (decoded.remainder.length !== 0) { + throw new Error('invalid RLP: remainder must be zero'); + } + return decoded.data; +} +exports.decode = decode; +/** Decode an input with RLP */ +function _decode(input) { + let length, llength, data, innerRemainder, d; + const decoded = []; + const firstByte = input[0]; + if (firstByte <= 0x7f) { + // a single byte whose value is in the [0x00, 0x7f] range, that byte is its own RLP encoding. + return { + data: input.slice(0, 1), + remainder: input.slice(1), + }; + } + else if (firstByte <= 0xb7) { + // string is 0-55 bytes long. A single byte with value 0x80 plus the length of the string followed by the string + // The range of the first byte is [0x80, 0xb7] + length = firstByte - 0x7f; + // set 0x80 null to 0 + if (firstByte === 0x80) { + data = Uint8Array.from([]); + } + else { + data = safeSlice(input, 1, length); + } + if (length === 2 && data[0] < 0x80) { + throw new Error('invalid RLP encoding: invalid prefix, single byte < 0x80 are not prefixed'); + } + return { + data, + remainder: input.slice(length), + }; + } + else if (firstByte <= 0xbf) { + // string is greater than 55 bytes long. A single byte with the value (0xb7 plus the length of the length), + // followed by the length, followed by the string + llength = firstByte - 0xb6; + if (input.length - 1 < llength) { + throw new Error('invalid RLP: not enough bytes for string length'); + } + length = decodeLength(safeSlice(input, 1, llength)); + if (length <= 55) { + throw new Error('invalid RLP: expected string length to be greater than 55'); + } + data = safeSlice(input, llength, length + llength); + return { + data, + remainder: input.slice(length + llength), + }; + } + else if (firstByte <= 0xf7) { + // a list between 0-55 bytes long + length = firstByte - 0xbf; + innerRemainder = safeSlice(input, 1, length); + while (innerRemainder.length) { + d = _decode(innerRemainder); + decoded.push(d.data); + innerRemainder = d.remainder; + } + return { + data: decoded, + remainder: input.slice(length), + }; + } + else { + // a list over 55 bytes long + llength = firstByte - 0xf6; + length = decodeLength(safeSlice(input, 1, llength)); + if (length < 56) { + throw new Error('invalid RLP: encoded list too short'); + } + const totalLength = llength + length; + if (totalLength > input.length) { + throw new Error('invalid RLP: total length is larger than the data'); + } + innerRemainder = safeSlice(input, llength, totalLength); + while (innerRemainder.length) { + d = _decode(innerRemainder); + decoded.push(d.data); + innerRemainder = d.remainder; + } + return { + data: decoded, + remainder: input.slice(totalLength), + }; + } +} +const cachedHexes = Array.from({ length: 256 }, (_v, i) => i.toString(16).padStart(2, '0')); +function bytesToHex(uint8a) { + // Pre-caching chars with `cachedHexes` speeds this up 6x + let hex = ''; + for (let i = 0; i < uint8a.length; i++) { + hex += cachedHexes[uint8a[i]]; + } + return hex; +} +function parseHexByte(hexByte) { + const byte = Number.parseInt(hexByte, 16); + if (Number.isNaN(byte)) + throw new Error('Invalid byte sequence'); + return byte; +} +// Caching slows it down 2-3x +function hexToBytes(hex) { + if (typeof hex !== 'string') { + throw new TypeError('hexToBytes: expected string, got ' + typeof hex); + } + if (hex.length % 2) + throw new Error('hexToBytes: received invalid unpadded hex'); + const array = new Uint8Array(hex.length / 2); + for (let i = 0; i < array.length; i++) { + const j = i * 2; + array[i] = parseHexByte(hex.slice(j, j + 2)); + } + return array; +} +/** Concatenates two Uint8Arrays into one. */ +function concatBytes(...arrays) { + if (arrays.length === 1) + return arrays[0]; + const length = arrays.reduce((a, arr) => a + arr.length, 0); + const result = new Uint8Array(length); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const arr = arrays[i]; + result.set(arr, pad); + pad += arr.length; + } + return result; +} +function utf8ToBytes(utf) { + return new TextEncoder().encode(utf); +} +/** Transform an integer into its hexadecimal value */ +function numberToHex(integer) { + if (integer < 0) { + throw new Error('Invalid integer as argument, must be unsigned!'); + } + const hex = integer.toString(16); + return hex.length % 2 ? `0${hex}` : hex; +} +/** Pad a string to be even */ +function padToEven(a) { + return a.length % 2 ? `0${a}` : a; +} +/** Check if a string is prefixed by 0x */ +function isHexPrefixed(str) { + return str.length >= 2 && str[0] === '0' && str[1] === 'x'; +} +/** Removes 0x from a given String */ +function stripHexPrefix(str) { + if (typeof str !== 'string') { + return str; + } + return isHexPrefixed(str) ? str.slice(2) : str; +} +/** Transform anything into a Uint8Array */ +function toBytes(v) { + if (v instanceof Uint8Array) { + return v; + } + if (typeof v === 'string') { + if (isHexPrefixed(v)) { + return hexToBytes(padToEven(stripHexPrefix(v))); + } + return utf8ToBytes(v); + } + if (typeof v === 'number' || typeof v === 'bigint') { + if (!v) { + return Uint8Array.from([]); + } + return hexToBytes(numberToHex(v)); + } + if (v === null || v === undefined) { + return Uint8Array.from([]); + } + throw new Error('toBytes: received unsupported type ' + typeof v); +} +exports.utils = { + bytesToHex, + concatBytes, + hexToBytes, + utf8ToBytes, +}; +exports.RLP = { encode, decode }; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 16284: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.accountBodyToRLP = exports.accountBodyToSlim = exports.accountBodyFromSlim = exports.isZeroAddress = exports.zeroAddress = exports.importPublic = exports.privateToAddress = exports.privateToPublic = exports.publicToAddress = exports.pubToAddress = exports.isValidPublic = exports.isValidPrivate = exports.generateAddress2 = exports.generateAddress = exports.isValidChecksumAddress = exports.toChecksumAddress = exports.isValidAddress = exports.Account = void 0; +const rlp_1 = __webpack_require__(66289); +const keccak_1 = __webpack_require__(32019); +const secp256k1_1 = __webpack_require__(26513); +const utils_1 = __webpack_require__(82672); +const bytes_1 = __webpack_require__(77312); +const constants_1 = __webpack_require__(89838); +const helpers_1 = __webpack_require__(35546); +const internal_1 = __webpack_require__(59498); +const _0n = BigInt(0); +class Account { + /** + * This constructor assigns and validates the values. + * Use the static factory methods to assist in creating an Account from varying data types. + */ + constructor(nonce = _0n, balance = _0n, storageRoot = constants_1.KECCAK256_RLP, codeHash = constants_1.KECCAK256_NULL) { + this.nonce = nonce; + this.balance = balance; + this.storageRoot = storageRoot; + this.codeHash = codeHash; + this._validate(); + } + static fromAccountData(accountData) { + const { nonce, balance, storageRoot, codeHash } = accountData; + return new Account(nonce !== undefined ? (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(nonce)) : undefined, balance !== undefined ? (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(balance)) : undefined, storageRoot !== undefined ? (0, bytes_1.toBuffer)(storageRoot) : undefined, codeHash !== undefined ? (0, bytes_1.toBuffer)(codeHash) : undefined); + } + static fromRlpSerializedAccount(serialized) { + const values = (0, bytes_1.arrToBufArr)(rlp_1.RLP.decode(Uint8Array.from(serialized))); + if (!Array.isArray(values)) { + throw new Error('Invalid serialized account input. Must be array'); + } + return this.fromValuesArray(values); + } + static fromValuesArray(values) { + const [nonce, balance, storageRoot, codeHash] = values; + return new Account((0, bytes_1.bufferToBigInt)(nonce), (0, bytes_1.bufferToBigInt)(balance), storageRoot, codeHash); + } + _validate() { + if (this.nonce < _0n) { + throw new Error('nonce must be greater than zero'); + } + if (this.balance < _0n) { + throw new Error('balance must be greater than zero'); + } + if (this.storageRoot.length !== 32) { + throw new Error('storageRoot must have a length of 32'); + } + if (this.codeHash.length !== 32) { + throw new Error('codeHash must have a length of 32'); + } + } + /** + * Returns a Buffer Array of the raw Buffers for the account, in order. + */ + raw() { + return [ + (0, bytes_1.bigIntToUnpaddedBuffer)(this.nonce), + (0, bytes_1.bigIntToUnpaddedBuffer)(this.balance), + this.storageRoot, + this.codeHash, + ]; + } + /** + * Returns the RLP serialization of the account as a `Buffer`. + */ + serialize() { + return Buffer.from(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)(this.raw()))); + } + /** + * Returns a `Boolean` determining if the account is a contract. + */ + isContract() { + return !this.codeHash.equals(constants_1.KECCAK256_NULL); + } + /** + * Returns a `Boolean` determining if the account is empty complying to the definition of + * account emptiness in [EIP-161](https://eips.ethereum.org/EIPS/eip-161): + * "An account is considered empty when it has no code and zero nonce and zero balance." + */ + isEmpty() { + return this.balance === _0n && this.nonce === _0n && this.codeHash.equals(constants_1.KECCAK256_NULL); + } +} +exports.Account = Account; +/** + * Checks if the address is a valid. Accepts checksummed addresses too. + */ +const isValidAddress = function (hexAddress) { + try { + (0, helpers_1.assertIsString)(hexAddress); + } + catch (e) { + return false; + } + return /^0x[0-9a-fA-F]{40}$/.test(hexAddress); +}; +exports.isValidAddress = isValidAddress; +/** + * Returns a checksummed address. + * + * If an eip1191ChainId is provided, the chainId will be included in the checksum calculation. This + * has the effect of checksummed addresses for one chain having invalid checksums for others. + * For more details see [EIP-1191](https://eips.ethereum.org/EIPS/eip-1191). + * + * WARNING: Checksums with and without the chainId will differ and the EIP-1191 checksum is not + * backwards compatible to the original widely adopted checksum format standard introduced in + * [EIP-55](https://eips.ethereum.org/EIPS/eip-55), so this will break in existing applications. + * Usage of this EIP is therefore discouraged unless you have a very targeted use case. + */ +const toChecksumAddress = function (hexAddress, eip1191ChainId) { + (0, helpers_1.assertIsHexString)(hexAddress); + const address = (0, internal_1.stripHexPrefix)(hexAddress).toLowerCase(); + let prefix = ''; + if (eip1191ChainId !== undefined) { + const chainId = (0, bytes_1.bufferToBigInt)((0, bytes_1.toBuffer)(eip1191ChainId)); + prefix = chainId.toString() + '0x'; + } + const buf = Buffer.from(prefix + address, 'utf8'); + const hash = (0, utils_1.bytesToHex)((0, keccak_1.keccak256)(buf)); + let ret = '0x'; + for (let i = 0; i < address.length; i++) { + if (parseInt(hash[i], 16) >= 8) { + ret += address[i].toUpperCase(); + } + else { + ret += address[i]; + } + } + return ret; +}; +exports.toChecksumAddress = toChecksumAddress; +/** + * Checks if the address is a valid checksummed address. + * + * See toChecksumAddress' documentation for details about the eip1191ChainId parameter. + */ +const isValidChecksumAddress = function (hexAddress, eip1191ChainId) { + return (0, exports.isValidAddress)(hexAddress) && (0, exports.toChecksumAddress)(hexAddress, eip1191ChainId) === hexAddress; +}; +exports.isValidChecksumAddress = isValidChecksumAddress; +/** + * Generates an address of a newly created contract. + * @param from The address which is creating this new address + * @param nonce The nonce of the from account + */ +const generateAddress = function (from, nonce) { + (0, helpers_1.assertIsBuffer)(from); + (0, helpers_1.assertIsBuffer)(nonce); + if ((0, bytes_1.bufferToBigInt)(nonce) === BigInt(0)) { + // in RLP we want to encode null in the case of zero nonce + // read the RLP documentation for an answer if you dare + return Buffer.from((0, keccak_1.keccak256)(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)([from, null])))).slice(-20); + } + // Only take the lower 160bits of the hash + return Buffer.from((0, keccak_1.keccak256)(rlp_1.RLP.encode((0, bytes_1.bufArrToArr)([from, nonce])))).slice(-20); +}; +exports.generateAddress = generateAddress; +/** + * Generates an address for a contract created using CREATE2. + * @param from The address which is creating this new address + * @param salt A salt + * @param initCode The init code of the contract being created + */ +const generateAddress2 = function (from, salt, initCode) { + (0, helpers_1.assertIsBuffer)(from); + (0, helpers_1.assertIsBuffer)(salt); + (0, helpers_1.assertIsBuffer)(initCode); + if (from.length !== 20) { + throw new Error('Expected from to be of length 20'); + } + if (salt.length !== 32) { + throw new Error('Expected salt to be of length 32'); + } + const address = (0, keccak_1.keccak256)(Buffer.concat([Buffer.from('ff', 'hex'), from, salt, (0, keccak_1.keccak256)(initCode)])); + return (0, bytes_1.toBuffer)(address).slice(-20); +}; +exports.generateAddress2 = generateAddress2; +/** + * Checks if the private key satisfies the rules of the curve secp256k1. + */ +const isValidPrivate = function (privateKey) { + return secp256k1_1.secp256k1.utils.isValidPrivateKey(privateKey); +}; +exports.isValidPrivate = isValidPrivate; +/** + * Checks if the public key satisfies the rules of the curve secp256k1 + * and the requirements of Ethereum. + * @param publicKey The two points of an uncompressed key, unless sanitize is enabled + * @param sanitize Accept public keys in other formats + */ +const isValidPublic = function (publicKey, sanitize = false) { + (0, helpers_1.assertIsBuffer)(publicKey); + if (publicKey.length === 64) { + // Convert to SEC1 for secp256k1 + // Automatically checks whether point is on curve + try { + secp256k1_1.secp256k1.ProjectivePoint.fromHex(Buffer.concat([Buffer.from([4]), publicKey])); + return true; + } + catch (e) { + return false; + } + } + if (!sanitize) { + return false; + } + try { + secp256k1_1.secp256k1.ProjectivePoint.fromHex(publicKey); + return true; + } + catch (e) { + return false; + } +}; +exports.isValidPublic = isValidPublic; +/** + * Returns the ethereum address of a given public key. + * Accepts "Ethereum public keys" and SEC1 encoded keys. + * @param pubKey The two points of an uncompressed key, unless sanitize is enabled + * @param sanitize Accept public keys in other formats + */ +const pubToAddress = function (pubKey, sanitize = false) { + (0, helpers_1.assertIsBuffer)(pubKey); + if (sanitize && pubKey.length !== 64) { + pubKey = Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromHex(pubKey).toRawBytes(false).slice(1)); + } + if (pubKey.length !== 64) { + throw new Error('Expected pubKey to be of length 64'); + } + // Only take the lower 160bits of the hash + return Buffer.from((0, keccak_1.keccak256)(pubKey)).slice(-20); +}; +exports.pubToAddress = pubToAddress; +exports.publicToAddress = exports.pubToAddress; +/** + * Returns the ethereum public key of a given private key. + * @param privateKey A private key must be 256 bits wide + */ +const privateToPublic = function (privateKey) { + (0, helpers_1.assertIsBuffer)(privateKey); + // skip the type flag and use the X, Y points + return Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromPrivateKey(privateKey).toRawBytes(false).slice(1)); +}; +exports.privateToPublic = privateToPublic; +/** + * Returns the ethereum address of a given private key. + * @param privateKey A private key must be 256 bits wide + */ +const privateToAddress = function (privateKey) { + return (0, exports.publicToAddress)((0, exports.privateToPublic)(privateKey)); +}; +exports.privateToAddress = privateToAddress; +/** + * Converts a public key to the Ethereum format. + */ +const importPublic = function (publicKey) { + (0, helpers_1.assertIsBuffer)(publicKey); + if (publicKey.length !== 64) { + publicKey = Buffer.from(secp256k1_1.secp256k1.ProjectivePoint.fromHex(publicKey).toRawBytes(false).slice(1)); + } + return publicKey; +}; +exports.importPublic = importPublic; +/** + * Returns the zero address. + */ +const zeroAddress = function () { + const addressLength = 20; + const addr = (0, bytes_1.zeros)(addressLength); + return (0, bytes_1.bufferToHex)(addr); +}; +exports.zeroAddress = zeroAddress; +/** + * Checks if a given address is the zero address. + */ +const isZeroAddress = function (hexAddress) { + try { + (0, helpers_1.assertIsString)(hexAddress); + } + catch (e) { + return false; + } + const zeroAddr = (0, exports.zeroAddress)(); + return zeroAddr === hexAddress; +}; +exports.isZeroAddress = isZeroAddress; +function accountBodyFromSlim(body) { + const [nonce, balance, storageRoot, codeHash] = body; + return [ + nonce, + balance, + (0, bytes_1.arrToBufArr)(storageRoot).length === 0 ? constants_1.KECCAK256_RLP : storageRoot, + (0, bytes_1.arrToBufArr)(codeHash).length === 0 ? constants_1.KECCAK256_NULL : codeHash, + ]; +} +exports.accountBodyFromSlim = accountBodyFromSlim; +const emptyUint8Arr = new Uint8Array(0); +function accountBodyToSlim(body) { + const [nonce, balance, storageRoot, codeHash] = body; + return [ + nonce, + balance, + (0, bytes_1.arrToBufArr)(storageRoot).equals(constants_1.KECCAK256_RLP) ? emptyUint8Arr : storageRoot, + (0, bytes_1.arrToBufArr)(codeHash).equals(constants_1.KECCAK256_NULL) ? emptyUint8Arr : codeHash, + ]; +} +exports.accountBodyToSlim = accountBodyToSlim; +/** + * Converts a slim account (per snap protocol spec) to the RLP encoded version of the account + * @param body Array of 4 Buffer-like items to represent the account + * @returns RLP encoded version of the account + */ +function accountBodyToRLP(body, couldBeSlim = true) { + const accountBody = couldBeSlim ? accountBodyFromSlim(body) : body; + return (0, bytes_1.arrToBufArr)(rlp_1.RLP.encode(accountBody)); +} +exports.accountBodyToRLP = accountBodyToRLP; +//# sourceMappingURL=account.js.map + +/***/ }), + +/***/ 86727: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Address = void 0; +const account_1 = __webpack_require__(16284); +const bytes_1 = __webpack_require__(77312); +/** + * Handling and generating Ethereum addresses + */ +class Address { + constructor(buf) { + if (buf.length !== 20) { + throw new Error('Invalid address length'); + } + this.buf = buf; + } + /** + * Returns the zero address. + */ + static zero() { + return new Address((0, bytes_1.zeros)(20)); + } + /** + * Returns an Address object from a hex-encoded string. + * @param str - Hex-encoded address + */ + static fromString(str) { + if (!(0, account_1.isValidAddress)(str)) { + throw new Error('Invalid address'); + } + return new Address((0, bytes_1.toBuffer)(str)); + } + /** + * Returns an address for a given public key. + * @param pubKey The two points of an uncompressed key + */ + static fromPublicKey(pubKey) { + if (!Buffer.isBuffer(pubKey)) { + throw new Error('Public key should be Buffer'); + } + const buf = (0, account_1.pubToAddress)(pubKey); + return new Address(buf); + } + /** + * Returns an address for a given private key. + * @param privateKey A private key must be 256 bits wide + */ + static fromPrivateKey(privateKey) { + if (!Buffer.isBuffer(privateKey)) { + throw new Error('Private key should be Buffer'); + } + const buf = (0, account_1.privateToAddress)(privateKey); + return new Address(buf); + } + /** + * Generates an address for a newly created contract. + * @param from The address which is creating this new address + * @param nonce The nonce of the from account + */ + static generate(from, nonce) { + if (typeof nonce !== 'bigint') { + throw new Error('Expected nonce to be a bigint'); + } + return new Address((0, account_1.generateAddress)(from.buf, (0, bytes_1.bigIntToBuffer)(nonce))); + } + /** + * Generates an address for a contract created using CREATE2. + * @param from The address which is creating this new address + * @param salt A salt + * @param initCode The init code of the contract being created + */ + static generate2(from, salt, initCode) { + if (!Buffer.isBuffer(salt)) { + throw new Error('Expected salt to be a Buffer'); + } + if (!Buffer.isBuffer(initCode)) { + throw new Error('Expected initCode to be a Buffer'); + } + return new Address((0, account_1.generateAddress2)(from.buf, salt, initCode)); + } + /** + * Is address equal to another. + */ + equals(address) { + return this.buf.equals(address.buf); + } + /** + * Is address zero. + */ + isZero() { + return this.equals(Address.zero()); + } + /** + * True if address is in the address range defined + * by EIP-1352 + */ + isPrecompileOrSystemAddress() { + const address = (0, bytes_1.bufferToBigInt)(this.buf); + const rangeMin = BigInt(0); + const rangeMax = BigInt('0xffff'); + return address >= rangeMin && address <= rangeMax; + } + /** + * Returns hex encoding of address. + */ + toString() { + return '0x' + this.buf.toString('hex'); + } + /** + * Returns Buffer representation of address. + */ + toBuffer() { + return Buffer.from(this.buf); + } +} +exports.Address = Address; +//# sourceMappingURL=address.js.map + +/***/ }), + +/***/ 98421: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +/** + * Ported to Typescript from original implementation below: + * https://github.com/ahultgren/async-eventemitter -- MIT licensed + * + * Type Definitions based on work by: patarapolw -- MIT licensed + * that was contributed to Definitely Typed below: + * https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/async-eventemitter + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AsyncEventEmitter = void 0; +const events_1 = __webpack_require__(37007); +async function runInSeries(context, tasks, data) { + let error; + for await (const task of tasks) { + try { + if (task.length < 2) { + //sync + task.call(context, data); + } + else { + await new Promise((resolve, reject) => { + task.call(context, data, (error) => { + if (error) { + reject(error); + } + else { + resolve(); + } + }); + }); + } + } + catch (e) { + error = e; + } + } + if (error) { + throw error; + } +} +class AsyncEventEmitter extends events_1.EventEmitter { + emit(event, ...args) { + let [data, callback] = args; + const self = this; + let listeners = self._events[event] ?? []; + // Optional data argument + if (callback === undefined && typeof data === 'function') { + callback = data; + data = undefined; + } + // Special treatment of internal newListener and removeListener events + if (event === 'newListener' || event === 'removeListener') { + data = { + event: data, + fn: callback, + }; + callback = undefined; + } + // A single listener is just a function not an array... + listeners = Array.isArray(listeners) ? listeners : [listeners]; + runInSeries(self, listeners.slice(), data).then(callback).catch(callback); + return self.listenerCount(event) > 0; + } + once(event, listener) { + const self = this; + let g; + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + // Hack to support set arity + if (listener.length >= 2) { + g = function (e, next) { + self.removeListener(event, g); + void listener(e, next); + }; + } + else { + g = function (e) { + self.removeListener(event, g); + void listener(e, g); + }; + } + self.on(event, g); + return self; + } + first(event, listener) { + let listeners = this._events[event] ?? []; + // Contract + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + // Listeners are not always an array + if (!Array.isArray(listeners)) { + ; + this._events[event] = listeners = [listeners]; + } + listeners.unshift(listener); + return this; + } + before(event, target, listener) { + return this.beforeOrAfter(event, target, listener); + } + after(event, target, listener) { + return this.beforeOrAfter(event, target, listener, 'after'); + } + beforeOrAfter(event, target, listener, beforeOrAfter) { + let listeners = this._events[event] ?? []; + let i; + let index; + const add = beforeOrAfter === 'after' ? 1 : 0; + // Contract + if (typeof listener !== 'function') { + throw new TypeError('listener must be a function'); + } + if (typeof target !== 'function') { + throw new TypeError('target must be a function'); + } + // Listeners are not always an array + if (!Array.isArray(listeners)) { + ; + this._events[event] = listeners = [listeners]; + } + index = listeners.length; + for (i = listeners.length; i--;) { + if (listeners[i] === target) { + index = i + add; + break; + } + } + listeners.splice(index, 0, listener); + return this; + } + on(event, listener) { + return super.on(event, listener); + } + addListener(event, listener) { + return super.addListener(event, listener); + } + prependListener(event, listener) { + return super.prependListener(event, listener); + } + prependOnceListener(event, listener) { + return super.prependOnceListener(event, listener); + } + removeAllListeners(event) { + return super.removeAllListeners(event); + } + removeListener(event, listener) { + return super.removeListener(event, listener); + } + eventNames() { + return super.eventNames(); + } + listeners(event) { + return super.listeners(event); + } + listenerCount(event) { + return super.listenerCount(event); + } + getMaxListeners() { + return super.getMaxListeners(); + } + setMaxListeners(maxListeners) { + return super.setMaxListeners(maxListeners); + } +} +exports.AsyncEventEmitter = AsyncEventEmitter; +//# sourceMappingURL=asyncEventEmitter.js.map + +/***/ }), + +/***/ 77312: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.intToUnpaddedBuffer = exports.bigIntToUnpaddedBuffer = exports.bigIntToHex = exports.bufArrToArr = exports.arrToBufArr = exports.validateNoLeadingZeroes = exports.baToJSON = exports.toUtf8 = exports.short = exports.addHexPrefix = exports.toUnsigned = exports.fromSigned = exports.bufferToInt = exports.bigIntToBuffer = exports.bufferToBigInt = exports.bufferToHex = exports.toBuffer = exports.unpadHexString = exports.unpadArray = exports.unpadBuffer = exports.setLengthRight = exports.setLengthLeft = exports.zeros = exports.intToBuffer = exports.intToHex = void 0; +const helpers_1 = __webpack_require__(35546); +const internal_1 = __webpack_require__(59498); +/** + * Converts a `Number` into a hex `String` + * @param {Number} i + * @return {String} + */ +const intToHex = function (i) { + if (!Number.isSafeInteger(i) || i < 0) { + throw new Error(`Received an invalid integer type: ${i}`); + } + return `0x${i.toString(16)}`; +}; +exports.intToHex = intToHex; +/** + * Converts an `Number` to a `Buffer` + * @param {Number} i + * @return {Buffer} + */ +const intToBuffer = function (i) { + const hex = (0, exports.intToHex)(i); + return Buffer.from((0, internal_1.padToEven)(hex.slice(2)), 'hex'); +}; +exports.intToBuffer = intToBuffer; +/** + * Returns a buffer filled with 0s. + * @param bytes the number of bytes the buffer should be + */ +const zeros = function (bytes) { + return Buffer.allocUnsafe(bytes).fill(0); +}; +exports.zeros = zeros; +/** + * Pads a `Buffer` with zeros till it has `length` bytes. + * Truncates the beginning or end of input if its length exceeds `length`. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @param right whether to start padding form the left or right + * @return (Buffer) + */ +const setLength = function (msg, length, right) { + const buf = (0, exports.zeros)(length); + if (right) { + if (msg.length < length) { + msg.copy(buf); + return buf; + } + return msg.slice(0, length); + } + else { + if (msg.length < length) { + msg.copy(buf, length - msg.length); + return buf; + } + return msg.slice(-length); + } +}; +/** + * Left Pads a `Buffer` with leading zeros till it has `length` bytes. + * Or it truncates the beginning if it exceeds. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @return (Buffer) + */ +const setLengthLeft = function (msg, length) { + (0, helpers_1.assertIsBuffer)(msg); + return setLength(msg, length, false); +}; +exports.setLengthLeft = setLengthLeft; +/** + * Right Pads a `Buffer` with trailing zeros till it has `length` bytes. + * it truncates the end if it exceeds. + * @param msg the value to pad (Buffer) + * @param length the number of bytes the output should be + * @return (Buffer) + */ +const setLengthRight = function (msg, length) { + (0, helpers_1.assertIsBuffer)(msg); + return setLength(msg, length, true); +}; +exports.setLengthRight = setLengthRight; +/** + * Trims leading zeros from a `Buffer`, `String` or `Number[]`. + * @param a (Buffer|Array|String) + * @return (Buffer|Array|String) + */ +const stripZeros = function (a) { + let first = a[0]; + while (a.length > 0 && first.toString() === '0') { + a = a.slice(1); + first = a[0]; + } + return a; +}; +/** + * Trims leading zeros from a `Buffer`. + * @param a (Buffer) + * @return (Buffer) + */ +const unpadBuffer = function (a) { + (0, helpers_1.assertIsBuffer)(a); + return stripZeros(a); +}; +exports.unpadBuffer = unpadBuffer; +/** + * Trims leading zeros from an `Array` (of numbers). + * @param a (number[]) + * @return (number[]) + */ +const unpadArray = function (a) { + (0, helpers_1.assertIsArray)(a); + return stripZeros(a); +}; +exports.unpadArray = unpadArray; +/** + * Trims leading zeros from a hex-prefixed `String`. + * @param a (String) + * @return (String) + */ +const unpadHexString = function (a) { + (0, helpers_1.assertIsHexString)(a); + a = (0, internal_1.stripHexPrefix)(a); + return ('0x' + stripZeros(a)); +}; +exports.unpadHexString = unpadHexString; +/** + * Attempts to turn a value into a `Buffer`. + * Inputs supported: `Buffer`, `String` (hex-prefixed), `Number`, null/undefined, `BigInt` and other objects + * with a `toArray()` or `toBuffer()` method. + * @param v the value + */ +const toBuffer = function (v) { + if (v === null || v === undefined) { + return Buffer.allocUnsafe(0); + } + if (Buffer.isBuffer(v)) { + return Buffer.from(v); + } + if (Array.isArray(v) || v instanceof Uint8Array) { + return Buffer.from(v); + } + if (typeof v === 'string') { + if (!(0, internal_1.isHexString)(v)) { + throw new Error(`Cannot convert string to buffer. toBuffer only supports 0x-prefixed hex strings and this string was given: ${v}`); + } + return Buffer.from((0, internal_1.padToEven)((0, internal_1.stripHexPrefix)(v)), 'hex'); + } + if (typeof v === 'number') { + return (0, exports.intToBuffer)(v); + } + if (typeof v === 'bigint') { + if (v < BigInt(0)) { + throw new Error(`Cannot convert negative bigint to buffer. Given: ${v}`); + } + let n = v.toString(16); + if (n.length % 2) + n = '0' + n; + return Buffer.from(n, 'hex'); + } + if (v.toArray) { + // converts a BN to a Buffer + return Buffer.from(v.toArray()); + } + if (v.toBuffer) { + return Buffer.from(v.toBuffer()); + } + throw new Error('invalid type'); +}; +exports.toBuffer = toBuffer; +/** + * Converts a `Buffer` into a `0x`-prefixed hex `String`. + * @param buf `Buffer` object to convert + */ +const bufferToHex = function (buf) { + buf = (0, exports.toBuffer)(buf); + return '0x' + buf.toString('hex'); +}; +exports.bufferToHex = bufferToHex; +/** + * Converts a {@link Buffer} to a {@link bigint} + */ +function bufferToBigInt(buf) { + const hex = (0, exports.bufferToHex)(buf); + if (hex === '0x') { + return BigInt(0); + } + return BigInt(hex); +} +exports.bufferToBigInt = bufferToBigInt; +/** + * Converts a {@link bigint} to a {@link Buffer} + */ +function bigIntToBuffer(num) { + return (0, exports.toBuffer)('0x' + num.toString(16)); +} +exports.bigIntToBuffer = bigIntToBuffer; +/** + * Converts a `Buffer` to a `Number`. + * @param buf `Buffer` object to convert + * @throws If the input number exceeds 53 bits. + */ +const bufferToInt = function (buf) { + const res = Number(bufferToBigInt(buf)); + if (!Number.isSafeInteger(res)) + throw new Error('Number exceeds 53 bits'); + return res; +}; +exports.bufferToInt = bufferToInt; +/** + * Interprets a `Buffer` as a signed integer and returns a `BigInt`. Assumes 256-bit numbers. + * @param num Signed integer value + */ +const fromSigned = function (num) { + return BigInt.asIntN(256, bufferToBigInt(num)); +}; +exports.fromSigned = fromSigned; +/** + * Converts a `BigInt` to an unsigned integer and returns it as a `Buffer`. Assumes 256-bit numbers. + * @param num + */ +const toUnsigned = function (num) { + return bigIntToBuffer(BigInt.asUintN(256, num)); +}; +exports.toUnsigned = toUnsigned; +/** + * Adds "0x" to a given `String` if it does not already start with "0x". + */ +const addHexPrefix = function (str) { + if (typeof str !== 'string') { + return str; + } + return (0, internal_1.isHexPrefixed)(str) ? str : '0x' + str; +}; +exports.addHexPrefix = addHexPrefix; +/** + * Shortens a string or buffer's hex string representation to maxLength (default 50). + * + * Examples: + * + * Input: '657468657265756d000000000000000000000000000000000000000000000000' + * Output: '657468657265756d0000000000000000000000000000000000…' + */ +function short(buffer, maxLength = 50) { + const bufferStr = Buffer.isBuffer(buffer) ? buffer.toString('hex') : buffer; + if (bufferStr.length <= maxLength) { + return bufferStr; + } + return bufferStr.slice(0, maxLength) + '…'; +} +exports.short = short; +/** + * Returns the utf8 string representation from a hex string. + * + * Examples: + * + * Input 1: '657468657265756d000000000000000000000000000000000000000000000000' + * Input 2: '657468657265756d' + * Input 3: '000000000000000000000000000000000000000000000000657468657265756d' + * + * Output (all 3 input variants): 'ethereum' + * + * Note that this method is not intended to be used with hex strings + * representing quantities in both big endian or little endian notation. + * + * @param string Hex string, should be `0x` prefixed + * @return Utf8 string + */ +const toUtf8 = function (hex) { + const zerosRegexp = /^(00)+|(00)+$/g; + hex = (0, internal_1.stripHexPrefix)(hex); + if (hex.length % 2 !== 0) { + throw new Error('Invalid non-even hex string input for toUtf8() provided'); + } + const bufferVal = Buffer.from(hex.replace(zerosRegexp, ''), 'hex'); + return bufferVal.toString('utf8'); +}; +exports.toUtf8 = toUtf8; +/** + * Converts a `Buffer` or `Array` to JSON. + * @param ba (Buffer|Array) + * @return (Array|String|null) + */ +const baToJSON = function (ba) { + if (Buffer.isBuffer(ba)) { + return `0x${ba.toString('hex')}`; + } + else if (ba instanceof Array) { + const array = []; + for (let i = 0; i < ba.length; i++) { + array.push((0, exports.baToJSON)(ba[i])); + } + return array; + } +}; +exports.baToJSON = baToJSON; +/** + * Checks provided Buffers for leading zeroes and throws if found. + * + * Examples: + * + * Valid values: 0x1, 0x, 0x01, 0x1234 + * Invalid values: 0x0, 0x00, 0x001, 0x0001 + * + * Note: This method is useful for validating that RLP encoded integers comply with the rule that all + * integer values encoded to RLP must be in the most compact form and contain no leading zero bytes + * @param values An object containing string keys and Buffer values + * @throws if any provided value is found to have leading zero bytes + */ +const validateNoLeadingZeroes = function (values) { + for (const [k, v] of Object.entries(values)) { + if (v !== undefined && v.length > 0 && v[0] === 0) { + throw new Error(`${k} cannot have leading zeroes, received: ${v.toString('hex')}`); + } + } +}; +exports.validateNoLeadingZeroes = validateNoLeadingZeroes; +function arrToBufArr(arr) { + if (!Array.isArray(arr)) { + return Buffer.from(arr); + } + return arr.map((a) => arrToBufArr(a)); +} +exports.arrToBufArr = arrToBufArr; +function bufArrToArr(arr) { + if (!Array.isArray(arr)) { + return Uint8Array.from(arr ?? []); + } + return arr.map((a) => bufArrToArr(a)); +} +exports.bufArrToArr = bufArrToArr; +/** + * Converts a {@link bigint} to a `0x` prefixed hex string + */ +const bigIntToHex = (num) => { + return '0x' + num.toString(16); +}; +exports.bigIntToHex = bigIntToHex; +/** + * Convert value from bigint to an unpadded Buffer + * (useful for RLP transport) + * @param value value to convert + */ +function bigIntToUnpaddedBuffer(value) { + return (0, exports.unpadBuffer)(bigIntToBuffer(value)); +} +exports.bigIntToUnpaddedBuffer = bigIntToUnpaddedBuffer; +function intToUnpaddedBuffer(value) { + return (0, exports.unpadBuffer)((0, exports.intToBuffer)(value)); +} +exports.intToUnpaddedBuffer = intToUnpaddedBuffer; +//# sourceMappingURL=bytes.js.map + +/***/ }), + +/***/ 89838: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.MAX_WITHDRAWALS_PER_PAYLOAD = exports.RLP_EMPTY_STRING = exports.KECCAK256_RLP = exports.KECCAK256_RLP_S = exports.KECCAK256_RLP_ARRAY = exports.KECCAK256_RLP_ARRAY_S = exports.KECCAK256_NULL = exports.KECCAK256_NULL_S = exports.TWO_POW256 = exports.SECP256K1_ORDER_DIV_2 = exports.SECP256K1_ORDER = exports.MAX_INTEGER_BIGINT = exports.MAX_INTEGER = exports.MAX_UINT64 = void 0; +const buffer_1 = __webpack_require__(48287); +const secp256k1_1 = __webpack_require__(26513); +/** + * 2^64-1 + */ +exports.MAX_UINT64 = BigInt('0xffffffffffffffff'); +/** + * The max integer that the evm can handle (2^256-1) + */ +exports.MAX_INTEGER = BigInt('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'); +/** + * The max integer that the evm can handle (2^256-1) as a bigint + * 2^256-1 equals to 340282366920938463463374607431768211455 + * We use literal value instead of calculated value for compatibility issue. + */ +exports.MAX_INTEGER_BIGINT = BigInt('115792089237316195423570985008687907853269984665640564039457584007913129639935'); +exports.SECP256K1_ORDER = secp256k1_1.secp256k1.CURVE.n; +exports.SECP256K1_ORDER_DIV_2 = secp256k1_1.secp256k1.CURVE.n / BigInt(2); +/** + * 2^256 + */ +exports.TWO_POW256 = BigInt('0x10000000000000000000000000000000000000000000000000000000000000000'); +/** + * Keccak-256 hash of null + */ +exports.KECCAK256_NULL_S = 'c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'; +/** + * Keccak-256 hash of null + */ +exports.KECCAK256_NULL = buffer_1.Buffer.from(exports.KECCAK256_NULL_S, 'hex'); +/** + * Keccak-256 of an RLP of an empty array + */ +exports.KECCAK256_RLP_ARRAY_S = '1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347'; +/** + * Keccak-256 of an RLP of an empty array + */ +exports.KECCAK256_RLP_ARRAY = buffer_1.Buffer.from(exports.KECCAK256_RLP_ARRAY_S, 'hex'); +/** + * Keccak-256 hash of the RLP of null + */ +exports.KECCAK256_RLP_S = '56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421'; +/** + * Keccak-256 hash of the RLP of null + */ +exports.KECCAK256_RLP = buffer_1.Buffer.from(exports.KECCAK256_RLP_S, 'hex'); +/** + * RLP encoded empty string + */ +exports.RLP_EMPTY_STRING = buffer_1.Buffer.from([0x80]); +exports.MAX_WITHDRAWALS_PER_PAYLOAD = 16; +//# sourceMappingURL=constants.js.map + +/***/ }), + +/***/ 45062: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.compactBytesToNibbles = exports.bytesToNibbles = exports.nibblesToCompactBytes = exports.nibblesToBytes = exports.hasTerminator = void 0; +// Reference: https://ethereum.org/en/developers/docs/data-structures-and-encoding/patricia-merkle-trie/ +/** + * + * @param s byte sequence + * @returns boolean indicating if input hex nibble sequence has terminator indicating leaf-node + * terminator is represented with 16 because a nibble ranges from 0 - 15(f) + */ +const hasTerminator = (nibbles) => { + return nibbles.length > 0 && nibbles[nibbles.length - 1] === 16; +}; +exports.hasTerminator = hasTerminator; +const nibblesToBytes = (nibbles, bytes) => { + for (let bi = 0, ni = 0; ni < nibbles.length; bi += 1, ni += 2) { + bytes[bi] = (nibbles[ni] << 4) | nibbles[ni + 1]; + } +}; +exports.nibblesToBytes = nibblesToBytes; +const nibblesToCompactBytes = (nibbles) => { + let terminator = 0; + if ((0, exports.hasTerminator)(nibbles)) { + terminator = 1; + // Remove the terminator from the sequence + nibbles = nibbles.subarray(0, nibbles.length - 1); + } + const buf = new Uint8Array(nibbles.length / 2 + 1); + // Shift the terminator info into the first nibble of buf[0] + buf[0] = terminator << 5; + // If odd length, then add that flag into the first nibble and put the odd nibble to + // second part of buf[0] which otherwise will be left padded with a 0 + if ((nibbles.length & 1) === 1) { + buf[0] |= 1 << 4; + buf[0] |= nibbles[0]; + nibbles = nibbles.subarray(1); + } + // create bytes out of the rest even nibbles + (0, exports.nibblesToBytes)(nibbles, buf.subarray(1)); + return buf; +}; +exports.nibblesToCompactBytes = nibblesToCompactBytes; +const bytesToNibbles = (str) => { + const l = str.length * 2 + 1; + const nibbles = new Uint8Array(l); + for (let i = 0; i < str.length; i++) { + const b = str[i]; + nibbles[i * 2] = b / 16; + nibbles[i * 2 + 1] = b % 16; + } + // This will get removed from calling function if the first nibble + // indicates that terminator is not present + nibbles[l - 1] = 16; + return nibbles; +}; +exports.bytesToNibbles = bytesToNibbles; +const compactBytesToNibbles = (compact) => { + if (compact.length === 0) { + return compact; + } + let base = (0, exports.bytesToNibbles)(compact); + // delete terminator flag if terminator flag was not in first nibble + if (base[0] < 2) { + base = base.subarray(0, base.length - 1); + } + // chop the terminator nibble and the even padding (if there is one) + // i.e. chop 2 left nibbles when even else 1 when odd + const chop = 2 - (base[0] & 1); + return base.subarray(chop); +}; +exports.compactBytesToNibbles = compactBytesToNibbles; +/** + * A test helper to generates compact path for a subset of key bytes + * + * TODO: Commenting the code for now as this seems to be helper function + * (from geth codebase ) + * + */ +// +// +// export const getPathTo = (tillBytes: number, key: Buffer) => { +// const hexNibbles = bytesToNibbles(key).subarray(0, tillBytes) +// // Remove the terminator if its there, although it would be there only if tillBytes >= key.length +// // This seems to be a test helper to generate paths so correctness of this isn't necessary +// hexNibbles[hexNibbles.length - 1] = 0 +// const compactBytes = nibblesToCompactBytes(hexNibbles) +// return [Buffer.from(compactBytes)] +// } +//# sourceMappingURL=encoding.js.map + +/***/ }), + +/***/ 35546: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.assertIsString = exports.assertIsArray = exports.assertIsBuffer = exports.assertIsHexString = void 0; +const internal_1 = __webpack_require__(59498); +/** + * Throws if a string is not hex prefixed + * @param {string} input string to check hex prefix of + */ +const assertIsHexString = function (input) { + if (!(0, internal_1.isHexString)(input)) { + const msg = `This method only supports 0x-prefixed hex strings but input was: ${input}`; + throw new Error(msg); + } +}; +exports.assertIsHexString = assertIsHexString; +/** + * Throws if input is not a buffer + * @param {Buffer} input value to check + */ +const assertIsBuffer = function (input) { + if (!Buffer.isBuffer(input)) { + const msg = `This method only supports Buffer but input was: ${input}`; + throw new Error(msg); + } +}; +exports.assertIsBuffer = assertIsBuffer; +/** + * Throws if input is not an array + * @param {number[]} input value to check + */ +const assertIsArray = function (input) { + if (!Array.isArray(input)) { + const msg = `This method only supports number arrays but input was: ${input}`; + throw new Error(msg); + } +}; +exports.assertIsArray = assertIsArray; +/** + * Throws if input is not a string + * @param {string} input value to check + */ +const assertIsString = function (input) { + if (typeof input !== 'string') { + const msg = `This method only supports strings but input was: ${input}`; + throw new Error(msg); + } +}; +exports.assertIsString = assertIsString; +//# sourceMappingURL=helpers.js.map + +/***/ }), + +/***/ 68683: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toAscii = exports.stripHexPrefix = exports.padToEven = exports.isHexString = exports.isHexPrefixed = exports.getKeys = exports.getBinarySize = exports.fromUtf8 = exports.fromAscii = exports.arrayContainsArray = void 0; +/** + * Constants + */ +__exportStar(__webpack_require__(89838), exports); +/** + * Units helpers + */ +__exportStar(__webpack_require__(52652), exports); +/** + * Account class and helper functions + */ +__exportStar(__webpack_require__(16284), exports); +/** + * Address type + */ +__exportStar(__webpack_require__(86727), exports); +/** + * Withdrawal type + */ +__exportStar(__webpack_require__(37380), exports); +/** + * ECDSA signature + */ +__exportStar(__webpack_require__(92133), exports); +/** + * Utilities for manipulating Buffers, byte arrays, etc. + */ +__exportStar(__webpack_require__(77312), exports); +/** + * Helpful TypeScript types + */ +__exportStar(__webpack_require__(42666), exports); +/** + * Helper function for working with compact encoding + */ +__exportStar(__webpack_require__(45062), exports); +/** + * Export ethjs-util methods + */ +__exportStar(__webpack_require__(98421), exports); +var internal_1 = __webpack_require__(59498); +Object.defineProperty(exports, "arrayContainsArray", ({ enumerable: true, get: function () { return internal_1.arrayContainsArray; } })); +Object.defineProperty(exports, "fromAscii", ({ enumerable: true, get: function () { return internal_1.fromAscii; } })); +Object.defineProperty(exports, "fromUtf8", ({ enumerable: true, get: function () { return internal_1.fromUtf8; } })); +Object.defineProperty(exports, "getBinarySize", ({ enumerable: true, get: function () { return internal_1.getBinarySize; } })); +Object.defineProperty(exports, "getKeys", ({ enumerable: true, get: function () { return internal_1.getKeys; } })); +Object.defineProperty(exports, "isHexPrefixed", ({ enumerable: true, get: function () { return internal_1.isHexPrefixed; } })); +Object.defineProperty(exports, "isHexString", ({ enumerable: true, get: function () { return internal_1.isHexString; } })); +Object.defineProperty(exports, "padToEven", ({ enumerable: true, get: function () { return internal_1.padToEven; } })); +Object.defineProperty(exports, "stripHexPrefix", ({ enumerable: true, get: function () { return internal_1.stripHexPrefix; } })); +Object.defineProperty(exports, "toAscii", ({ enumerable: true, get: function () { return internal_1.toAscii; } })); +__exportStar(__webpack_require__(31708), exports); +__exportStar(__webpack_require__(81862), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 59498: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +/* +The MIT License + +Copyright (c) 2016 Nick Dodson. nickdodson.com + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isHexString = exports.getKeys = exports.fromAscii = exports.fromUtf8 = exports.toAscii = exports.arrayContainsArray = exports.getBinarySize = exports.padToEven = exports.stripHexPrefix = exports.isHexPrefixed = void 0; +/** + * Returns a `Boolean` on whether or not the a `String` starts with '0x' + * @param str the string input value + * @return a boolean if it is or is not hex prefixed + * @throws if the str input is not a string + */ +function isHexPrefixed(str) { + if (typeof str !== 'string') { + throw new Error(`[isHexPrefixed] input must be type 'string', received type ${typeof str}`); + } + return str[0] === '0' && str[1] === 'x'; +} +exports.isHexPrefixed = isHexPrefixed; +/** + * Removes '0x' from a given `String` if present + * @param str the string value + * @returns the string without 0x prefix + */ +const stripHexPrefix = (str) => { + if (typeof str !== 'string') + throw new Error(`[stripHexPrefix] input must be type 'string', received ${typeof str}`); + return isHexPrefixed(str) ? str.slice(2) : str; +}; +exports.stripHexPrefix = stripHexPrefix; +/** + * Pads a `String` to have an even length + * @param value + * @return output + */ +function padToEven(value) { + let a = value; + if (typeof a !== 'string') { + throw new Error(`[padToEven] value must be type 'string', received ${typeof a}`); + } + if (a.length % 2) + a = `0${a}`; + return a; +} +exports.padToEven = padToEven; +/** + * Get the binary size of a string + * @param str + * @returns the number of bytes contained within the string + */ +function getBinarySize(str) { + if (typeof str !== 'string') { + throw new Error(`[getBinarySize] method requires input type 'string', received ${typeof str}`); + } + return Buffer.byteLength(str, 'utf8'); +} +exports.getBinarySize = getBinarySize; +/** + * Returns TRUE if the first specified array contains all elements + * from the second one. FALSE otherwise. + * + * @param superset + * @param subset + * + */ +function arrayContainsArray(superset, subset, some) { + if (Array.isArray(superset) !== true) { + throw new Error(`[arrayContainsArray] method requires input 'superset' to be an array, got type '${typeof superset}'`); + } + if (Array.isArray(subset) !== true) { + throw new Error(`[arrayContainsArray] method requires input 'subset' to be an array, got type '${typeof subset}'`); + } + return subset[some === true ? 'some' : 'every']((value) => superset.indexOf(value) >= 0); +} +exports.arrayContainsArray = arrayContainsArray; +/** + * Should be called to get ascii from its hex representation + * + * @param string in hex + * @returns ascii string representation of hex value + */ +function toAscii(hex) { + let str = ''; + let i = 0; + const l = hex.length; + if (hex.substring(0, 2) === '0x') + i = 2; + for (; i < l; i += 2) { + const code = parseInt(hex.substr(i, 2), 16); + str += String.fromCharCode(code); + } + return str; +} +exports.toAscii = toAscii; +/** + * Should be called to get hex representation (prefixed by 0x) of utf8 string + * + * @param string + * @param optional padding + * @returns hex representation of input string + */ +function fromUtf8(stringValue) { + const str = Buffer.from(stringValue, 'utf8'); + return `0x${padToEven(str.toString('hex')).replace(/^0+|0+$/g, '')}`; +} +exports.fromUtf8 = fromUtf8; +/** + * Should be called to get hex representation (prefixed by 0x) of ascii string + * + * @param string + * @param optional padding + * @returns hex representation of input string + */ +function fromAscii(stringValue) { + let hex = ''; + for (let i = 0; i < stringValue.length; i++) { + const code = stringValue.charCodeAt(i); + const n = code.toString(16); + hex += n.length < 2 ? `0${n}` : n; + } + return `0x${hex}`; +} +exports.fromAscii = fromAscii; +/** + * Returns the keys from an array of objects. + * @example + * ```js + * getKeys([{a: '1', b: '2'}, {a: '3', b: '4'}], 'a') => ['1', '3'] + *```` + * @param params + * @param key + * @param allowEmpty + * @returns output just a simple array of output keys + */ +function getKeys(params, key, allowEmpty) { + if (!Array.isArray(params)) { + throw new Error(`[getKeys] method expects input 'params' to be an array, got ${typeof params}`); + } + if (typeof key !== 'string') { + throw new Error(`[getKeys] method expects input 'key' to be type 'string', got ${typeof params}`); + } + const result = []; + for (let i = 0; i < params.length; i++) { + let value = params[i][key]; + if (allowEmpty === true && !value) { + value = ''; + } + else if (typeof value !== 'string') { + throw new Error(`invalid abi - expected type 'string', received ${typeof value}`); + } + result.push(value); + } + return result; +} +exports.getKeys = getKeys; +/** + * Is the string a hex string. + * + * @param value + * @param length + * @returns output the string is a hex string + */ +function isHexString(value, length) { + if (typeof value !== 'string' || !value.match(/^0x[0-9A-Fa-f]*$/)) + return false; + if (typeof length !== 'undefined' && length > 0 && value.length !== 2 + 2 * length) + return false; + return true; +} +exports.isHexString = isHexString; +//# sourceMappingURL=internal.js.map + +/***/ }), + +/***/ 31708: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var console = __webpack_require__(96763); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Lock = void 0; +// Based on https://github.com/jsoendermann/semaphore-async-await/blob/master/src/Semaphore.ts +class Lock { + constructor() { + this.permits = 1; + this.promiseResolverQueue = []; + } + /** + * Returns a promise used to wait for a permit to become available. This method should be awaited on. + * @returns A promise that gets resolved when execution is allowed to proceed. + */ + async acquire() { + if (this.permits > 0) { + this.permits -= 1; + return Promise.resolve(true); + } + // If there is no permit available, we return a promise that resolves once the semaphore gets + // signaled enough times that permits is equal to one. + return new Promise((resolver) => this.promiseResolverQueue.push(resolver)); + } + /** + * Increases the number of permits by one. If there are other functions waiting, one of them will + * continue to execute in a future iteration of the event loop. + */ + release() { + this.permits += 1; + if (this.permits > 1 && this.promiseResolverQueue.length > 0) { + // eslint-disable-next-line no-console + console.warn('Lock.permits should never be > 0 when there is someone waiting.'); + } + else if (this.permits === 1 && this.promiseResolverQueue.length > 0) { + // If there is someone else waiting, immediately consume the permit that was released + // at the beginning of this function and let the waiting function resume. + this.permits -= 1; + const nextResolver = this.promiseResolverQueue.shift(); + if (nextResolver) { + nextResolver(true); + } + } + } +} +exports.Lock = Lock; +//# sourceMappingURL=lock.js.map + +/***/ }), + +/***/ 81862: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getProvider = exports.fetchFromProvider = void 0; +const micro_ftch_1 = __webpack_require__(6215); +const fetchFromProvider = async (url, params) => { + const res = await (0, micro_ftch_1.default)(url, { + headers: { + 'content-type': 'application/json', + }, + type: 'json', + data: { + method: params.method, + params: params.params, + jsonrpc: '2.0', + id: 1, + }, + }); + return res.result; +}; +exports.fetchFromProvider = fetchFromProvider; +const getProvider = (provider) => { + if (typeof provider === 'string') { + return provider; + } + else if (provider?.connection?.url !== undefined) { + return provider.connection.url; + } + else { + throw new Error('Must provide valid provider URL or Web3Provider'); + } +}; +exports.getProvider = getProvider; +//# sourceMappingURL=provider.js.map + +/***/ }), + +/***/ 92133: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hashPersonalMessage = exports.isValidSignature = exports.fromRpcSig = exports.toCompactSig = exports.toRpcSig = exports.ecrecover = exports.ecsign = void 0; +const keccak_1 = __webpack_require__(32019); +const secp256k1_1 = __webpack_require__(26513); +const bytes_1 = __webpack_require__(77312); +const constants_1 = __webpack_require__(89838); +const helpers_1 = __webpack_require__(35546); +/** + * Returns the ECDSA signature of a message hash. + * + * If `chainId` is provided assume an EIP-155-style signature and calculate the `v` value + * accordingly, otherwise return a "static" `v` just derived from the `recovery` bit + */ +function ecsign(msgHash, privateKey, chainId) { + const sig = secp256k1_1.secp256k1.sign(msgHash, privateKey); + const buf = sig.toCompactRawBytes(); + const r = Buffer.from(buf.slice(0, 32)); + const s = Buffer.from(buf.slice(32, 64)); + const v = chainId === undefined + ? BigInt(sig.recovery + 27) + : BigInt(sig.recovery + 35) + BigInt(chainId) * BigInt(2); + return { r, s, v }; +} +exports.ecsign = ecsign; +function calculateSigRecovery(v, chainId) { + if (v === BigInt(0) || v === BigInt(1)) + return v; + if (chainId === undefined) { + return v - BigInt(27); + } + return v - (chainId * BigInt(2) + BigInt(35)); +} +function isValidSigRecovery(recovery) { + return recovery === BigInt(0) || recovery === BigInt(1); +} +/** + * ECDSA public key recovery from signature. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Recovered public key + */ +const ecrecover = function (msgHash, v, r, s, chainId) { + const signature = Buffer.concat([(0, bytes_1.setLengthLeft)(r, 32), (0, bytes_1.setLengthLeft)(s, 32)], 64); + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + const sig = secp256k1_1.secp256k1.Signature.fromCompact(signature).addRecoveryBit(Number(recovery)); + const senderPubKey = sig.recoverPublicKey(msgHash); + return Buffer.from(senderPubKey.toRawBytes(false).slice(1)); +}; +exports.ecrecover = ecrecover; +/** + * Convert signature parameters into the format of `eth_sign` RPC method. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Signature + */ +const toRpcSig = function (v, r, s, chainId) { + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + // geth (and the RPC eth_sign method) uses the 65 byte format used by Bitcoin + return (0, bytes_1.bufferToHex)(Buffer.concat([(0, bytes_1.setLengthLeft)(r, 32), (0, bytes_1.setLengthLeft)(s, 32), (0, bytes_1.toBuffer)(v)])); +}; +exports.toRpcSig = toRpcSig; +/** + * Convert signature parameters into the format of Compact Signature Representation (EIP-2098). + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @returns Signature + */ +const toCompactSig = function (v, r, s, chainId) { + const recovery = calculateSigRecovery(v, chainId); + if (!isValidSigRecovery(recovery)) { + throw new Error('Invalid signature v value'); + } + let ss = s; + if ((v > BigInt(28) && v % BigInt(2) === BigInt(1)) || v === BigInt(1) || v === BigInt(28)) { + ss = Buffer.from(s); + ss[0] |= 0x80; + } + return (0, bytes_1.bufferToHex)(Buffer.concat([(0, bytes_1.setLengthLeft)(r, 32), (0, bytes_1.setLengthLeft)(ss, 32)])); +}; +exports.toCompactSig = toCompactSig; +/** + * Convert signature format of the `eth_sign` RPC method to signature parameters + * + * NOTE: For an extracted `v` value < 27 (see Geth bug https://github.com/ethereum/go-ethereum/issues/2053) + * `v + 27` is returned for the `v` value + * NOTE: After EIP1559, `v` could be `0` or `1` but this function assumes + * it's a signed message (EIP-191 or EIP-712) adding `27` at the end. Remove if needed. + */ +const fromRpcSig = function (sig) { + const buf = (0, bytes_1.toBuffer)(sig); + let r; + let s; + let v; + if (buf.length >= 65) { + r = buf.slice(0, 32); + s = buf.slice(32, 64); + v = (0, bytes_1.bufferToBigInt)(buf.slice(64)); + } + else if (buf.length === 64) { + // Compact Signature Representation (https://eips.ethereum.org/EIPS/eip-2098) + r = buf.slice(0, 32); + s = buf.slice(32, 64); + v = BigInt((0, bytes_1.bufferToInt)(buf.slice(32, 33)) >> 7); + s[0] &= 0x7f; + } + else { + throw new Error('Invalid signature length'); + } + // support both versions of `eth_sign` responses + if (v < 27) { + v = v + BigInt(27); + } + return { + v, + r, + s, + }; +}; +exports.fromRpcSig = fromRpcSig; +/** + * Validate a ECDSA signature. + * NOTE: Accepts `v === 0 | v === 1` for EIP1559 transactions + * @param homesteadOrLater Indicates whether this is being used on either the homestead hardfork or a later one + */ +const isValidSignature = function (v, r, s, homesteadOrLater = true, chainId) { + if (r.length !== 32 || s.length !== 32) { + return false; + } + if (!isValidSigRecovery(calculateSigRecovery(v, chainId))) { + return false; + } + const rBigInt = (0, bytes_1.bufferToBigInt)(r); + const sBigInt = (0, bytes_1.bufferToBigInt)(s); + if (rBigInt === BigInt(0) || + rBigInt >= constants_1.SECP256K1_ORDER || + sBigInt === BigInt(0) || + sBigInt >= constants_1.SECP256K1_ORDER) { + return false; + } + if (homesteadOrLater && sBigInt >= constants_1.SECP256K1_ORDER_DIV_2) { + return false; + } + return true; +}; +exports.isValidSignature = isValidSignature; +/** + * Returns the keccak-256 hash of `message`, prefixed with the header used by the `eth_sign` RPC call. + * The output of this function can be fed into `ecsign` to produce the same signature as the `eth_sign` + * call for a given `message`, or fed to `ecrecover` along with a signature to recover the public key + * used to produce the signature. + */ +const hashPersonalMessage = function (message) { + (0, helpers_1.assertIsBuffer)(message); + const prefix = Buffer.from(`\u0019Ethereum Signed Message:\n${message.length}`, 'utf-8'); + return Buffer.from((0, keccak_1.keccak256)(Buffer.concat([prefix, message]))); +}; +exports.hashPersonalMessage = hashPersonalMessage; +//# sourceMappingURL=signature.js.map + +/***/ }), + +/***/ 42666: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toType = exports.TypeOutput = void 0; +const bytes_1 = __webpack_require__(77312); +const internal_1 = __webpack_require__(59498); +/** + * Type output options + */ +var TypeOutput; +(function (TypeOutput) { + TypeOutput[TypeOutput["Number"] = 0] = "Number"; + TypeOutput[TypeOutput["BigInt"] = 1] = "BigInt"; + TypeOutput[TypeOutput["Buffer"] = 2] = "Buffer"; + TypeOutput[TypeOutput["PrefixedHexString"] = 3] = "PrefixedHexString"; +})(TypeOutput = exports.TypeOutput || (exports.TypeOutput = {})); +function toType(input, outputType) { + if (input === null) { + return null; + } + if (input === undefined) { + return undefined; + } + if (typeof input === 'string' && !(0, internal_1.isHexString)(input)) { + throw new Error(`A string must be provided with a 0x-prefix, given: ${input}`); + } + else if (typeof input === 'number' && !Number.isSafeInteger(input)) { + throw new Error('The provided number is greater than MAX_SAFE_INTEGER (please use an alternative input type)'); + } + const output = (0, bytes_1.toBuffer)(input); + switch (outputType) { + case TypeOutput.Buffer: + return output; + case TypeOutput.BigInt: + return (0, bytes_1.bufferToBigInt)(output); + case TypeOutput.Number: { + const bigInt = (0, bytes_1.bufferToBigInt)(output); + if (bigInt > BigInt(Number.MAX_SAFE_INTEGER)) { + throw new Error('The provided number is greater than MAX_SAFE_INTEGER (please use an alternative output type)'); + } + return Number(bigInt); + } + case TypeOutput.PrefixedHexString: + return (0, bytes_1.bufferToHex)(output); + default: + throw new Error('unknown outputType'); + } +} +exports.toType = toType; +//# sourceMappingURL=types.js.map + +/***/ }), + +/***/ 52652: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GWEI_TO_WEI = void 0; +/** Easy conversion from Gwei to wei */ +exports.GWEI_TO_WEI = BigInt(1000000000); +//# sourceMappingURL=units.js.map + +/***/ }), + +/***/ 37380: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Withdrawal = void 0; +const address_1 = __webpack_require__(86727); +const bytes_1 = __webpack_require__(77312); +const types_1 = __webpack_require__(42666); +/** + * Representation of EIP-4895 withdrawal data + */ +class Withdrawal { + /** + * This constructor assigns and validates the values. + * Use the static factory methods to assist in creating a Withdrawal object from varying data types. + * Its amount is in Gwei to match CL representation and for eventual ssz withdrawalsRoot + */ + constructor(index, validatorIndex, address, + /** + * withdrawal amount in Gwei to match the CL repesentation and eventually ssz withdrawalsRoot + */ + amount) { + this.index = index; + this.validatorIndex = validatorIndex; + this.address = address; + this.amount = amount; + } + static fromWithdrawalData(withdrawalData) { + const { index: indexData, validatorIndex: validatorIndexData, address: addressData, amount: amountData, } = withdrawalData; + const index = (0, types_1.toType)(indexData, types_1.TypeOutput.BigInt); + const validatorIndex = (0, types_1.toType)(validatorIndexData, types_1.TypeOutput.BigInt); + const address = new address_1.Address((0, types_1.toType)(addressData, types_1.TypeOutput.Buffer)); + const amount = (0, types_1.toType)(amountData, types_1.TypeOutput.BigInt); + return new Withdrawal(index, validatorIndex, address, amount); + } + static fromValuesArray(withdrawalArray) { + if (withdrawalArray.length !== 4) { + throw Error(`Invalid withdrawalArray length expected=4 actual=${withdrawalArray.length}`); + } + const [index, validatorIndex, address, amount] = withdrawalArray; + return Withdrawal.fromWithdrawalData({ index, validatorIndex, address, amount }); + } + /** + * Convert a withdrawal to a buffer array + * @param withdrawal the withdrawal to convert + * @returns buffer array of the withdrawal + */ + static toBufferArray(withdrawal) { + const { index, validatorIndex, address, amount } = withdrawal; + const indexBuffer = (0, types_1.toType)(index, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(index, types_1.TypeOutput.Buffer); + const validatorIndexBuffer = (0, types_1.toType)(validatorIndex, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(validatorIndex, types_1.TypeOutput.Buffer); + let addressBuffer; + if (address instanceof address_1.Address) { + addressBuffer = address.buf; + } + else { + addressBuffer = (0, types_1.toType)(address, types_1.TypeOutput.Buffer); + } + const amountBuffer = (0, types_1.toType)(amount, types_1.TypeOutput.BigInt) === BigInt(0) + ? Buffer.alloc(0) + : (0, types_1.toType)(amount, types_1.TypeOutput.Buffer); + return [indexBuffer, validatorIndexBuffer, addressBuffer, amountBuffer]; + } + raw() { + return Withdrawal.toBufferArray(this); + } + toValue() { + return { + index: this.index, + validatorIndex: this.validatorIndex, + address: this.address.buf, + amount: this.amount, + }; + } + toJSON() { + return { + index: (0, bytes_1.bigIntToHex)(this.index), + validatorIndex: (0, bytes_1.bigIntToHex)(this.validatorIndex), + address: '0x' + this.address.buf.toString('hex'), + amount: (0, bytes_1.bigIntToHex)(this.amount), + }; + } +} +exports.Withdrawal = Withdrawal; +//# sourceMappingURL=withdrawal.js.map + +/***/ }), + +/***/ 56498: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +// ESLint gets confused by the nested list and tables in the docs, so we disable +// the rule for this file. +/* eslint-disable jsdoc/check-indentation, jsdoc/match-description */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.decodeSingle = exports.decode = exports.encodePacked = exports.encodeSingle = exports.encode = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const packer_1 = __webpack_require__(37700); +/** + * Encode the data with the provided types. The types must be valid Solidity + * ABI types. + * + * This will attempt to parse the values into the correct types. For example, + * if you pass in a hex string for a `uint256`, it will be parsed into a + * `bigint`. Regular strings are interpreted as UTF-8 strings. If you want to + * pass in a hex string, you must pass it in as a `Uint8Array`, or use the + * "0x"-prefix. + * + * It will also attempt to infer the types of the values. For example, if you + * pass in a string for a `uint256`, it will result in a TypeScript compile-time + * error. This does not work for all types, however. For example, if you use + * nested arrays or tuples, the type will be inferred as `unknown`. + * + * The following types are supported: + * + * - `address`: A 20-byte Ethereum address. + * - As a 40-character-long hexadecimal string, starting with "0x". + * - As a 20-byte-long byte array, i.e., `Uint8Array`. + * - `bool`: A boolean value. + * - As a boolean literal, i.e., `true` or `false`. + * - As the strings "true" or "false". + * - `bytes(n)`: A dynamic byte array. + * - As a hexadecimal string, starting with "0x". + * - As a byte array, i.e., `Uint8Array`. + * - As a regular string, which will be interpreted as UTF-8. + * - `function`: A Solidity function. + * - As a 48-character-long hexadecimal string, starting with "0x". + * - As a 24-byte-long byte array, i.e., `Uint8Array`. + * - As a {@link SolidityFunction} object. + * - `int(n)`: A signed integer. + * - As a number. + * - As a `bigint`. + * - As a hexadecimal string, starting with "0x". + * - `string`: A dynamic UTF-8 string. + * - As a regular string. + * - As a hexadecimal string, starting with "0x". + * - As a byte array, i.e., `Uint8Array`. + * - `tuple`: A tuple of values. + * - As an array of values. + * - `uint(n)`: An unsigned integer. + * - As a number. + * - As a `bigint`. + * - As a hexadecimal string, starting with "0x". + * + * @example + * ```typescript + * import { encode, decode } from '@metamask/abi-utils'; + * + * const types = ['uint256', 'string']; + * const encoded = encode(types, [42, 'Hello, world!']); + * const decoded = decode(types, encoded); + * + * console.log(decoded); // [42n, 'Hello, world!'] + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html + * @param types - The types to encode. + * @param values - The values to encode. This array must have the same length as + * the types array. + * @param packed - Whether to use the non-standard packed mode. Defaults to + * `false`. + * @param tight - Whether to pack the values tightly. When enabled, the values + * will be packed without any padding. This matches the behaviour of + * `ethereumjs-abi`. Defaults to `false`. + * @returns The ABI encoded bytes. + */ +const encode = (types, values, packed, tight) => { + try { + return (0, packer_1.pack)({ types, values, packed, tight }); + } + catch (error) { + if (error instanceof errors_1.ParserError) { + throw new errors_1.ParserError(`Unable to encode value: ${error.message}`, error); + } + throw new errors_1.ParserError(`An unexpected error occurred: ${(0, errors_1.getErrorMessage)(error)}`, error); + } +}; +exports.encode = encode; +/** + * Encode the data with the provided type. The type must be a valid Solidity + * ABI type. + * + * See {@link encode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodeSingle, decodeSingle } from '@metamask/abi-utils'; + * + * const encoded = encodeSingle('uint256', 42); + * const decoded = decodeSingle('uint256', encoded); + * + * console.log(decoded); // 42n + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param type - The type to encode. + * @param value - The value to encode. + * @returns The ABI encoded bytes. + */ +const encodeSingle = (type, value) => { + return (0, exports.encode)([type], [value]); +}; +exports.encodeSingle = encodeSingle; +/** + * Encode the data with the provided types. The types must be valid Solidity + * ABI types. This is similar to {@link encode}, but the values are encoded in + * the non-standard packed mode. This differs from the standard encoding in the + * following ways: + * + * - Most values are packed tightly, without alignment padding. + * - The exception is array values, which are padded to 32 bytes. + * - Values are still padded to their full size, i.e., `uint16` values are still + * padded to 2 bytes, regardless of the length of the value. + * - The encoding of dynamic types (`bytes`, `string`) is different. The length + * of the dynamic type is not included in the encoding, and the dynamic type is + * not padded to a multiple of 32 bytes. + * - All values are encoded in-place, without any offsets. + * + * The encoding of this is ambiguous as soon as there is more than one dynamic + * type. That means that these values cannot be decoded with {@link decode} or + * Solidity's `abi.decode` function. + * + * See {@link encode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodePacked } from '@metamask/abi-utils'; + * + * const encoded = encodePacked(['uint8'], [42]); + * + * console.log(encoded); // `Uint8Array [ 42 ]` + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#non-standard-packed-mode + * @param types - The types to encode. + * @param values - The values to encode. + * @param tight - Whether to pack the values tightly. When enabled, `bytesN` + * values in arrays will be packed without any padding. This matches the + * behaviour of `ethereumjs-abi`. Defaults to `false`. + * @returns The ABI encoded bytes. + */ +const encodePacked = (types, values, tight) => { + return (0, exports.encode)(types, values, true, tight); +}; +exports.encodePacked = encodePacked; +/** + * Decode an ABI encoded buffer with the specified types. The types must be + * valid Solidity ABI types. + * + * This will attempt to infer the output types from the input types. For + * example, if you use `uint256` as an input type, the output type will be + * `bigint`. This does not work for all types, however. For example, if you use + * nested array types or tuple types, the output type will be `unknown`. + * + * The resulting types of the values will be as follows: + * + * | Contract ABI Type | Resulting JavaScript Type | + * | ----------------- | ------------------------- | + * | `address` | `string` | + * | `bool` | `boolean` | + * | `bytes(n)` | `Uint8Array` | + * | `function` | {@link SolidityFunction} | + * | `int(n)` | `bigint` | + * | `string` | `string` | + * | `tuple` | `Array` | + * | `array` | `Array` | + * | `uint(n)` | `bigint` | + * + * @example + * ```typescript + * import { encode, decode } from '@metamask/abi-utils'; + * + * const types = ['uint256', 'string']; + * const encoded = encode(types, [42, 'Hello, world!']); + * const decoded = decode(types, encoded); + * + * console.log(decoded); // [42n, 'Hello, world!'] + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param types - The types to decode the bytes with. + * @param value - The bytes-like value to decode. + * @returns The decoded values as array. + */ +const decode = (types, value) => { + const bytes = (0, utils_1.createBytes)(value); + try { + return (0, packer_1.unpack)(types, bytes); + } + catch (error) { + if (error instanceof errors_1.ParserError) { + throw new errors_1.ParserError(`Unable to decode value: ${error.message}`, error); + } + throw new errors_1.ParserError(`An unexpected error occurred: ${(0, errors_1.getErrorMessage)(error)}`, error); + } +}; +exports.decode = decode; +/** + * Decode the data with the provided type. The type must be a valid Solidity + * ABI type. + * + * See {@link decode} for more information on how values are parsed. + * + * @example + * ```typescript + * import { encodeSingle, decodeSingle } from '@metamask/abi-utils'; + * + * const encoded = encodeSingle('uint256', 42); + * const decoded = decodeSingle('uint256', encoded); + * + * console.log(decoded); // 42n + * ``` + * @see https://docs.soliditylang.org/en/v0.8.17/abi-spec.html#types + * @param type - The type to decode. + * @param value - The bytes-like value to decode. + * @returns The decoded value. + */ +const decodeSingle = (type, value) => { + const result = (0, exports.decode)([type], value); + (0, utils_1.assert)(result.length === 1, new errors_1.ParserError('Decoded value array has unexpected length.')); + return result[0]; +}; +exports.decodeSingle = decodeSingle; +//# sourceMappingURL=abi.js.map + +/***/ }), + +/***/ 5961: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ParserError = exports.getErrorStack = exports.getErrorMessage = void 0; +const utils_1 = __webpack_require__(22049); +/** + * Attempt to get an error message from a value. + * + * - If the value is an error, the error's message is returned. + * - If the value is an object with a `message` property, the value of that + * property is returned. + * - If the value is a string, the value is returned. + * - Otherwise, "Unknown error." is returned. + * + * @param error - The value to get an error message from. + * @returns The error message. + * @internal + */ +const getErrorMessage = (error) => { + if (typeof error === 'string') { + return error; + } + if (error instanceof Error) { + return error.message; + } + if ((0, utils_1.isObject)(error) && + (0, utils_1.hasProperty)(error, 'message') && + typeof error.message === 'string') { + return error.message; + } + return 'Unknown error.'; +}; +exports.getErrorMessage = getErrorMessage; +/** + * Get the error stack from a value. If the value is an error, the error's stack + * is returned. Otherwise, it returns `undefined`. + * + * @param error - The value to get an error stack from. + * @returns The error stack, or `undefined` if the value is not an error. + * @internal + */ +const getErrorStack = (error) => { + if (error instanceof Error) { + return error.stack; + } + return undefined; +}; +exports.getErrorStack = getErrorStack; +/** + * An error that is thrown when the ABI encoder or decoder encounters an + * issue. + */ +class ParserError extends Error { + constructor(message, originalError) { + super(message); + this.name = 'ParserError'; + const originalStack = (0, exports.getErrorStack)(originalError); + if (originalStack) { + this.stack = originalStack; + } + } +} +exports.ParserError = ParserError; +//# sourceMappingURL=errors.js.map + +/***/ }), + +/***/ 93256: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__webpack_require__(56498), exports); +__exportStar(__webpack_require__(5961), exports); +__exportStar(__webpack_require__(11126), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 57924: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.iterate = void 0; +const utils_1 = __webpack_require__(22049); +/** + * Iterate over a buffer with the specified size. This will yield a part of the + * buffer starting at an increment of the specified size, until the end of the + * buffer is reached. + * + * Calling the `skip` function will make it skip the specified number of bytes. + * + * @param buffer - The buffer to iterate over. + * @param size - The number of bytes to iterate with. + * @returns An iterator that yields the parts of the byte array. + * @yields The parts of the byte array. + */ +const iterate = function* (buffer, size = 32) { + for (let pointer = 0; pointer < buffer.length; pointer += size) { + const skip = (length) => { + (0, utils_1.assert)(length >= 0, 'Cannot skip a negative number of bytes.'); + (0, utils_1.assert)(length % size === 0, 'Length must be a multiple of the size.'); + pointer += length; + }; + const value = buffer.subarray(pointer); + yield { skip, value }; + } + return { + skip: () => undefined, + value: new Uint8Array(), + }; +}; +exports.iterate = iterate; +//# sourceMappingURL=iterator.js.map + +/***/ }), + +/***/ 37700: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unpack = exports.pack = exports.isDynamicParser = exports.getParser = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const iterator_1 = __webpack_require__(57924); +const parsers_1 = __webpack_require__(46207); +const utils_2 = __webpack_require__(26365); +/** + * Get the parser for the specified type. + * + * @param type - The type to get a parser for. + * @returns The parser. + * @throws If there is no parser for the specified type. + */ +const getParser = (type) => { + const parsers = { + address: parsers_1.address, + array: parsers_1.array, + bool: parsers_1.bool, + bytes: parsers_1.bytes, + fixedBytes: parsers_1.fixedBytes, + function: parsers_1.fn, + number: parsers_1.number, + string: parsers_1.string, + tuple: parsers_1.tuple, + }; + const staticParser = parsers[type]; + if (staticParser) { + return staticParser; + } + const parser = Object.values(parsers).find((value) => value.isType(type)); + if (parser) { + return parser; + } + throw new errors_1.ParserError(`The type "${type}" is not supported.`); +}; +exports.getParser = getParser; +/** + * Check if the specified parser is dynamic, for the provided types. This is + * primarily used for parsing tuples, where a tuple can be dynamic based on the + * types. For other parsers, it will simply use the set `isDynamic` value. + * + * @param parser - The parser to check. + * @param type - The type to check the parser with. + * @returns Whether the parser is dynamic. + */ +const isDynamicParser = (parser, type) => { + const { isDynamic } = parser; + if (typeof isDynamic === 'function') { + return isDynamic(type); + } + return isDynamic; +}; +exports.isDynamicParser = isDynamicParser; +/** + * Pack the provided values in a buffer, encoded with the specified types. If a + * buffer is specified, the resulting value will be concatenated with the + * buffer. + * + * @param args - The arguments object. + * @param args.types - The types of the values to pack. + * @param args.values - The values to pack. + * @param args.packed - Whether to use the non-standard packed mode. Defaults to + * `false`. + * @param args.arrayPacked - Whether to use the non-standard packed mode for + * arrays. Defaults to `false`. + * @param args.byteArray - The byte array to encode the values into. Defaults to + * an empty array. + * @param args.tight - Whether to use tight packing mode. Only applicable when + * `packed` is true. When true, the packed mode will not add any padding bytes. + * This matches the packing behaviour of `ethereumjs-abi`, but is not standard. + * @returns The resulting encoded buffer. + */ +const pack = ({ types, values, packed = false, tight = false, arrayPacked = false, byteArray = new Uint8Array(), }) => { + (0, utils_1.assert)(types.length === values.length, new errors_1.ParserError(`The number of types (${types.length}) does not match the number of values (${values.length}).`)); + const { staticBuffer, dynamicBuffer, pointers } = types.reduce( + // eslint-disable-next-line @typescript-eslint/no-shadow + ({ staticBuffer, dynamicBuffer, pointers }, type, index) => { + const parser = (0, exports.getParser)(type); + const value = values[index]; + // If packed mode is enabled, we can skip the dynamic check, as all + // values are encoded in the static buffer. + if (packed || arrayPacked || !(0, exports.isDynamicParser)(parser, type)) { + return { + staticBuffer: parser.encode({ + buffer: staticBuffer, + value, + type, + packed, + tight, + }), + dynamicBuffer, + pointers, + }; + } + const newStaticBuffer = (0, utils_1.concatBytes)([staticBuffer, new Uint8Array(32)]); + const newDynamicBuffer = parser.encode({ + buffer: dynamicBuffer, + value, + type, + packed, + tight, + }); + return { + staticBuffer: newStaticBuffer, + dynamicBuffer: newDynamicBuffer, + pointers: [ + ...pointers, + { position: staticBuffer.length, pointer: dynamicBuffer.length }, + ], + }; + }, { + staticBuffer: new Uint8Array(), + dynamicBuffer: new Uint8Array(), + pointers: [], + }); + // If packed mode is enabled, there shouldn't be any dynamic values. + (0, utils_1.assert)((!packed && !arrayPacked) || dynamicBuffer.length === 0, new errors_1.ParserError('Invalid pack state.')); + const dynamicStart = staticBuffer.length; + const updatedBuffer = pointers.reduce((target, { pointer, position }) => { + const offset = (0, utils_2.padStart)((0, utils_1.numberToBytes)(dynamicStart + pointer)); + return (0, utils_2.set)(target, offset, position); + }, staticBuffer); + return (0, utils_1.concatBytes)([byteArray, updatedBuffer, dynamicBuffer]); +}; +exports.pack = pack; +const unpack = (types, buffer) => { + const iterator = (0, iterator_1.iterate)(buffer); + return types.map((type) => { + const { value: { value, skip }, done, } = iterator.next(); + (0, utils_1.assert)(!done, new errors_1.ParserError(`The encoded value is invalid for the provided types. Reached end of buffer while attempting to parse "${type}".`)); + const parser = (0, exports.getParser)(type); + const isDynamic = (0, exports.isDynamicParser)(parser, type); + if (isDynamic) { + const pointer = (0, utils_1.bytesToNumber)(value.subarray(0, 32)); + const target = buffer.subarray(pointer); + return parser.decode({ type, value: target, skip }); + } + return parser.decode({ type, value, skip }); + }); +}; +exports.unpack = unpack; +//# sourceMappingURL=packer.js.map + +/***/ }), + +/***/ 91563: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.address = exports.getAddress = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const utils_2 = __webpack_require__(26365); +/** + * Normalize an address value. This accepts the address as: + * + * - A hex string starting with the `0x` prefix. + * - A byte array (`Uint8Array` or `Buffer`). + * + * It checks that the address is 20 bytes long. + * + * @param value - The value to normalize. + * @returns The normalized address as `Uint8Array`. + */ +const getAddress = (value) => { + const bytesValue = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bytesValue.length <= 20, new errors_1.ParserError(`Invalid address value. Expected address to be 20 bytes long, but received ${bytesValue.length} bytes.`)); + return (0, utils_2.padStart)(bytesValue, 20); +}; +exports.getAddress = getAddress; +exports.address = { + isDynamic: false, + /** + * Get if the given value is a valid address type. Since `address` is a simple + * type, this is just a check that the value is "address". + * + * @param type - The type to check. + * @returns Whether the type is a valid address type. + */ + isType: (type) => type === 'address', + /** + * Get the byte length of an encoded address. Since `address` is a simple + * type, this always returns 32. + * + * Note that actual addresses are only 20 bytes long, but the encoding of + * the `address` type is always 32 bytes long. + * + * @returns The byte length of an encoded address. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given address to a 32-byte-long byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The address to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded address added to it. + */ + encode({ buffer, value, packed }) { + const addressValue = (0, exports.getAddress)(value); + // If we're using packed encoding, we can just add the address bytes to the + // byte array, without adding any padding. + if (packed) { + return (0, utils_1.concatBytes)([buffer, addressValue]); + } + const addressBuffer = (0, utils_2.padStart)(addressValue); + return (0, utils_1.concatBytes)([buffer, addressBuffer]); + }, + /** + * Decode the given byte array to an address. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded address as a hexadecimal string, starting with the + * "0x"-prefix. + */ + decode({ value }) { + return (0, utils_1.add0x)((0, utils_1.bytesToHex)(value.slice(12, 32))); + }, +}; +//# sourceMappingURL=address.js.map + +/***/ }), + +/***/ 186: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.array = exports.getTupleType = exports.getArrayType = exports.isArrayType = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const packer_1 = __webpack_require__(37700); +const utils_2 = __webpack_require__(26365); +const fixed_bytes_1 = __webpack_require__(83415); +const tuple_1 = __webpack_require__(30717); +const ARRAY_REGEX = /^(?.*)\[(?\d*?)\]$/u; +const isArrayType = (type) => ARRAY_REGEX.test(type); +exports.isArrayType = isArrayType; +/** + * Get the type of the array. + * + * @param type - The type to get the array type for. + * @returns The array type. + */ +const getArrayType = (type) => { + const match = type.match(ARRAY_REGEX); + (0, utils_1.assert)(match?.groups?.type, new errors_1.ParserError(`Invalid array type. Expected an array type, but received "${type}".`)); + return [ + match.groups.type, + match.groups.length ? parseInt(match.groups.length, 10) : undefined, + ]; +}; +exports.getArrayType = getArrayType; +/** + * Get the type of the array as a tuple type. This is used for encoding fixed + * length arrays, which are encoded as tuples. + * + * @param innerType - The type of the array. + * @param length - The length of the array. + * @returns The tuple type. + */ +const getTupleType = (innerType, length) => { + return `(${new Array(length).fill(innerType).join(',')})`; +}; +exports.getTupleType = getTupleType; +exports.array = { + /** + * Check if the array is dynamic. Arrays are dynamic if the array does not + * have a fixed length, or if the array type is dynamic. + * + * @param type - The type to check. + * @returns Whether the array is dynamic. + */ + isDynamic(type) { + const [innerType, length] = (0, exports.getArrayType)(type); + return ( + // `T[]` is dynamic for any `T`. `T[k]` is dynamic for any dynamic `T` and + // any `k >= 0`. + length === undefined || (0, packer_1.isDynamicParser)((0, packer_1.getParser)(innerType), innerType)); + }, + /** + * Check if a type is an array type. + * + * @param type - The type to check. + * @returns Whether the type is an array type. + */ + isType(type) { + return (0, exports.isArrayType)(type); + }, + /** + * Get the byte length of an encoded array. If the array is dynamic, this + * returns 32, i.e., the length of the pointer to the array. If the array is + * static, this returns the byte length of the resulting tuple type. + * + * @param type - The type to get the byte length for. + * @returns The byte length of an encoded array. + */ + getByteLength(type) { + (0, utils_1.assert)((0, exports.isArrayType)(type), new errors_1.ParserError(`Expected an array type, but received "${type}".`)); + const [innerType, length] = (0, exports.getArrayType)(type); + if (!(0, packer_1.isDynamicParser)(this, type) && length !== undefined) { + return tuple_1.tuple.getByteLength((0, exports.getTupleType)(innerType, length)); + } + return 32; + }, + /** + * Encode the given array to a byte array. If the array is static, this uses + * the tuple encoder. + * + * @param args - The encoding arguments. + * @param args.type - The type of the array. + * @param args.buffer - The byte array to add to. + * @param args.value - The array to encode. + * @param args.packed - Whether to use non-standard packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded array added to it. + */ + encode({ type, buffer, value, packed, tight }) { + const [arrayType, fixedLength] = (0, exports.getArrayType)(type); + // Packed encoding does not support nested arrays. + (0, utils_1.assert)(!packed || !(0, exports.isArrayType)(arrayType), new errors_1.ParserError(`Cannot pack nested arrays.`)); + // Tightly pack `T[]` where `T` is a dynamic type. This is not supported in + // Solidity, but is commonly used in the Ethereum ecosystem. + if (packed && (0, packer_1.isDynamicParser)((0, packer_1.getParser)(arrayType), arrayType)) { + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: buffer, + packed, + arrayPacked: true, + tight, + }); + } + if (fixedLength) { + (0, utils_1.assert)(fixedLength === value.length, new errors_1.ParserError(`Array length does not match type length. Expected a length of ${fixedLength}, but received ${value.length}.`)); + // `T[k]` for any `T` and `k` is encoded as `(T[0], ..., T[k - 1])`. + return tuple_1.tuple.encode({ + type: (0, exports.getTupleType)(arrayType, fixedLength), + buffer, + value, + // In "tight" mode, we don't pad the values to 32 bytes if the value is + // of type `bytesN`. This is an edge case in `ethereumjs-abi` that we + // support to provide compatibility with it. + packed: fixed_bytes_1.fixedBytes.isType(arrayType) && tight, + tight, + }); + } + // For packed encoding, we don't need to encode the length of the array, + // so we can just encode the values. + if (packed) { + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: buffer, + // In "tight" mode, we don't pad the values to 32 bytes if the value is + // of type `bytesN`. This is an edge case in `ethereumjs-abi` that we + // support to provide compatibility with it. + packed: fixed_bytes_1.fixedBytes.isType(arrayType) && tight, + arrayPacked: true, + tight, + }); + } + // `T[]` with `k` elements is encoded as `k (T[0], ..., T[k - 1])`. That + // means that we just need to encode the length of the array, and then the + // array itself. The pointer is encoded by the {@link pack} function. + const arrayLength = (0, utils_2.padStart)((0, utils_1.numberToBytes)(value.length)); + return (0, packer_1.pack)({ + types: new Array(value.length).fill(arrayType), + values: value, + byteArray: (0, utils_1.concatBytes)([buffer, arrayLength]), + packed, + tight, + }); + }, + /** + * Decode an array from the given byte array. + * + * @param args - The decoding arguments. + * @param args.type - The type of the array. + * @param args.value - The byte array to decode. + * @returns The decoded array. + */ + decode({ type, value, ...rest }) { + const [arrayType, fixedLength] = (0, exports.getArrayType)(type); + if (fixedLength) { + const result = tuple_1.tuple.decode({ + type: (0, exports.getTupleType)(arrayType, fixedLength), + value, + ...rest, + }); + (0, utils_1.assert)(result.length === fixedLength, new errors_1.ParserError(`Array length does not match type length. Expected a length of ${fixedLength}, but received ${result.length}.`)); + return result; + } + const arrayLength = (0, utils_1.bytesToNumber)(value.subarray(0, 32)); + return (0, packer_1.unpack)(new Array(arrayLength).fill(arrayType), value.subarray(32)); + }, +}; +//# sourceMappingURL=array.js.map + +/***/ }), + +/***/ 47435: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.bool = exports.getBooleanValue = void 0; +const utils_1 = __webpack_require__(22049); +const superstruct_1 = __webpack_require__(2150); +const errors_1 = __webpack_require__(5961); +const number_1 = __webpack_require__(6150); +const BooleanCoercer = (0, superstruct_1.coerce)((0, superstruct_1.boolean)(), (0, superstruct_1.union)([(0, superstruct_1.literal)('true'), (0, superstruct_1.literal)('false')]), (value) => value === 'true'); +/** + * Normalize a boolean value. This accepts the boolean as: + * + * - A boolean literal. + * - The string "true" or "false". + * + * @param value - The value to get a boolean for. + * @returns The parsed boolean value. This is `BigInt(1)` for truthy values, or + * `BigInt(0)` for falsy values. + */ +const getBooleanValue = (value) => { + try { + const booleanValue = (0, superstruct_1.create)(value, BooleanCoercer); + if (booleanValue) { + return BigInt(1); + } + return BigInt(0); + } + catch { + throw new errors_1.ParserError(`Invalid boolean value. Expected a boolean literal, or the string "true" or "false", but received "${value}".`); + } +}; +exports.getBooleanValue = getBooleanValue; +exports.bool = { + isDynamic: false, + /** + * Get if the given value is a valid boolean type. Since `bool` is a simple + * type, this is just a check that the value is "bool". + * + * @param type - The type to check. + * @returns Whether the type is a valid boolean type. + */ + isType: (type) => type === 'bool', + /** + * Get the byte length of an encoded boolean. Since `bool` is a simple + * type, this always returns 32. + * + * Note that actual booleans are only 1 byte long, but the encoding of + * the `bool` type is always 32 bytes long. + * + * @returns The byte length of an encoded boolean. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given boolean to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The boolean to encode. + * @param args.packed - Whether the value is packed. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded boolean added to it. + */ + encode({ buffer, value, packed, tight }) { + const booleanValue = (0, exports.getBooleanValue)(value); + // For packed encoding, we add a single byte (`0x00` or `0x01`) to the byte + // array. + if (packed) { + return (0, utils_1.concatBytes)([buffer, (0, utils_1.bigIntToBytes)(booleanValue)]); + } + // Booleans are encoded as 32-byte integers, so we use the number parser + // to encode the boolean value. + return number_1.number.encode({ + type: 'uint256', + buffer, + value: booleanValue, + packed, + tight, + }); + }, + /** + * Decode the given byte array to a boolean. + * + * @param args - The decoding arguments. + * @returns The decoded boolean. + */ + decode(args) { + // Booleans are encoded as 32-byte integers, so we use the number parser + // to decode the boolean value. + return number_1.number.decode({ ...args, type: 'uint256' }) === BigInt(1); + }, +}; +//# sourceMappingURL=bool.js.map + +/***/ }), + +/***/ 99356: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.bytes = void 0; +const utils_1 = __webpack_require__(22049); +const utils_2 = __webpack_require__(26365); +exports.bytes = { + isDynamic: true, + /** + * Check if a type is a bytes type. Since `bytes` is a simple type, this is + * just a check that the type is "bytes". + * + * @param type - The type to check. + * @returns Whether the type is a bytes type. + */ + isType: (type) => type === 'bytes', + /** + * Get the byte length of an encoded bytes value. Since `bytes` is a simple + * type, this always returns 32. + * + * Note that actual length of a bytes value is variable, but the encoded + * static value (pointer) is always 32 bytes long. + * + * @returns The byte length of an encoded bytes value. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given bytes value to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The bytes value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded bytes value added to it. + */ + encode({ buffer, value, packed }) { + const bufferValue = (0, utils_1.createBytes)(value); + // For packed encoding, we can just add the bytes value to the byte array, + // without adding any padding or alignment. There is also no need to + // encode the length of the bytes. + if (packed) { + return (0, utils_1.concatBytes)([buffer, bufferValue]); + } + const paddedSize = Math.ceil(bufferValue.byteLength / 32) * 32; + // Bytes of length `k` are encoded as `k pad_right(bytes)`. + return (0, utils_1.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1.numberToBytes)(bufferValue.byteLength)), + (0, utils_2.padEnd)(bufferValue, paddedSize), + ]); + }, + /** + * Decode the given byte array to a bytes value. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded bytes value as a `Uint8Array`. + */ + decode({ value }) { + const bytesValue = value.subarray(0, 32); + const length = (0, utils_1.bytesToNumber)(bytesValue); + // Since we're returning a `Uint8Array`, we use `slice` to copy the bytes + // into a new array. + return value.slice(32, 32 + length); + }, +}; +//# sourceMappingURL=bytes.js.map + +/***/ }), + +/***/ 83415: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fixedBytes = exports.getByteLength = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const utils_2 = __webpack_require__(26365); +const BYTES_REGEX = /^bytes([0-9]{1,2})$/u; +/** + * Get the length of the specified type. If a length is not specified, or if the + * length is out of range (0 < n <= 32), this will throw an error. + * + * @param type - The type to get the length for. + * @returns The byte length of the type. + */ +const getByteLength = (type) => { + const bytes = type.match(BYTES_REGEX)?.[1]; + (0, utils_1.assert)(bytes, `Invalid byte length. Expected a number between 1 and 32, but received "${type}".`); + const length = Number(bytes); + (0, utils_1.assert)(length > 0 && length <= 32, new errors_1.ParserError(`Invalid byte length. Expected a number between 1 and 32, but received "${type}".`)); + return length; +}; +exports.getByteLength = getByteLength; +exports.fixedBytes = { + isDynamic: false, + /** + * Check if a type is a fixed bytes type. + * + * @param type - The type to check. + * @returns Whether the type is a fixed bytes type. + */ + isType(type) { + return BYTES_REGEX.test(type); + }, + /** + * Get the byte length of an encoded fixed bytes type. + * + * @returns The byte length of the type. + */ + getByteLength() { + return 32; + }, + /** + * Encode a fixed bytes value. + * + * @param args - The arguments to encode. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed }) { + const length = (0, exports.getByteLength)(type); + const bufferValue = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bufferValue.length <= length, new errors_1.ParserError(`Expected a value of length ${length}, but received a value of length ${bufferValue.length}.`)); + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padEnd)(bufferValue, length)]); + } + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padEnd)(bufferValue)]); + }, + /** + * Decode a fixed bytes value. + * + * @param args - The arguments to decode. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @returns The decoded value as a `Uint8Array`. + */ + decode({ type, value }) { + const length = (0, exports.getByteLength)(type); + // Since we're returning a `Uint8Array`, we use `slice` to copy the bytes + // into a new array. + return value.slice(0, length); + }, +}; +//# sourceMappingURL=fixed-bytes.js.map + +/***/ }), + +/***/ 27827: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fn = exports.getFunction = void 0; +const utils_1 = __webpack_require__(22049); +const superstruct_1 = __webpack_require__(2150); +const errors_1 = __webpack_require__(5961); +const fixed_bytes_1 = __webpack_require__(83415); +/** + * A struct that represents a Solidity function. The value must be a hex string + * or a byte array. The created value will always be an object with an `address` + * and `selector` property. + */ +const FunctionStruct = (0, superstruct_1.coerce)((0, superstruct_1.object)({ + address: utils_1.StrictHexStruct, + selector: utils_1.StrictHexStruct, +}), (0, superstruct_1.union)([utils_1.StrictHexStruct, (0, superstruct_1.instance)(Uint8Array)]), (value) => { + const bytes = (0, utils_1.createBytes)(value); + (0, utils_1.assert)(bytes.length === 24, new errors_1.ParserError(`Invalid Solidity function. Expected function to be 24 bytes long, but received ${bytes.length} bytes.`)); + return { + address: (0, utils_1.bytesToHex)(bytes.subarray(0, 20)), + selector: (0, utils_1.bytesToHex)(bytes.subarray(20, 24)), + }; +}); +/** + * Normalize a function. This accepts the function as: + * + * - A {@link SolidityFunction} object. + * - A hexadecimal string. + * - A byte array. + * + * @param input - The function-like input. + * @returns The function as buffer. + */ +const getFunction = (input) => { + const value = (0, superstruct_1.create)(input, FunctionStruct); + return (0, utils_1.concatBytes)([(0, utils_1.hexToBytes)(value.address), (0, utils_1.hexToBytes)(value.selector)]); +}; +exports.getFunction = getFunction; +exports.fn = { + isDynamic: false, + /** + * Check if a type is a function type. Since `function` is a simple type, this + * is just a check that the type is "function". + * + * @param type - The type to check. + * @returns Whether the type is a function type. + */ + isType: (type) => type === 'function', + /** + * Get the byte length of an encoded function. Since `function` is a simple + * type, this always returns 32. + * + * Note that actual functions are only 24 bytes long, but the encoding of + * the `function` type is always 32 bytes long. + * + * @returns The byte length of an encoded function. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given function to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The function to encode. + * @param args.packed - Whether to use packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded function added to it. + */ + encode({ buffer, value, packed, tight }) { + const fnValue = (0, exports.getFunction)(value); + // Functions are encoded as `bytes24`, so we use the fixedBytes parser to + // encode the function. + return fixed_bytes_1.fixedBytes.encode({ + type: 'bytes24', + buffer, + value: fnValue, + packed, + tight, + }); + }, + /** + * Decode the given byte array to a function. + * + * @param args - The decoding arguments. + * @param args.value - The byte array to decode. + * @returns The decoded function as a {@link SolidityFunction} object. + */ + decode({ value }) { + return { + address: (0, utils_1.bytesToHex)(value.slice(0, 20)), + selector: (0, utils_1.bytesToHex)(value.slice(20, 24)), + }; + }, +}; +//# sourceMappingURL=function.js.map + +/***/ }), + +/***/ 46207: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__webpack_require__(91563), exports); +__exportStar(__webpack_require__(186), exports); +__exportStar(__webpack_require__(47435), exports); +__exportStar(__webpack_require__(99356), exports); +__exportStar(__webpack_require__(83415), exports); +__exportStar(__webpack_require__(27827), exports); +__exportStar(__webpack_require__(6150), exports); +__exportStar(__webpack_require__(28160), exports); +__exportStar(__webpack_require__(8446), exports); +__exportStar(__webpack_require__(30717), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 6150: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.number = exports.getBigInt = exports.assertNumberLength = exports.getLength = exports.isSigned = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const utils_2 = __webpack_require__(26365); +const NUMBER_REGEX = /^u?int(?[0-9]*)?$/u; +/** + * Check if a number type is signed. + * + * @param type - The type to check. + * @returns Whether the type is signed. + */ +const isSigned = (type) => { + return !type.startsWith('u'); +}; +exports.isSigned = isSigned; +/** + * Get the length of the specified type. If a length is not specified, if the + * length is out of range (8 <= n <= 256), or if the length is not a multiple of + * 8, this will throw an error. + * + * @param type - The type to get the length for. + * @returns The bit length of the type. + */ +const getLength = (type) => { + if (type === 'int' || type === 'uint') { + return 256; + } + const match = type.match(NUMBER_REGEX); + (0, utils_1.assert)(match?.groups?.length, new errors_1.ParserError(`Invalid number type. Expected a number type, but received "${type}".`)); + const length = parseInt(match.groups.length, 10); + (0, utils_1.assert)(length >= 8 && length <= 256, new errors_1.ParserError(`Invalid number length. Expected a number between 8 and 256, but received "${type}".`)); + (0, utils_1.assert)(length % 8 === 0, new errors_1.ParserError(`Invalid number length. Expected a multiple of 8, but received "${type}".`)); + return length; +}; +exports.getLength = getLength; +/** + * Assert that the byte length of the given value is in range for the given + * number type. + * + * @param value - The value to check. + * @param type - The type of the value. + * @throws If the value is out of range for the type. + */ +const assertNumberLength = (value, type) => { + const length = (0, exports.getLength)(type); + const maxValue = BigInt(2) ** BigInt(length - ((0, exports.isSigned)(type) ? 1 : 0)) - BigInt(1); + if ((0, exports.isSigned)(type)) { + // Signed types must be in the range of `-(2^(length - 1))` to + // `2^(length - 1) - 1`. + (0, utils_1.assert)(value >= -(maxValue + BigInt(1)) && value <= maxValue, new errors_1.ParserError(`Number "${value}" is out of range for type "${type}".`)); + return; + } + // Unsigned types must be in the range of `0` to `2^length - 1`. + (0, utils_1.assert)(value <= maxValue, new errors_1.ParserError(`Number "${value}" is out of range for type "${type}".`)); +}; +exports.assertNumberLength = assertNumberLength; +/** + * Normalize a `bigint` value. This accepts the value as: + * + * - A `bigint`. + * - A `number`. + * - A decimal string, i.e., a string that does not start with "0x". + * - A hexadecimal string, i.e., a string that starts with "0x". + * + * @param value - The number-like value to parse. + * @returns The value parsed as bigint. + */ +const getBigInt = (value) => { + try { + return (0, utils_1.createBigInt)(value); + } + catch { + throw new errors_1.ParserError(`Invalid number. Expected a valid number value, but received "${value}".`); + } +}; +exports.getBigInt = getBigInt; +exports.number = { + isDynamic: false, + /** + * Check if a type is a number type. + * + * @param type - The type to check. + * @returns Whether the type is a number type. + */ + isType(type) { + return NUMBER_REGEX.test(type); + }, + /** + * Get the byte length of an encoded number type. Since `int` and `uint` are + * simple types, this will always return 32. + * + * @returns The byte length of the type. + */ + getByteLength() { + return 32; + }, + /** + * Encode a number value. + * + * @param args - The arguments to encode. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use packed encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed }) { + const bigIntValue = (0, exports.getBigInt)(value); + (0, exports.assertNumberLength)(bigIntValue, type); + if ((0, exports.isSigned)(type)) { + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + const length = (0, exports.getLength)(type) / 8; + return (0, utils_1.concatBytes)([buffer, (0, utils_1.signedBigIntToBytes)(bigIntValue, length)]); + } + return (0, utils_1.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1.signedBigIntToBytes)(bigIntValue, 32)), + ]); + } + // For packed encoding, the value is padded to the length of the type, and + // then added to the byte array. + if (packed) { + const length = (0, exports.getLength)(type) / 8; + return (0, utils_1.concatBytes)([ + buffer, + (0, utils_2.padStart)((0, utils_1.bigIntToBytes)(bigIntValue), length), + ]); + } + return (0, utils_1.concatBytes)([buffer, (0, utils_2.padStart)((0, utils_1.bigIntToBytes)(bigIntValue))]); + }, + /** + * Decode a number value. + * + * @param args - The decoding arguments. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @returns The decoded value. + */ + decode({ type, value }) { + const buffer = value.subarray(0, 32); + if ((0, exports.isSigned)(type)) { + const numberValue = (0, utils_1.bytesToSignedBigInt)(buffer); + (0, exports.assertNumberLength)(numberValue, type); + return numberValue; + } + const numberValue = (0, utils_1.bytesToBigInt)(buffer); + (0, exports.assertNumberLength)(numberValue, type); + return numberValue; + }, +}; +//# sourceMappingURL=number.js.map + +/***/ }), + +/***/ 28160: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +//# sourceMappingURL=parser.js.map + +/***/ }), + +/***/ 8446: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.string = void 0; +const utils_1 = __webpack_require__(22049); +const bytes_1 = __webpack_require__(99356); +exports.string = { + isDynamic: true, + /** + * Check if a type is a string type. Since `string` is a simple type, this + * is just a check if the type is "string". + * + * @param type - The type to check. + * @returns Whether the type is a string type. + */ + isType: (type) => type === 'string', + /** + * Get the byte length of an encoded string type. Since `string` is a simple + * type, this will always return 32. + * + * Note that actual strings are variable in length, but the encoded static + * value (pointer) is always 32 bytes long. + * + * @returns The byte length of an encoded string. + */ + getByteLength() { + return 32; + }, + /** + * Encode the given string value to a byte array. + * + * @param args - The encoding arguments. + * @param args.buffer - The byte array to add to. + * @param args.value - The string value to encode. + * @param args.packed - Whether to use packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded string value added to it. + */ + encode({ buffer, value, packed, tight }) { + // Strings are encoded as UTF-8 bytes, so we use the bytes parser to encode + // the string as bytes. + return bytes_1.bytes.encode({ + type: 'bytes', + buffer, + value: (0, utils_1.stringToBytes)(value), + packed, + tight, + }); + }, + /** + * Decode the given byte array to a string value. + * + * @param args - The decoding arguments. + * @returns The decoded string value. + */ + decode(args) { + // Strings are encoded as UTF-8 bytes, so we use the bytes parser to decode + // the bytes, and convert them to a string. + return (0, utils_1.bytesToString)(bytes_1.bytes.decode(args)); + }, +}; +//# sourceMappingURL=string.js.map + +/***/ }), + +/***/ 30717: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.tuple = exports.getTupleElements = void 0; +const utils_1 = __webpack_require__(22049); +const errors_1 = __webpack_require__(5961); +const packer_1 = __webpack_require__(37700); +const TUPLE_REGEX = /^\((.+)\)$/u; +const isTupleType = (type) => TUPLE_REGEX.test(type); +/** + * Get elements from a tuple type. + * + * @param type - The tuple type to get the types for. + * @returns The elements of the tuple as string array. + */ +const getTupleElements = (type) => { + (0, utils_1.assert)(type.startsWith('(') && type.endsWith(')'), new errors_1.ParserError(`Invalid tuple type. Expected tuple type, but received "${type}".`)); + const elements = []; + let current = ''; + let depth = 0; + for (let i = 1; i < type.length - 1; i++) { + const char = type[i]; + if (char === ',' && depth === 0) { + elements.push(current.trim()); + current = ''; + } + else { + current += char; + if (char === '(') { + depth += 1; + } + else if (char === ')') { + depth -= 1; + } + } + } + if (current.trim()) { + elements.push(current.trim()); + } + return elements; +}; +exports.getTupleElements = getTupleElements; +exports.tuple = { + /** + * Check if the tuple is dynamic. Tuples are dynamic if one or more elements + * of the tuple are dynamic. + * + * @param type - The type to check. + * @returns Whether the tuple is dynamic. + */ + isDynamic(type) { + const elements = (0, exports.getTupleElements)(type); + return elements.some((element) => { + const parser = (0, packer_1.getParser)(element); + return (0, packer_1.isDynamicParser)(parser, element); + }); + }, + /** + * Check if a type is a tuple type. + * + * @param type - The type to check. + * @returns Whether the type is a tuple type. + */ + isType(type) { + return isTupleType(type); + }, + /** + * Get the byte length of a tuple type. If the tuple is dynamic, this will + * always return 32. If the tuple is static, this will return the sum of the + * byte lengths of the tuple elements. + * + * @param type - The type to get the byte length for. + * @returns The byte length of the tuple type. + */ + getByteLength(type) { + if ((0, packer_1.isDynamicParser)(this, type)) { + return 32; + } + const elements = (0, exports.getTupleElements)(type); + return elements.reduce((total, element) => { + return total + (0, packer_1.getParser)(element).getByteLength(element); + }, 0); + }, + /** + * Encode a tuple value. + * + * @param args - The encoding arguments. + * @param args.type - The type of the value. + * @param args.buffer - The byte array to add to. + * @param args.value - The value to encode. + * @param args.packed - Whether to use non-standard packed encoding. + * @param args.tight - Whether to use non-standard tight encoding. + * @returns The bytes with the encoded value added to it. + */ + encode({ type, buffer, value, packed, tight }) { + const elements = (0, exports.getTupleElements)(type); + return (0, packer_1.pack)({ + types: elements, + values: value, + byteArray: buffer, + packed, + tight, + }); + }, + /** + * Decode a tuple value. + * + * @param args - The decoding arguments. + * @param args.type - The type of the value. + * @param args.value - The value to decode. + * @param args.skip - A function to skip a number of bytes. + * @returns The decoded value. + */ + decode({ type, value, skip }) { + const elements = (0, exports.getTupleElements)(type); + const length = this.getByteLength(type) - 32; + skip(length); + return (0, packer_1.unpack)(elements, value); + }, +}; +//# sourceMappingURL=tuple.js.map + +/***/ }), + +/***/ 15744: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +//# sourceMappingURL=abi.js.map + +/***/ }), + +/***/ 11126: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__webpack_require__(15744), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 59194: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.padEnd = exports.padStart = exports.set = void 0; +const utils_1 = __webpack_require__(22049); +const BUFFER_WIDTH = 32; +/** + * Set `buffer` in `target` at the specified position. + * + * @param target - The buffer to set to. + * @param buffer - The buffer to set in the target. + * @param position - The position at which to set the target. + * @returns The combined buffer. + */ +const set = (target, buffer, position) => { + return (0, utils_1.concatBytes)([ + target.subarray(0, position), + buffer, + target.subarray(position + buffer.length), + ]); +}; +exports.set = set; +/** + * Add padding to a buffer. If the buffer is larger than `length`, this function won't do anything. If it's smaller, the + * buffer will be padded to the specified length, with extra zeroes at the start. + * + * @param buffer - The buffer to add padding to. + * @param length - The number of bytes to pad the buffer to. + * @returns The padded buffer. + */ +const padStart = (buffer, length = BUFFER_WIDTH) => { + const padding = new Uint8Array(Math.max(length - buffer.length, 0)).fill(0x00); + return (0, utils_1.concatBytes)([padding, buffer]); +}; +exports.padStart = padStart; +/** + * Add padding to a buffer. If the buffer is larger than `length`, this function won't do anything. If it's smaller, the + * buffer will be padded to the specified length, with extra zeroes at the end. + * + * @param buffer - The buffer to add padding to. + * @param length - The number of bytes to pad the buffer to. + * @returns The padded buffer. + */ +const padEnd = (buffer, length = BUFFER_WIDTH) => { + const padding = new Uint8Array(Math.max(length - buffer.length, 0)).fill(0x00); + return (0, utils_1.concatBytes)([buffer, padding]); +}; +exports.padEnd = padEnd; +//# sourceMappingURL=buffer.js.map + +/***/ }), + +/***/ 26365: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__webpack_require__(59194), exports); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 98537: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEncryptionPublicKey = exports.decryptSafely = exports.decrypt = exports.encryptSafely = exports.encrypt = void 0; +const nacl = __importStar(__webpack_require__(88947)); +const naclUtil = __importStar(__webpack_require__(76386)); +const utils_1 = __webpack_require__(54907); +/** + * Encrypt a message. + * + * @param options - The encryption options. + * @param options.publicKey - The public key of the message recipient. + * @param options.data - The message data. + * @param options.version - The type of encryption to use. + * @returns The encrypted data. + */ +function encrypt({ publicKey, data, version, }) { + if ((0, utils_1.isNullish)(publicKey)) { + throw new Error('Missing publicKey parameter'); + } + else if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(version)) { + throw new Error('Missing version parameter'); + } + switch (version) { + case 'x25519-xsalsa20-poly1305': { + if (typeof data !== 'string') { + throw new Error('Message data must be given as a string'); + } + // generate ephemeral keypair + const ephemeralKeyPair = nacl.box.keyPair(); + // assemble encryption parameters - from string to UInt8 + let pubKeyUInt8Array; + try { + pubKeyUInt8Array = naclUtil.decodeBase64(publicKey); + } + catch (err) { + throw new Error('Bad public key'); + } + const msgParamsUInt8Array = naclUtil.decodeUTF8(data); + const nonce = nacl.randomBytes(nacl.box.nonceLength); + // encrypt + const encryptedMessage = nacl.box(msgParamsUInt8Array, nonce, pubKeyUInt8Array, ephemeralKeyPair.secretKey); + // handle encrypted data + const output = { + version: 'x25519-xsalsa20-poly1305', + nonce: naclUtil.encodeBase64(nonce), + ephemPublicKey: naclUtil.encodeBase64(ephemeralKeyPair.publicKey), + ciphertext: naclUtil.encodeBase64(encryptedMessage), + }; + // return encrypted msg data + return output; + } + default: + throw new Error('Encryption type/version not supported'); + } +} +exports.encrypt = encrypt; +/** + * Encrypt a message in a way that obscures the message length. + * + * The message is padded to a multiple of 2048 before being encrypted so that the length of the + * resulting encrypted message can't be used to guess the exact length of the original message. + * + * @param options - The encryption options. + * @param options.publicKey - The public key of the message recipient. + * @param options.data - The message data. + * @param options.version - The type of encryption to use. + * @returns The encrypted data. + */ +function encryptSafely({ publicKey, data, version, }) { + if ((0, utils_1.isNullish)(publicKey)) { + throw new Error('Missing publicKey parameter'); + } + else if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(version)) { + throw new Error('Missing version parameter'); + } + const DEFAULT_PADDING_LENGTH = 2 ** 11; + const NACL_EXTRA_BYTES = 16; + if (typeof data === 'object' && data && 'toJSON' in data) { + // remove toJSON attack vector + // TODO, check all possible children + throw new Error('Cannot encrypt with toJSON property. Please remove toJSON property'); + } + // add padding + const dataWithPadding = { + data, + padding: '', + }; + // calculate padding + const dataLength = Buffer.byteLength(JSON.stringify(dataWithPadding), 'utf-8'); + const modVal = dataLength % DEFAULT_PADDING_LENGTH; + let padLength = 0; + // Only pad if necessary + if (modVal > 0) { + padLength = DEFAULT_PADDING_LENGTH - modVal - NACL_EXTRA_BYTES; // nacl extra bytes + } + dataWithPadding.padding = '0'.repeat(padLength); + const paddedMessage = JSON.stringify(dataWithPadding); + return encrypt({ publicKey, data: paddedMessage, version }); +} +exports.encryptSafely = encryptSafely; +/** + * Decrypt a message. + * + * @param options - The decryption options. + * @param options.encryptedData - The encrypted data. + * @param options.privateKey - The private key to decrypt with. + * @returns The decrypted message. + */ +function decrypt({ encryptedData, privateKey, }) { + if ((0, utils_1.isNullish)(encryptedData)) { + throw new Error('Missing encryptedData parameter'); + } + else if ((0, utils_1.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + switch (encryptedData.version) { + case 'x25519-xsalsa20-poly1305': { + // string to buffer to UInt8Array + const receiverPrivateKeyUint8Array = naclDecodeHex(privateKey); + const receiverEncryptionPrivateKey = nacl.box.keyPair.fromSecretKey(receiverPrivateKeyUint8Array).secretKey; + // assemble decryption parameters + const nonce = naclUtil.decodeBase64(encryptedData.nonce); + const ciphertext = naclUtil.decodeBase64(encryptedData.ciphertext); + const ephemPublicKey = naclUtil.decodeBase64(encryptedData.ephemPublicKey); + // decrypt + const decryptedMessage = nacl.box.open(ciphertext, nonce, ephemPublicKey, receiverEncryptionPrivateKey); + // return decrypted msg data + try { + if (!decryptedMessage) { + throw new Error(); + } + const output = naclUtil.encodeUTF8(decryptedMessage); + // TODO: This is probably extraneous but was kept to minimize changes during refactor + if (!output) { + throw new Error(); + } + return output; + } + catch (err) { + if (err && typeof err.message === 'string' && err.message.length) { + throw new Error(`Decryption failed: ${err.message}`); + } + throw new Error(`Decryption failed.`); + } + } + default: + throw new Error('Encryption type/version not supported.'); + } +} +exports.decrypt = decrypt; +/** + * Decrypt a message that has been encrypted using `encryptSafely`. + * + * @param options - The decryption options. + * @param options.encryptedData - The encrypted data. + * @param options.privateKey - The private key to decrypt with. + * @returns The decrypted message. + */ +function decryptSafely({ encryptedData, privateKey, }) { + if ((0, utils_1.isNullish)(encryptedData)) { + throw new Error('Missing encryptedData parameter'); + } + else if ((0, utils_1.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + const dataWithPadding = JSON.parse(decrypt({ encryptedData, privateKey })); + return dataWithPadding.data; +} +exports.decryptSafely = decryptSafely; +/** + * Get the encryption public key for the given key. + * + * @param privateKey - The private key to generate the encryption public key with. + * @returns The encryption public key. + */ +function getEncryptionPublicKey(privateKey) { + const privateKeyUint8Array = naclDecodeHex(privateKey); + const encryptionPublicKey = nacl.box.keyPair.fromSecretKey(privateKeyUint8Array).publicKey; + return naclUtil.encodeBase64(encryptionPublicKey); +} +exports.getEncryptionPublicKey = getEncryptionPublicKey; +/** + * Convert a hex string to the UInt8Array format used by nacl. + * + * @param msgHex - The string to convert. + * @returns The converted string. + */ +function naclDecodeHex(msgHex) { + const msgBase64 = Buffer.from(msgHex, 'hex').toString('base64'); + return naclUtil.decodeBase64(msgBase64); +} +//# sourceMappingURL=encryption.js.map + +/***/ }), + +/***/ 51594: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.normalize = exports.concatSig = void 0; +__exportStar(__webpack_require__(20252), exports); +__exportStar(__webpack_require__(10169), exports); +__exportStar(__webpack_require__(98537), exports); +var utils_1 = __webpack_require__(54907); +Object.defineProperty(exports, "concatSig", ({ enumerable: true, get: function () { return utils_1.concatSig; } })); +Object.defineProperty(exports, "normalize", ({ enumerable: true, get: function () { return utils_1.normalize; } })); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 20252: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.extractPublicKey = exports.recoverPersonalSignature = exports.personalSign = void 0; +const util_1 = __webpack_require__(68683); +const utils_1 = __webpack_require__(54907); +/** + * Create an Ethereum-specific signature for a message. + * + * This function is equivalent to the `eth_sign` Ethereum JSON-RPC method as specified in EIP-1417, + * as well as the MetaMask's `personal_sign` method. + * + * @param options - The personal sign options. + * @param options.privateKey - The key to sign with. + * @param options.data - The hex data to sign. + * @returns The '0x'-prefixed hex encoded signature. + */ +function personalSign({ privateKey, data, }) { + if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(privateKey)) { + throw new Error('Missing privateKey parameter'); + } + const message = (0, utils_1.legacyToBuffer)(data); + const msgHash = (0, util_1.hashPersonalMessage)(message); + const sig = (0, util_1.ecsign)(msgHash, privateKey); + const serialized = (0, utils_1.concatSig)((0, util_1.toBuffer)(sig.v), sig.r, sig.s); + return serialized; +} +exports.personalSign = personalSign; +/** + * Recover the address of the account used to create the given Ethereum signature. The message + * must have been signed using the `personalSign` function, or an equivalent function. + * + * @param options - The signature recovery options. + * @param options.data - The hex data that was signed. + * @param options.signature - The '0x'-prefixed hex encoded message signature. + * @returns The '0x'-prefixed hex encoded address of the message signer. + */ +function recoverPersonalSignature({ data, signature, }) { + if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const publicKey = getPublicKeyFor(data, signature); + const sender = (0, util_1.publicToAddress)(publicKey); + const senderHex = (0, util_1.bufferToHex)(sender); + return senderHex; +} +exports.recoverPersonalSignature = recoverPersonalSignature; +/** + * Recover the public key of the account used to create the given Ethereum signature. The message + * must have been signed using the `personalSign` function, or an equivalent function. + * + * @param options - The public key recovery options. + * @param options.data - The hex data that was signed. + * @param options.signature - The '0x'-prefixed hex encoded message signature. + * @returns The '0x'-prefixed hex encoded public key of the message signer. + */ +function extractPublicKey({ data, signature, }) { + if ((0, utils_1.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_1.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const publicKey = getPublicKeyFor(data, signature); + return `0x${publicKey.toString('hex')}`; +} +exports.extractPublicKey = extractPublicKey; +/** + * Get the public key for the given signature and message. + * + * @param message - The message that was signed. + * @param signature - The '0x'-prefixed hex encoded message signature. + * @returns The public key of the signer. + */ +function getPublicKeyFor(message, signature) { + const messageHash = (0, util_1.hashPersonalMessage)((0, utils_1.legacyToBuffer)(message)); + return (0, utils_1.recoverPublicKey)(messageHash, signature); +} +//# sourceMappingURL=personal-sign.js.map + +/***/ }), + +/***/ 10169: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.recoverTypedSignature = exports.signTypedData = exports.typedSignatureHash = exports.TypedDataUtils = exports.TYPED_MESSAGE_SCHEMA = exports.SignTypedDataVersion = void 0; +const util_1 = __webpack_require__(68683); +const abi_utils_1 = __webpack_require__(93256); +const parsers_1 = __webpack_require__(46207); +const utils_1 = __webpack_require__(26365); +const utils_2 = __webpack_require__(22049); +const keccak_1 = __webpack_require__(32019); +const utils_3 = __webpack_require__(54907); +/** + * Represents the version of `signTypedData` being used. + * + * V1 is based upon [an early version of + * EIP-712](https://github.com/ethereum/EIPs/pull/712/commits/21abe254fe0452d8583d5b132b1d7be87c0439ca) + * that lacked some later security improvements, and should generally be neglected in favor of + * later versions. + * + * V3 is based on EIP-712, except that arrays and recursive data structures are not supported. + * + * V4 is based on EIP-712, and includes full support of arrays and recursive data structures. + */ +var SignTypedDataVersion; +(function (SignTypedDataVersion) { + SignTypedDataVersion["V1"] = "V1"; + SignTypedDataVersion["V3"] = "V3"; + SignTypedDataVersion["V4"] = "V4"; +})(SignTypedDataVersion = exports.SignTypedDataVersion || (exports.SignTypedDataVersion = {})); +exports.TYPED_MESSAGE_SCHEMA = { + type: 'object', + properties: { + types: { + type: 'object', + additionalProperties: { + type: 'array', + items: { + type: 'object', + properties: { + name: { type: 'string' }, + type: { type: 'string' }, + }, + required: ['name', 'type'], + }, + }, + }, + primaryType: { type: 'string' }, + domain: { type: 'object' }, + message: { type: 'object' }, + }, + required: ['types', 'primaryType', 'domain', 'message'], +}; +/** + * Validate that the given value is a valid version string. + * + * @param version - The version value to validate. + * @param allowedVersions - A list of allowed versions. If omitted, all versions are assumed to be + * allowed. + */ +function validateVersion(version, allowedVersions) { + if (!Object.keys(SignTypedDataVersion).includes(version)) { + throw new Error(`Invalid version: '${version}'`); + } + else if (allowedVersions && !allowedVersions.includes(version)) { + throw new Error(`SignTypedDataVersion not allowed: '${version}'. Allowed versions are: ${allowedVersions.join(', ')}`); + } +} +/** + * Parse a string, number, or bigint value into a `Uint8Array`. + * + * @param type - The type of the value. + * @param value - The value to parse. + * @returns The parsed value. + */ +function parseNumber(type, value) { + (0, utils_2.assert)(value !== null, `Unable to encode value: Invalid number. Expected a valid number value, but received "${value}".`); + const bigIntValue = BigInt(value); + const length = (0, parsers_1.getLength)(type); + const maxValue = BigInt(2) ** BigInt(length) - BigInt(1); + // Note that this is not accurate, since the actual maximum value for unsigned + // integers is `2 ^ (length - 1) - 1`, but this is required for backwards + // compatibility with the old implementation. + (0, utils_2.assert)(bigIntValue >= -maxValue && bigIntValue <= maxValue, `Unable to encode value: Number "${value}" is out of range for type "${type}".`); + return bigIntValue; +} +/** + * Parse an address string to a `Uint8Array`. The behaviour of this is quite + * strange, in that it does not parse the address as hexadecimal string, nor as + * UTF-8. It does some weird stuff with the string and char codes, and then + * returns the result as a `Uint8Array`. + * + * This is based on the old `ethereumjs-abi` implementation, which essentially + * calls `new BN(address, 10)` on the address string, the equivalent of calling + * `parseInt(address, 10)` in JavaScript. This is not a valid way to parse an + * address and would result in `NaN` in plain JavaScript, but it is the + * behaviour of the old implementation, and so we must preserve it for backwards + * compatibility. + * + * @param address - The address to parse. + * @returns The parsed address. + */ +function reallyStrangeAddressToBytes(address) { + let addressValue = BigInt(0); + for (let i = 0; i < address.length; i++) { + const character = BigInt(address.charCodeAt(i) - 48); + addressValue *= BigInt(10); + // 'a' + if (character >= 49) { + addressValue += character - BigInt(49) + BigInt(0xa); + // 'A' + } + else if (character >= 17) { + addressValue += character - BigInt(17) + BigInt(0xa); + // '0' - '9' + } + else { + addressValue += character; + } + } + return (0, utils_1.padStart)((0, utils_2.bigIntToBytes)(addressValue), 20); +} +/** + * Encode a single field. + * + * @param types - All type definitions. + * @param name - The name of the field to encode. + * @param type - The type of the field being encoded. + * @param value - The value to encode. + * @param version - The EIP-712 version the encoding should comply with. + * @returns Encoded representation of the field. + */ +function encodeField(types, name, type, +// TODO: constrain type on `value` +value, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + if (types[type] !== undefined) { + return [ + 'bytes32', + // TODO: return Buffer, remove string from return type + version === SignTypedDataVersion.V4 && value == null // eslint-disable-line no-eq-null + ? '0x0000000000000000000000000000000000000000000000000000000000000000' + : (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(encodeData(type, value, types, version))), + ]; + } + // `function` is supported in `@metamask/abi-utils`, but not allowed by + // EIP-712, so we throw an error here. + if (type === 'function') { + throw new Error('Unsupported or invalid type: "function"'); + } + if (value === undefined) { + throw new Error(`missing value for field ${name} of type ${type}`); + } + if (type === 'address') { + if (typeof value === 'number') { + return ['address', (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 20)]; + } + else if ((0, utils_2.isStrictHexString)(value)) { + return ['address', (0, utils_2.add0x)(value)]; + } + else if (typeof value === 'string') { + return ['address', reallyStrangeAddressToBytes(value).subarray(0, 20)]; + } + } + if (type === 'bool') { + return ['bool', Boolean(value)]; + } + if (type === 'bytes') { + if (typeof value === 'number') { + value = (0, utils_2.numberToBytes)(value); + } + else if ((0, utils_2.isStrictHexString)(value) || value === '0x') { + value = (0, utils_2.hexToBytes)(value); + } + else if (typeof value === 'string') { + value = (0, utils_2.stringToBytes)(value); + } + return ['bytes32', (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(value))]; + } + if (type.startsWith('bytes') && type !== 'bytes' && !type.includes('[')) { + if (typeof value === 'number') { + if (value < 0) { + return ['bytes32', new Uint8Array(32)]; + } + return ['bytes32', (0, utils_2.bigIntToBytes)(BigInt(value))]; + } + else if ((0, utils_2.isStrictHexString)(value)) { + return ['bytes32', (0, utils_2.hexToBytes)(value)]; + } + return ['bytes32', value]; + } + if (type.startsWith('int') && !type.includes('[')) { + const bigIntValue = parseNumber(type, value); + if (bigIntValue >= BigInt(0)) { + return ['uint256', bigIntValue]; + } + return ['int256', bigIntValue]; + } + if (type === 'string') { + if (typeof value === 'number') { + value = (0, utils_2.numberToBytes)(value); + } + else { + value = (0, utils_2.stringToBytes)(value !== null && value !== void 0 ? value : ''); + } + return ['bytes32', (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(value))]; + } + if (type.endsWith(']')) { + if (version === SignTypedDataVersion.V3) { + throw new Error('Arrays are unimplemented in encodeData; use V4 extension'); + } + const parsedType = type.slice(0, type.lastIndexOf('[')); + const typeValuePairs = value.map((item) => encodeField(types, name, parsedType, item, version)); + return [ + 'bytes32', + (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, abi_utils_1.encode)(typeValuePairs.map(([t]) => t), typeValuePairs.map(([, v]) => v)))), + ]; + } + return [type, value]; +} +/** + * Encodes an object by encoding and concatenating each of its members. + * + * @param primaryType - The root type. + * @param data - The object to encode. + * @param types - Type definitions for all types included in the message. + * @param version - The EIP-712 version the encoding should comply with. + * @returns An encoded representation of an object. + */ +function encodeData(primaryType, data, types, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const encodedTypes = ['bytes32']; + const encodedValues = [ + hashType(primaryType, types), + ]; + for (const field of types[primaryType]) { + if (version === SignTypedDataVersion.V3 && data[field.name] === undefined) { + continue; + } + const [type, value] = encodeField(types, field.name, field.type, data[field.name], version); + encodedTypes.push(type); + encodedValues.push(value); + } + return (0, util_1.arrToBufArr)((0, abi_utils_1.encode)(encodedTypes, encodedValues)); +} +/** + * Encodes the type of an object by encoding a comma delimited list of its members. + * + * @param primaryType - The root type to encode. + * @param types - Type definitions for all types included in the message. + * @returns An encoded representation of the primary type. + */ +function encodeType(primaryType, types) { + let result = ''; + const unsortedDeps = findTypeDependencies(primaryType, types); + unsortedDeps.delete(primaryType); + const deps = [primaryType, ...Array.from(unsortedDeps).sort()]; + for (const type of deps) { + const children = types[type]; + if (!children) { + throw new Error(`No type definition specified: ${type}`); + } + result += `${type}(${types[type] + .map(({ name, type: t }) => `${t} ${name}`) + .join(',')})`; + } + return result; +} +/** + * Finds all types within a type definition object. + * + * @param primaryType - The root type. + * @param types - Type definitions for all types included in the message. + * @param results - The current set of accumulated types. + * @returns The set of all types found in the type definition. + */ +function findTypeDependencies(primaryType, types, results = new Set()) { + if (typeof primaryType !== 'string') { + throw new Error(`Invalid findTypeDependencies input ${JSON.stringify(primaryType)}`); + } + const match = primaryType.match(/^\w*/u); + [primaryType] = match; + if (results.has(primaryType) || types[primaryType] === undefined) { + return results; + } + results.add(primaryType); + for (const field of types[primaryType]) { + findTypeDependencies(field.type, types, results); + } + return results; +} +/** + * Hashes an object. + * + * @param primaryType - The root type. + * @param data - The object to hash. + * @param types - Type definitions for all types included in the message. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the object. + */ +function hashStruct(primaryType, data, types, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const encoded = encodeData(primaryType, data, types, version); + const hashed = (0, keccak_1.keccak256)(encoded); + const buf = (0, util_1.arrToBufArr)(hashed); + return buf; +} +/** + * Hashes the type of an object. + * + * @param primaryType - The root type to hash. + * @param types - Type definitions for all types included in the message. + * @returns The hash of the object type. + */ +function hashType(primaryType, types) { + const encodedHashType = (0, utils_2.stringToBytes)(encodeType(primaryType, types)); + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)(encodedHashType)); +} +/** + * Removes properties from a message object that are not defined per EIP-712. + * + * @param data - The typed message object. + * @returns The typed message object with only allowed fields. + */ +function sanitizeData(data) { + const sanitizedData = {}; + for (const key in exports.TYPED_MESSAGE_SCHEMA.properties) { + if (data[key]) { + sanitizedData[key] = data[key]; + } + } + if ('types' in sanitizedData) { + // TODO: Fix types + sanitizedData.types = Object.assign({ EIP712Domain: [] }, sanitizedData.types); + } + return sanitizedData; +} +/** + * Create a EIP-712 Domain Hash. + * This hash is used at the top of the EIP-712 encoding. + * + * @param typedData - The typed message to hash. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the domain object. + */ +function eip712DomainHash(typedData, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const sanitizedData = sanitizeData(typedData); + const { domain } = sanitizedData; + const domainType = { EIP712Domain: sanitizedData.types.EIP712Domain }; + return hashStruct('EIP712Domain', domain, domainType, version); +} +/** + * Hash a typed message according to EIP-712. The returned message starts with the EIP-712 prefix, + * which is "1901", followed by the hash of the domain separator, then the data (if any). + * The result is hashed again and returned. + * + * This function does not sign the message. The resulting hash must still be signed to create an + * EIP-712 signature. + * + * @param typedData - The typed message to hash. + * @param version - The EIP-712 version the encoding should comply with. + * @returns The hash of the typed message. + */ +function eip712Hash(typedData, version) { + validateVersion(version, [SignTypedDataVersion.V3, SignTypedDataVersion.V4]); + const sanitizedData = sanitizeData(typedData); + const parts = [(0, utils_2.hexToBytes)('1901')]; + parts.push(eip712DomainHash(typedData, version)); + if (sanitizedData.primaryType !== 'EIP712Domain') { + parts.push(hashStruct( + // TODO: Validate that this is a string, so this type cast can be removed. + sanitizedData.primaryType, sanitizedData.message, sanitizedData.types, version)); + } + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, utils_2.concatBytes)(parts))); +} +/** + * A collection of utility functions used for signing typed data. + */ +exports.TypedDataUtils = { + encodeData, + encodeType, + findTypeDependencies, + hashStruct, + hashType, + sanitizeData, + eip712Hash, + eip712DomainHash, +}; +/** + * Generate the "V1" hash for the provided typed message. + * + * The hash will be generated in accordance with an earlier version of the EIP-712 + * specification. This hash is used in `signTypedData_v1`. + * + * @param typedData - The typed message. + * @returns The '0x'-prefixed hex encoded hash representing the type of the provided message. + */ +function typedSignatureHash(typedData) { + const hashBuffer = _typedSignatureHash(typedData); + return (0, utils_2.bytesToHex)(hashBuffer); +} +exports.typedSignatureHash = typedSignatureHash; +/** + * Normalize a value, so that `@metamask/abi-utils` can handle it. This + * matches the behaviour of the `ethereumjs-abi` library. + * + * @param type - The type of the value to normalize. + * @param value - The value to normalize. + * @returns The normalized value. + */ +function normalizeValue(type, value) { + if ((0, parsers_1.isArrayType)(type) && Array.isArray(value)) { + const [innerType] = (0, parsers_1.getArrayType)(type); + return value.map((item) => normalizeValue(innerType, item)); + } + if (type === 'address') { + if (typeof value === 'number') { + return (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 20); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_1.padStart)((0, utils_2.hexToBytes)(value).subarray(0, 20), 20); + } + if (value instanceof Uint8Array) { + return (0, utils_1.padStart)(value.subarray(0, 20), 20); + } + } + if (type === 'bool') { + return Boolean(value); + } + if (type.startsWith('bytes') && type !== 'bytes') { + const length = (0, parsers_1.getByteLength)(type); + if (typeof value === 'number') { + if (value < 0) { + // `solidityPack(['bytesN'], [-1])` returns `0x00..00`. + return new Uint8Array(); + } + return (0, utils_2.numberToBytes)(value).subarray(0, length); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_2.hexToBytes)(value).subarray(0, length); + } + if (value instanceof Uint8Array) { + return value.subarray(0, length); + } + } + if (type.startsWith('uint')) { + if (typeof value === 'number') { + return Math.abs(value); + } + } + if (type.startsWith('int')) { + if (typeof value === 'number') { + const length = (0, parsers_1.getLength)(type); + return BigInt.asIntN(length, BigInt(value)); + } + } + return value; +} +/** + * For some reason `ethereumjs-abi` treats `address` and `address[]` differently + * so we need to normalize `address[]` differently. + * + * @param values - The values to normalize. + * @returns The normalized values. + */ +function normalizeAddresses(values) { + return values.map((value) => { + if (typeof value === 'number') { + return (0, utils_1.padStart)((0, utils_2.numberToBytes)(value), 32); + } + if ((0, utils_2.isStrictHexString)(value)) { + return (0, utils_1.padStart)((0, utils_2.hexToBytes)(value).subarray(0, 32), 32); + } + if (value instanceof Uint8Array) { + return (0, utils_1.padStart)(value.subarray(0, 32), 32); + } + return value; + }); +} +/** + * For some reason `ethereumjs-abi` treats `intN` and `intN[]` differently + * so we need to normalize `intN[]` differently. + * + * @param type - The type of the value to normalize. + * @param values - The values to normalize. + * @returns The normalized values. + */ +function normalizeIntegers(type, values) { + return values.map((value) => { + if (typeof value === 'string' || + typeof value === 'number' || + typeof value === 'bigint') { + const bigIntValue = parseNumber(type, value); + if (bigIntValue >= BigInt(0)) { + return (0, utils_1.padStart)((0, utils_2.bigIntToBytes)(bigIntValue), 32); + } + const length = (0, parsers_1.getLength)(type); + const asIntN = BigInt.asIntN(length, bigIntValue); + return (0, utils_2.signedBigIntToBytes)(asIntN, 32); + } + return value; + }); +} +/** + * Generate the "V1" hash for the provided typed message. + * + * The hash will be generated in accordance with an earlier version of the EIP-712 + * specification. This hash is used in `signTypedData_v1`. + * + * @param typedData - The typed message. + * @returns The hash representing the type of the provided message. + */ +function _typedSignatureHash(typedData) { + const error = new Error('Expect argument to be non-empty array'); + if (typeof typedData !== 'object' || + !('length' in typedData) || + !typedData.length) { + throw error; + } + const normalizedData = typedData.map(({ name, type, value }) => { + // Handle an edge case with `address[]` types. + if (type === 'address[]') { + return { + name, + type: 'bytes32[]', + value: normalizeAddresses(value), + }; + } + // Handle an edge case with `intN[]` types. + if (type.startsWith('int') && (0, parsers_1.isArrayType)(type)) { + const [innerType, length] = (0, parsers_1.getArrayType)(type); + return { + name, + type: `bytes32[${length !== null && length !== void 0 ? length : ''}]`, + value: normalizeIntegers(innerType, value), + }; + } + return { + name, + type, + value: normalizeValue(type, value), + }; + }); + const data = normalizedData.map((e) => { + if (e.type !== 'bytes') { + return e.value; + } + return (0, utils_3.legacyToBuffer)(e.value); + }); + const types = normalizedData.map((e) => { + if (e.type === 'function') { + throw new Error('Unsupported or invalid type: "function"'); + } + return e.type; + }); + const schema = typedData.map((e) => { + if (!e.name) { + throw error; + } + return `${e.type} ${e.name}`; + }); + return (0, util_1.arrToBufArr)((0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(['bytes32', 'bytes32'], [ + (0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(['string[]'], [schema], true)), + (0, keccak_1.keccak256)((0, abi_utils_1.encodePacked)(types, data, true)), + ]))); +} +/** + * Sign typed data according to EIP-712. The signing differs based upon the `version`. + * + * V1 is based upon [an early version of + * EIP-712](https://github.com/ethereum/EIPs/pull/712/commits/21abe254fe0452d8583d5b132b1d7be87c0439ca) + * that lacked some later security improvements, and should generally be neglected in favor of + * later versions. + * + * V3 is based on [EIP-712](https://eips.ethereum.org/EIPS/eip-712), except that arrays and + * recursive data structures are not supported. + * + * V4 is based on [EIP-712](https://eips.ethereum.org/EIPS/eip-712), and includes full support of + * arrays and recursive data structures. + * + * @param options - The signing options. + * @param options.privateKey - The private key to sign with. + * @param options.data - The typed data to sign. + * @param options.version - The signing version to use. + * @returns The '0x'-prefixed hex encoded signature. + */ +function signTypedData({ privateKey, data, version, }) { + validateVersion(version); + if ((0, utils_3.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_3.isNullish)(privateKey)) { + throw new Error('Missing private key parameter'); + } + const messageHash = version === SignTypedDataVersion.V1 + ? _typedSignatureHash(data) + : exports.TypedDataUtils.eip712Hash(data, version); + const sig = (0, util_1.ecsign)(messageHash, privateKey); + return (0, utils_3.concatSig)((0, util_1.arrToBufArr)((0, utils_2.bigIntToBytes)(sig.v)), sig.r, sig.s); +} +exports.signTypedData = signTypedData; +/** + * Recover the address of the account that created the given EIP-712 + * signature. The version provided must match the version used to + * create the signature. + * + * @param options - The signature recovery options. + * @param options.data - The typed data that was signed. + * @param options.signature - The '0x-prefixed hex encoded message signature. + * @param options.version - The signing version to use. + * @returns The '0x'-prefixed hex address of the signer. + */ +function recoverTypedSignature({ data, signature, version, }) { + validateVersion(version); + if ((0, utils_3.isNullish)(data)) { + throw new Error('Missing data parameter'); + } + else if ((0, utils_3.isNullish)(signature)) { + throw new Error('Missing signature parameter'); + } + const messageHash = version === SignTypedDataVersion.V1 + ? _typedSignatureHash(data) + : exports.TypedDataUtils.eip712Hash(data, version); + const publicKey = (0, utils_3.recoverPublicKey)(messageHash, signature); + const sender = (0, util_1.publicToAddress)(publicKey); + return (0, utils_2.bytesToHex)(sender); +} +exports.recoverTypedSignature = recoverTypedSignature; +//# sourceMappingURL=sign-typed-data.js.map + +/***/ }), + +/***/ 54907: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.normalize = exports.recoverPublicKey = exports.concatSig = exports.legacyToBuffer = exports.isNullish = exports.padWithZeroes = void 0; +const util_1 = __webpack_require__(68683); +const utils_1 = __webpack_require__(22049); +/** + * Pads the front of the given hex string with zeroes until it reaches the + * target length. If the input string is already longer than or equal to the + * target length, it is returned unmodified. + * + * If the input string is "0x"-prefixed or not a hex string, an error will be + * thrown. + * + * @param hexString - The hexadecimal string to pad with zeroes. + * @param targetLength - The target length of the hexadecimal string. + * @returns The input string front-padded with zeroes, or the original string + * if it was already greater than or equal to to the target length. + */ +function padWithZeroes(hexString, targetLength) { + if (hexString !== '' && !/^[a-f0-9]+$/iu.test(hexString)) { + throw new Error(`Expected an unprefixed hex string. Received: ${hexString}`); + } + if (targetLength < 0) { + throw new Error(`Expected a non-negative integer target length. Received: ${targetLength}`); + } + return String.prototype.padStart.call(hexString, targetLength, '0'); +} +exports.padWithZeroes = padWithZeroes; +/** + * Returns `true` if the given value is nullish. + * + * @param value - The value being checked. + * @returns Whether the value is nullish. + */ +function isNullish(value) { + return value === null || value === undefined; +} +exports.isNullish = isNullish; +/** + * Convert a value to a Buffer. This function should be equivalent to the `toBuffer` function in + * `ethereumjs-util@5.2.1`. + * + * @param value - The value to convert to a Buffer. + * @returns The given value as a Buffer. + */ +function legacyToBuffer(value) { + return typeof value === 'string' && !(0, util_1.isHexString)(value) + ? Buffer.from(value) + : (0, util_1.toBuffer)(value); +} +exports.legacyToBuffer = legacyToBuffer; +/** + * Concatenate an extended ECDSA signature into a single '0x'-prefixed hex string. + * + * @param v - The 'v' portion of the signature. + * @param r - The 'r' portion of the signature. + * @param s - The 's' portion of the signature. + * @returns The concatenated ECDSA signature as a '0x'-prefixed string. + */ +function concatSig(v, r, s) { + const rSig = (0, util_1.fromSigned)(r); + const sSig = (0, util_1.fromSigned)(s); + const vSig = (0, util_1.bufferToInt)(v); + const rStr = padWithZeroes((0, util_1.toUnsigned)(rSig).toString('hex'), 64); + const sStr = padWithZeroes((0, util_1.toUnsigned)(sSig).toString('hex'), 64); + const vStr = (0, utils_1.remove0x)((0, utils_1.numberToHex)(vSig)); + return (0, utils_1.add0x)(rStr.concat(sStr, vStr)); +} +exports.concatSig = concatSig; +/** + * Recover the public key from the given signature and message hash. + * + * @param messageHash - The hash of the signed message. + * @param signature - The signature. + * @returns The public key of the signer. + */ +function recoverPublicKey(messageHash, signature) { + const sigParams = (0, util_1.fromRpcSig)(signature); + return (0, util_1.ecrecover)(messageHash, sigParams.v, sigParams.r, sigParams.s); +} +exports.recoverPublicKey = recoverPublicKey; +/** + * Normalize the input to a lower-cased '0x'-prefixed hex string. + * + * @param input - The value to normalize. + * @returns The normalized value. + */ +function normalize(input) { + if (isNullish(input)) { + return undefined; + } + if (typeof input === 'number') { + if (input < 0) { + return '0x'; + } + const buffer = (0, utils_1.numberToBytes)(input); + input = (0, utils_1.bytesToHex)(buffer); + } + if (typeof input !== 'string') { + let msg = 'eth-sig-util.normalize() requires hex string or integer input.'; + msg += ` received ${typeof input}: ${input}`; + throw new Error(msg); + } + return (0, utils_1.add0x)(input.toLowerCase()); +} +exports.normalize = normalize; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 61275: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }// src/logging.ts +var _debug = __webpack_require__(17833); var _debug2 = _interopRequireDefault(_debug); +var globalLogger = _debug2.default.call(void 0, "metamask"); +function createProjectLogger(projectName) { + return globalLogger.extend(projectName); +} +function createModuleLogger(projectLogger, moduleName) { + return projectLogger.extend(moduleName); +} + + + + +exports.createProjectLogger = createProjectLogger; exports.createModuleLogger = createModuleLogger; +//# sourceMappingURL=chunk-2LBGT4GH.js.map + +/***/ }), + +/***/ 85244: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true}));var __accessCheck = (obj, member, msg) => { + if (!member.has(obj)) + throw TypeError("Cannot " + msg); +}; +var __privateGet = (obj, member, getter) => { + __accessCheck(obj, member, "read from private field"); + return getter ? getter.call(obj) : member.get(obj); +}; +var __privateAdd = (obj, member, value) => { + if (member.has(obj)) + throw TypeError("Cannot add the same private member more than once"); + member instanceof WeakSet ? member.add(obj) : member.set(obj, value); +}; +var __privateSet = (obj, member, value, setter) => { + __accessCheck(obj, member, "write to private field"); + setter ? setter.call(obj, value) : member.set(obj, value); + return value; +}; + + + + + +exports.__privateGet = __privateGet; exports.__privateAdd = __privateAdd; exports.__privateSet = __privateSet; +//# sourceMappingURL=chunk-3W5G4CYI.js.map + +/***/ }), + +/***/ 73631: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/versions.ts + + + + + + +var _semver = __webpack_require__(99589); +var _superstruct = __webpack_require__(2150); +var VersionStruct = _superstruct.refine.call(void 0, + _superstruct.string.call(void 0, ), + "Version", + (value) => { + if (_semver.valid.call(void 0, value) === null) { + return `Expected SemVer version, got "${value}"`; + } + return true; + } +); +var VersionRangeStruct = _superstruct.refine.call(void 0, + _superstruct.string.call(void 0, ), + "Version range", + (value) => { + if (_semver.validRange.call(void 0, value) === null) { + return `Expected SemVer range, got "${value}"`; + } + return true; + } +); +function isValidSemVerVersion(version) { + return _superstruct.is.call(void 0, version, VersionStruct); +} +function isValidSemVerRange(versionRange) { + return _superstruct.is.call(void 0, versionRange, VersionRangeStruct); +} +function assertIsSemVerVersion(version) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, version, VersionStruct); +} +function assertIsSemVerRange(range) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, range, VersionRangeStruct); +} +function gtVersion(version1, version2) { + return _semver.gt.call(void 0, version1, version2); +} +function gtRange(version, range) { + return _semver.gtr.call(void 0, version, range); +} +function satisfiesVersionRange(version, versionRange) { + return _semver.satisfies.call(void 0, version, versionRange, { + includePrerelease: true + }); +} + + + + + + + + + + + +exports.VersionStruct = VersionStruct; exports.VersionRangeStruct = VersionRangeStruct; exports.isValidSemVerVersion = isValidSemVerVersion; exports.isValidSemVerRange = isValidSemVerRange; exports.assertIsSemVerVersion = assertIsSemVerVersion; exports.assertIsSemVerRange = assertIsSemVerRange; exports.gtVersion = gtVersion; exports.gtRange = gtRange; exports.satisfiesVersionRange = satisfiesVersionRange; +//# sourceMappingURL=chunk-4D6XQBHA.js.map + +/***/ }), + +/***/ 69116: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true}));// src/time.ts +var Duration = /* @__PURE__ */ ((Duration2) => { + Duration2[Duration2["Millisecond"] = 1] = "Millisecond"; + Duration2[Duration2["Second"] = 1e3] = "Second"; + Duration2[Duration2["Minute"] = 6e4] = "Minute"; + Duration2[Duration2["Hour"] = 36e5] = "Hour"; + Duration2[Duration2["Day"] = 864e5] = "Day"; + Duration2[Duration2["Week"] = 6048e5] = "Week"; + Duration2[Duration2["Year"] = 31536e6] = "Year"; + return Duration2; +})(Duration || {}); +var isNonNegativeInteger = (number) => Number.isInteger(number) && number >= 0; +var assertIsNonNegativeInteger = (number, name) => { + if (!isNonNegativeInteger(number)) { + throw new Error( + `"${name}" must be a non-negative integer. Received: "${number}".` + ); + } +}; +function inMilliseconds(count, duration) { + assertIsNonNegativeInteger(count, "count"); + return count * duration; +} +function timeSince(timestamp) { + assertIsNonNegativeInteger(timestamp, "timestamp"); + return Date.now() - timestamp; +} + + + + + +exports.Duration = Duration; exports.inMilliseconds = inMilliseconds; exports.timeSince = timeSince; +//# sourceMappingURL=chunk-4RMX5YWE.js.map + +/***/ }), + +/***/ 87982: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-5AVWINSB.js.map + +/***/ }), + +/***/ 21848: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/base64.ts +var _superstruct = __webpack_require__(2150); +var base64 = (struct, options = {}) => { + const paddingRequired = _nullishCoalesce(options.paddingRequired, () => ( false)); + const characterSet = _nullishCoalesce(options.characterSet, () => ( "base64")); + let letters; + if (characterSet === "base64") { + letters = String.raw`[A-Za-z0-9+\/]`; + } else { + _chunk6ZDHSOUVjs.assert.call(void 0, characterSet === "base64url"); + letters = String.raw`[-_A-Za-z0-9]`; + } + let re; + if (paddingRequired) { + re = new RegExp( + `^(?:${letters}{4})*(?:${letters}{3}=|${letters}{2}==)?$`, + "u" + ); + } else { + re = new RegExp( + `^(?:${letters}{4})*(?:${letters}{2,3}|${letters}{3}=|${letters}{2}==)?$`, + "u" + ); + } + return _superstruct.pattern.call(void 0, struct, re); +}; + + + +exports.base64 = base64; +//# sourceMappingURL=chunk-6NZW4WK4.js.map + +/***/ }), + +/***/ 40932: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } + +var _chunkIZC266HSjs = __webpack_require__(1486); + +// src/assert.ts +var _superstruct = __webpack_require__(2150); +function isConstructable(fn) { + return Boolean(typeof _optionalChain([fn, 'optionalAccess', _ => _.prototype, 'optionalAccess', _2 => _2.constructor, 'optionalAccess', _3 => _3.name]) === "string"); +} +function getErrorMessageWithoutTrailingPeriod(error) { + return _chunkIZC266HSjs.getErrorMessage.call(void 0, error).replace(/\.$/u, ""); +} +function getError(ErrorWrapper, message) { + if (isConstructable(ErrorWrapper)) { + return new ErrorWrapper({ + message + }); + } + return ErrorWrapper({ + message + }); +} +var AssertionError = class extends Error { + constructor(options) { + super(options.message); + this.code = "ERR_ASSERTION"; + } +}; +function assert(value, message = "Assertion failed.", ErrorWrapper = AssertionError) { + if (!value) { + if (message instanceof Error) { + throw message; + } + throw getError(ErrorWrapper, message); + } +} +function assertStruct(value, struct, errorPrefix = "Assertion failed", ErrorWrapper = AssertionError) { + try { + _superstruct.assert.call(void 0, value, struct); + } catch (error) { + throw getError( + ErrorWrapper, + `${errorPrefix}: ${getErrorMessageWithoutTrailingPeriod(error)}.` + ); + } +} +function assertExhaustive(_object) { + throw new Error( + "Invalid branch reached. Should be detected during compilation." + ); +} + + + + + + +exports.AssertionError = AssertionError; exports.assert = assert; exports.assertStruct = assertStruct; exports.assertExhaustive = assertExhaustive; +//# sourceMappingURL=chunk-6ZDHSOUV.js.map + +/***/ }), + +/***/ 39705: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true}));// src/promise.ts +function createDeferredPromise({ + suppressUnhandledRejection = false +} = {}) { + let resolve; + let reject; + const promise = new Promise( + (innerResolve, innerReject) => { + resolve = innerResolve; + reject = innerReject; + } + ); + if (suppressUnhandledRejection) { + promise.catch((_error) => { + }); + } + return { promise, resolve, reject }; +} + + + +exports.createDeferredPromise = createDeferredPromise; +//# sourceMappingURL=chunk-C6HGFEYL.js.map + +/***/ }), + +/***/ 1203: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + + + +var _chunkQEPVHEP7js = __webpack_require__(75363); + + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/coercers.ts + + + + + + + + + +var _superstruct = __webpack_require__(2150); +var NumberLikeStruct = _superstruct.union.call(void 0, [_superstruct.number.call(void 0, ), _superstruct.bigint.call(void 0, ), _superstruct.string.call(void 0, ), _chunkQEPVHEP7js.StrictHexStruct]); +var NumberCoercer = _superstruct.coerce.call(void 0, _superstruct.number.call(void 0, ), NumberLikeStruct, Number); +var BigIntCoercer = _superstruct.coerce.call(void 0, _superstruct.bigint.call(void 0, ), NumberLikeStruct, BigInt); +var BytesLikeStruct = _superstruct.union.call(void 0, [_chunkQEPVHEP7js.StrictHexStruct, _superstruct.instance.call(void 0, Uint8Array)]); +var BytesCoercer = _superstruct.coerce.call(void 0, + _superstruct.instance.call(void 0, Uint8Array), + _superstruct.union.call(void 0, [_chunkQEPVHEP7js.StrictHexStruct]), + _chunkQEPVHEP7js.hexToBytes +); +var HexCoercer = _superstruct.coerce.call(void 0, _chunkQEPVHEP7js.StrictHexStruct, _superstruct.instance.call(void 0, Uint8Array), _chunkQEPVHEP7js.bytesToHex); +function createNumber(value) { + try { + const result = _superstruct.create.call(void 0, value, NumberCoercer); + _chunk6ZDHSOUVjs.assert.call(void 0, + Number.isFinite(result), + `Expected a number-like value, got "${value}".` + ); + return result; + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error(`Expected a number-like value, got "${value}".`); + } + throw error; + } +} +function createBigInt(value) { + try { + return _superstruct.create.call(void 0, value, BigIntCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a number-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} +function createBytes(value) { + if (typeof value === "string" && value.toLowerCase() === "0x") { + return new Uint8Array(); + } + try { + return _superstruct.create.call(void 0, value, BytesCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a bytes-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} +function createHex(value) { + if (value instanceof Uint8Array && value.length === 0 || typeof value === "string" && value.toLowerCase() === "0x") { + return "0x"; + } + try { + return _superstruct.create.call(void 0, value, HexCoercer); + } catch (error) { + if (error instanceof _superstruct.StructError) { + throw new Error( + `Expected a bytes-like value, got "${String(error.value)}".` + ); + } + throw error; + } +} + + + + + + +exports.createNumber = createNumber; exports.createBigInt = createBigInt; exports.createBytes = createBytes; exports.createHex = createHex; +//# sourceMappingURL=chunk-DHVKFDHQ.js.map + +/***/ }), + +/***/ 1508: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + +var _chunk6NZW4WK4js = __webpack_require__(21848); + +// src/checksum.ts +var _superstruct = __webpack_require__(2150); +var ChecksumStruct = _superstruct.size.call(void 0, + _chunk6NZW4WK4js.base64.call(void 0, _superstruct.string.call(void 0, ), { paddingRequired: true }), + 44, + 44 +); + + + +exports.ChecksumStruct = ChecksumStruct; +//# sourceMappingURL=chunk-E4C7EW4R.js.map + +/***/ }), + +/***/ 51423: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-EQMZL4XU.js.map + +/***/ }), + +/***/ 1486: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + + +var _chunkQVEKZRZ2js = __webpack_require__(96526); + +// src/errors.ts +var _ponycause = __webpack_require__(71843); +function isError(error) { + return error instanceof Error || _chunkQVEKZRZ2js.isObject.call(void 0, error) && error.constructor.name === "Error"; +} +function isErrorWithCode(error) { + return typeof error === "object" && error !== null && "code" in error; +} +function isErrorWithMessage(error) { + return typeof error === "object" && error !== null && "message" in error; +} +function isErrorWithStack(error) { + return typeof error === "object" && error !== null && "stack" in error; +} +function getErrorMessage(error) { + if (isErrorWithMessage(error) && typeof error.message === "string") { + return error.message; + } + if (_chunkQVEKZRZ2js.isNullOrUndefined.call(void 0, error)) { + return ""; + } + return String(error); +} +function wrapError(originalError, message) { + if (isError(originalError)) { + let error; + if (Error.length === 2) { + error = new Error(message, { cause: originalError }); + } else { + error = new (0, _ponycause.ErrorWithCause)(message, { cause: originalError }); + } + if (isErrorWithCode(originalError)) { + error.code = originalError.code; + } + return error; + } + if (message.length > 0) { + return new Error(`${String(originalError)}: ${message}`); + } + return new Error(String(originalError)); +} + + + + + + + +exports.isErrorWithCode = isErrorWithCode; exports.isErrorWithMessage = isErrorWithMessage; exports.isErrorWithStack = isErrorWithStack; exports.getErrorMessage = getErrorMessage; exports.wrapError = wrapError; +//# sourceMappingURL=chunk-IZC266HS.js.map + +/***/ }), + +/***/ 58383: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-LC2CRSWD.js.map + +/***/ }), + +/***/ 87427: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + + +var _chunkQVEKZRZ2js = __webpack_require__(96526); + +// src/json.ts + + + + + + + + + + + + + + + + + + + + +var _superstruct = __webpack_require__(2150); +var object = (schema) => ( + // The type is slightly different from a regular object struct, because we + // want to make properties with `undefined` in their type optional, but not + // `undefined` itself. This means that we need a type cast. + _superstruct.object.call(void 0, schema) +); +function hasOptional({ path, branch }) { + const field = path[path.length - 1]; + return _chunkQVEKZRZ2js.hasProperty.call(void 0, branch[branch.length - 2], field); +} +function exactOptional(struct) { + return new (0, _superstruct.Struct)({ + ...struct, + type: `optional ${struct.type}`, + validator: (value, context) => !hasOptional(context) || struct.validator(value, context), + refiner: (value, context) => !hasOptional(context) || struct.refiner(value, context) + }); +} +var finiteNumber = () => _superstruct.define.call(void 0, "finite number", (value) => { + return _superstruct.is.call(void 0, value, _superstruct.number.call(void 0, )) && Number.isFinite(value); +}); +var UnsafeJsonStruct = _superstruct.union.call(void 0, [ + _superstruct.literal.call(void 0, null), + _superstruct.boolean.call(void 0, ), + finiteNumber(), + _superstruct.string.call(void 0, ), + _superstruct.array.call(void 0, _superstruct.lazy.call(void 0, () => UnsafeJsonStruct)), + _superstruct.record.call(void 0, + _superstruct.string.call(void 0, ), + _superstruct.lazy.call(void 0, () => UnsafeJsonStruct) + ) +]); +var JsonStruct = _superstruct.coerce.call(void 0, UnsafeJsonStruct, _superstruct.any.call(void 0, ), (value) => { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, value, UnsafeJsonStruct); + return JSON.parse( + JSON.stringify(value, (propKey, propValue) => { + if (propKey === "__proto__" || propKey === "constructor") { + return void 0; + } + return propValue; + }) + ); +}); +function isValidJson(value) { + try { + getSafeJson(value); + return true; + } catch (e) { + return false; + } +} +function getSafeJson(value) { + return _superstruct.create.call(void 0, value, JsonStruct); +} +function getJsonSize(value) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, value, JsonStruct, "Invalid JSON value"); + const json = JSON.stringify(value); + return new TextEncoder().encode(json).byteLength; +} +var jsonrpc2 = "2.0"; +var JsonRpcVersionStruct = _superstruct.literal.call(void 0, jsonrpc2); +var JsonRpcIdStruct = _superstruct.nullable.call(void 0, _superstruct.union.call(void 0, [_superstruct.number.call(void 0, ), _superstruct.string.call(void 0, )])); +var JsonRpcErrorStruct = object({ + code: _superstruct.integer.call(void 0, ), + message: _superstruct.string.call(void 0, ), + data: exactOptional(JsonStruct), + stack: exactOptional(_superstruct.string.call(void 0, )) +}); +var JsonRpcParamsStruct = _superstruct.union.call(void 0, [_superstruct.record.call(void 0, _superstruct.string.call(void 0, ), JsonStruct), _superstruct.array.call(void 0, JsonStruct)]); +var JsonRpcRequestStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + method: _superstruct.string.call(void 0, ), + params: exactOptional(JsonRpcParamsStruct) +}); +var JsonRpcNotificationStruct = object({ + jsonrpc: JsonRpcVersionStruct, + method: _superstruct.string.call(void 0, ), + params: exactOptional(JsonRpcParamsStruct) +}); +function isJsonRpcNotification(value) { + return _superstruct.is.call(void 0, value, JsonRpcNotificationStruct); +} +function assertIsJsonRpcNotification(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcNotificationStruct, + "Invalid JSON-RPC notification", + ErrorWrapper + ); +} +function isJsonRpcRequest(value) { + return _superstruct.is.call(void 0, value, JsonRpcRequestStruct); +} +function assertIsJsonRpcRequest(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcRequestStruct, + "Invalid JSON-RPC request", + ErrorWrapper + ); +} +var PendingJsonRpcResponseStruct = _superstruct.object.call(void 0, { + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + result: _superstruct.optional.call(void 0, _superstruct.unknown.call(void 0, )), + error: _superstruct.optional.call(void 0, JsonRpcErrorStruct) +}); +var JsonRpcSuccessStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + result: JsonStruct +}); +var JsonRpcFailureStruct = object({ + id: JsonRpcIdStruct, + jsonrpc: JsonRpcVersionStruct, + error: JsonRpcErrorStruct +}); +var JsonRpcResponseStruct = _superstruct.union.call(void 0, [ + JsonRpcSuccessStruct, + JsonRpcFailureStruct +]); +function isPendingJsonRpcResponse(response) { + return _superstruct.is.call(void 0, response, PendingJsonRpcResponseStruct); +} +function assertIsPendingJsonRpcResponse(response, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + response, + PendingJsonRpcResponseStruct, + "Invalid pending JSON-RPC response", + ErrorWrapper + ); +} +function isJsonRpcResponse(response) { + return _superstruct.is.call(void 0, response, JsonRpcResponseStruct); +} +function assertIsJsonRpcResponse(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcResponseStruct, + "Invalid JSON-RPC response", + ErrorWrapper + ); +} +function isJsonRpcSuccess(value) { + return _superstruct.is.call(void 0, value, JsonRpcSuccessStruct); +} +function assertIsJsonRpcSuccess(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcSuccessStruct, + "Invalid JSON-RPC success response", + ErrorWrapper + ); +} +function isJsonRpcFailure(value) { + return _superstruct.is.call(void 0, value, JsonRpcFailureStruct); +} +function assertIsJsonRpcFailure(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcFailureStruct, + "Invalid JSON-RPC failure response", + ErrorWrapper + ); +} +function isJsonRpcError(value) { + return _superstruct.is.call(void 0, value, JsonRpcErrorStruct); +} +function assertIsJsonRpcError(value, ErrorWrapper) { + _chunk6ZDHSOUVjs.assertStruct.call(void 0, + value, + JsonRpcErrorStruct, + "Invalid JSON-RPC error", + ErrorWrapper + ); +} +function getJsonRpcIdValidator(options) { + const { permitEmptyString, permitFractions, permitNull } = { + permitEmptyString: true, + permitFractions: false, + permitNull: true, + ...options + }; + const isValidJsonRpcId = (id) => { + return Boolean( + typeof id === "number" && (permitFractions || Number.isInteger(id)) || typeof id === "string" && (permitEmptyString || id.length > 0) || permitNull && id === null + ); + }; + return isValidJsonRpcId; +} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +exports.object = object; exports.exactOptional = exactOptional; exports.UnsafeJsonStruct = UnsafeJsonStruct; exports.JsonStruct = JsonStruct; exports.isValidJson = isValidJson; exports.getSafeJson = getSafeJson; exports.getJsonSize = getJsonSize; exports.jsonrpc2 = jsonrpc2; exports.JsonRpcVersionStruct = JsonRpcVersionStruct; exports.JsonRpcIdStruct = JsonRpcIdStruct; exports.JsonRpcErrorStruct = JsonRpcErrorStruct; exports.JsonRpcParamsStruct = JsonRpcParamsStruct; exports.JsonRpcRequestStruct = JsonRpcRequestStruct; exports.JsonRpcNotificationStruct = JsonRpcNotificationStruct; exports.isJsonRpcNotification = isJsonRpcNotification; exports.assertIsJsonRpcNotification = assertIsJsonRpcNotification; exports.isJsonRpcRequest = isJsonRpcRequest; exports.assertIsJsonRpcRequest = assertIsJsonRpcRequest; exports.PendingJsonRpcResponseStruct = PendingJsonRpcResponseStruct; exports.JsonRpcSuccessStruct = JsonRpcSuccessStruct; exports.JsonRpcFailureStruct = JsonRpcFailureStruct; exports.JsonRpcResponseStruct = JsonRpcResponseStruct; exports.isPendingJsonRpcResponse = isPendingJsonRpcResponse; exports.assertIsPendingJsonRpcResponse = assertIsPendingJsonRpcResponse; exports.isJsonRpcResponse = isJsonRpcResponse; exports.assertIsJsonRpcResponse = assertIsJsonRpcResponse; exports.isJsonRpcSuccess = isJsonRpcSuccess; exports.assertIsJsonRpcSuccess = assertIsJsonRpcSuccess; exports.isJsonRpcFailure = isJsonRpcFailure; exports.assertIsJsonRpcFailure = assertIsJsonRpcFailure; exports.isJsonRpcError = isJsonRpcError; exports.assertIsJsonRpcError = assertIsJsonRpcError; exports.getJsonRpcIdValidator = getJsonRpcIdValidator; +//# sourceMappingURL=chunk-OLLG4H35.js.map + +/***/ }), + +/***/ 75363: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +Object.defineProperty(exports, "__esModule", ({value: true})); function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/hex.ts +var _sha3 = __webpack_require__(2214); +var _superstruct = __webpack_require__(2150); + +// src/bytes.ts +var _base = __webpack_require__(63203); +var HEX_MINIMUM_NUMBER_CHARACTER = 48; +var HEX_MAXIMUM_NUMBER_CHARACTER = 58; +var HEX_CHARACTER_OFFSET = 87; +function getPrecomputedHexValuesBuilder() { + const lookupTable = []; + return () => { + if (lookupTable.length === 0) { + for (let i = 0; i < 256; i++) { + lookupTable.push(i.toString(16).padStart(2, "0")); + } + } + return lookupTable; + }; +} +var getPrecomputedHexValues = getPrecomputedHexValuesBuilder(); +function isBytes(value) { + return value instanceof Uint8Array; +} +function assertIsBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, isBytes(value), "Value must be a Uint8Array."); +} +function bytesToHex(bytes) { + assertIsBytes(bytes); + if (bytes.length === 0) { + return "0x"; + } + const lookupTable = getPrecomputedHexValues(); + const hexadecimal = new Array(bytes.length); + for (let i = 0; i < bytes.length; i++) { + hexadecimal[i] = lookupTable[bytes[i]]; + } + return add0x(hexadecimal.join("")); +} +function bytesToBigInt(bytes) { + assertIsBytes(bytes); + const hexadecimal = bytesToHex(bytes); + return BigInt(hexadecimal); +} +function bytesToSignedBigInt(bytes) { + assertIsBytes(bytes); + let value = BigInt(0); + for (const byte of bytes) { + value = (value << BigInt(8)) + BigInt(byte); + } + return BigInt.asIntN(bytes.length * 8, value); +} +function bytesToNumber(bytes) { + assertIsBytes(bytes); + const bigint = bytesToBigInt(bytes); + _chunk6ZDHSOUVjs.assert.call(void 0, + bigint <= BigInt(Number.MAX_SAFE_INTEGER), + "Number is not a safe integer. Use `bytesToBigInt` instead." + ); + return Number(bigint); +} +function bytesToString(bytes) { + assertIsBytes(bytes); + return new TextDecoder().decode(bytes); +} +function bytesToBase64(bytes) { + assertIsBytes(bytes); + return _base.base64.encode(bytes); +} +function hexToBytes(value) { + if (_optionalChain([value, 'optionalAccess', _ => _.toLowerCase, 'optionalCall', _2 => _2()]) === "0x") { + return new Uint8Array(); + } + assertIsHexString(value); + const strippedValue = remove0x(value).toLowerCase(); + const normalizedValue = strippedValue.length % 2 === 0 ? strippedValue : `0${strippedValue}`; + const bytes = new Uint8Array(normalizedValue.length / 2); + for (let i = 0; i < bytes.length; i++) { + const c1 = normalizedValue.charCodeAt(i * 2); + const c2 = normalizedValue.charCodeAt(i * 2 + 1); + const n1 = c1 - (c1 < HEX_MAXIMUM_NUMBER_CHARACTER ? HEX_MINIMUM_NUMBER_CHARACTER : HEX_CHARACTER_OFFSET); + const n2 = c2 - (c2 < HEX_MAXIMUM_NUMBER_CHARACTER ? HEX_MINIMUM_NUMBER_CHARACTER : HEX_CHARACTER_OFFSET); + bytes[i] = n1 * 16 + n2; + } + return bytes; +} +function bigIntToBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs.assert.call(void 0, value >= BigInt(0), "Value must be a non-negative bigint."); + const hexadecimal = value.toString(16); + return hexToBytes(hexadecimal); +} +function bigIntFits(value, bytes) { + _chunk6ZDHSOUVjs.assert.call(void 0, bytes > 0); + const mask = value >> BigInt(31); + return !((~value & mask) + (value & ~mask) >> BigInt(bytes * 8 + ~0)); +} +function signedBigIntToBytes(value, byteLength) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs.assert.call(void 0, typeof byteLength === "number", "Byte length must be a number."); + _chunk6ZDHSOUVjs.assert.call(void 0, byteLength > 0, "Byte length must be greater than 0."); + _chunk6ZDHSOUVjs.assert.call(void 0, + bigIntFits(value, byteLength), + "Byte length is too small to represent the given value." + ); + let numberValue = value; + const bytes = new Uint8Array(byteLength); + for (let i = 0; i < bytes.length; i++) { + bytes[i] = Number(BigInt.asUintN(8, numberValue)); + numberValue >>= BigInt(8); + } + return bytes.reverse(); +} +function numberToBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "number", "Value must be a number."); + _chunk6ZDHSOUVjs.assert.call(void 0, value >= 0, "Value must be a non-negative number."); + _chunk6ZDHSOUVjs.assert.call(void 0, + Number.isSafeInteger(value), + "Value is not a safe integer. Use `bigIntToBytes` instead." + ); + const hexadecimal = value.toString(16); + return hexToBytes(hexadecimal); +} +function stringToBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "string", "Value must be a string."); + return new TextEncoder().encode(value); +} +function base64ToBytes(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "string", "Value must be a string."); + return _base.base64.decode(value); +} +function valueToBytes(value) { + if (typeof value === "bigint") { + return bigIntToBytes(value); + } + if (typeof value === "number") { + return numberToBytes(value); + } + if (typeof value === "string") { + if (value.startsWith("0x")) { + return hexToBytes(value); + } + return stringToBytes(value); + } + if (isBytes(value)) { + return value; + } + throw new TypeError(`Unsupported value type: "${typeof value}".`); +} +function concatBytes(values) { + const normalizedValues = new Array(values.length); + let byteLength = 0; + for (let i = 0; i < values.length; i++) { + const value = valueToBytes(values[i]); + normalizedValues[i] = value; + byteLength += value.length; + } + const bytes = new Uint8Array(byteLength); + for (let i = 0, offset = 0; i < normalizedValues.length; i++) { + bytes.set(normalizedValues[i], offset); + offset += normalizedValues[i].length; + } + return bytes; +} +function createDataView(bytes) { + if (typeof Buffer !== "undefined" && bytes instanceof Buffer) { + const buffer = bytes.buffer.slice( + bytes.byteOffset, + bytes.byteOffset + bytes.byteLength + ); + return new DataView(buffer); + } + return new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength); +} + +// src/hex.ts +var HexStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), /^(?:0x)?[0-9a-f]+$/iu); +var StrictHexStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), /^0x[0-9a-f]+$/iu); +var HexAddressStruct = _superstruct.pattern.call(void 0, + _superstruct.string.call(void 0, ), + /^0x[0-9a-f]{40}$/u +); +var HexChecksumAddressStruct = _superstruct.pattern.call(void 0, + _superstruct.string.call(void 0, ), + /^0x[0-9a-fA-F]{40}$/u +); +function isHexString(value) { + return _superstruct.is.call(void 0, value, HexStruct); +} +function isStrictHexString(value) { + return _superstruct.is.call(void 0, value, StrictHexStruct); +} +function assertIsHexString(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, isHexString(value), "Value must be a hexadecimal string."); +} +function assertIsStrictHexString(value) { + _chunk6ZDHSOUVjs.assert.call(void 0, + isStrictHexString(value), + 'Value must be a hexadecimal string, starting with "0x".' + ); +} +function isValidHexAddress(possibleAddress) { + return _superstruct.is.call(void 0, possibleAddress, HexAddressStruct) || isValidChecksumAddress(possibleAddress); +} +function getChecksumAddress(address) { + _chunk6ZDHSOUVjs.assert.call(void 0, _superstruct.is.call(void 0, address, HexChecksumAddressStruct), "Invalid hex address."); + const unPrefixed = remove0x(address.toLowerCase()); + const unPrefixedHash = remove0x(bytesToHex(_sha3.keccak_256.call(void 0, unPrefixed))); + return `0x${unPrefixed.split("").map((character, nibbleIndex) => { + const hashCharacter = unPrefixedHash[nibbleIndex]; + _chunk6ZDHSOUVjs.assert.call(void 0, _superstruct.is.call(void 0, hashCharacter, _superstruct.string.call(void 0, )), "Hash shorter than address."); + return parseInt(hashCharacter, 16) > 7 ? character.toUpperCase() : character; + }).join("")}`; +} +function isValidChecksumAddress(possibleChecksum) { + if (!_superstruct.is.call(void 0, possibleChecksum, HexChecksumAddressStruct)) { + return false; + } + return getChecksumAddress(possibleChecksum) === possibleChecksum; +} +function add0x(hexadecimal) { + if (hexadecimal.startsWith("0x")) { + return hexadecimal; + } + if (hexadecimal.startsWith("0X")) { + return `0x${hexadecimal.substring(2)}`; + } + return `0x${hexadecimal}`; +} +function remove0x(hexadecimal) { + if (hexadecimal.startsWith("0x") || hexadecimal.startsWith("0X")) { + return hexadecimal.substring(2); + } + return hexadecimal; +} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +exports.HexStruct = HexStruct; exports.StrictHexStruct = StrictHexStruct; exports.HexAddressStruct = HexAddressStruct; exports.HexChecksumAddressStruct = HexChecksumAddressStruct; exports.isHexString = isHexString; exports.isStrictHexString = isStrictHexString; exports.assertIsHexString = assertIsHexString; exports.assertIsStrictHexString = assertIsStrictHexString; exports.isValidHexAddress = isValidHexAddress; exports.getChecksumAddress = getChecksumAddress; exports.isValidChecksumAddress = isValidChecksumAddress; exports.add0x = add0x; exports.remove0x = remove0x; exports.isBytes = isBytes; exports.assertIsBytes = assertIsBytes; exports.bytesToHex = bytesToHex; exports.bytesToBigInt = bytesToBigInt; exports.bytesToSignedBigInt = bytesToSignedBigInt; exports.bytesToNumber = bytesToNumber; exports.bytesToString = bytesToString; exports.bytesToBase64 = bytesToBase64; exports.hexToBytes = hexToBytes; exports.bigIntToBytes = bigIntToBytes; exports.signedBigIntToBytes = signedBigIntToBytes; exports.numberToBytes = numberToBytes; exports.stringToBytes = stringToBytes; exports.base64ToBytes = base64ToBytes; exports.valueToBytes = valueToBytes; exports.concatBytes = concatBytes; exports.createDataView = createDataView; +//# sourceMappingURL=chunk-QEPVHEP7.js.map + +/***/ }), + +/***/ 96526: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } }// src/misc.ts +function isNonEmptyArray(value) { + return Array.isArray(value) && value.length > 0; +} +function isNullOrUndefined(value) { + return value === null || value === void 0; +} +function isObject(value) { + return Boolean(value) && typeof value === "object" && !Array.isArray(value); +} +var hasProperty = (objectToCheck, name) => Object.hasOwnProperty.call(objectToCheck, name); +function getKnownPropertyNames(object) { + return Object.getOwnPropertyNames(object); +} +var JsonSize = /* @__PURE__ */ ((JsonSize2) => { + JsonSize2[JsonSize2["Null"] = 4] = "Null"; + JsonSize2[JsonSize2["Comma"] = 1] = "Comma"; + JsonSize2[JsonSize2["Wrapper"] = 1] = "Wrapper"; + JsonSize2[JsonSize2["True"] = 4] = "True"; + JsonSize2[JsonSize2["False"] = 5] = "False"; + JsonSize2[JsonSize2["Quote"] = 1] = "Quote"; + JsonSize2[JsonSize2["Colon"] = 1] = "Colon"; + JsonSize2[JsonSize2["Date"] = 24] = "Date"; + return JsonSize2; +})(JsonSize || {}); +var ESCAPE_CHARACTERS_REGEXP = /"|\\|\n|\r|\t/gu; +function isPlainObject(value) { + if (typeof value !== "object" || value === null) { + return false; + } + try { + let proto = value; + while (Object.getPrototypeOf(proto) !== null) { + proto = Object.getPrototypeOf(proto); + } + return Object.getPrototypeOf(value) === proto; + } catch (_) { + return false; + } +} +function isASCII(character) { + return character.charCodeAt(0) <= 127; +} +function calculateStringSize(value) { + const size = value.split("").reduce((total, character) => { + if (isASCII(character)) { + return total + 1; + } + return total + 2; + }, 0); + return size + (_nullishCoalesce(value.match(ESCAPE_CHARACTERS_REGEXP), () => ( []))).length; +} +function calculateNumberSize(value) { + return value.toString().length; +} + + + + + + + + + + + + + +exports.isNonEmptyArray = isNonEmptyArray; exports.isNullOrUndefined = isNullOrUndefined; exports.isObject = isObject; exports.hasProperty = hasProperty; exports.getKnownPropertyNames = getKnownPropertyNames; exports.JsonSize = JsonSize; exports.ESCAPE_CHARACTERS_REGEXP = ESCAPE_CHARACTERS_REGEXP; exports.isPlainObject = isPlainObject; exports.isASCII = isASCII; exports.calculateStringSize = calculateStringSize; exports.calculateNumberSize = calculateNumberSize; +//# sourceMappingURL=chunk-QVEKZRZ2.js.map + +/***/ }), + +/***/ 61305: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-RKRGAFXY.js.map + +/***/ }), + +/***/ 43207: +/***/ (() => { + +"use strict"; +//# sourceMappingURL=chunk-UOTVU7OQ.js.map + +/***/ }), + +/***/ 41535: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + + +var _chunkQEPVHEP7js = __webpack_require__(75363); + + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + +// src/number.ts +var numberToHex = (value) => { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "number", "Value must be a number."); + _chunk6ZDHSOUVjs.assert.call(void 0, value >= 0, "Value must be a non-negative number."); + _chunk6ZDHSOUVjs.assert.call(void 0, + Number.isSafeInteger(value), + "Value is not a safe integer. Use `bigIntToHex` instead." + ); + return _chunkQEPVHEP7js.add0x.call(void 0, value.toString(16)); +}; +var bigIntToHex = (value) => { + _chunk6ZDHSOUVjs.assert.call(void 0, typeof value === "bigint", "Value must be a bigint."); + _chunk6ZDHSOUVjs.assert.call(void 0, value >= 0, "Value must be a non-negative bigint."); + return _chunkQEPVHEP7js.add0x.call(void 0, value.toString(16)); +}; +var hexToNumber = (value) => { + _chunkQEPVHEP7js.assertIsHexString.call(void 0, value); + const numberValue = parseInt(value, 16); + _chunk6ZDHSOUVjs.assert.call(void 0, + Number.isSafeInteger(numberValue), + "Value is not a safe integer. Use `hexToBigInt` instead." + ); + return numberValue; +}; +var hexToBigInt = (value) => { + _chunkQEPVHEP7js.assertIsHexString.call(void 0, value); + return BigInt(_chunkQEPVHEP7js.add0x.call(void 0, value)); +}; + + + + + + +exports.numberToHex = numberToHex; exports.bigIntToHex = bigIntToHex; exports.hexToNumber = hexToNumber; exports.hexToBigInt = hexToBigInt; +//# sourceMappingURL=chunk-VFXTVNXN.js.map + +/***/ }), + +/***/ 2489: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; }// src/caip-types.ts +var _superstruct = __webpack_require__(2150); +var CAIP_CHAIN_ID_REGEX = /^(?[-a-z0-9]{3,8}):(?[-_a-zA-Z0-9]{1,32})$/u; +var CAIP_NAMESPACE_REGEX = /^[-a-z0-9]{3,8}$/u; +var CAIP_REFERENCE_REGEX = /^[-_a-zA-Z0-9]{1,32}$/u; +var CAIP_ACCOUNT_ID_REGEX = /^(?(?[-a-z0-9]{3,8}):(?[-_a-zA-Z0-9]{1,32})):(?[-.%a-zA-Z0-9]{1,128})$/u; +var CAIP_ACCOUNT_ADDRESS_REGEX = /^[-.%a-zA-Z0-9]{1,128}$/u; +var CaipChainIdStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), CAIP_CHAIN_ID_REGEX); +var CaipNamespaceStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), CAIP_NAMESPACE_REGEX); +var CaipReferenceStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), CAIP_REFERENCE_REGEX); +var CaipAccountIdStruct = _superstruct.pattern.call(void 0, _superstruct.string.call(void 0, ), CAIP_ACCOUNT_ID_REGEX); +var CaipAccountAddressStruct = _superstruct.pattern.call(void 0, + _superstruct.string.call(void 0, ), + CAIP_ACCOUNT_ADDRESS_REGEX +); +var KnownCaipNamespace = /* @__PURE__ */ ((KnownCaipNamespace2) => { + KnownCaipNamespace2["Eip155"] = "eip155"; + return KnownCaipNamespace2; +})(KnownCaipNamespace || {}); +function isCaipChainId(value) { + return _superstruct.is.call(void 0, value, CaipChainIdStruct); +} +function isCaipNamespace(value) { + return _superstruct.is.call(void 0, value, CaipNamespaceStruct); +} +function isCaipReference(value) { + return _superstruct.is.call(void 0, value, CaipReferenceStruct); +} +function isCaipAccountId(value) { + return _superstruct.is.call(void 0, value, CaipAccountIdStruct); +} +function isCaipAccountAddress(value) { + return _superstruct.is.call(void 0, value, CaipAccountAddressStruct); +} +function parseCaipChainId(caipChainId) { + const match = CAIP_CHAIN_ID_REGEX.exec(caipChainId); + if (!_optionalChain([match, 'optionalAccess', _ => _.groups])) { + throw new Error("Invalid CAIP chain ID."); + } + return { + namespace: match.groups.namespace, + reference: match.groups.reference + }; +} +function parseCaipAccountId(caipAccountId) { + const match = CAIP_ACCOUNT_ID_REGEX.exec(caipAccountId); + if (!_optionalChain([match, 'optionalAccess', _2 => _2.groups])) { + throw new Error("Invalid CAIP account ID."); + } + return { + address: match.groups.accountAddress, + chainId: match.groups.chainId, + chain: { + namespace: match.groups.namespace, + reference: match.groups.reference + } + }; +} +function toCaipChainId(namespace, reference) { + if (!isCaipNamespace(namespace)) { + throw new Error( + `Invalid "namespace", must match: ${CAIP_NAMESPACE_REGEX.toString()}` + ); + } + if (!isCaipReference(reference)) { + throw new Error( + `Invalid "reference", must match: ${CAIP_REFERENCE_REGEX.toString()}` + ); + } + return `${namespace}:${reference}`; +} + + + + + + + + + + + + + + + + + + + + + +exports.CAIP_CHAIN_ID_REGEX = CAIP_CHAIN_ID_REGEX; exports.CAIP_NAMESPACE_REGEX = CAIP_NAMESPACE_REGEX; exports.CAIP_REFERENCE_REGEX = CAIP_REFERENCE_REGEX; exports.CAIP_ACCOUNT_ID_REGEX = CAIP_ACCOUNT_ID_REGEX; exports.CAIP_ACCOUNT_ADDRESS_REGEX = CAIP_ACCOUNT_ADDRESS_REGEX; exports.CaipChainIdStruct = CaipChainIdStruct; exports.CaipNamespaceStruct = CaipNamespaceStruct; exports.CaipReferenceStruct = CaipReferenceStruct; exports.CaipAccountIdStruct = CaipAccountIdStruct; exports.CaipAccountAddressStruct = CaipAccountAddressStruct; exports.KnownCaipNamespace = KnownCaipNamespace; exports.isCaipChainId = isCaipChainId; exports.isCaipNamespace = isCaipNamespace; exports.isCaipReference = isCaipReference; exports.isCaipAccountId = isCaipAccountId; exports.isCaipAccountAddress = isCaipAccountAddress; exports.parseCaipChainId = parseCaipChainId; exports.parseCaipAccountId = parseCaipAccountId; exports.toCaipChainId = toCaipChainId; +//# sourceMappingURL=chunk-YWAID473.js.map + +/***/ }), + +/***/ 51584: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true})); + + + +var _chunk3W5G4CYIjs = __webpack_require__(85244); + +// src/collections.ts +var _map; +var FrozenMap = class { + constructor(entries) { + _chunk3W5G4CYIjs.__privateAdd.call(void 0, this, _map, void 0); + _chunk3W5G4CYIjs.__privateSet.call(void 0, this, _map, new Map(entries)); + Object.freeze(this); + } + get size() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).size; + } + [Symbol.iterator]() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map)[Symbol.iterator](); + } + entries() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).entries(); + } + forEach(callbackfn, thisArg) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).forEach( + (value, key, _map2) => callbackfn.call(thisArg, value, key, this) + ); + } + get(key) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).get(key); + } + has(key) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).has(key); + } + keys() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).keys(); + } + values() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _map).values(); + } + toString() { + return `FrozenMap(${this.size}) {${this.size > 0 ? ` ${[...this.entries()].map(([key, value]) => `${String(key)} => ${String(value)}`).join(", ")} ` : ""}}`; + } +}; +_map = new WeakMap(); +var _set; +var FrozenSet = class { + constructor(values) { + _chunk3W5G4CYIjs.__privateAdd.call(void 0, this, _set, void 0); + _chunk3W5G4CYIjs.__privateSet.call(void 0, this, _set, new Set(values)); + Object.freeze(this); + } + get size() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).size; + } + [Symbol.iterator]() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set)[Symbol.iterator](); + } + entries() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).entries(); + } + forEach(callbackfn, thisArg) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).forEach( + (value, value2, _set2) => callbackfn.call(thisArg, value, value2, this) + ); + } + has(value) { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).has(value); + } + keys() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).keys(); + } + values() { + return _chunk3W5G4CYIjs.__privateGet.call(void 0, this, _set).values(); + } + toString() { + return `FrozenSet(${this.size}) {${this.size > 0 ? ` ${[...this.values()].map((member) => String(member)).join(", ")} ` : ""}}`; + } +}; +_set = new WeakMap(); +Object.freeze(FrozenMap); +Object.freeze(FrozenMap.prototype); +Object.freeze(FrozenSet); +Object.freeze(FrozenSet.prototype); + + + + +exports.FrozenMap = FrozenMap; exports.FrozenSet = FrozenSet; +//# sourceMappingURL=chunk-Z2RGWDD7.js.map + +/***/ }), + +/***/ 22049: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +Object.defineProperty(exports, "__esModule", ({value: true}));__webpack_require__(87982); + + + + + +var _chunkVFXTVNXNjs = __webpack_require__(41535); +__webpack_require__(58383); + + +var _chunkC6HGFEYLjs = __webpack_require__(39705); + + + + +var _chunk4RMX5YWEjs = __webpack_require__(69116); +__webpack_require__(43207); + + + + + + + + + + +var _chunk4D6XQBHAjs = __webpack_require__(73631); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +var _chunkOLLG4H35js = __webpack_require__(87427); +__webpack_require__(61305); + + + +var _chunk2LBGT4GHjs = __webpack_require__(61275); + + + + + + + + + + + + + + + + + + + + +var _chunkYWAID473js = __webpack_require__(2489); + + +var _chunkE4C7EW4Rjs = __webpack_require__(1508); + + +var _chunk6NZW4WK4js = __webpack_require__(21848); + + + + + +var _chunkDHVKFDHQjs = __webpack_require__(1203); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +var _chunkQEPVHEP7js = __webpack_require__(75363); + + + + + +var _chunk6ZDHSOUVjs = __webpack_require__(40932); + + + + + + +var _chunkIZC266HSjs = __webpack_require__(1486); + + + + + + + + + + + + +var _chunkQVEKZRZ2js = __webpack_require__(96526); + + + +var _chunkZ2RGWDD7js = __webpack_require__(51584); +__webpack_require__(85244); +__webpack_require__(51423); + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +exports.AssertionError = _chunk6ZDHSOUVjs.AssertionError; exports.CAIP_ACCOUNT_ADDRESS_REGEX = _chunkYWAID473js.CAIP_ACCOUNT_ADDRESS_REGEX; exports.CAIP_ACCOUNT_ID_REGEX = _chunkYWAID473js.CAIP_ACCOUNT_ID_REGEX; exports.CAIP_CHAIN_ID_REGEX = _chunkYWAID473js.CAIP_CHAIN_ID_REGEX; exports.CAIP_NAMESPACE_REGEX = _chunkYWAID473js.CAIP_NAMESPACE_REGEX; exports.CAIP_REFERENCE_REGEX = _chunkYWAID473js.CAIP_REFERENCE_REGEX; exports.CaipAccountAddressStruct = _chunkYWAID473js.CaipAccountAddressStruct; exports.CaipAccountIdStruct = _chunkYWAID473js.CaipAccountIdStruct; exports.CaipChainIdStruct = _chunkYWAID473js.CaipChainIdStruct; exports.CaipNamespaceStruct = _chunkYWAID473js.CaipNamespaceStruct; exports.CaipReferenceStruct = _chunkYWAID473js.CaipReferenceStruct; exports.ChecksumStruct = _chunkE4C7EW4Rjs.ChecksumStruct; exports.Duration = _chunk4RMX5YWEjs.Duration; exports.ESCAPE_CHARACTERS_REGEXP = _chunkQVEKZRZ2js.ESCAPE_CHARACTERS_REGEXP; exports.FrozenMap = _chunkZ2RGWDD7js.FrozenMap; exports.FrozenSet = _chunkZ2RGWDD7js.FrozenSet; exports.HexAddressStruct = _chunkQEPVHEP7js.HexAddressStruct; exports.HexChecksumAddressStruct = _chunkQEPVHEP7js.HexChecksumAddressStruct; exports.HexStruct = _chunkQEPVHEP7js.HexStruct; exports.JsonRpcErrorStruct = _chunkOLLG4H35js.JsonRpcErrorStruct; exports.JsonRpcFailureStruct = _chunkOLLG4H35js.JsonRpcFailureStruct; exports.JsonRpcIdStruct = _chunkOLLG4H35js.JsonRpcIdStruct; exports.JsonRpcNotificationStruct = _chunkOLLG4H35js.JsonRpcNotificationStruct; exports.JsonRpcParamsStruct = _chunkOLLG4H35js.JsonRpcParamsStruct; exports.JsonRpcRequestStruct = _chunkOLLG4H35js.JsonRpcRequestStruct; exports.JsonRpcResponseStruct = _chunkOLLG4H35js.JsonRpcResponseStruct; exports.JsonRpcSuccessStruct = _chunkOLLG4H35js.JsonRpcSuccessStruct; exports.JsonRpcVersionStruct = _chunkOLLG4H35js.JsonRpcVersionStruct; exports.JsonSize = _chunkQVEKZRZ2js.JsonSize; exports.JsonStruct = _chunkOLLG4H35js.JsonStruct; exports.KnownCaipNamespace = _chunkYWAID473js.KnownCaipNamespace; exports.PendingJsonRpcResponseStruct = _chunkOLLG4H35js.PendingJsonRpcResponseStruct; exports.StrictHexStruct = _chunkQEPVHEP7js.StrictHexStruct; exports.UnsafeJsonStruct = _chunkOLLG4H35js.UnsafeJsonStruct; exports.VersionRangeStruct = _chunk4D6XQBHAjs.VersionRangeStruct; exports.VersionStruct = _chunk4D6XQBHAjs.VersionStruct; exports.add0x = _chunkQEPVHEP7js.add0x; exports.assert = _chunk6ZDHSOUVjs.assert; exports.assertExhaustive = _chunk6ZDHSOUVjs.assertExhaustive; exports.assertIsBytes = _chunkQEPVHEP7js.assertIsBytes; exports.assertIsHexString = _chunkQEPVHEP7js.assertIsHexString; exports.assertIsJsonRpcError = _chunkOLLG4H35js.assertIsJsonRpcError; exports.assertIsJsonRpcFailure = _chunkOLLG4H35js.assertIsJsonRpcFailure; exports.assertIsJsonRpcNotification = _chunkOLLG4H35js.assertIsJsonRpcNotification; exports.assertIsJsonRpcRequest = _chunkOLLG4H35js.assertIsJsonRpcRequest; exports.assertIsJsonRpcResponse = _chunkOLLG4H35js.assertIsJsonRpcResponse; exports.assertIsJsonRpcSuccess = _chunkOLLG4H35js.assertIsJsonRpcSuccess; exports.assertIsPendingJsonRpcResponse = _chunkOLLG4H35js.assertIsPendingJsonRpcResponse; exports.assertIsSemVerRange = _chunk4D6XQBHAjs.assertIsSemVerRange; exports.assertIsSemVerVersion = _chunk4D6XQBHAjs.assertIsSemVerVersion; exports.assertIsStrictHexString = _chunkQEPVHEP7js.assertIsStrictHexString; exports.assertStruct = _chunk6ZDHSOUVjs.assertStruct; exports.base64 = _chunk6NZW4WK4js.base64; exports.base64ToBytes = _chunkQEPVHEP7js.base64ToBytes; exports.bigIntToBytes = _chunkQEPVHEP7js.bigIntToBytes; exports.bigIntToHex = _chunkVFXTVNXNjs.bigIntToHex; exports.bytesToBase64 = _chunkQEPVHEP7js.bytesToBase64; exports.bytesToBigInt = _chunkQEPVHEP7js.bytesToBigInt; exports.bytesToHex = _chunkQEPVHEP7js.bytesToHex; exports.bytesToNumber = _chunkQEPVHEP7js.bytesToNumber; exports.bytesToSignedBigInt = _chunkQEPVHEP7js.bytesToSignedBigInt; exports.bytesToString = _chunkQEPVHEP7js.bytesToString; exports.calculateNumberSize = _chunkQVEKZRZ2js.calculateNumberSize; exports.calculateStringSize = _chunkQVEKZRZ2js.calculateStringSize; exports.concatBytes = _chunkQEPVHEP7js.concatBytes; exports.createBigInt = _chunkDHVKFDHQjs.createBigInt; exports.createBytes = _chunkDHVKFDHQjs.createBytes; exports.createDataView = _chunkQEPVHEP7js.createDataView; exports.createDeferredPromise = _chunkC6HGFEYLjs.createDeferredPromise; exports.createHex = _chunkDHVKFDHQjs.createHex; exports.createModuleLogger = _chunk2LBGT4GHjs.createModuleLogger; exports.createNumber = _chunkDHVKFDHQjs.createNumber; exports.createProjectLogger = _chunk2LBGT4GHjs.createProjectLogger; exports.exactOptional = _chunkOLLG4H35js.exactOptional; exports.getChecksumAddress = _chunkQEPVHEP7js.getChecksumAddress; exports.getErrorMessage = _chunkIZC266HSjs.getErrorMessage; exports.getJsonRpcIdValidator = _chunkOLLG4H35js.getJsonRpcIdValidator; exports.getJsonSize = _chunkOLLG4H35js.getJsonSize; exports.getKnownPropertyNames = _chunkQVEKZRZ2js.getKnownPropertyNames; exports.getSafeJson = _chunkOLLG4H35js.getSafeJson; exports.gtRange = _chunk4D6XQBHAjs.gtRange; exports.gtVersion = _chunk4D6XQBHAjs.gtVersion; exports.hasProperty = _chunkQVEKZRZ2js.hasProperty; exports.hexToBigInt = _chunkVFXTVNXNjs.hexToBigInt; exports.hexToBytes = _chunkQEPVHEP7js.hexToBytes; exports.hexToNumber = _chunkVFXTVNXNjs.hexToNumber; exports.inMilliseconds = _chunk4RMX5YWEjs.inMilliseconds; exports.isASCII = _chunkQVEKZRZ2js.isASCII; exports.isBytes = _chunkQEPVHEP7js.isBytes; exports.isCaipAccountAddress = _chunkYWAID473js.isCaipAccountAddress; exports.isCaipAccountId = _chunkYWAID473js.isCaipAccountId; exports.isCaipChainId = _chunkYWAID473js.isCaipChainId; exports.isCaipNamespace = _chunkYWAID473js.isCaipNamespace; exports.isCaipReference = _chunkYWAID473js.isCaipReference; exports.isErrorWithCode = _chunkIZC266HSjs.isErrorWithCode; exports.isErrorWithMessage = _chunkIZC266HSjs.isErrorWithMessage; exports.isErrorWithStack = _chunkIZC266HSjs.isErrorWithStack; exports.isHexString = _chunkQEPVHEP7js.isHexString; exports.isJsonRpcError = _chunkOLLG4H35js.isJsonRpcError; exports.isJsonRpcFailure = _chunkOLLG4H35js.isJsonRpcFailure; exports.isJsonRpcNotification = _chunkOLLG4H35js.isJsonRpcNotification; exports.isJsonRpcRequest = _chunkOLLG4H35js.isJsonRpcRequest; exports.isJsonRpcResponse = _chunkOLLG4H35js.isJsonRpcResponse; exports.isJsonRpcSuccess = _chunkOLLG4H35js.isJsonRpcSuccess; exports.isNonEmptyArray = _chunkQVEKZRZ2js.isNonEmptyArray; exports.isNullOrUndefined = _chunkQVEKZRZ2js.isNullOrUndefined; exports.isObject = _chunkQVEKZRZ2js.isObject; exports.isPendingJsonRpcResponse = _chunkOLLG4H35js.isPendingJsonRpcResponse; exports.isPlainObject = _chunkQVEKZRZ2js.isPlainObject; exports.isStrictHexString = _chunkQEPVHEP7js.isStrictHexString; exports.isValidChecksumAddress = _chunkQEPVHEP7js.isValidChecksumAddress; exports.isValidHexAddress = _chunkQEPVHEP7js.isValidHexAddress; exports.isValidJson = _chunkOLLG4H35js.isValidJson; exports.isValidSemVerRange = _chunk4D6XQBHAjs.isValidSemVerRange; exports.isValidSemVerVersion = _chunk4D6XQBHAjs.isValidSemVerVersion; exports.jsonrpc2 = _chunkOLLG4H35js.jsonrpc2; exports.numberToBytes = _chunkQEPVHEP7js.numberToBytes; exports.numberToHex = _chunkVFXTVNXNjs.numberToHex; exports.object = _chunkOLLG4H35js.object; exports.parseCaipAccountId = _chunkYWAID473js.parseCaipAccountId; exports.parseCaipChainId = _chunkYWAID473js.parseCaipChainId; exports.remove0x = _chunkQEPVHEP7js.remove0x; exports.satisfiesVersionRange = _chunk4D6XQBHAjs.satisfiesVersionRange; exports.signedBigIntToBytes = _chunkQEPVHEP7js.signedBigIntToBytes; exports.stringToBytes = _chunkQEPVHEP7js.stringToBytes; exports.timeSince = _chunk4RMX5YWEjs.timeSince; exports.toCaipChainId = _chunkYWAID473js.toCaipChainId; exports.valueToBytes = _chunkQEPVHEP7js.valueToBytes; exports.wrapError = _chunkIZC266HSjs.wrapError; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 82102: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.output = exports.exists = exports.hash = exports.bytes = exports.bool = exports.number = exports.isBytes = void 0; +function number(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`positive integer expected, not ${n}`); +} +exports.number = number; +function bool(b) { + if (typeof b !== 'boolean') + throw new Error(`boolean expected, not ${b}`); +} +exports.bool = bool; +// copied from utils +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +exports.isBytes = isBytes; +function bytes(b, ...lengths) { + if (!isBytes(b)) + throw new Error('Uint8Array expected'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Uint8Array expected of length ${lengths}, not of length=${b.length}`); +} +exports.bytes = bytes; +function hash(h) { + if (typeof h !== 'function' || typeof h.create !== 'function') + throw new Error('Hash should be wrapped by utils.wrapConstructor'); + number(h.outputLen); + number(h.blockLen); +} +exports.hash = hash; +function exists(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); +} +exports.exists = exists; +function output(out, instance) { + bytes(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } +} +exports.output = output; +const assert = { number, bool, bytes, hash, exists, output }; +exports["default"] = assert; +//# sourceMappingURL=_assert.js.map + +/***/ }), + +/***/ 17335: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.add5L = exports.add5H = exports.add4H = exports.add4L = exports.add3H = exports.add3L = exports.add = exports.rotlBL = exports.rotlBH = exports.rotlSL = exports.rotlSH = exports.rotr32L = exports.rotr32H = exports.rotrBL = exports.rotrBH = exports.rotrSL = exports.rotrSH = exports.shrSL = exports.shrSH = exports.toBig = exports.split = exports.fromBig = void 0; +const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); +const _32n = /* @__PURE__ */ BigInt(32); +// We are not using BigUint64Array, because they are extremely slow as per 2022 +function fromBig(n, le = false) { + if (le) + return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; + return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; +} +exports.fromBig = fromBig; +function split(lst, le = false) { + let Ah = new Uint32Array(lst.length); + let Al = new Uint32Array(lst.length); + for (let i = 0; i < lst.length; i++) { + const { h, l } = fromBig(lst[i], le); + [Ah[i], Al[i]] = [h, l]; + } + return [Ah, Al]; +} +exports.split = split; +const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0); +exports.toBig = toBig; +// for Shift in [0, 32) +const shrSH = (h, _l, s) => h >>> s; +exports.shrSH = shrSH; +const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +exports.shrSL = shrSL; +// Right rotate for Shift in [1, 32) +const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s)); +exports.rotrSH = rotrSH; +const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +exports.rotrSL = rotrSL; +// Right rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); +exports.rotrBH = rotrBH; +const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); +exports.rotrBL = rotrBL; +// Right rotate for shift===32 (just swaps l&h) +const rotr32H = (_h, l) => l; +exports.rotr32H = rotr32H; +const rotr32L = (h, _l) => h; +exports.rotr32L = rotr32L; +// Left rotate for Shift in [1, 32) +const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); +exports.rotlSH = rotlSH; +const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); +exports.rotlSL = rotlSL; +// Left rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); +exports.rotlBH = rotlBH; +const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); +exports.rotlBL = rotlBL; +// JS uses 32-bit signed integers for bitwise operations which means we cannot +// simple take carry out of low bit sum by shift, we need to use division. +function add(Ah, Al, Bh, Bl) { + const l = (Al >>> 0) + (Bl >>> 0); + return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; +} +exports.add = add; +// Addition with more than 2 elements +const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); +exports.add3L = add3L; +const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; +exports.add3H = add3H; +const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); +exports.add4L = add4L; +const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; +exports.add4H = add4H; +const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); +exports.add5L = add5L; +const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; +exports.add5H = add5H; +// prettier-ignore +const u64 = { + fromBig, split, toBig, + shrSH, shrSL, + rotrSH, rotrSL, rotrBH, rotrBL, + rotr32H, rotr32L, + rotlSH, rotlSL, rotlBH, rotlBL, + add, add3L, add3H, add4L, add4H, add5H, add5L, +}; +exports["default"] = u64; +//# sourceMappingURL=_u64.js.map + +/***/ }), + +/***/ 6256: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.crypto = void 0; +exports.crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; +//# sourceMappingURL=crypto.js.map + +/***/ }), + +/***/ 2214: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.shake256 = exports.shake128 = exports.keccak_512 = exports.keccak_384 = exports.keccak_256 = exports.keccak_224 = exports.sha3_512 = exports.sha3_384 = exports.sha3_256 = exports.sha3_224 = exports.Keccak = exports.keccakP = void 0; +const _assert_js_1 = __webpack_require__(82102); +const _u64_js_1 = __webpack_require__(17335); +const utils_js_1 = __webpack_require__(79520); +// SHA3 (keccak) is based on a new design: basically, the internal state is bigger than output size. +// It's called a sponge function. +// Various per round constants calculations +const SHA3_PI = []; +const SHA3_ROTL = []; +const _SHA3_IOTA = []; +const _0n = /* @__PURE__ */ BigInt(0); +const _1n = /* @__PURE__ */ BigInt(1); +const _2n = /* @__PURE__ */ BigInt(2); +const _7n = /* @__PURE__ */ BigInt(7); +const _256n = /* @__PURE__ */ BigInt(256); +const _0x71n = /* @__PURE__ */ BigInt(0x71); +for (let round = 0, R = _1n, x = 1, y = 0; round < 24; round++) { + // Pi + [x, y] = [y, (2 * x + 3 * y) % 5]; + SHA3_PI.push(2 * (5 * y + x)); + // Rotational + SHA3_ROTL.push((((round + 1) * (round + 2)) / 2) % 64); + // Iota + let t = _0n; + for (let j = 0; j < 7; j++) { + R = ((R << _1n) ^ ((R >> _7n) * _0x71n)) % _256n; + if (R & _2n) + t ^= _1n << ((_1n << /* @__PURE__ */ BigInt(j)) - _1n); + } + _SHA3_IOTA.push(t); +} +const [SHA3_IOTA_H, SHA3_IOTA_L] = /* @__PURE__ */ (0, _u64_js_1.split)(_SHA3_IOTA, true); +// Left rotation (without 0, 32, 64) +const rotlH = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBH)(h, l, s) : (0, _u64_js_1.rotlSH)(h, l, s)); +const rotlL = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBL)(h, l, s) : (0, _u64_js_1.rotlSL)(h, l, s)); +// Same as keccakf1600, but allows to skip some rounds +function keccakP(s, rounds = 24) { + const B = new Uint32Array(5 * 2); + // NOTE: all indices are x2 since we store state as u32 instead of u64 (bigints to slow in js) + for (let round = 24 - rounds; round < 24; round++) { + // Theta θ + for (let x = 0; x < 10; x++) + B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; + for (let x = 0; x < 10; x += 2) { + const idx1 = (x + 8) % 10; + const idx0 = (x + 2) % 10; + const B0 = B[idx0]; + const B1 = B[idx0 + 1]; + const Th = rotlH(B0, B1, 1) ^ B[idx1]; + const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; + for (let y = 0; y < 50; y += 10) { + s[x + y] ^= Th; + s[x + y + 1] ^= Tl; + } + } + // Rho (ρ) and Pi (π) + let curH = s[2]; + let curL = s[3]; + for (let t = 0; t < 24; t++) { + const shift = SHA3_ROTL[t]; + const Th = rotlH(curH, curL, shift); + const Tl = rotlL(curH, curL, shift); + const PI = SHA3_PI[t]; + curH = s[PI]; + curL = s[PI + 1]; + s[PI] = Th; + s[PI + 1] = Tl; + } + // Chi (χ) + for (let y = 0; y < 50; y += 10) { + for (let x = 0; x < 10; x++) + B[x] = s[y + x]; + for (let x = 0; x < 10; x++) + s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; + } + // Iota (ι) + s[0] ^= SHA3_IOTA_H[round]; + s[1] ^= SHA3_IOTA_L[round]; + } + B.fill(0); +} +exports.keccakP = keccakP; +class Keccak extends utils_js_1.Hash { + // NOTE: we accept arguments in bytes instead of bits here. + constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { + super(); + this.blockLen = blockLen; + this.suffix = suffix; + this.outputLen = outputLen; + this.enableXOF = enableXOF; + this.rounds = rounds; + this.pos = 0; + this.posOut = 0; + this.finished = false; + this.destroyed = false; + // Can be passed from user as dkLen + (0, _assert_js_1.number)(outputLen); + // 1600 = 5x5 matrix of 64bit. 1600 bits === 200 bytes + if (0 >= this.blockLen || this.blockLen >= 200) + throw new Error('Sha3 supports only keccak-f1600 function'); + this.state = new Uint8Array(200); + this.state32 = (0, utils_js_1.u32)(this.state); + } + keccak() { + if (!utils_js_1.isLE) + (0, utils_js_1.byteSwap32)(this.state32); + keccakP(this.state32, this.rounds); + if (!utils_js_1.isLE) + (0, utils_js_1.byteSwap32)(this.state32); + this.posOut = 0; + this.pos = 0; + } + update(data) { + (0, _assert_js_1.exists)(this); + const { blockLen, state } = this; + data = (0, utils_js_1.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + for (let i = 0; i < take; i++) + state[this.pos++] ^= data[pos++]; + if (this.pos === blockLen) + this.keccak(); + } + return this; + } + finish() { + if (this.finished) + return; + this.finished = true; + const { state, suffix, pos, blockLen } = this; + // Do the padding + state[pos] ^= suffix; + if ((suffix & 0x80) !== 0 && pos === blockLen - 1) + this.keccak(); + state[blockLen - 1] ^= 0x80; + this.keccak(); + } + writeInto(out) { + (0, _assert_js_1.exists)(this, false); + (0, _assert_js_1.bytes)(out); + this.finish(); + const bufferOut = this.state; + const { blockLen } = this; + for (let pos = 0, len = out.length; pos < len;) { + if (this.posOut >= blockLen) + this.keccak(); + const take = Math.min(blockLen - this.posOut, len - pos); + out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); + this.posOut += take; + pos += take; + } + return out; + } + xofInto(out) { + // Sha3/Keccak usage with XOF is probably mistake, only SHAKE instances can do XOF + if (!this.enableXOF) + throw new Error('XOF is not possible for this instance'); + return this.writeInto(out); + } + xof(bytes) { + (0, _assert_js_1.number)(bytes); + return this.xofInto(new Uint8Array(bytes)); + } + digestInto(out) { + (0, _assert_js_1.output)(out, this); + if (this.finished) + throw new Error('digest() was already called'); + this.writeInto(out); + this.destroy(); + return out; + } + digest() { + return this.digestInto(new Uint8Array(this.outputLen)); + } + destroy() { + this.destroyed = true; + this.state.fill(0); + } + _cloneInto(to) { + const { blockLen, suffix, outputLen, rounds, enableXOF } = this; + to || (to = new Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); + to.state32.set(this.state32); + to.pos = this.pos; + to.posOut = this.posOut; + to.finished = this.finished; + to.rounds = rounds; + // Suffix can change in cSHAKE + to.suffix = suffix; + to.outputLen = outputLen; + to.enableXOF = enableXOF; + to.destroyed = this.destroyed; + return to; + } +} +exports.Keccak = Keccak; +const gen = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapConstructor)(() => new Keccak(blockLen, suffix, outputLen)); +exports.sha3_224 = gen(0x06, 144, 224 / 8); +/** + * SHA3-256 hash function + * @param message - that would be hashed + */ +exports.sha3_256 = gen(0x06, 136, 256 / 8); +exports.sha3_384 = gen(0x06, 104, 384 / 8); +exports.sha3_512 = gen(0x06, 72, 512 / 8); +exports.keccak_224 = gen(0x01, 144, 224 / 8); +/** + * keccak-256 hash function. Different from SHA3-256. + * @param message - that would be hashed + */ +exports.keccak_256 = gen(0x01, 136, 256 / 8); +exports.keccak_384 = gen(0x01, 104, 384 / 8); +exports.keccak_512 = gen(0x01, 72, 512 / 8); +const genShake = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapXOFConstructorWithOpts)((opts = {}) => new Keccak(blockLen, suffix, opts.dkLen === undefined ? outputLen : opts.dkLen, true)); +exports.shake128 = genShake(0x1f, 168, 128 / 8); +exports.shake256 = genShake(0x1f, 136, 256 / 8); +//# sourceMappingURL=sha3.js.map + +/***/ }), + +/***/ 79520: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.randomBytes = exports.wrapXOFConstructorWithOpts = exports.wrapConstructorWithOpts = exports.wrapConstructor = exports.checkOpts = exports.Hash = exports.concatBytes = exports.toBytes = exports.utf8ToBytes = exports.asyncLoop = exports.nextTick = exports.hexToBytes = exports.bytesToHex = exports.byteSwap32 = exports.byteSwapIfBE = exports.byteSwap = exports.isLE = exports.rotl = exports.rotr = exports.createView = exports.u32 = exports.u8 = exports.isBytes = void 0; +// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. +// node.js versions earlier than v19 don't declare it in global scope. +// For node.js, package.json#exports field mapping rewrites import +// from `crypto` to `cryptoNode`, which imports native module. +// Makes the utils un-importable in browsers without a bundler. +// Once node.js 18 is deprecated (2025-04-30), we can just drop the import. +const crypto_1 = __webpack_require__(6256); +const _assert_js_1 = __webpack_require__(82102); +// export { isBytes } from './_assert.js'; +// We can't reuse isBytes from _assert, because somehow this causes huge perf issues +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +exports.isBytes = isBytes; +// Cast array to different type +const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); +exports.u8 = u8; +const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); +exports.u32 = u32; +// Cast array to view +const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); +exports.createView = createView; +// The rotate right (circular right shift) operation for uint32 +const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); +exports.rotr = rotr; +// The rotate left (circular left shift) operation for uint32 +const rotl = (word, shift) => (word << shift) | ((word >>> (32 - shift)) >>> 0); +exports.rotl = rotl; +exports.isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; +// The byte swap operation for uint32 +const byteSwap = (word) => ((word << 24) & 0xff000000) | + ((word << 8) & 0xff0000) | + ((word >>> 8) & 0xff00) | + ((word >>> 24) & 0xff); +exports.byteSwap = byteSwap; +// Conditionally byte swap if on a big-endian platform +exports.byteSwapIfBE = exports.isLE ? (n) => n : (n) => (0, exports.byteSwap)(n); +// In place byte swap for Uint32Array +function byteSwap32(arr) { + for (let i = 0; i < arr.length; i++) { + arr[i] = (0, exports.byteSwap)(arr[i]); + } +} +exports.byteSwap32 = byteSwap32; +// Array where index 0xf0 (240) is mapped to string 'f0' +const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +/** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ +function bytesToHex(bytes) { + (0, _assert_js_1.bytes)(bytes); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; +} +exports.bytesToHex = bytesToHex; +// We use optimized technique to convert hex string to byte array +const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; +function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; +} +/** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ +function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; +} +exports.hexToBytes = hexToBytes; +// There is no setImmediate in browser and setTimeout is slow. +// call of async fn will return Promise, which will be fullfiled only on +// next scheduler queue processing step and this is exactly what we need. +const nextTick = async () => { }; +exports.nextTick = nextTick; +// Returns control to thread each 'tick' ms to avoid blocking +async function asyncLoop(iters, tick, cb) { + let ts = Date.now(); + for (let i = 0; i < iters; i++) { + cb(i); + // Date.now() is not monotonic, so in case if clock goes backwards we return return control too + const diff = Date.now() - ts; + if (diff >= 0 && diff < tick) + continue; + await (0, exports.nextTick)(); + ts += diff; + } +} +exports.asyncLoop = asyncLoop; +/** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ +function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 +} +exports.utf8ToBytes = utf8ToBytes; +/** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ +function toBytes(data) { + if (typeof data === 'string') + data = utf8ToBytes(data); + (0, _assert_js_1.bytes)(data); + return data; +} +exports.toBytes = toBytes; +/** + * Copies several Uint8Arrays into one. + */ +function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + (0, _assert_js_1.bytes)(a); + sum += a.length; + } + const res = new Uint8Array(sum); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; +} +exports.concatBytes = concatBytes; +// For runtime check if class implements interface +class Hash { + // Safe version that clones internal state + clone() { + return this._cloneInto(); + } +} +exports.Hash = Hash; +const toStr = {}.toString; +function checkOpts(defaults, opts) { + if (opts !== undefined && toStr.call(opts) !== '[object Object]') + throw new Error('Options should be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; +} +exports.checkOpts = checkOpts; +function wrapConstructor(hashCons) { + const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); + const tmp = hashCons(); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = () => hashCons(); + return hashC; +} +exports.wrapConstructor = wrapConstructor; +function wrapConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +exports.wrapConstructorWithOpts = wrapConstructorWithOpts; +function wrapXOFConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +exports.wrapXOFConstructorWithOpts = wrapXOFConstructorWithOpts; +/** + * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. + */ +function randomBytes(bytesLength = 32) { + if (crypto_1.crypto && typeof crypto_1.crypto.getRandomValues === 'function') { + return crypto_1.crypto.getRandomValues(new Uint8Array(bytesLength)); + } + throw new Error('crypto.getRandomValues must be defined'); +} +exports.randomBytes = randomBytes; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 73562: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCurve = exports.getHash = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +const hmac_1 = __webpack_require__(39615); +const utils_1 = __webpack_require__(99175); +const weierstrass_js_1 = __webpack_require__(91705); +// connects noble-curves to noble-hashes +function getHash(hash) { + return { + hash, + hmac: (key, ...msgs) => (0, hmac_1.hmac)(hash, key, (0, utils_1.concatBytes)(...msgs)), + randomBytes: utils_1.randomBytes, + }; +} +exports.getHash = getHash; +function createCurve(curveDef, defHash) { + const create = (hash) => (0, weierstrass_js_1.weierstrass)({ ...curveDef, ...getHash(hash) }); + return Object.freeze({ ...create(defHash), create }); +} +exports.createCurve = createCurve; +//# sourceMappingURL=_shortw_utils.js.map + +/***/ }), + +/***/ 62422: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateBasic = exports.wNAF = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// Abelian group utilities +const modular_js_1 = __webpack_require__(24967); +const utils_js_1 = __webpack_require__(91484); +const _0n = BigInt(0); +const _1n = BigInt(1); +// Elliptic curve multiplication of Point by scalar. Fragile. +// Scalars should always be less than curve order: this should be checked inside of a curve itself. +// Creates precomputation tables for fast multiplication: +// - private scalar is split by fixed size windows of W bits +// - every window point is collected from window's table & added to accumulator +// - since windows are different, same point inside tables won't be accessed more than once per calc +// - each multiplication is 'Math.ceil(CURVE_ORDER / 𝑊) + 1' point additions (fixed for any scalar) +// - +1 window is neccessary for wNAF +// - wNAF reduces table size: 2x less memory + 2x faster generation, but 10% slower multiplication +// TODO: Research returning 2d JS array of windows, instead of a single window. This would allow +// windows to be in different memory locations +function wNAF(c, bits) { + const constTimeNegate = (condition, item) => { + const neg = item.negate(); + return condition ? neg : item; + }; + const opts = (W) => { + const windows = Math.ceil(bits / W) + 1; // +1, because + const windowSize = 2 ** (W - 1); // -1 because we skip zero + return { windows, windowSize }; + }; + return { + constTimeNegate, + // non-const time multiplication ladder + unsafeLadder(elm, n) { + let p = c.ZERO; + let d = elm; + while (n > _0n) { + if (n & _1n) + p = p.add(d); + d = d.double(); + n >>= _1n; + } + return p; + }, + /** + * Creates a wNAF precomputation window. Used for caching. + * Default window size is set by `utils.precompute()` and is equal to 8. + * Number of precomputed points depends on the curve size: + * 2^(𝑊−1) * (Math.ceil(𝑛 / 𝑊) + 1), where: + * - 𝑊 is the window size + * - 𝑛 is the bitlength of the curve order. + * For a 256-bit curve and window size 8, the number of precomputed points is 128 * 33 = 4224. + * @returns precomputed point tables flattened to a single array + */ + precomputeWindow(elm, W) { + const { windows, windowSize } = opts(W); + const points = []; + let p = elm; + let base = p; + for (let window = 0; window < windows; window++) { + base = p; + points.push(base); + // =1, because we skip zero + for (let i = 1; i < windowSize; i++) { + base = base.add(p); + points.push(base); + } + p = base.double(); + } + return points; + }, + /** + * Implements ec multiplication using precomputed tables and w-ary non-adjacent form. + * @param W window size + * @param precomputes precomputed tables + * @param n scalar (we don't check here, but should be less than curve order) + * @returns real and fake (for const-time) points + */ + wNAF(W, precomputes, n) { + // TODO: maybe check that scalar is less than group order? wNAF behavious is undefined otherwise + // But need to carefully remove other checks before wNAF. ORDER == bits here + const { windows, windowSize } = opts(W); + let p = c.ZERO; + let f = c.BASE; + const mask = BigInt(2 ** W - 1); // Create mask with W ones: 0b1111 for W=4 etc. + const maxNumber = 2 ** W; + const shiftBy = BigInt(W); + for (let window = 0; window < windows; window++) { + const offset = window * windowSize; + // Extract W bits. + let wbits = Number(n & mask); + // Shift number by W bits. + n >>= shiftBy; + // If the bits are bigger than max size, we'll split those. + // +224 => 256 - 32 + if (wbits > windowSize) { + wbits -= maxNumber; + n += _1n; + } + // This code was first written with assumption that 'f' and 'p' will never be infinity point: + // since each addition is multiplied by 2 ** W, it cannot cancel each other. However, + // there is negate now: it is possible that negated element from low value + // would be the same as high element, which will create carry into next window. + // It's not obvious how this can fail, but still worth investigating later. + // Check if we're onto Zero point. + // Add random point inside current window to f. + const offset1 = offset; + const offset2 = offset + Math.abs(wbits) - 1; // -1 because we skip zero + const cond1 = window % 2 !== 0; + const cond2 = wbits < 0; + if (wbits === 0) { + // The most important part for const-time getPublicKey + f = f.add(constTimeNegate(cond1, precomputes[offset1])); + } + else { + p = p.add(constTimeNegate(cond2, precomputes[offset2])); + } + } + // JIT-compiler should not eliminate f here, since it will later be used in normalizeZ() + // Even if the variable is still unused, there are some checks which will + // throw an exception, so compiler needs to prove they won't happen, which is hard. + // At this point there is a way to F be infinity-point even if p is not, + // which makes it less const-time: around 1 bigint multiply. + return { p, f }; + }, + wNAFCached(P, precomputesMap, n, transform) { + // @ts-ignore + const W = P._WINDOW_SIZE || 1; + // Calculate precomputes on a first run, reuse them after + let comp = precomputesMap.get(P); + if (!comp) { + comp = this.precomputeWindow(P, W); + if (W !== 1) { + precomputesMap.set(P, transform(comp)); + } + } + return this.wNAF(W, comp, n); + }, + }; +} +exports.wNAF = wNAF; +function validateBasic(curve) { + (0, modular_js_1.validateField)(curve.Fp); + (0, utils_js_1.validateObject)(curve, { + n: 'bigint', + h: 'bigint', + Gx: 'field', + Gy: 'field', + }, { + nBitLength: 'isSafeInteger', + nByteLength: 'isSafeInteger', + }); + // Set defaults + return Object.freeze({ + ...(0, modular_js_1.nLength)(curve.n, curve.nBitLength), + ...curve, + ...{ p: curve.Fp.ORDER }, + }); +} +exports.validateBasic = validateBasic; +//# sourceMappingURL=curve.js.map + +/***/ }), + +/***/ 71761: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createHasher = exports.isogenyMap = exports.hash_to_field = exports.expand_message_xof = exports.expand_message_xmd = void 0; +const modular_js_1 = __webpack_require__(24967); +const utils_js_1 = __webpack_require__(91484); +function validateDST(dst) { + if ((0, utils_js_1.isBytes)(dst)) + return dst; + if (typeof dst === 'string') + return (0, utils_js_1.utf8ToBytes)(dst); + throw new Error('DST must be Uint8Array or string'); +} +// Octet Stream to Integer. "spec" implementation of os2ip is 2.5x slower vs bytesToNumberBE. +const os2ip = utils_js_1.bytesToNumberBE; +// Integer to Octet Stream (numberToBytesBE) +function i2osp(value, length) { + if (value < 0 || value >= 1 << (8 * length)) { + throw new Error(`bad I2OSP call: value=${value} length=${length}`); + } + const res = Array.from({ length }).fill(0); + for (let i = length - 1; i >= 0; i--) { + res[i] = value & 0xff; + value >>>= 8; + } + return new Uint8Array(res); +} +function strxor(a, b) { + const arr = new Uint8Array(a.length); + for (let i = 0; i < a.length; i++) { + arr[i] = a[i] ^ b[i]; + } + return arr; +} +function abytes(item) { + if (!(0, utils_js_1.isBytes)(item)) + throw new Error('Uint8Array expected'); +} +function isNum(item) { + if (!Number.isSafeInteger(item)) + throw new Error('number expected'); +} +// Produces a uniformly random byte string using a cryptographic hash function H that outputs b bits +// https://www.rfc-editor.org/rfc/rfc9380#section-5.3.1 +function expand_message_xmd(msg, DST, lenInBytes, H) { + abytes(msg); + abytes(DST); + isNum(lenInBytes); + // https://www.rfc-editor.org/rfc/rfc9380#section-5.3.3 + if (DST.length > 255) + DST = H((0, utils_js_1.concatBytes)((0, utils_js_1.utf8ToBytes)('H2C-OVERSIZE-DST-'), DST)); + const { outputLen: b_in_bytes, blockLen: r_in_bytes } = H; + const ell = Math.ceil(lenInBytes / b_in_bytes); + if (ell > 255) + throw new Error('Invalid xmd length'); + const DST_prime = (0, utils_js_1.concatBytes)(DST, i2osp(DST.length, 1)); + const Z_pad = i2osp(0, r_in_bytes); + const l_i_b_str = i2osp(lenInBytes, 2); // len_in_bytes_str + const b = new Array(ell); + const b_0 = H((0, utils_js_1.concatBytes)(Z_pad, msg, l_i_b_str, i2osp(0, 1), DST_prime)); + b[0] = H((0, utils_js_1.concatBytes)(b_0, i2osp(1, 1), DST_prime)); + for (let i = 1; i <= ell; i++) { + const args = [strxor(b_0, b[i - 1]), i2osp(i + 1, 1), DST_prime]; + b[i] = H((0, utils_js_1.concatBytes)(...args)); + } + const pseudo_random_bytes = (0, utils_js_1.concatBytes)(...b); + return pseudo_random_bytes.slice(0, lenInBytes); +} +exports.expand_message_xmd = expand_message_xmd; +// Produces a uniformly random byte string using an extendable-output function (XOF) H. +// 1. The collision resistance of H MUST be at least k bits. +// 2. H MUST be an XOF that has been proved indifferentiable from +// a random oracle under a reasonable cryptographic assumption. +// https://www.rfc-editor.org/rfc/rfc9380#section-5.3.2 +function expand_message_xof(msg, DST, lenInBytes, k, H) { + abytes(msg); + abytes(DST); + isNum(lenInBytes); + // https://www.rfc-editor.org/rfc/rfc9380#section-5.3.3 + // DST = H('H2C-OVERSIZE-DST-' || a_very_long_DST, Math.ceil((lenInBytes * k) / 8)); + if (DST.length > 255) { + const dkLen = Math.ceil((2 * k) / 8); + DST = H.create({ dkLen }).update((0, utils_js_1.utf8ToBytes)('H2C-OVERSIZE-DST-')).update(DST).digest(); + } + if (lenInBytes > 65535 || DST.length > 255) + throw new Error('expand_message_xof: invalid lenInBytes'); + return (H.create({ dkLen: lenInBytes }) + .update(msg) + .update(i2osp(lenInBytes, 2)) + // 2. DST_prime = DST || I2OSP(len(DST), 1) + .update(DST) + .update(i2osp(DST.length, 1)) + .digest()); +} +exports.expand_message_xof = expand_message_xof; +/** + * Hashes arbitrary-length byte strings to a list of one or more elements of a finite field F + * https://www.rfc-editor.org/rfc/rfc9380#section-5.2 + * @param msg a byte string containing the message to hash + * @param count the number of elements of F to output + * @param options `{DST: string, p: bigint, m: number, k: number, expand: 'xmd' | 'xof', hash: H}`, see above + * @returns [u_0, ..., u_(count - 1)], a list of field elements. + */ +function hash_to_field(msg, count, options) { + (0, utils_js_1.validateObject)(options, { + DST: 'stringOrUint8Array', + p: 'bigint', + m: 'isSafeInteger', + k: 'isSafeInteger', + hash: 'hash', + }); + const { p, k, m, hash, expand, DST: _DST } = options; + abytes(msg); + isNum(count); + const DST = validateDST(_DST); + const log2p = p.toString(2).length; + const L = Math.ceil((log2p + k) / 8); // section 5.1 of ietf draft link above + const len_in_bytes = count * m * L; + let prb; // pseudo_random_bytes + if (expand === 'xmd') { + prb = expand_message_xmd(msg, DST, len_in_bytes, hash); + } + else if (expand === 'xof') { + prb = expand_message_xof(msg, DST, len_in_bytes, k, hash); + } + else if (expand === '_internal_pass') { + // for internal tests only + prb = msg; + } + else { + throw new Error('expand must be "xmd" or "xof"'); + } + const u = new Array(count); + for (let i = 0; i < count; i++) { + const e = new Array(m); + for (let j = 0; j < m; j++) { + const elm_offset = L * (j + i * m); + const tv = prb.subarray(elm_offset, elm_offset + L); + e[j] = (0, modular_js_1.mod)(os2ip(tv), p); + } + u[i] = e; + } + return u; +} +exports.hash_to_field = hash_to_field; +function isogenyMap(field, map) { + // Make same order as in spec + const COEFF = map.map((i) => Array.from(i).reverse()); + return (x, y) => { + const [xNum, xDen, yNum, yDen] = COEFF.map((val) => val.reduce((acc, i) => field.add(field.mul(acc, x), i))); + x = field.div(xNum, xDen); // xNum / xDen + y = field.mul(y, field.div(yNum, yDen)); // y * (yNum / yDev) + return { x, y }; + }; +} +exports.isogenyMap = isogenyMap; +function createHasher(Point, mapToCurve, def) { + if (typeof mapToCurve !== 'function') + throw new Error('mapToCurve() must be defined'); + return { + // Encodes byte string to elliptic curve. + // hash_to_curve from https://www.rfc-editor.org/rfc/rfc9380#section-3 + hashToCurve(msg, options) { + const u = hash_to_field(msg, 2, { ...def, DST: def.DST, ...options }); + const u0 = Point.fromAffine(mapToCurve(u[0])); + const u1 = Point.fromAffine(mapToCurve(u[1])); + const P = u0.add(u1).clearCofactor(); + P.assertValidity(); + return P; + }, + // Encodes byte string to elliptic curve. + // encode_to_curve from https://www.rfc-editor.org/rfc/rfc9380#section-3 + encodeToCurve(msg, options) { + const u = hash_to_field(msg, 1, { ...def, DST: def.encodeDST, ...options }); + const P = Point.fromAffine(mapToCurve(u[0])).clearCofactor(); + P.assertValidity(); + return P; + }, + }; +} +exports.createHasher = createHasher; +//# sourceMappingURL=hash-to-curve.js.map + +/***/ }), + +/***/ 24967: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mapHashToField = exports.getMinHashLength = exports.getFieldBytesLength = exports.hashToPrivateScalar = exports.FpSqrtEven = exports.FpSqrtOdd = exports.Field = exports.nLength = exports.FpIsSquare = exports.FpDiv = exports.FpInvertBatch = exports.FpPow = exports.validateField = exports.isNegativeLE = exports.FpSqrt = exports.tonelliShanks = exports.invert = exports.pow2 = exports.pow = exports.mod = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// Utilities for modular arithmetics and finite fields +const utils_js_1 = __webpack_require__(91484); +// prettier-ignore +const _0n = BigInt(0), _1n = BigInt(1), _2n = BigInt(2), _3n = BigInt(3); +// prettier-ignore +const _4n = BigInt(4), _5n = BigInt(5), _8n = BigInt(8); +// prettier-ignore +const _9n = BigInt(9), _16n = BigInt(16); +// Calculates a modulo b +function mod(a, b) { + const result = a % b; + return result >= _0n ? result : b + result; +} +exports.mod = mod; +/** + * Efficiently raise num to power and do modular division. + * Unsafe in some contexts: uses ladder, so can expose bigint bits. + * @example + * pow(2n, 6n, 11n) // 64n % 11n == 9n + */ +// TODO: use field version && remove +function pow(num, power, modulo) { + if (modulo <= _0n || power < _0n) + throw new Error('Expected power/modulo > 0'); + if (modulo === _1n) + return _0n; + let res = _1n; + while (power > _0n) { + if (power & _1n) + res = (res * num) % modulo; + num = (num * num) % modulo; + power >>= _1n; + } + return res; +} +exports.pow = pow; +// Does x ^ (2 ^ power) mod p. pow2(30, 4) == 30 ^ (2 ^ 4) +function pow2(x, power, modulo) { + let res = x; + while (power-- > _0n) { + res *= res; + res %= modulo; + } + return res; +} +exports.pow2 = pow2; +// Inverses number over modulo +function invert(number, modulo) { + if (number === _0n || modulo <= _0n) { + throw new Error(`invert: expected positive integers, got n=${number} mod=${modulo}`); + } + // Euclidean GCD https://brilliant.org/wiki/extended-euclidean-algorithm/ + // Fermat's little theorem "CT-like" version inv(n) = n^(m-2) mod m is 30x slower. + let a = mod(number, modulo); + let b = modulo; + // prettier-ignore + let x = _0n, y = _1n, u = _1n, v = _0n; + while (a !== _0n) { + // JIT applies optimization if those two lines follow each other + const q = b / a; + const r = b % a; + const m = x - u * q; + const n = y - v * q; + // prettier-ignore + b = a, a = r, x = u, y = v, u = m, v = n; + } + const gcd = b; + if (gcd !== _1n) + throw new Error('invert: does not exist'); + return mod(x, modulo); +} +exports.invert = invert; +/** + * Tonelli-Shanks square root search algorithm. + * 1. https://eprint.iacr.org/2012/685.pdf (page 12) + * 2. Square Roots from 1; 24, 51, 10 to Dan Shanks + * Will start an infinite loop if field order P is not prime. + * @param P field order + * @returns function that takes field Fp (created from P) and number n + */ +function tonelliShanks(P) { + // Legendre constant: used to calculate Legendre symbol (a | p), + // which denotes the value of a^((p-1)/2) (mod p). + // (a | p) ≡ 1 if a is a square (mod p) + // (a | p) ≡ -1 if a is not a square (mod p) + // (a | p) ≡ 0 if a ≡ 0 (mod p) + const legendreC = (P - _1n) / _2n; + let Q, S, Z; + // Step 1: By factoring out powers of 2 from p - 1, + // find q and s such that p - 1 = q*(2^s) with q odd + for (Q = P - _1n, S = 0; Q % _2n === _0n; Q /= _2n, S++) + ; + // Step 2: Select a non-square z such that (z | p) ≡ -1 and set c ≡ zq + for (Z = _2n; Z < P && pow(Z, legendreC, P) !== P - _1n; Z++) + ; + // Fast-path + if (S === 1) { + const p1div4 = (P + _1n) / _4n; + return function tonelliFast(Fp, n) { + const root = Fp.pow(n, p1div4); + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Slow-path + const Q1div2 = (Q + _1n) / _2n; + return function tonelliSlow(Fp, n) { + // Step 0: Check that n is indeed a square: (n | p) should not be ≡ -1 + if (Fp.pow(n, legendreC) === Fp.neg(Fp.ONE)) + throw new Error('Cannot find square root'); + let r = S; + // TODO: will fail at Fp2/etc + let g = Fp.pow(Fp.mul(Fp.ONE, Z), Q); // will update both x and b + let x = Fp.pow(n, Q1div2); // first guess at the square root + let b = Fp.pow(n, Q); // first guess at the fudge factor + while (!Fp.eql(b, Fp.ONE)) { + if (Fp.eql(b, Fp.ZERO)) + return Fp.ZERO; // https://en.wikipedia.org/wiki/Tonelli%E2%80%93Shanks_algorithm (4. If t = 0, return r = 0) + // Find m such b^(2^m)==1 + let m = 1; + for (let t2 = Fp.sqr(b); m < r; m++) { + if (Fp.eql(t2, Fp.ONE)) + break; + t2 = Fp.sqr(t2); // t2 *= t2 + } + // NOTE: r-m-1 can be bigger than 32, need to convert to bigint before shift, otherwise there will be overflow + const ge = Fp.pow(g, _1n << BigInt(r - m - 1)); // ge = 2^(r-m-1) + g = Fp.sqr(ge); // g = ge * ge + x = Fp.mul(x, ge); // x *= ge + b = Fp.mul(b, g); // b *= g + r = m; + } + return x; + }; +} +exports.tonelliShanks = tonelliShanks; +function FpSqrt(P) { + // NOTE: different algorithms can give different roots, it is up to user to decide which one they want. + // For example there is FpSqrtOdd/FpSqrtEven to choice root based on oddness (used for hash-to-curve). + // P ≡ 3 (mod 4) + // √n = n^((P+1)/4) + if (P % _4n === _3n) { + // Not all roots possible! + // const ORDER = + // 0x1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaabn; + // const NUM = 72057594037927816n; + const p1div4 = (P + _1n) / _4n; + return function sqrt3mod4(Fp, n) { + const root = Fp.pow(n, p1div4); + // Throw if root**2 != n + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // Atkin algorithm for q ≡ 5 (mod 8), https://eprint.iacr.org/2012/685.pdf (page 10) + if (P % _8n === _5n) { + const c1 = (P - _5n) / _8n; + return function sqrt5mod8(Fp, n) { + const n2 = Fp.mul(n, _2n); + const v = Fp.pow(n2, c1); + const nv = Fp.mul(n, v); + const i = Fp.mul(Fp.mul(nv, _2n), v); + const root = Fp.mul(nv, Fp.sub(i, Fp.ONE)); + if (!Fp.eql(Fp.sqr(root), n)) + throw new Error('Cannot find square root'); + return root; + }; + } + // P ≡ 9 (mod 16) + if (P % _16n === _9n) { + // NOTE: tonelli is too slow for bls-Fp2 calculations even on start + // Means we cannot use sqrt for constants at all! + // + // const c1 = Fp.sqrt(Fp.negate(Fp.ONE)); // 1. c1 = sqrt(-1) in F, i.e., (c1^2) == -1 in F + // const c2 = Fp.sqrt(c1); // 2. c2 = sqrt(c1) in F, i.e., (c2^2) == c1 in F + // const c3 = Fp.sqrt(Fp.negate(c1)); // 3. c3 = sqrt(-c1) in F, i.e., (c3^2) == -c1 in F + // const c4 = (P + _7n) / _16n; // 4. c4 = (q + 7) / 16 # Integer arithmetic + // sqrt = (x) => { + // let tv1 = Fp.pow(x, c4); // 1. tv1 = x^c4 + // let tv2 = Fp.mul(c1, tv1); // 2. tv2 = c1 * tv1 + // const tv3 = Fp.mul(c2, tv1); // 3. tv3 = c2 * tv1 + // let tv4 = Fp.mul(c3, tv1); // 4. tv4 = c3 * tv1 + // const e1 = Fp.equals(Fp.square(tv2), x); // 5. e1 = (tv2^2) == x + // const e2 = Fp.equals(Fp.square(tv3), x); // 6. e2 = (tv3^2) == x + // tv1 = Fp.cmov(tv1, tv2, e1); // 7. tv1 = CMOV(tv1, tv2, e1) # Select tv2 if (tv2^2) == x + // tv2 = Fp.cmov(tv4, tv3, e2); // 8. tv2 = CMOV(tv4, tv3, e2) # Select tv3 if (tv3^2) == x + // const e3 = Fp.equals(Fp.square(tv2), x); // 9. e3 = (tv2^2) == x + // return Fp.cmov(tv1, tv2, e3); // 10. z = CMOV(tv1, tv2, e3) # Select the sqrt from tv1 and tv2 + // } + } + // Other cases: Tonelli-Shanks algorithm + return tonelliShanks(P); +} +exports.FpSqrt = FpSqrt; +// Little-endian check for first LE bit (last BE bit); +const isNegativeLE = (num, modulo) => (mod(num, modulo) & _1n) === _1n; +exports.isNegativeLE = isNegativeLE; +// prettier-ignore +const FIELD_FIELDS = [ + 'create', 'isValid', 'is0', 'neg', 'inv', 'sqrt', 'sqr', + 'eql', 'add', 'sub', 'mul', 'pow', 'div', + 'addN', 'subN', 'mulN', 'sqrN' +]; +function validateField(field) { + const initial = { + ORDER: 'bigint', + MASK: 'bigint', + BYTES: 'isSafeInteger', + BITS: 'isSafeInteger', + }; + const opts = FIELD_FIELDS.reduce((map, val) => { + map[val] = 'function'; + return map; + }, initial); + return (0, utils_js_1.validateObject)(field, opts); +} +exports.validateField = validateField; +// Generic field functions +/** + * Same as `pow` but for Fp: non-constant-time. + * Unsafe in some contexts: uses ladder, so can expose bigint bits. + */ +function FpPow(f, num, power) { + // Should have same speed as pow for bigints + // TODO: benchmark! + if (power < _0n) + throw new Error('Expected power > 0'); + if (power === _0n) + return f.ONE; + if (power === _1n) + return num; + let p = f.ONE; + let d = num; + while (power > _0n) { + if (power & _1n) + p = f.mul(p, d); + d = f.sqr(d); + power >>= _1n; + } + return p; +} +exports.FpPow = FpPow; +/** + * Efficiently invert an array of Field elements. + * `inv(0)` will return `undefined` here: make sure to throw an error. + */ +function FpInvertBatch(f, nums) { + const tmp = new Array(nums.length); + // Walk from first to last, multiply them by each other MOD p + const lastMultiplied = nums.reduce((acc, num, i) => { + if (f.is0(num)) + return acc; + tmp[i] = acc; + return f.mul(acc, num); + }, f.ONE); + // Invert last element + const inverted = f.inv(lastMultiplied); + // Walk from last to first, multiply them by inverted each other MOD p + nums.reduceRight((acc, num, i) => { + if (f.is0(num)) + return acc; + tmp[i] = f.mul(acc, tmp[i]); + return f.mul(acc, num); + }, inverted); + return tmp; +} +exports.FpInvertBatch = FpInvertBatch; +function FpDiv(f, lhs, rhs) { + return f.mul(lhs, typeof rhs === 'bigint' ? invert(rhs, f.ORDER) : f.inv(rhs)); +} +exports.FpDiv = FpDiv; +// This function returns True whenever the value x is a square in the field F. +function FpIsSquare(f) { + const legendreConst = (f.ORDER - _1n) / _2n; // Integer arithmetic + return (x) => { + const p = f.pow(x, legendreConst); + return f.eql(p, f.ZERO) || f.eql(p, f.ONE); + }; +} +exports.FpIsSquare = FpIsSquare; +// CURVE.n lengths +function nLength(n, nBitLength) { + // Bit size, byte size of CURVE.n + const _nBitLength = nBitLength !== undefined ? nBitLength : n.toString(2).length; + const nByteLength = Math.ceil(_nBitLength / 8); + return { nBitLength: _nBitLength, nByteLength }; +} +exports.nLength = nLength; +/** + * Initializes a finite field over prime. **Non-primes are not supported.** + * Do not init in loop: slow. Very fragile: always run a benchmark on a change. + * Major performance optimizations: + * * a) denormalized operations like mulN instead of mul + * * b) same object shape: never add or remove keys + * * c) Object.freeze + * @param ORDER prime positive bigint + * @param bitLen how many bits the field consumes + * @param isLE (def: false) if encoding / decoding should be in little-endian + * @param redef optional faster redefinitions of sqrt and other methods + */ +function Field(ORDER, bitLen, isLE = false, redef = {}) { + if (ORDER <= _0n) + throw new Error(`Expected Field ORDER > 0, got ${ORDER}`); + const { nBitLength: BITS, nByteLength: BYTES } = nLength(ORDER, bitLen); + if (BYTES > 2048) + throw new Error('Field lengths over 2048 bytes are not supported'); + const sqrtP = FpSqrt(ORDER); + const f = Object.freeze({ + ORDER, + BITS, + BYTES, + MASK: (0, utils_js_1.bitMask)(BITS), + ZERO: _0n, + ONE: _1n, + create: (num) => mod(num, ORDER), + isValid: (num) => { + if (typeof num !== 'bigint') + throw new Error(`Invalid field element: expected bigint, got ${typeof num}`); + return _0n <= num && num < ORDER; // 0 is valid element, but it's not invertible + }, + is0: (num) => num === _0n, + isOdd: (num) => (num & _1n) === _1n, + neg: (num) => mod(-num, ORDER), + eql: (lhs, rhs) => lhs === rhs, + sqr: (num) => mod(num * num, ORDER), + add: (lhs, rhs) => mod(lhs + rhs, ORDER), + sub: (lhs, rhs) => mod(lhs - rhs, ORDER), + mul: (lhs, rhs) => mod(lhs * rhs, ORDER), + pow: (num, power) => FpPow(f, num, power), + div: (lhs, rhs) => mod(lhs * invert(rhs, ORDER), ORDER), + // Same as above, but doesn't normalize + sqrN: (num) => num * num, + addN: (lhs, rhs) => lhs + rhs, + subN: (lhs, rhs) => lhs - rhs, + mulN: (lhs, rhs) => lhs * rhs, + inv: (num) => invert(num, ORDER), + sqrt: redef.sqrt || ((n) => sqrtP(f, n)), + invertBatch: (lst) => FpInvertBatch(f, lst), + // TODO: do we really need constant cmov? + // We don't have const-time bigints anyway, so probably will be not very useful + cmov: (a, b, c) => (c ? b : a), + toBytes: (num) => (isLE ? (0, utils_js_1.numberToBytesLE)(num, BYTES) : (0, utils_js_1.numberToBytesBE)(num, BYTES)), + fromBytes: (bytes) => { + if (bytes.length !== BYTES) + throw new Error(`Fp.fromBytes: expected ${BYTES}, got ${bytes.length}`); + return isLE ? (0, utils_js_1.bytesToNumberLE)(bytes) : (0, utils_js_1.bytesToNumberBE)(bytes); + }, + }); + return Object.freeze(f); +} +exports.Field = Field; +function FpSqrtOdd(Fp, elm) { + if (!Fp.isOdd) + throw new Error(`Field doesn't have isOdd`); + const root = Fp.sqrt(elm); + return Fp.isOdd(root) ? root : Fp.neg(root); +} +exports.FpSqrtOdd = FpSqrtOdd; +function FpSqrtEven(Fp, elm) { + if (!Fp.isOdd) + throw new Error(`Field doesn't have isOdd`); + const root = Fp.sqrt(elm); + return Fp.isOdd(root) ? Fp.neg(root) : root; +} +exports.FpSqrtEven = FpSqrtEven; +/** + * "Constant-time" private key generation utility. + * Same as mapKeyToField, but accepts less bytes (40 instead of 48 for 32-byte field). + * Which makes it slightly more biased, less secure. + * @deprecated use mapKeyToField instead + */ +function hashToPrivateScalar(hash, groupOrder, isLE = false) { + hash = (0, utils_js_1.ensureBytes)('privateHash', hash); + const hashLen = hash.length; + const minLen = nLength(groupOrder).nByteLength + 8; + if (minLen < 24 || hashLen < minLen || hashLen > 1024) + throw new Error(`hashToPrivateScalar: expected ${minLen}-1024 bytes of input, got ${hashLen}`); + const num = isLE ? (0, utils_js_1.bytesToNumberLE)(hash) : (0, utils_js_1.bytesToNumberBE)(hash); + return mod(num, groupOrder - _1n) + _1n; +} +exports.hashToPrivateScalar = hashToPrivateScalar; +/** + * Returns total number of bytes consumed by the field element. + * For example, 32 bytes for usual 256-bit weierstrass curve. + * @param fieldOrder number of field elements, usually CURVE.n + * @returns byte length of field + */ +function getFieldBytesLength(fieldOrder) { + if (typeof fieldOrder !== 'bigint') + throw new Error('field order must be bigint'); + const bitLength = fieldOrder.toString(2).length; + return Math.ceil(bitLength / 8); +} +exports.getFieldBytesLength = getFieldBytesLength; +/** + * Returns minimal amount of bytes that can be safely reduced + * by field order. + * Should be 2^-128 for 128-bit curve such as P256. + * @param fieldOrder number of field elements, usually CURVE.n + * @returns byte length of target hash + */ +function getMinHashLength(fieldOrder) { + const length = getFieldBytesLength(fieldOrder); + return length + Math.ceil(length / 2); +} +exports.getMinHashLength = getMinHashLength; +/** + * "Constant-time" private key generation utility. + * Can take (n + n/2) or more bytes of uniform input e.g. from CSPRNG or KDF + * and convert them into private scalar, with the modulo bias being negligible. + * Needs at least 48 bytes of input for 32-byte private key. + * https://research.kudelskisecurity.com/2020/07/28/the-definitive-guide-to-modulo-bias-and-how-to-avoid-it/ + * FIPS 186-5, A.2 https://csrc.nist.gov/publications/detail/fips/186/5/final + * RFC 9380, https://www.rfc-editor.org/rfc/rfc9380#section-5 + * @param hash hash output from SHA3 or a similar function + * @param groupOrder size of subgroup - (e.g. secp256k1.CURVE.n) + * @param isLE interpret hash bytes as LE num + * @returns valid private scalar + */ +function mapHashToField(key, fieldOrder, isLE = false) { + const len = key.length; + const fieldLen = getFieldBytesLength(fieldOrder); + const minLen = getMinHashLength(fieldOrder); + // No small numbers: need to understand bias story. No huge numbers: easier to detect JS timings. + if (len < 16 || len < minLen || len > 1024) + throw new Error(`expected ${minLen}-1024 bytes of input, got ${len}`); + const num = isLE ? (0, utils_js_1.bytesToNumberBE)(key) : (0, utils_js_1.bytesToNumberLE)(key); + // `mod(x, 11)` can sometimes produce 0. `mod(x, 10) + 1` is the same, but no 0 + const reduced = mod(num, fieldOrder - _1n) + _1n; + return isLE ? (0, utils_js_1.numberToBytesLE)(reduced, fieldLen) : (0, utils_js_1.numberToBytesBE)(reduced, fieldLen); +} +exports.mapHashToField = mapHashToField; +//# sourceMappingURL=modular.js.map + +/***/ }), + +/***/ 91484: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.validateObject = exports.createHmacDrbg = exports.bitMask = exports.bitSet = exports.bitGet = exports.bitLen = exports.utf8ToBytes = exports.equalBytes = exports.concatBytes = exports.ensureBytes = exports.numberToVarBytesBE = exports.numberToBytesLE = exports.numberToBytesBE = exports.bytesToNumberLE = exports.bytesToNumberBE = exports.hexToBytes = exports.hexToNumber = exports.numberToHexUnpadded = exports.bytesToHex = exports.isBytes = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// 100 lines of code in the file are duplicated from noble-hashes (utils). +// This is OK: `abstract` directory does not use noble-hashes. +// User may opt-in into using different hashing library. This way, noble-hashes +// won't be included into their bundle. +const _0n = BigInt(0); +const _1n = BigInt(1); +const _2n = BigInt(2); +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +exports.isBytes = isBytes; +// Array where index 0xf0 (240) is mapped to string 'f0' +const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +/** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ +function bytesToHex(bytes) { + if (!isBytes(bytes)) + throw new Error('Uint8Array expected'); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; +} +exports.bytesToHex = bytesToHex; +function numberToHexUnpadded(num) { + const hex = num.toString(16); + return hex.length & 1 ? `0${hex}` : hex; +} +exports.numberToHexUnpadded = numberToHexUnpadded; +function hexToNumber(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + // Big Endian + return BigInt(hex === '' ? '0' : `0x${hex}`); +} +exports.hexToNumber = hexToNumber; +// We use optimized technique to convert hex string to byte array +const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; +function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; +} +/** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ +function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; +} +exports.hexToBytes = hexToBytes; +// BE: Big Endian, LE: Little Endian +function bytesToNumberBE(bytes) { + return hexToNumber(bytesToHex(bytes)); +} +exports.bytesToNumberBE = bytesToNumberBE; +function bytesToNumberLE(bytes) { + if (!isBytes(bytes)) + throw new Error('Uint8Array expected'); + return hexToNumber(bytesToHex(Uint8Array.from(bytes).reverse())); +} +exports.bytesToNumberLE = bytesToNumberLE; +function numberToBytesBE(n, len) { + return hexToBytes(n.toString(16).padStart(len * 2, '0')); +} +exports.numberToBytesBE = numberToBytesBE; +function numberToBytesLE(n, len) { + return numberToBytesBE(n, len).reverse(); +} +exports.numberToBytesLE = numberToBytesLE; +// Unpadded, rarely used +function numberToVarBytesBE(n) { + return hexToBytes(numberToHexUnpadded(n)); +} +exports.numberToVarBytesBE = numberToVarBytesBE; +/** + * Takes hex string or Uint8Array, converts to Uint8Array. + * Validates output length. + * Will throw error for other types. + * @param title descriptive title for an error e.g. 'private key' + * @param hex hex string or Uint8Array + * @param expectedLength optional, will compare to result array's length + * @returns + */ +function ensureBytes(title, hex, expectedLength) { + let res; + if (typeof hex === 'string') { + try { + res = hexToBytes(hex); + } + catch (e) { + throw new Error(`${title} must be valid hex string, got "${hex}". Cause: ${e}`); + } + } + else if (isBytes(hex)) { + // Uint8Array.from() instead of hash.slice() because node.js Buffer + // is instance of Uint8Array, and its slice() creates **mutable** copy + res = Uint8Array.from(hex); + } + else { + throw new Error(`${title} must be hex string or Uint8Array`); + } + const len = res.length; + if (typeof expectedLength === 'number' && len !== expectedLength) + throw new Error(`${title} expected ${expectedLength} bytes, got ${len}`); + return res; +} +exports.ensureBytes = ensureBytes; +/** + * Copies several Uint8Arrays into one. + */ +function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + if (!isBytes(a)) + throw new Error('Uint8Array expected'); + sum += a.length; + } + let res = new Uint8Array(sum); + let pad = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; +} +exports.concatBytes = concatBytes; +// Compares 2 u8a-s in kinda constant time +function equalBytes(a, b) { + if (a.length !== b.length) + return false; + let diff = 0; + for (let i = 0; i < a.length; i++) + diff |= a[i] ^ b[i]; + return diff === 0; +} +exports.equalBytes = equalBytes; +/** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ +function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 +} +exports.utf8ToBytes = utf8ToBytes; +// Bit operations +/** + * Calculates amount of bits in a bigint. + * Same as `n.toString(2).length` + */ +function bitLen(n) { + let len; + for (len = 0; n > _0n; n >>= _1n, len += 1) + ; + return len; +} +exports.bitLen = bitLen; +/** + * Gets single bit at position. + * NOTE: first bit position is 0 (same as arrays) + * Same as `!!+Array.from(n.toString(2)).reverse()[pos]` + */ +function bitGet(n, pos) { + return (n >> BigInt(pos)) & _1n; +} +exports.bitGet = bitGet; +/** + * Sets single bit at position. + */ +const bitSet = (n, pos, value) => { + return n | ((value ? _1n : _0n) << BigInt(pos)); +}; +exports.bitSet = bitSet; +/** + * Calculate mask for N bits. Not using ** operator with bigints because of old engines. + * Same as BigInt(`0b${Array(i).fill('1').join('')}`) + */ +const bitMask = (n) => (_2n << BigInt(n - 1)) - _1n; +exports.bitMask = bitMask; +// DRBG +const u8n = (data) => new Uint8Array(data); // creates Uint8Array +const u8fr = (arr) => Uint8Array.from(arr); // another shortcut +/** + * Minimal HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + * @returns function that will call DRBG until 2nd arg returns something meaningful + * @example + * const drbg = createHmacDRBG(32, 32, hmac); + * drbg(seed, bytesToKey); // bytesToKey must return Key or undefined + */ +function createHmacDrbg(hashLen, qByteLen, hmacFn) { + if (typeof hashLen !== 'number' || hashLen < 2) + throw new Error('hashLen must be a number'); + if (typeof qByteLen !== 'number' || qByteLen < 2) + throw new Error('qByteLen must be a number'); + if (typeof hmacFn !== 'function') + throw new Error('hmacFn must be a function'); + // Step B, Step C: set hashLen to 8*ceil(hlen/8) + let v = u8n(hashLen); // Minimal non-full-spec HMAC-DRBG from NIST 800-90 for RFC6979 sigs. + let k = u8n(hashLen); // Steps B and C of RFC6979 3.2: set hashLen, in our case always same + let i = 0; // Iterations counter, will throw when over 1000 + const reset = () => { + v.fill(1); + k.fill(0); + i = 0; + }; + const h = (...b) => hmacFn(k, v, ...b); // hmac(k)(v, ...values) + const reseed = (seed = u8n()) => { + // HMAC-DRBG reseed() function. Steps D-G + k = h(u8fr([0x00]), seed); // k = hmac(k || v || 0x00 || seed) + v = h(); // v = hmac(k || v) + if (seed.length === 0) + return; + k = h(u8fr([0x01]), seed); // k = hmac(k || v || 0x01 || seed) + v = h(); // v = hmac(k || v) + }; + const gen = () => { + // HMAC-DRBG generate() function + if (i++ >= 1000) + throw new Error('drbg: tried 1000 values'); + let len = 0; + const out = []; + while (len < qByteLen) { + v = h(); + const sl = v.slice(); + out.push(sl); + len += v.length; + } + return concatBytes(...out); + }; + const genUntil = (seed, pred) => { + reset(); + reseed(seed); // Steps D-G + let res = undefined; // Step H: grind until k is in [1..n-1] + while (!(res = pred(gen()))) + reseed(); + reset(); + return res; + }; + return genUntil; +} +exports.createHmacDrbg = createHmacDrbg; +// Validating curves and fields +const validatorFns = { + bigint: (val) => typeof val === 'bigint', + function: (val) => typeof val === 'function', + boolean: (val) => typeof val === 'boolean', + string: (val) => typeof val === 'string', + stringOrUint8Array: (val) => typeof val === 'string' || isBytes(val), + isSafeInteger: (val) => Number.isSafeInteger(val), + array: (val) => Array.isArray(val), + field: (val, object) => object.Fp.isValid(val), + hash: (val) => typeof val === 'function' && Number.isSafeInteger(val.outputLen), +}; +// type Record = { [P in K]: T; } +function validateObject(object, validators, optValidators = {}) { + const checkField = (fieldName, type, isOptional) => { + const checkVal = validatorFns[type]; + if (typeof checkVal !== 'function') + throw new Error(`Invalid validator "${type}", expected function`); + const val = object[fieldName]; + if (isOptional && val === undefined) + return; + if (!checkVal(val, object)) { + throw new Error(`Invalid param ${String(fieldName)}=${val} (${typeof val}), expected ${type}`); + } + }; + for (const [fieldName, type] of Object.entries(validators)) + checkField(fieldName, type, false); + for (const [fieldName, type] of Object.entries(optValidators)) + checkField(fieldName, type, true); + return object; +} +exports.validateObject = validateObject; +// validate type tests +// const o: { a: number; b: number; c: number } = { a: 1, b: 5, c: 6 }; +// const z0 = validateObject(o, { a: 'isSafeInteger' }, { c: 'bigint' }); // Ok! +// // Should fail type-check +// const z1 = validateObject(o, { a: 'tmp' }, { c: 'zz' }); +// const z2 = validateObject(o, { a: 'isSafeInteger' }, { c: 'zz' }); +// const z3 = validateObject(o, { test: 'boolean', z: 'bug' }); +// const z4 = validateObject(o, { a: 'boolean', z: 'bug' }); +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 91705: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.mapToCurveSimpleSWU = exports.SWUFpSqrtRatio = exports.weierstrass = exports.weierstrassPoints = exports.DER = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +// Short Weierstrass curve. The formula is: y² = x³ + ax + b +const mod = __webpack_require__(24967); +const ut = __webpack_require__(91484); +const utils_js_1 = __webpack_require__(91484); +const curve_js_1 = __webpack_require__(62422); +function validatePointOpts(curve) { + const opts = (0, curve_js_1.validateBasic)(curve); + ut.validateObject(opts, { + a: 'field', + b: 'field', + }, { + allowedPrivateKeyLengths: 'array', + wrapPrivateKey: 'boolean', + isTorsionFree: 'function', + clearCofactor: 'function', + allowInfinityPoint: 'boolean', + fromBytes: 'function', + toBytes: 'function', + }); + const { endo, Fp, a } = opts; + if (endo) { + if (!Fp.eql(a, Fp.ZERO)) { + throw new Error('Endomorphism can only be defined for Koblitz curves that have a=0'); + } + if (typeof endo !== 'object' || + typeof endo.beta !== 'bigint' || + typeof endo.splitScalar !== 'function') { + throw new Error('Expected endomorphism with beta: bigint and splitScalar: function'); + } + } + return Object.freeze({ ...opts }); +} +// ASN.1 DER encoding utilities +const { bytesToNumberBE: b2n, hexToBytes: h2b } = ut; +exports.DER = { + // asn.1 DER encoding utils + Err: class DERErr extends Error { + constructor(m = '') { + super(m); + } + }, + _parseInt(data) { + const { Err: E } = exports.DER; + if (data.length < 2 || data[0] !== 0x02) + throw new E('Invalid signature integer tag'); + const len = data[1]; + const res = data.subarray(2, len + 2); + if (!len || res.length !== len) + throw new E('Invalid signature integer: wrong length'); + // https://crypto.stackexchange.com/a/57734 Leftmost bit of first byte is 'negative' flag, + // since we always use positive integers here. It must always be empty: + // - add zero byte if exists + // - if next byte doesn't have a flag, leading zero is not allowed (minimal encoding) + if (res[0] & 0b10000000) + throw new E('Invalid signature integer: negative'); + if (res[0] === 0x00 && !(res[1] & 0b10000000)) + throw new E('Invalid signature integer: unnecessary leading zero'); + return { d: b2n(res), l: data.subarray(len + 2) }; // d is data, l is left + }, + toSig(hex) { + // parse DER signature + const { Err: E } = exports.DER; + const data = typeof hex === 'string' ? h2b(hex) : hex; + if (!ut.isBytes(data)) + throw new Error('ui8a expected'); + let l = data.length; + if (l < 2 || data[0] != 0x30) + throw new E('Invalid signature tag'); + if (data[1] !== l - 2) + throw new E('Invalid signature: incorrect length'); + const { d: r, l: sBytes } = exports.DER._parseInt(data.subarray(2)); + const { d: s, l: rBytesLeft } = exports.DER._parseInt(sBytes); + if (rBytesLeft.length) + throw new E('Invalid signature: left bytes after parsing'); + return { r, s }; + }, + hexFromSig(sig) { + // Add leading zero if first byte has negative bit enabled. More details in '_parseInt' + const slice = (s) => (Number.parseInt(s[0], 16) & 0b1000 ? '00' + s : s); + const h = (num) => { + const hex = num.toString(16); + return hex.length & 1 ? `0${hex}` : hex; + }; + const s = slice(h(sig.s)); + const r = slice(h(sig.r)); + const shl = s.length / 2; + const rhl = r.length / 2; + const sl = h(shl); + const rl = h(rhl); + return `30${h(rhl + shl + 4)}02${rl}${r}02${sl}${s}`; + }, +}; +// Be friendly to bad ECMAScript parsers by not using bigint literals +// prettier-ignore +const _0n = BigInt(0), _1n = BigInt(1), _2n = BigInt(2), _3n = BigInt(3), _4n = BigInt(4); +function weierstrassPoints(opts) { + const CURVE = validatePointOpts(opts); + const { Fp } = CURVE; // All curves has same field / group length as for now, but they can differ + const toBytes = CURVE.toBytes || + ((_c, point, _isCompressed) => { + const a = point.toAffine(); + return ut.concatBytes(Uint8Array.from([0x04]), Fp.toBytes(a.x), Fp.toBytes(a.y)); + }); + const fromBytes = CURVE.fromBytes || + ((bytes) => { + // const head = bytes[0]; + const tail = bytes.subarray(1); + // if (head !== 0x04) throw new Error('Only non-compressed encoding is supported'); + const x = Fp.fromBytes(tail.subarray(0, Fp.BYTES)); + const y = Fp.fromBytes(tail.subarray(Fp.BYTES, 2 * Fp.BYTES)); + return { x, y }; + }); + /** + * y² = x³ + ax + b: Short weierstrass curve formula + * @returns y² + */ + function weierstrassEquation(x) { + const { a, b } = CURVE; + const x2 = Fp.sqr(x); // x * x + const x3 = Fp.mul(x2, x); // x2 * x + return Fp.add(Fp.add(x3, Fp.mul(x, a)), b); // x3 + a * x + b + } + // Validate whether the passed curve params are valid. + // We check if curve equation works for generator point. + // `assertValidity()` won't work: `isTorsionFree()` is not available at this point in bls12-381. + // ProjectivePoint class has not been initialized yet. + if (!Fp.eql(Fp.sqr(CURVE.Gy), weierstrassEquation(CURVE.Gx))) + throw new Error('bad generator point: equation left != right'); + // Valid group elements reside in range 1..n-1 + function isWithinCurveOrder(num) { + return typeof num === 'bigint' && _0n < num && num < CURVE.n; + } + function assertGE(num) { + if (!isWithinCurveOrder(num)) + throw new Error('Expected valid bigint: 0 < bigint < curve.n'); + } + // Validates if priv key is valid and converts it to bigint. + // Supports options allowedPrivateKeyLengths and wrapPrivateKey. + function normPrivateKeyToScalar(key) { + const { allowedPrivateKeyLengths: lengths, nByteLength, wrapPrivateKey, n } = CURVE; + if (lengths && typeof key !== 'bigint') { + if (ut.isBytes(key)) + key = ut.bytesToHex(key); + // Normalize to hex string, pad. E.g. P521 would norm 130-132 char hex to 132-char bytes + if (typeof key !== 'string' || !lengths.includes(key.length)) + throw new Error('Invalid key'); + key = key.padStart(nByteLength * 2, '0'); + } + let num; + try { + num = + typeof key === 'bigint' + ? key + : ut.bytesToNumberBE((0, utils_js_1.ensureBytes)('private key', key, nByteLength)); + } + catch (error) { + throw new Error(`private key must be ${nByteLength} bytes, hex or bigint, not ${typeof key}`); + } + if (wrapPrivateKey) + num = mod.mod(num, n); // disabled by default, enabled for BLS + assertGE(num); // num in range [1..N-1] + return num; + } + const pointPrecomputes = new Map(); + function assertPrjPoint(other) { + if (!(other instanceof Point)) + throw new Error('ProjectivePoint expected'); + } + /** + * Projective Point works in 3d / projective (homogeneous) coordinates: (x, y, z) ∋ (x=x/z, y=y/z) + * Default Point works in 2d / affine coordinates: (x, y) + * We're doing calculations in projective, because its operations don't require costly inversion. + */ + class Point { + constructor(px, py, pz) { + this.px = px; + this.py = py; + this.pz = pz; + if (px == null || !Fp.isValid(px)) + throw new Error('x required'); + if (py == null || !Fp.isValid(py)) + throw new Error('y required'); + if (pz == null || !Fp.isValid(pz)) + throw new Error('z required'); + } + // Does not validate if the point is on-curve. + // Use fromHex instead, or call assertValidity() later. + static fromAffine(p) { + const { x, y } = p || {}; + if (!p || !Fp.isValid(x) || !Fp.isValid(y)) + throw new Error('invalid affine point'); + if (p instanceof Point) + throw new Error('projective point not allowed'); + const is0 = (i) => Fp.eql(i, Fp.ZERO); + // fromAffine(x:0, y:0) would produce (x:0, y:0, z:1), but we need (x:0, y:1, z:0) + if (is0(x) && is0(y)) + return Point.ZERO; + return new Point(x, y, Fp.ONE); + } + get x() { + return this.toAffine().x; + } + get y() { + return this.toAffine().y; + } + /** + * Takes a bunch of Projective Points but executes only one + * inversion on all of them. Inversion is very slow operation, + * so this improves performance massively. + * Optimization: converts a list of projective points to a list of identical points with Z=1. + */ + static normalizeZ(points) { + const toInv = Fp.invertBatch(points.map((p) => p.pz)); + return points.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + } + /** + * Converts hash string or Uint8Array to Point. + * @param hex short/long ECDSA hex + */ + static fromHex(hex) { + const P = Point.fromAffine(fromBytes((0, utils_js_1.ensureBytes)('pointHex', hex))); + P.assertValidity(); + return P; + } + // Multiplies generator point by privateKey. + static fromPrivateKey(privateKey) { + return Point.BASE.multiply(normPrivateKeyToScalar(privateKey)); + } + // "Private method", don't use it directly + _setWindowSize(windowSize) { + this._WINDOW_SIZE = windowSize; + pointPrecomputes.delete(this); + } + // A point on curve is valid if it conforms to equation. + assertValidity() { + if (this.is0()) { + // (0, 1, 0) aka ZERO is invalid in most contexts. + // In BLS, ZERO can be serialized, so we allow it. + // (0, 0, 0) is wrong representation of ZERO and is always invalid. + if (CURVE.allowInfinityPoint && !Fp.is0(this.py)) + return; + throw new Error('bad point: ZERO'); + } + // Some 3rd-party test vectors require different wording between here & `fromCompressedHex` + const { x, y } = this.toAffine(); + // Check if x, y are valid field elements + if (!Fp.isValid(x) || !Fp.isValid(y)) + throw new Error('bad point: x or y not FE'); + const left = Fp.sqr(y); // y² + const right = weierstrassEquation(x); // x³ + ax + b + if (!Fp.eql(left, right)) + throw new Error('bad point: equation left != right'); + if (!this.isTorsionFree()) + throw new Error('bad point: not in prime-order subgroup'); + } + hasEvenY() { + const { y } = this.toAffine(); + if (Fp.isOdd) + return !Fp.isOdd(y); + throw new Error("Field doesn't support isOdd"); + } + /** + * Compare one point to another. + */ + equals(other) { + assertPrjPoint(other); + const { px: X1, py: Y1, pz: Z1 } = this; + const { px: X2, py: Y2, pz: Z2 } = other; + const U1 = Fp.eql(Fp.mul(X1, Z2), Fp.mul(X2, Z1)); + const U2 = Fp.eql(Fp.mul(Y1, Z2), Fp.mul(Y2, Z1)); + return U1 && U2; + } + /** + * Flips point to one corresponding to (x, -y) in Affine coordinates. + */ + negate() { + return new Point(this.px, Fp.neg(this.py), this.pz); + } + // Renes-Costello-Batina exception-free doubling formula. + // There is 30% faster Jacobian formula, but it is not complete. + // https://eprint.iacr.org/2015/1060, algorithm 3 + // Cost: 8M + 3S + 3*a + 2*b3 + 15add. + double() { + const { a, b } = CURVE; + const b3 = Fp.mul(b, _3n); + const { px: X1, py: Y1, pz: Z1 } = this; + let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore + let t0 = Fp.mul(X1, X1); // step 1 + let t1 = Fp.mul(Y1, Y1); + let t2 = Fp.mul(Z1, Z1); + let t3 = Fp.mul(X1, Y1); + t3 = Fp.add(t3, t3); // step 5 + Z3 = Fp.mul(X1, Z1); + Z3 = Fp.add(Z3, Z3); + X3 = Fp.mul(a, Z3); + Y3 = Fp.mul(b3, t2); + Y3 = Fp.add(X3, Y3); // step 10 + X3 = Fp.sub(t1, Y3); + Y3 = Fp.add(t1, Y3); + Y3 = Fp.mul(X3, Y3); + X3 = Fp.mul(t3, X3); + Z3 = Fp.mul(b3, Z3); // step 15 + t2 = Fp.mul(a, t2); + t3 = Fp.sub(t0, t2); + t3 = Fp.mul(a, t3); + t3 = Fp.add(t3, Z3); + Z3 = Fp.add(t0, t0); // step 20 + t0 = Fp.add(Z3, t0); + t0 = Fp.add(t0, t2); + t0 = Fp.mul(t0, t3); + Y3 = Fp.add(Y3, t0); + t2 = Fp.mul(Y1, Z1); // step 25 + t2 = Fp.add(t2, t2); + t0 = Fp.mul(t2, t3); + X3 = Fp.sub(X3, t0); + Z3 = Fp.mul(t2, t1); + Z3 = Fp.add(Z3, Z3); // step 30 + Z3 = Fp.add(Z3, Z3); + return new Point(X3, Y3, Z3); + } + // Renes-Costello-Batina exception-free addition formula. + // There is 30% faster Jacobian formula, but it is not complete. + // https://eprint.iacr.org/2015/1060, algorithm 1 + // Cost: 12M + 0S + 3*a + 3*b3 + 23add. + add(other) { + assertPrjPoint(other); + const { px: X1, py: Y1, pz: Z1 } = this; + const { px: X2, py: Y2, pz: Z2 } = other; + let X3 = Fp.ZERO, Y3 = Fp.ZERO, Z3 = Fp.ZERO; // prettier-ignore + const a = CURVE.a; + const b3 = Fp.mul(CURVE.b, _3n); + let t0 = Fp.mul(X1, X2); // step 1 + let t1 = Fp.mul(Y1, Y2); + let t2 = Fp.mul(Z1, Z2); + let t3 = Fp.add(X1, Y1); + let t4 = Fp.add(X2, Y2); // step 5 + t3 = Fp.mul(t3, t4); + t4 = Fp.add(t0, t1); + t3 = Fp.sub(t3, t4); + t4 = Fp.add(X1, Z1); + let t5 = Fp.add(X2, Z2); // step 10 + t4 = Fp.mul(t4, t5); + t5 = Fp.add(t0, t2); + t4 = Fp.sub(t4, t5); + t5 = Fp.add(Y1, Z1); + X3 = Fp.add(Y2, Z2); // step 15 + t5 = Fp.mul(t5, X3); + X3 = Fp.add(t1, t2); + t5 = Fp.sub(t5, X3); + Z3 = Fp.mul(a, t4); + X3 = Fp.mul(b3, t2); // step 20 + Z3 = Fp.add(X3, Z3); + X3 = Fp.sub(t1, Z3); + Z3 = Fp.add(t1, Z3); + Y3 = Fp.mul(X3, Z3); + t1 = Fp.add(t0, t0); // step 25 + t1 = Fp.add(t1, t0); + t2 = Fp.mul(a, t2); + t4 = Fp.mul(b3, t4); + t1 = Fp.add(t1, t2); + t2 = Fp.sub(t0, t2); // step 30 + t2 = Fp.mul(a, t2); + t4 = Fp.add(t4, t2); + t0 = Fp.mul(t1, t4); + Y3 = Fp.add(Y3, t0); + t0 = Fp.mul(t5, t4); // step 35 + X3 = Fp.mul(t3, X3); + X3 = Fp.sub(X3, t0); + t0 = Fp.mul(t3, t1); + Z3 = Fp.mul(t5, Z3); + Z3 = Fp.add(Z3, t0); // step 40 + return new Point(X3, Y3, Z3); + } + subtract(other) { + return this.add(other.negate()); + } + is0() { + return this.equals(Point.ZERO); + } + wNAF(n) { + return wnaf.wNAFCached(this, pointPrecomputes, n, (comp) => { + const toInv = Fp.invertBatch(comp.map((p) => p.pz)); + return comp.map((p, i) => p.toAffine(toInv[i])).map(Point.fromAffine); + }); + } + /** + * Non-constant-time multiplication. Uses double-and-add algorithm. + * It's faster, but should only be used when you don't care about + * an exposed private key e.g. sig verification, which works over *public* keys. + */ + multiplyUnsafe(n) { + const I = Point.ZERO; + if (n === _0n) + return I; + assertGE(n); // Will throw on 0 + if (n === _1n) + return this; + const { endo } = CURVE; + if (!endo) + return wnaf.unsafeLadder(this, n); + // Apply endomorphism + let { k1neg, k1, k2neg, k2 } = endo.splitScalar(n); + let k1p = I; + let k2p = I; + let d = this; + while (k1 > _0n || k2 > _0n) { + if (k1 & _1n) + k1p = k1p.add(d); + if (k2 & _1n) + k2p = k2p.add(d); + d = d.double(); + k1 >>= _1n; + k2 >>= _1n; + } + if (k1neg) + k1p = k1p.negate(); + if (k2neg) + k2p = k2p.negate(); + k2p = new Point(Fp.mul(k2p.px, endo.beta), k2p.py, k2p.pz); + return k1p.add(k2p); + } + /** + * Constant time multiplication. + * Uses wNAF method. Windowed method may be 10% faster, + * but takes 2x longer to generate and consumes 2x memory. + * Uses precomputes when available. + * Uses endomorphism for Koblitz curves. + * @param scalar by which the point would be multiplied + * @returns New point + */ + multiply(scalar) { + assertGE(scalar); + let n = scalar; + let point, fake; // Fake point is used to const-time mult + const { endo } = CURVE; + if (endo) { + const { k1neg, k1, k2neg, k2 } = endo.splitScalar(n); + let { p: k1p, f: f1p } = this.wNAF(k1); + let { p: k2p, f: f2p } = this.wNAF(k2); + k1p = wnaf.constTimeNegate(k1neg, k1p); + k2p = wnaf.constTimeNegate(k2neg, k2p); + k2p = new Point(Fp.mul(k2p.px, endo.beta), k2p.py, k2p.pz); + point = k1p.add(k2p); + fake = f1p.add(f2p); + } + else { + const { p, f } = this.wNAF(n); + point = p; + fake = f; + } + // Normalize `z` for both points, but return only real one + return Point.normalizeZ([point, fake])[0]; + } + /** + * Efficiently calculate `aP + bQ`. Unsafe, can expose private key, if used incorrectly. + * Not using Strauss-Shamir trick: precomputation tables are faster. + * The trick could be useful if both P and Q are not G (not in our case). + * @returns non-zero affine point + */ + multiplyAndAddUnsafe(Q, a, b) { + const G = Point.BASE; // No Strauss-Shamir trick: we have 10% faster G precomputes + const mul = (P, a // Select faster multiply() method + ) => (a === _0n || a === _1n || !P.equals(G) ? P.multiplyUnsafe(a) : P.multiply(a)); + const sum = mul(this, a).add(mul(Q, b)); + return sum.is0() ? undefined : sum; + } + // Converts Projective point to affine (x, y) coordinates. + // Can accept precomputed Z^-1 - for example, from invertBatch. + // (x, y, z) ∋ (x=x/z, y=y/z) + toAffine(iz) { + const { px: x, py: y, pz: z } = this; + const is0 = this.is0(); + // If invZ was 0, we return zero point. However we still want to execute + // all operations, so we replace invZ with a random number, 1. + if (iz == null) + iz = is0 ? Fp.ONE : Fp.inv(z); + const ax = Fp.mul(x, iz); + const ay = Fp.mul(y, iz); + const zz = Fp.mul(z, iz); + if (is0) + return { x: Fp.ZERO, y: Fp.ZERO }; + if (!Fp.eql(zz, Fp.ONE)) + throw new Error('invZ was invalid'); + return { x: ax, y: ay }; + } + isTorsionFree() { + const { h: cofactor, isTorsionFree } = CURVE; + if (cofactor === _1n) + return true; // No subgroups, always torsion-free + if (isTorsionFree) + return isTorsionFree(Point, this); + throw new Error('isTorsionFree() has not been declared for the elliptic curve'); + } + clearCofactor() { + const { h: cofactor, clearCofactor } = CURVE; + if (cofactor === _1n) + return this; // Fast-path + if (clearCofactor) + return clearCofactor(Point, this); + return this.multiplyUnsafe(CURVE.h); + } + toRawBytes(isCompressed = true) { + this.assertValidity(); + return toBytes(Point, this, isCompressed); + } + toHex(isCompressed = true) { + return ut.bytesToHex(this.toRawBytes(isCompressed)); + } + } + Point.BASE = new Point(CURVE.Gx, CURVE.Gy, Fp.ONE); + Point.ZERO = new Point(Fp.ZERO, Fp.ONE, Fp.ZERO); + const _bits = CURVE.nBitLength; + const wnaf = (0, curve_js_1.wNAF)(Point, CURVE.endo ? Math.ceil(_bits / 2) : _bits); + // Validate if generator point is on curve + return { + CURVE, + ProjectivePoint: Point, + normPrivateKeyToScalar, + weierstrassEquation, + isWithinCurveOrder, + }; +} +exports.weierstrassPoints = weierstrassPoints; +function validateOpts(curve) { + const opts = (0, curve_js_1.validateBasic)(curve); + ut.validateObject(opts, { + hash: 'hash', + hmac: 'function', + randomBytes: 'function', + }, { + bits2int: 'function', + bits2int_modN: 'function', + lowS: 'boolean', + }); + return Object.freeze({ lowS: true, ...opts }); +} +function weierstrass(curveDef) { + const CURVE = validateOpts(curveDef); + const { Fp, n: CURVE_ORDER } = CURVE; + const compressedLen = Fp.BYTES + 1; // e.g. 33 for 32 + const uncompressedLen = 2 * Fp.BYTES + 1; // e.g. 65 for 32 + function isValidFieldElement(num) { + return _0n < num && num < Fp.ORDER; // 0 is banned since it's not invertible FE + } + function modN(a) { + return mod.mod(a, CURVE_ORDER); + } + function invN(a) { + return mod.invert(a, CURVE_ORDER); + } + const { ProjectivePoint: Point, normPrivateKeyToScalar, weierstrassEquation, isWithinCurveOrder, } = weierstrassPoints({ + ...CURVE, + toBytes(_c, point, isCompressed) { + const a = point.toAffine(); + const x = Fp.toBytes(a.x); + const cat = ut.concatBytes; + if (isCompressed) { + return cat(Uint8Array.from([point.hasEvenY() ? 0x02 : 0x03]), x); + } + else { + return cat(Uint8Array.from([0x04]), x, Fp.toBytes(a.y)); + } + }, + fromBytes(bytes) { + const len = bytes.length; + const head = bytes[0]; + const tail = bytes.subarray(1); + // this.assertValidity() is done inside of fromHex + if (len === compressedLen && (head === 0x02 || head === 0x03)) { + const x = ut.bytesToNumberBE(tail); + if (!isValidFieldElement(x)) + throw new Error('Point is not on curve'); + const y2 = weierstrassEquation(x); // y² = x³ + ax + b + let y = Fp.sqrt(y2); // y = y² ^ (p+1)/4 + const isYOdd = (y & _1n) === _1n; + // ECDSA + const isHeadOdd = (head & 1) === 1; + if (isHeadOdd !== isYOdd) + y = Fp.neg(y); + return { x, y }; + } + else if (len === uncompressedLen && head === 0x04) { + const x = Fp.fromBytes(tail.subarray(0, Fp.BYTES)); + const y = Fp.fromBytes(tail.subarray(Fp.BYTES, 2 * Fp.BYTES)); + return { x, y }; + } + else { + throw new Error(`Point of length ${len} was invalid. Expected ${compressedLen} compressed bytes or ${uncompressedLen} uncompressed bytes`); + } + }, + }); + const numToNByteStr = (num) => ut.bytesToHex(ut.numberToBytesBE(num, CURVE.nByteLength)); + function isBiggerThanHalfOrder(number) { + const HALF = CURVE_ORDER >> _1n; + return number > HALF; + } + function normalizeS(s) { + return isBiggerThanHalfOrder(s) ? modN(-s) : s; + } + // slice bytes num + const slcNum = (b, from, to) => ut.bytesToNumberBE(b.slice(from, to)); + /** + * ECDSA signature with its (r, s) properties. Supports DER & compact representations. + */ + class Signature { + constructor(r, s, recovery) { + this.r = r; + this.s = s; + this.recovery = recovery; + this.assertValidity(); + } + // pair (bytes of r, bytes of s) + static fromCompact(hex) { + const l = CURVE.nByteLength; + hex = (0, utils_js_1.ensureBytes)('compactSignature', hex, l * 2); + return new Signature(slcNum(hex, 0, l), slcNum(hex, l, 2 * l)); + } + // DER encoded ECDSA signature + // https://bitcoin.stackexchange.com/questions/57644/what-are-the-parts-of-a-bitcoin-transaction-input-script + static fromDER(hex) { + const { r, s } = exports.DER.toSig((0, utils_js_1.ensureBytes)('DER', hex)); + return new Signature(r, s); + } + assertValidity() { + // can use assertGE here + if (!isWithinCurveOrder(this.r)) + throw new Error('r must be 0 < r < CURVE.n'); + if (!isWithinCurveOrder(this.s)) + throw new Error('s must be 0 < s < CURVE.n'); + } + addRecoveryBit(recovery) { + return new Signature(this.r, this.s, recovery); + } + recoverPublicKey(msgHash) { + const { r, s, recovery: rec } = this; + const h = bits2int_modN((0, utils_js_1.ensureBytes)('msgHash', msgHash)); // Truncate hash + if (rec == null || ![0, 1, 2, 3].includes(rec)) + throw new Error('recovery id invalid'); + const radj = rec === 2 || rec === 3 ? r + CURVE.n : r; + if (radj >= Fp.ORDER) + throw new Error('recovery id 2 or 3 invalid'); + const prefix = (rec & 1) === 0 ? '02' : '03'; + const R = Point.fromHex(prefix + numToNByteStr(radj)); + const ir = invN(radj); // r^-1 + const u1 = modN(-h * ir); // -hr^-1 + const u2 = modN(s * ir); // sr^-1 + const Q = Point.BASE.multiplyAndAddUnsafe(R, u1, u2); // (sr^-1)R-(hr^-1)G = -(hr^-1)G + (sr^-1) + if (!Q) + throw new Error('point at infinify'); // unsafe is fine: no priv data leaked + Q.assertValidity(); + return Q; + } + // Signatures should be low-s, to prevent malleability. + hasHighS() { + return isBiggerThanHalfOrder(this.s); + } + normalizeS() { + return this.hasHighS() ? new Signature(this.r, modN(-this.s), this.recovery) : this; + } + // DER-encoded + toDERRawBytes() { + return ut.hexToBytes(this.toDERHex()); + } + toDERHex() { + return exports.DER.hexFromSig({ r: this.r, s: this.s }); + } + // padded bytes of r, then padded bytes of s + toCompactRawBytes() { + return ut.hexToBytes(this.toCompactHex()); + } + toCompactHex() { + return numToNByteStr(this.r) + numToNByteStr(this.s); + } + } + const utils = { + isValidPrivateKey(privateKey) { + try { + normPrivateKeyToScalar(privateKey); + return true; + } + catch (error) { + return false; + } + }, + normPrivateKeyToScalar: normPrivateKeyToScalar, + /** + * Produces cryptographically secure private key from random of size + * (groupLen + ceil(groupLen / 2)) with modulo bias being negligible. + */ + randomPrivateKey: () => { + const length = mod.getMinHashLength(CURVE.n); + return mod.mapHashToField(CURVE.randomBytes(length), CURVE.n); + }, + /** + * Creates precompute table for an arbitrary EC point. Makes point "cached". + * Allows to massively speed-up `point.multiply(scalar)`. + * @returns cached point + * @example + * const fast = utils.precompute(8, ProjectivePoint.fromHex(someonesPubKey)); + * fast.multiply(privKey); // much faster ECDH now + */ + precompute(windowSize = 8, point = Point.BASE) { + point._setWindowSize(windowSize); + point.multiply(BigInt(3)); // 3 is arbitrary, just need any number here + return point; + }, + }; + /** + * Computes public key for a private key. Checks for validity of the private key. + * @param privateKey private key + * @param isCompressed whether to return compact (default), or full key + * @returns Public key, full when isCompressed=false; short when isCompressed=true + */ + function getPublicKey(privateKey, isCompressed = true) { + return Point.fromPrivateKey(privateKey).toRawBytes(isCompressed); + } + /** + * Quick and dirty check for item being public key. Does not validate hex, or being on-curve. + */ + function isProbPub(item) { + const arr = ut.isBytes(item); + const str = typeof item === 'string'; + const len = (arr || str) && item.length; + if (arr) + return len === compressedLen || len === uncompressedLen; + if (str) + return len === 2 * compressedLen || len === 2 * uncompressedLen; + if (item instanceof Point) + return true; + return false; + } + /** + * ECDH (Elliptic Curve Diffie Hellman). + * Computes shared public key from private key and public key. + * Checks: 1) private key validity 2) shared key is on-curve. + * Does NOT hash the result. + * @param privateA private key + * @param publicB different public key + * @param isCompressed whether to return compact (default), or full key + * @returns shared public key + */ + function getSharedSecret(privateA, publicB, isCompressed = true) { + if (isProbPub(privateA)) + throw new Error('first arg must be private key'); + if (!isProbPub(publicB)) + throw new Error('second arg must be public key'); + const b = Point.fromHex(publicB); // check for being on-curve + return b.multiply(normPrivateKeyToScalar(privateA)).toRawBytes(isCompressed); + } + // RFC6979: ensure ECDSA msg is X bytes and < N. RFC suggests optional truncating via bits2octets. + // FIPS 186-4 4.6 suggests the leftmost min(nBitLen, outLen) bits, which matches bits2int. + // bits2int can produce res>N, we can do mod(res, N) since the bitLen is the same. + // int2octets can't be used; pads small msgs with 0: unacceptatble for trunc as per RFC vectors + const bits2int = CURVE.bits2int || + function (bytes) { + // For curves with nBitLength % 8 !== 0: bits2octets(bits2octets(m)) !== bits2octets(m) + // for some cases, since bytes.length * 8 is not actual bitLength. + const num = ut.bytesToNumberBE(bytes); // check for == u8 done here + const delta = bytes.length * 8 - CURVE.nBitLength; // truncate to nBitLength leftmost bits + return delta > 0 ? num >> BigInt(delta) : num; + }; + const bits2int_modN = CURVE.bits2int_modN || + function (bytes) { + return modN(bits2int(bytes)); // can't use bytesToNumberBE here + }; + // NOTE: pads output with zero as per spec + const ORDER_MASK = ut.bitMask(CURVE.nBitLength); + /** + * Converts to bytes. Checks if num in `[0..ORDER_MASK-1]` e.g.: `[0..2^256-1]`. + */ + function int2octets(num) { + if (typeof num !== 'bigint') + throw new Error('bigint expected'); + if (!(_0n <= num && num < ORDER_MASK)) + throw new Error(`bigint expected < 2^${CURVE.nBitLength}`); + // works with order, can have different size than numToField! + return ut.numberToBytesBE(num, CURVE.nByteLength); + } + // Steps A, D of RFC6979 3.2 + // Creates RFC6979 seed; converts msg/privKey to numbers. + // Used only in sign, not in verify. + // NOTE: we cannot assume here that msgHash has same amount of bytes as curve order, this will be wrong at least for P521. + // Also it can be bigger for P224 + SHA256 + function prepSig(msgHash, privateKey, opts = defaultSigOpts) { + if (['recovered', 'canonical'].some((k) => k in opts)) + throw new Error('sign() legacy options not supported'); + const { hash, randomBytes } = CURVE; + let { lowS, prehash, extraEntropy: ent } = opts; // generates low-s sigs by default + if (lowS == null) + lowS = true; // RFC6979 3.2: we skip step A, because we already provide hash + msgHash = (0, utils_js_1.ensureBytes)('msgHash', msgHash); + if (prehash) + msgHash = (0, utils_js_1.ensureBytes)('prehashed msgHash', hash(msgHash)); + // We can't later call bits2octets, since nested bits2int is broken for curves + // with nBitLength % 8 !== 0. Because of that, we unwrap it here as int2octets call. + // const bits2octets = (bits) => int2octets(bits2int_modN(bits)) + const h1int = bits2int_modN(msgHash); + const d = normPrivateKeyToScalar(privateKey); // validate private key, convert to bigint + const seedArgs = [int2octets(d), int2octets(h1int)]; + // extraEntropy. RFC6979 3.6: additional k' (optional). + if (ent != null) { + // K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1) || k') + const e = ent === true ? randomBytes(Fp.BYTES) : ent; // generate random bytes OR pass as-is + seedArgs.push((0, utils_js_1.ensureBytes)('extraEntropy', e)); // check for being bytes + } + const seed = ut.concatBytes(...seedArgs); // Step D of RFC6979 3.2 + const m = h1int; // NOTE: no need to call bits2int second time here, it is inside truncateHash! + // Converts signature params into point w r/s, checks result for validity. + function k2sig(kBytes) { + // RFC 6979 Section 3.2, step 3: k = bits2int(T) + const k = bits2int(kBytes); // Cannot use fields methods, since it is group element + if (!isWithinCurveOrder(k)) + return; // Important: all mod() calls here must be done over N + const ik = invN(k); // k^-1 mod n + const q = Point.BASE.multiply(k).toAffine(); // q = Gk + const r = modN(q.x); // r = q.x mod n + if (r === _0n) + return; + // Can use scalar blinding b^-1(bm + bdr) where b ∈ [1,q−1] according to + // https://tches.iacr.org/index.php/TCHES/article/view/7337/6509. We've decided against it: + // a) dependency on CSPRNG b) 15% slowdown c) doesn't really help since bigints are not CT + const s = modN(ik * modN(m + r * d)); // Not using blinding here + if (s === _0n) + return; + let recovery = (q.x === r ? 0 : 2) | Number(q.y & _1n); // recovery bit (2 or 3, when q.x > n) + let normS = s; + if (lowS && isBiggerThanHalfOrder(s)) { + normS = normalizeS(s); // if lowS was passed, ensure s is always + recovery ^= 1; // // in the bottom half of N + } + return new Signature(r, normS, recovery); // use normS, not s + } + return { seed, k2sig }; + } + const defaultSigOpts = { lowS: CURVE.lowS, prehash: false }; + const defaultVerOpts = { lowS: CURVE.lowS, prehash: false }; + /** + * Signs message hash with a private key. + * ``` + * sign(m, d, k) where + * (x, y) = G × k + * r = x mod n + * s = (m + dr)/k mod n + * ``` + * @param msgHash NOT message. msg needs to be hashed to `msgHash`, or use `prehash`. + * @param privKey private key + * @param opts lowS for non-malleable sigs. extraEntropy for mixing randomness into k. prehash will hash first arg. + * @returns signature with recovery param + */ + function sign(msgHash, privKey, opts = defaultSigOpts) { + const { seed, k2sig } = prepSig(msgHash, privKey, opts); // Steps A, D of RFC6979 3.2. + const C = CURVE; + const drbg = ut.createHmacDrbg(C.hash.outputLen, C.nByteLength, C.hmac); + return drbg(seed, k2sig); // Steps B, C, D, E, F, G + } + // Enable precomputes. Slows down first publicKey computation by 20ms. + Point.BASE._setWindowSize(8); + // utils.precompute(8, ProjectivePoint.BASE) + /** + * Verifies a signature against message hash and public key. + * Rejects lowS signatures by default: to override, + * specify option `{lowS: false}`. Implements section 4.1.4 from https://www.secg.org/sec1-v2.pdf: + * + * ``` + * verify(r, s, h, P) where + * U1 = hs^-1 mod n + * U2 = rs^-1 mod n + * R = U1⋅G - U2⋅P + * mod(R.x, n) == r + * ``` + */ + function verify(signature, msgHash, publicKey, opts = defaultVerOpts) { + const sg = signature; + msgHash = (0, utils_js_1.ensureBytes)('msgHash', msgHash); + publicKey = (0, utils_js_1.ensureBytes)('publicKey', publicKey); + if ('strict' in opts) + throw new Error('options.strict was renamed to lowS'); + const { lowS, prehash } = opts; + let _sig = undefined; + let P; + try { + if (typeof sg === 'string' || ut.isBytes(sg)) { + // Signature can be represented in 2 ways: compact (2*nByteLength) & DER (variable-length). + // Since DER can also be 2*nByteLength bytes, we check for it first. + try { + _sig = Signature.fromDER(sg); + } + catch (derError) { + if (!(derError instanceof exports.DER.Err)) + throw derError; + _sig = Signature.fromCompact(sg); + } + } + else if (typeof sg === 'object' && typeof sg.r === 'bigint' && typeof sg.s === 'bigint') { + const { r, s } = sg; + _sig = new Signature(r, s); + } + else { + throw new Error('PARSE'); + } + P = Point.fromHex(publicKey); + } + catch (error) { + if (error.message === 'PARSE') + throw new Error(`signature must be Signature instance, Uint8Array or hex string`); + return false; + } + if (lowS && _sig.hasHighS()) + return false; + if (prehash) + msgHash = CURVE.hash(msgHash); + const { r, s } = _sig; + const h = bits2int_modN(msgHash); // Cannot use fields methods, since it is group element + const is = invN(s); // s^-1 + const u1 = modN(h * is); // u1 = hs^-1 mod n + const u2 = modN(r * is); // u2 = rs^-1 mod n + const R = Point.BASE.multiplyAndAddUnsafe(P, u1, u2)?.toAffine(); // R = u1⋅G + u2⋅P + if (!R) + return false; + const v = modN(R.x); + return v === r; + } + return { + CURVE, + getPublicKey, + getSharedSecret, + sign, + verify, + ProjectivePoint: Point, + Signature, + utils, + }; +} +exports.weierstrass = weierstrass; +/** + * Implementation of the Shallue and van de Woestijne method for any weierstrass curve. + * TODO: check if there is a way to merge this with uvRatio in Edwards; move to modular. + * b = True and y = sqrt(u / v) if (u / v) is square in F, and + * b = False and y = sqrt(Z * (u / v)) otherwise. + * @param Fp + * @param Z + * @returns + */ +function SWUFpSqrtRatio(Fp, Z) { + // Generic implementation + const q = Fp.ORDER; + let l = _0n; + for (let o = q - _1n; o % _2n === _0n; o /= _2n) + l += _1n; + const c1 = l; // 1. c1, the largest integer such that 2^c1 divides q - 1. + // We need 2n ** c1 and 2n ** (c1-1). We can't use **; but we can use <<. + // 2n ** c1 == 2n << (c1-1) + const _2n_pow_c1_1 = _2n << (c1 - _1n - _1n); + const _2n_pow_c1 = _2n_pow_c1_1 * _2n; + const c2 = (q - _1n) / _2n_pow_c1; // 2. c2 = (q - 1) / (2^c1) # Integer arithmetic + const c3 = (c2 - _1n) / _2n; // 3. c3 = (c2 - 1) / 2 # Integer arithmetic + const c4 = _2n_pow_c1 - _1n; // 4. c4 = 2^c1 - 1 # Integer arithmetic + const c5 = _2n_pow_c1_1; // 5. c5 = 2^(c1 - 1) # Integer arithmetic + const c6 = Fp.pow(Z, c2); // 6. c6 = Z^c2 + const c7 = Fp.pow(Z, (c2 + _1n) / _2n); // 7. c7 = Z^((c2 + 1) / 2) + let sqrtRatio = (u, v) => { + let tv1 = c6; // 1. tv1 = c6 + let tv2 = Fp.pow(v, c4); // 2. tv2 = v^c4 + let tv3 = Fp.sqr(tv2); // 3. tv3 = tv2^2 + tv3 = Fp.mul(tv3, v); // 4. tv3 = tv3 * v + let tv5 = Fp.mul(u, tv3); // 5. tv5 = u * tv3 + tv5 = Fp.pow(tv5, c3); // 6. tv5 = tv5^c3 + tv5 = Fp.mul(tv5, tv2); // 7. tv5 = tv5 * tv2 + tv2 = Fp.mul(tv5, v); // 8. tv2 = tv5 * v + tv3 = Fp.mul(tv5, u); // 9. tv3 = tv5 * u + let tv4 = Fp.mul(tv3, tv2); // 10. tv4 = tv3 * tv2 + tv5 = Fp.pow(tv4, c5); // 11. tv5 = tv4^c5 + let isQR = Fp.eql(tv5, Fp.ONE); // 12. isQR = tv5 == 1 + tv2 = Fp.mul(tv3, c7); // 13. tv2 = tv3 * c7 + tv5 = Fp.mul(tv4, tv1); // 14. tv5 = tv4 * tv1 + tv3 = Fp.cmov(tv2, tv3, isQR); // 15. tv3 = CMOV(tv2, tv3, isQR) + tv4 = Fp.cmov(tv5, tv4, isQR); // 16. tv4 = CMOV(tv5, tv4, isQR) + // 17. for i in (c1, c1 - 1, ..., 2): + for (let i = c1; i > _1n; i--) { + let tv5 = i - _2n; // 18. tv5 = i - 2 + tv5 = _2n << (tv5 - _1n); // 19. tv5 = 2^tv5 + let tvv5 = Fp.pow(tv4, tv5); // 20. tv5 = tv4^tv5 + const e1 = Fp.eql(tvv5, Fp.ONE); // 21. e1 = tv5 == 1 + tv2 = Fp.mul(tv3, tv1); // 22. tv2 = tv3 * tv1 + tv1 = Fp.mul(tv1, tv1); // 23. tv1 = tv1 * tv1 + tvv5 = Fp.mul(tv4, tv1); // 24. tv5 = tv4 * tv1 + tv3 = Fp.cmov(tv2, tv3, e1); // 25. tv3 = CMOV(tv2, tv3, e1) + tv4 = Fp.cmov(tvv5, tv4, e1); // 26. tv4 = CMOV(tv5, tv4, e1) + } + return { isValid: isQR, value: tv3 }; + }; + if (Fp.ORDER % _4n === _3n) { + // sqrt_ratio_3mod4(u, v) + const c1 = (Fp.ORDER - _3n) / _4n; // 1. c1 = (q - 3) / 4 # Integer arithmetic + const c2 = Fp.sqrt(Fp.neg(Z)); // 2. c2 = sqrt(-Z) + sqrtRatio = (u, v) => { + let tv1 = Fp.sqr(v); // 1. tv1 = v^2 + const tv2 = Fp.mul(u, v); // 2. tv2 = u * v + tv1 = Fp.mul(tv1, tv2); // 3. tv1 = tv1 * tv2 + let y1 = Fp.pow(tv1, c1); // 4. y1 = tv1^c1 + y1 = Fp.mul(y1, tv2); // 5. y1 = y1 * tv2 + const y2 = Fp.mul(y1, c2); // 6. y2 = y1 * c2 + const tv3 = Fp.mul(Fp.sqr(y1), v); // 7. tv3 = y1^2; 8. tv3 = tv3 * v + const isQR = Fp.eql(tv3, u); // 9. isQR = tv3 == u + let y = Fp.cmov(y2, y1, isQR); // 10. y = CMOV(y2, y1, isQR) + return { isValid: isQR, value: y }; // 11. return (isQR, y) isQR ? y : y*c2 + }; + } + // No curves uses that + // if (Fp.ORDER % _8n === _5n) // sqrt_ratio_5mod8 + return sqrtRatio; +} +exports.SWUFpSqrtRatio = SWUFpSqrtRatio; +/** + * Simplified Shallue-van de Woestijne-Ulas Method + * https://www.rfc-editor.org/rfc/rfc9380#section-6.6.2 + */ +function mapToCurveSimpleSWU(Fp, opts) { + mod.validateField(Fp); + if (!Fp.isValid(opts.A) || !Fp.isValid(opts.B) || !Fp.isValid(opts.Z)) + throw new Error('mapToCurveSimpleSWU: invalid opts'); + const sqrtRatio = SWUFpSqrtRatio(Fp, opts.Z); + if (!Fp.isOdd) + throw new Error('Fp.isOdd is not implemented!'); + // Input: u, an element of F. + // Output: (x, y), a point on E. + return (u) => { + // prettier-ignore + let tv1, tv2, tv3, tv4, tv5, tv6, x, y; + tv1 = Fp.sqr(u); // 1. tv1 = u^2 + tv1 = Fp.mul(tv1, opts.Z); // 2. tv1 = Z * tv1 + tv2 = Fp.sqr(tv1); // 3. tv2 = tv1^2 + tv2 = Fp.add(tv2, tv1); // 4. tv2 = tv2 + tv1 + tv3 = Fp.add(tv2, Fp.ONE); // 5. tv3 = tv2 + 1 + tv3 = Fp.mul(tv3, opts.B); // 6. tv3 = B * tv3 + tv4 = Fp.cmov(opts.Z, Fp.neg(tv2), !Fp.eql(tv2, Fp.ZERO)); // 7. tv4 = CMOV(Z, -tv2, tv2 != 0) + tv4 = Fp.mul(tv4, opts.A); // 8. tv4 = A * tv4 + tv2 = Fp.sqr(tv3); // 9. tv2 = tv3^2 + tv6 = Fp.sqr(tv4); // 10. tv6 = tv4^2 + tv5 = Fp.mul(tv6, opts.A); // 11. tv5 = A * tv6 + tv2 = Fp.add(tv2, tv5); // 12. tv2 = tv2 + tv5 + tv2 = Fp.mul(tv2, tv3); // 13. tv2 = tv2 * tv3 + tv6 = Fp.mul(tv6, tv4); // 14. tv6 = tv6 * tv4 + tv5 = Fp.mul(tv6, opts.B); // 15. tv5 = B * tv6 + tv2 = Fp.add(tv2, tv5); // 16. tv2 = tv2 + tv5 + x = Fp.mul(tv1, tv3); // 17. x = tv1 * tv3 + const { isValid, value } = sqrtRatio(tv2, tv6); // 18. (is_gx1_square, y1) = sqrt_ratio(tv2, tv6) + y = Fp.mul(tv1, u); // 19. y = tv1 * u -> Z * u^3 * y1 + y = Fp.mul(y, value); // 20. y = y * y1 + x = Fp.cmov(x, tv3, isValid); // 21. x = CMOV(x, tv3, is_gx1_square) + y = Fp.cmov(y, value, isValid); // 22. y = CMOV(y, y1, is_gx1_square) + const e1 = Fp.isOdd(u) === Fp.isOdd(y); // 23. e1 = sgn0(u) == sgn0(y) + y = Fp.cmov(Fp.neg(y), y, e1); // 24. y = CMOV(-y, y, e1) + x = Fp.div(x, tv4); // 25. x = x / tv4 + return { x, y }; + }; +} +exports.mapToCurveSimpleSWU = mapToCurveSimpleSWU; +//# sourceMappingURL=weierstrass.js.map + +/***/ }), + +/***/ 8510: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.encodeToCurve = exports.hashToCurve = exports.schnorr = exports.secp256k1 = void 0; +/*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +const sha256_1 = __webpack_require__(22623); +const utils_1 = __webpack_require__(99175); +const modular_js_1 = __webpack_require__(24967); +const weierstrass_js_1 = __webpack_require__(91705); +const utils_js_1 = __webpack_require__(91484); +const hash_to_curve_js_1 = __webpack_require__(71761); +const _shortw_utils_js_1 = __webpack_require__(73562); +const secp256k1P = BigInt('0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f'); +const secp256k1N = BigInt('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141'); +const _1n = BigInt(1); +const _2n = BigInt(2); +const divNearest = (a, b) => (a + b / _2n) / b; +/** + * √n = n^((p+1)/4) for fields p = 3 mod 4. We unwrap the loop and multiply bit-by-bit. + * (P+1n/4n).toString(2) would produce bits [223x 1, 0, 22x 1, 4x 0, 11, 00] + */ +function sqrtMod(y) { + const P = secp256k1P; + // prettier-ignore + const _3n = BigInt(3), _6n = BigInt(6), _11n = BigInt(11), _22n = BigInt(22); + // prettier-ignore + const _23n = BigInt(23), _44n = BigInt(44), _88n = BigInt(88); + const b2 = (y * y * y) % P; // x^3, 11 + const b3 = (b2 * b2 * y) % P; // x^7 + const b6 = ((0, modular_js_1.pow2)(b3, _3n, P) * b3) % P; + const b9 = ((0, modular_js_1.pow2)(b6, _3n, P) * b3) % P; + const b11 = ((0, modular_js_1.pow2)(b9, _2n, P) * b2) % P; + const b22 = ((0, modular_js_1.pow2)(b11, _11n, P) * b11) % P; + const b44 = ((0, modular_js_1.pow2)(b22, _22n, P) * b22) % P; + const b88 = ((0, modular_js_1.pow2)(b44, _44n, P) * b44) % P; + const b176 = ((0, modular_js_1.pow2)(b88, _88n, P) * b88) % P; + const b220 = ((0, modular_js_1.pow2)(b176, _44n, P) * b44) % P; + const b223 = ((0, modular_js_1.pow2)(b220, _3n, P) * b3) % P; + const t1 = ((0, modular_js_1.pow2)(b223, _23n, P) * b22) % P; + const t2 = ((0, modular_js_1.pow2)(t1, _6n, P) * b2) % P; + const root = (0, modular_js_1.pow2)(t2, _2n, P); + if (!Fp.eql(Fp.sqr(root), y)) + throw new Error('Cannot find square root'); + return root; +} +const Fp = (0, modular_js_1.Field)(secp256k1P, undefined, undefined, { sqrt: sqrtMod }); +exports.secp256k1 = (0, _shortw_utils_js_1.createCurve)({ + a: BigInt(0), // equation params: a, b + b: BigInt(7), // Seem to be rigid: bitcointalk.org/index.php?topic=289795.msg3183975#msg3183975 + Fp, // Field's prime: 2n**256n - 2n**32n - 2n**9n - 2n**8n - 2n**7n - 2n**6n - 2n**4n - 1n + n: secp256k1N, // Curve order, total count of valid points in the field + // Base point (x, y) aka generator point + Gx: BigInt('55066263022277343669578718895168534326250603453777594175500187360389116729240'), + Gy: BigInt('32670510020758816978083085130507043184471273380659243275938904335757337482424'), + h: BigInt(1), // Cofactor + lowS: true, // Allow only low-S signatures by default in sign() and verify() + /** + * secp256k1 belongs to Koblitz curves: it has efficiently computable endomorphism. + * Endomorphism uses 2x less RAM, speeds up precomputation by 2x and ECDH / key recovery by 20%. + * For precomputed wNAF it trades off 1/2 init time & 1/3 ram for 20% perf hit. + * Explanation: https://gist.github.com/paulmillr/eb670806793e84df628a7c434a873066 + */ + endo: { + beta: BigInt('0x7ae96a2b657c07106e64479eac3434e99cf0497512f58995c1396c28719501ee'), + splitScalar: (k) => { + const n = secp256k1N; + const a1 = BigInt('0x3086d221a7d46bcde86c90e49284eb15'); + const b1 = -_1n * BigInt('0xe4437ed6010e88286f547fa90abfe4c3'); + const a2 = BigInt('0x114ca50f7a8e2f3f657c1108d9d44cfd8'); + const b2 = a1; + const POW_2_128 = BigInt('0x100000000000000000000000000000000'); // (2n**128n).toString(16) + const c1 = divNearest(b2 * k, n); + const c2 = divNearest(-b1 * k, n); + let k1 = (0, modular_js_1.mod)(k - c1 * a1 - c2 * a2, n); + let k2 = (0, modular_js_1.mod)(-c1 * b1 - c2 * b2, n); + const k1neg = k1 > POW_2_128; + const k2neg = k2 > POW_2_128; + if (k1neg) + k1 = n - k1; + if (k2neg) + k2 = n - k2; + if (k1 > POW_2_128 || k2 > POW_2_128) { + throw new Error('splitScalar: Endomorphism failed, k=' + k); + } + return { k1neg, k1, k2neg, k2 }; + }, + }, +}, sha256_1.sha256); +// Schnorr signatures are superior to ECDSA from above. Below is Schnorr-specific BIP0340 code. +// https://github.com/bitcoin/bips/blob/master/bip-0340.mediawiki +const _0n = BigInt(0); +const fe = (x) => typeof x === 'bigint' && _0n < x && x < secp256k1P; +const ge = (x) => typeof x === 'bigint' && _0n < x && x < secp256k1N; +/** An object mapping tags to their tagged hash prefix of [SHA256(tag) | SHA256(tag)] */ +const TAGGED_HASH_PREFIXES = {}; +function taggedHash(tag, ...messages) { + let tagP = TAGGED_HASH_PREFIXES[tag]; + if (tagP === undefined) { + const tagH = (0, sha256_1.sha256)(Uint8Array.from(tag, (c) => c.charCodeAt(0))); + tagP = (0, utils_js_1.concatBytes)(tagH, tagH); + TAGGED_HASH_PREFIXES[tag] = tagP; + } + return (0, sha256_1.sha256)((0, utils_js_1.concatBytes)(tagP, ...messages)); +} +// ECDSA compact points are 33-byte. Schnorr is 32: we strip first byte 0x02 or 0x03 +const pointToBytes = (point) => point.toRawBytes(true).slice(1); +const numTo32b = (n) => (0, utils_js_1.numberToBytesBE)(n, 32); +const modP = (x) => (0, modular_js_1.mod)(x, secp256k1P); +const modN = (x) => (0, modular_js_1.mod)(x, secp256k1N); +const Point = exports.secp256k1.ProjectivePoint; +const GmulAdd = (Q, a, b) => Point.BASE.multiplyAndAddUnsafe(Q, a, b); +// Calculate point, scalar and bytes +function schnorrGetExtPubKey(priv) { + let d_ = exports.secp256k1.utils.normPrivateKeyToScalar(priv); // same method executed in fromPrivateKey + let p = Point.fromPrivateKey(d_); // P = d'⋅G; 0 < d' < n check is done inside + const scalar = p.hasEvenY() ? d_ : modN(-d_); + return { scalar: scalar, bytes: pointToBytes(p) }; +} +/** + * lift_x from BIP340. Convert 32-byte x coordinate to elliptic curve point. + * @returns valid point checked for being on-curve + */ +function lift_x(x) { + if (!fe(x)) + throw new Error('bad x: need 0 < x < p'); // Fail if x ≥ p. + const xx = modP(x * x); + const c = modP(xx * x + BigInt(7)); // Let c = x³ + 7 mod p. + let y = sqrtMod(c); // Let y = c^(p+1)/4 mod p. + if (y % _2n !== _0n) + y = modP(-y); // Return the unique point P such that x(P) = x and + const p = new Point(x, y, _1n); // y(P) = y if y mod 2 = 0 or y(P) = p-y otherwise. + p.assertValidity(); + return p; +} +/** + * Create tagged hash, convert it to bigint, reduce modulo-n. + */ +function challenge(...args) { + return modN((0, utils_js_1.bytesToNumberBE)(taggedHash('BIP0340/challenge', ...args))); +} +/** + * Schnorr public key is just `x` coordinate of Point as per BIP340. + */ +function schnorrGetPublicKey(privateKey) { + return schnorrGetExtPubKey(privateKey).bytes; // d'=int(sk). Fail if d'=0 or d'≥n. Ret bytes(d'⋅G) +} +/** + * Creates Schnorr signature as per BIP340. Verifies itself before returning anything. + * auxRand is optional and is not the sole source of k generation: bad CSPRNG won't be dangerous. + */ +function schnorrSign(message, privateKey, auxRand = (0, utils_1.randomBytes)(32)) { + const m = (0, utils_js_1.ensureBytes)('message', message); + const { bytes: px, scalar: d } = schnorrGetExtPubKey(privateKey); // checks for isWithinCurveOrder + const a = (0, utils_js_1.ensureBytes)('auxRand', auxRand, 32); // Auxiliary random data a: a 32-byte array + const t = numTo32b(d ^ (0, utils_js_1.bytesToNumberBE)(taggedHash('BIP0340/aux', a))); // Let t be the byte-wise xor of bytes(d) and hash/aux(a) + const rand = taggedHash('BIP0340/nonce', t, px, m); // Let rand = hash/nonce(t || bytes(P) || m) + const k_ = modN((0, utils_js_1.bytesToNumberBE)(rand)); // Let k' = int(rand) mod n + if (k_ === _0n) + throw new Error('sign failed: k is zero'); // Fail if k' = 0. + const { bytes: rx, scalar: k } = schnorrGetExtPubKey(k_); // Let R = k'⋅G. + const e = challenge(rx, px, m); // Let e = int(hash/challenge(bytes(R) || bytes(P) || m)) mod n. + const sig = new Uint8Array(64); // Let sig = bytes(R) || bytes((k + ed) mod n). + sig.set(rx, 0); + sig.set(numTo32b(modN(k + e * d)), 32); + // If Verify(bytes(P), m, sig) (see below) returns failure, abort + if (!schnorrVerify(sig, m, px)) + throw new Error('sign: Invalid signature produced'); + return sig; +} +/** + * Verifies Schnorr signature. + * Will swallow errors & return false except for initial type validation of arguments. + */ +function schnorrVerify(signature, message, publicKey) { + const sig = (0, utils_js_1.ensureBytes)('signature', signature, 64); + const m = (0, utils_js_1.ensureBytes)('message', message); + const pub = (0, utils_js_1.ensureBytes)('publicKey', publicKey, 32); + try { + const P = lift_x((0, utils_js_1.bytesToNumberBE)(pub)); // P = lift_x(int(pk)); fail if that fails + const r = (0, utils_js_1.bytesToNumberBE)(sig.subarray(0, 32)); // Let r = int(sig[0:32]); fail if r ≥ p. + if (!fe(r)) + return false; + const s = (0, utils_js_1.bytesToNumberBE)(sig.subarray(32, 64)); // Let s = int(sig[32:64]); fail if s ≥ n. + if (!ge(s)) + return false; + const e = challenge(numTo32b(r), pointToBytes(P), m); // int(challenge(bytes(r)||bytes(P)||m))%n + const R = GmulAdd(P, s, modN(-e)); // R = s⋅G - e⋅P + if (!R || !R.hasEvenY() || R.toAffine().x !== r) + return false; // -eP == (n-e)P + return true; // Fail if is_infinite(R) / not has_even_y(R) / x(R) ≠ r. + } + catch (error) { + return false; + } +} +exports.schnorr = (() => ({ + getPublicKey: schnorrGetPublicKey, + sign: schnorrSign, + verify: schnorrVerify, + utils: { + randomPrivateKey: exports.secp256k1.utils.randomPrivateKey, + lift_x, + pointToBytes, + numberToBytesBE: utils_js_1.numberToBytesBE, + bytesToNumberBE: utils_js_1.bytesToNumberBE, + taggedHash, + mod: modular_js_1.mod, + }, +}))(); +const isoMap = /* @__PURE__ */ (() => (0, hash_to_curve_js_1.isogenyMap)(Fp, [ + // xNum + [ + '0x8e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38daaaaa8c7', + '0x7d3d4c80bc321d5b9f315cea7fd44c5d595d2fc0bf63b92dfff1044f17c6581', + '0x534c328d23f234e6e2a413deca25caece4506144037c40314ecbd0b53d9dd262', + '0x8e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38e38daaaaa88c', + ], + // xDen + [ + '0xd35771193d94918a9ca34ccbb7b640dd86cd409542f8487d9fe6b745781eb49b', + '0xedadc6f64383dc1df7c4b2d51b54225406d36b641f5e41bbc52a56612a8c6d14', + '0x0000000000000000000000000000000000000000000000000000000000000001', // LAST 1 + ], + // yNum + [ + '0x4bda12f684bda12f684bda12f684bda12f684bda12f684bda12f684b8e38e23c', + '0xc75e0c32d5cb7c0fa9d0a54b12a0a6d5647ab046d686da6fdffc90fc201d71a3', + '0x29a6194691f91a73715209ef6512e576722830a201be2018a765e85a9ecee931', + '0x2f684bda12f684bda12f684bda12f684bda12f684bda12f684bda12f38e38d84', + ], + // yDen + [ + '0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffff93b', + '0x7a06534bb8bdb49fd5e9e6632722c2989467c1bfc8e8d978dfb425d2685c2573', + '0x6484aa716545ca2cf3a70c3fa8fe337e0a3d21162f0d6299a7bf8192bfd2a76f', + '0x0000000000000000000000000000000000000000000000000000000000000001', // LAST 1 + ], +].map((i) => i.map((j) => BigInt(j)))))(); +const mapSWU = /* @__PURE__ */ (() => (0, weierstrass_js_1.mapToCurveSimpleSWU)(Fp, { + A: BigInt('0x3f8731abdd661adca08a5558f0f5d272e953d363cb6f0e5d405447c01a444533'), + B: BigInt('1771'), + Z: Fp.create(BigInt('-11')), +}))(); +const htf = /* @__PURE__ */ (() => (0, hash_to_curve_js_1.createHasher)(exports.secp256k1.ProjectivePoint, (scalars) => { + const { x, y } = mapSWU(Fp.create(scalars[0])); + return isoMap(x, y); +}, { + DST: 'secp256k1_XMD:SHA-256_SSWU_RO_', + encodeDST: 'secp256k1_XMD:SHA-256_SSWU_NU_', + p: Fp.ORDER, + m: 1, + k: 128, + expand: 'xmd', + hash: sha256_1.sha256, +}))(); +exports.hashToCurve = (() => htf.hashToCurve)(); +exports.encodeToCurve = (() => htf.encodeToCurve)(); +//# sourceMappingURL=secp256k1.js.map + +/***/ }), + +/***/ 67557: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.output = exports.exists = exports.hash = exports.bytes = exports.bool = exports.number = void 0; +function number(n) { + if (!Number.isSafeInteger(n) || n < 0) + throw new Error(`Wrong positive integer: ${n}`); +} +exports.number = number; +function bool(b) { + if (typeof b !== 'boolean') + throw new Error(`Expected boolean, not ${b}`); +} +exports.bool = bool; +// copied from utils +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +function bytes(b, ...lengths) { + if (!isBytes(b)) + throw new Error('Expected Uint8Array'); + if (lengths.length > 0 && !lengths.includes(b.length)) + throw new Error(`Expected Uint8Array of length ${lengths}, not of length=${b.length}`); +} +exports.bytes = bytes; +function hash(hash) { + if (typeof hash !== 'function' || typeof hash.create !== 'function') + throw new Error('Hash should be wrapped by utils.wrapConstructor'); + number(hash.outputLen); + number(hash.blockLen); +} +exports.hash = hash; +function exists(instance, checkFinished = true) { + if (instance.destroyed) + throw new Error('Hash instance has been destroyed'); + if (checkFinished && instance.finished) + throw new Error('Hash#digest() has already been called'); +} +exports.exists = exists; +function output(out, instance) { + bytes(out); + const min = instance.outputLen; + if (out.length < min) { + throw new Error(`digestInto() expects output buffer of length at least ${min}`); + } +} +exports.output = output; +const assert = { number, bool, bytes, hash, exists, output }; +exports["default"] = assert; +//# sourceMappingURL=_assert.js.map + +/***/ }), + +/***/ 90915: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SHA2 = void 0; +const _assert_js_1 = __webpack_require__(67557); +const utils_js_1 = __webpack_require__(99175); +// Polyfill for Safari 14 +function setBigUint64(view, byteOffset, value, isLE) { + if (typeof view.setBigUint64 === 'function') + return view.setBigUint64(byteOffset, value, isLE); + const _32n = BigInt(32); + const _u32_max = BigInt(0xffffffff); + const wh = Number((value >> _32n) & _u32_max); + const wl = Number(value & _u32_max); + const h = isLE ? 4 : 0; + const l = isLE ? 0 : 4; + view.setUint32(byteOffset + h, wh, isLE); + view.setUint32(byteOffset + l, wl, isLE); +} +// Base SHA2 class (RFC 6234) +class SHA2 extends utils_js_1.Hash { + constructor(blockLen, outputLen, padOffset, isLE) { + super(); + this.blockLen = blockLen; + this.outputLen = outputLen; + this.padOffset = padOffset; + this.isLE = isLE; + this.finished = false; + this.length = 0; + this.pos = 0; + this.destroyed = false; + this.buffer = new Uint8Array(blockLen); + this.view = (0, utils_js_1.createView)(this.buffer); + } + update(data) { + (0, _assert_js_1.exists)(this); + const { view, buffer, blockLen } = this; + data = (0, utils_js_1.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + // Fast path: we have at least one block in input, cast it to view and process + if (take === blockLen) { + const dataView = (0, utils_js_1.createView)(data); + for (; blockLen <= len - pos; pos += blockLen) + this.process(dataView, pos); + continue; + } + buffer.set(data.subarray(pos, pos + take), this.pos); + this.pos += take; + pos += take; + if (this.pos === blockLen) { + this.process(view, 0); + this.pos = 0; + } + } + this.length += data.length; + this.roundClean(); + return this; + } + digestInto(out) { + (0, _assert_js_1.exists)(this); + (0, _assert_js_1.output)(out, this); + this.finished = true; + // Padding + // We can avoid allocation of buffer for padding completely if it + // was previously not allocated here. But it won't change performance. + const { buffer, view, blockLen, isLE } = this; + let { pos } = this; + // append the bit '1' to the message + buffer[pos++] = 0b10000000; + this.buffer.subarray(pos).fill(0); + // we have less than padOffset left in buffer, so we cannot put length in current block, need process it and pad again + if (this.padOffset > blockLen - pos) { + this.process(view, 0); + pos = 0; + } + // Pad until full block byte with zeros + for (let i = pos; i < blockLen; i++) + buffer[i] = 0; + // Note: sha512 requires length to be 128bit integer, but length in JS will overflow before that + // You need to write around 2 exabytes (u64_max / 8 / (1024**6)) for this to happen. + // So we just write lowest 64 bits of that value. + setBigUint64(view, blockLen - 8, BigInt(this.length * 8), isLE); + this.process(view, 0); + const oview = (0, utils_js_1.createView)(out); + const len = this.outputLen; + // NOTE: we do division by 4 later, which should be fused in single op with modulo by JIT + if (len % 4) + throw new Error('_sha2: outputLen should be aligned to 32bit'); + const outLen = len / 4; + const state = this.get(); + if (outLen > state.length) + throw new Error('_sha2: outputLen bigger than state'); + for (let i = 0; i < outLen; i++) + oview.setUint32(4 * i, state[i], isLE); + } + digest() { + const { buffer, outputLen } = this; + this.digestInto(buffer); + const res = buffer.slice(0, outputLen); + this.destroy(); + return res; + } + _cloneInto(to) { + to || (to = new this.constructor()); + to.set(...this.get()); + const { blockLen, buffer, length, finished, destroyed, pos } = this; + to.length = length; + to.pos = pos; + to.finished = finished; + to.destroyed = destroyed; + if (length % blockLen) + to.buffer.set(buffer); + return to; + } +} +exports.SHA2 = SHA2; +//# sourceMappingURL=_sha2.js.map + +/***/ }), + +/***/ 22318: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.add5L = exports.add5H = exports.add4H = exports.add4L = exports.add3H = exports.add3L = exports.add = exports.rotlBL = exports.rotlBH = exports.rotlSL = exports.rotlSH = exports.rotr32L = exports.rotr32H = exports.rotrBL = exports.rotrBH = exports.rotrSL = exports.rotrSH = exports.shrSL = exports.shrSH = exports.toBig = exports.split = exports.fromBig = void 0; +const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); +const _32n = /* @__PURE__ */ BigInt(32); +// We are not using BigUint64Array, because they are extremely slow as per 2022 +function fromBig(n, le = false) { + if (le) + return { h: Number(n & U32_MASK64), l: Number((n >> _32n) & U32_MASK64) }; + return { h: Number((n >> _32n) & U32_MASK64) | 0, l: Number(n & U32_MASK64) | 0 }; +} +exports.fromBig = fromBig; +function split(lst, le = false) { + let Ah = new Uint32Array(lst.length); + let Al = new Uint32Array(lst.length); + for (let i = 0; i < lst.length; i++) { + const { h, l } = fromBig(lst[i], le); + [Ah[i], Al[i]] = [h, l]; + } + return [Ah, Al]; +} +exports.split = split; +const toBig = (h, l) => (BigInt(h >>> 0) << _32n) | BigInt(l >>> 0); +exports.toBig = toBig; +// for Shift in [0, 32) +const shrSH = (h, _l, s) => h >>> s; +exports.shrSH = shrSH; +const shrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +exports.shrSL = shrSL; +// Right rotate for Shift in [1, 32) +const rotrSH = (h, l, s) => (h >>> s) | (l << (32 - s)); +exports.rotrSH = rotrSH; +const rotrSL = (h, l, s) => (h << (32 - s)) | (l >>> s); +exports.rotrSL = rotrSL; +// Right rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotrBH = (h, l, s) => (h << (64 - s)) | (l >>> (s - 32)); +exports.rotrBH = rotrBH; +const rotrBL = (h, l, s) => (h >>> (s - 32)) | (l << (64 - s)); +exports.rotrBL = rotrBL; +// Right rotate for shift===32 (just swaps l&h) +const rotr32H = (_h, l) => l; +exports.rotr32H = rotr32H; +const rotr32L = (h, _l) => h; +exports.rotr32L = rotr32L; +// Left rotate for Shift in [1, 32) +const rotlSH = (h, l, s) => (h << s) | (l >>> (32 - s)); +exports.rotlSH = rotlSH; +const rotlSL = (h, l, s) => (l << s) | (h >>> (32 - s)); +exports.rotlSL = rotlSL; +// Left rotate for Shift in (32, 64), NOTE: 32 is special case. +const rotlBH = (h, l, s) => (l << (s - 32)) | (h >>> (64 - s)); +exports.rotlBH = rotlBH; +const rotlBL = (h, l, s) => (h << (s - 32)) | (l >>> (64 - s)); +exports.rotlBL = rotlBL; +// JS uses 32-bit signed integers for bitwise operations which means we cannot +// simple take carry out of low bit sum by shift, we need to use division. +function add(Ah, Al, Bh, Bl) { + const l = (Al >>> 0) + (Bl >>> 0); + return { h: (Ah + Bh + ((l / 2 ** 32) | 0)) | 0, l: l | 0 }; +} +exports.add = add; +// Addition with more than 2 elements +const add3L = (Al, Bl, Cl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0); +exports.add3L = add3L; +const add3H = (low, Ah, Bh, Ch) => (Ah + Bh + Ch + ((low / 2 ** 32) | 0)) | 0; +exports.add3H = add3H; +const add4L = (Al, Bl, Cl, Dl) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0); +exports.add4L = add4L; +const add4H = (low, Ah, Bh, Ch, Dh) => (Ah + Bh + Ch + Dh + ((low / 2 ** 32) | 0)) | 0; +exports.add4H = add4H; +const add5L = (Al, Bl, Cl, Dl, El) => (Al >>> 0) + (Bl >>> 0) + (Cl >>> 0) + (Dl >>> 0) + (El >>> 0); +exports.add5L = add5L; +const add5H = (low, Ah, Bh, Ch, Dh, Eh) => (Ah + Bh + Ch + Dh + Eh + ((low / 2 ** 32) | 0)) | 0; +exports.add5H = add5H; +// prettier-ignore +const u64 = { + fromBig, split, toBig, + shrSH, shrSL, + rotrSH, rotrSL, rotrBH, rotrBL, + rotr32H, rotr32L, + rotlSH, rotlSL, rotlBH, rotlBL, + add, add3L, add3H, add4L, add4H, add5H, add5L, +}; +exports["default"] = u64; +//# sourceMappingURL=_u64.js.map + +/***/ }), + +/***/ 25145: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.crypto = void 0; +exports.crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; +//# sourceMappingURL=crypto.js.map + +/***/ }), + +/***/ 39615: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hmac = exports.HMAC = void 0; +const _assert_js_1 = __webpack_require__(67557); +const utils_js_1 = __webpack_require__(99175); +// HMAC (RFC 2104) +class HMAC extends utils_js_1.Hash { + constructor(hash, _key) { + super(); + this.finished = false; + this.destroyed = false; + (0, _assert_js_1.hash)(hash); + const key = (0, utils_js_1.toBytes)(_key); + this.iHash = hash.create(); + if (typeof this.iHash.update !== 'function') + throw new Error('Expected instance of class which extends utils.Hash'); + this.blockLen = this.iHash.blockLen; + this.outputLen = this.iHash.outputLen; + const blockLen = this.blockLen; + const pad = new Uint8Array(blockLen); + // blockLen can be bigger than outputLen + pad.set(key.length > blockLen ? hash.create().update(key).digest() : key); + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36; + this.iHash.update(pad); + // By doing update (processing of first block) of outer hash here we can re-use it between multiple calls via clone + this.oHash = hash.create(); + // Undo internal XOR && apply outer XOR + for (let i = 0; i < pad.length; i++) + pad[i] ^= 0x36 ^ 0x5c; + this.oHash.update(pad); + pad.fill(0); + } + update(buf) { + (0, _assert_js_1.exists)(this); + this.iHash.update(buf); + return this; + } + digestInto(out) { + (0, _assert_js_1.exists)(this); + (0, _assert_js_1.bytes)(out, this.outputLen); + this.finished = true; + this.iHash.digestInto(out); + this.oHash.update(out); + this.oHash.digestInto(out); + this.destroy(); + } + digest() { + const out = new Uint8Array(this.oHash.outputLen); + this.digestInto(out); + return out; + } + _cloneInto(to) { + // Create new instance without calling constructor since key already in state and we don't know it. + to || (to = Object.create(Object.getPrototypeOf(this), {})); + const { oHash, iHash, finished, destroyed, blockLen, outputLen } = this; + to = to; + to.finished = finished; + to.destroyed = destroyed; + to.blockLen = blockLen; + to.outputLen = outputLen; + to.oHash = oHash._cloneInto(to.oHash); + to.iHash = iHash._cloneInto(to.iHash); + return to; + } + destroy() { + this.destroyed = true; + this.oHash.destroy(); + this.iHash.destroy(); + } +} +exports.HMAC = HMAC; +/** + * HMAC: RFC2104 message authentication code. + * @param hash - function that would be used e.g. sha256 + * @param key - message key + * @param message - message data + */ +const hmac = (hash, key, message) => new HMAC(hash, key).update(message).digest(); +exports.hmac = hmac; +exports.hmac.create = (hash, key) => new HMAC(hash, key); +//# sourceMappingURL=hmac.js.map + +/***/ }), + +/***/ 22623: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sha224 = exports.sha256 = void 0; +const _sha2_js_1 = __webpack_require__(90915); +const utils_js_1 = __webpack_require__(99175); +// SHA2-256 need to try 2^128 hashes to execute birthday attack. +// BTC network is doing 2^67 hashes/sec as per early 2023. +// Choice: a ? b : c +const Chi = (a, b, c) => (a & b) ^ (~a & c); +// Majority function, true if any two inpust is true +const Maj = (a, b, c) => (a & b) ^ (a & c) ^ (b & c); +// Round constants: +// first 32 bits of the fractional parts of the cube roots of the first 64 primes 2..311) +// prettier-ignore +const SHA256_K = /* @__PURE__ */ new Uint32Array([ + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, + 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, + 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, + 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, + 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, + 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, + 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 +]); +// Initial state (first 32 bits of the fractional parts of the square roots of the first 8 primes 2..19): +// prettier-ignore +const IV = /* @__PURE__ */ new Uint32Array([ + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19 +]); +// Temporary buffer, not used to store anything between runs +// Named this way because it matches specification. +const SHA256_W = /* @__PURE__ */ new Uint32Array(64); +class SHA256 extends _sha2_js_1.SHA2 { + constructor() { + super(64, 32, 8, false); + // We cannot use array here since array allows indexing by variable + // which means optimizer/compiler cannot use registers. + this.A = IV[0] | 0; + this.B = IV[1] | 0; + this.C = IV[2] | 0; + this.D = IV[3] | 0; + this.E = IV[4] | 0; + this.F = IV[5] | 0; + this.G = IV[6] | 0; + this.H = IV[7] | 0; + } + get() { + const { A, B, C, D, E, F, G, H } = this; + return [A, B, C, D, E, F, G, H]; + } + // prettier-ignore + set(A, B, C, D, E, F, G, H) { + this.A = A | 0; + this.B = B | 0; + this.C = C | 0; + this.D = D | 0; + this.E = E | 0; + this.F = F | 0; + this.G = G | 0; + this.H = H | 0; + } + process(view, offset) { + // Extend the first 16 words into the remaining 48 words w[16..63] of the message schedule array + for (let i = 0; i < 16; i++, offset += 4) + SHA256_W[i] = view.getUint32(offset, false); + for (let i = 16; i < 64; i++) { + const W15 = SHA256_W[i - 15]; + const W2 = SHA256_W[i - 2]; + const s0 = (0, utils_js_1.rotr)(W15, 7) ^ (0, utils_js_1.rotr)(W15, 18) ^ (W15 >>> 3); + const s1 = (0, utils_js_1.rotr)(W2, 17) ^ (0, utils_js_1.rotr)(W2, 19) ^ (W2 >>> 10); + SHA256_W[i] = (s1 + SHA256_W[i - 7] + s0 + SHA256_W[i - 16]) | 0; + } + // Compression function main loop, 64 rounds + let { A, B, C, D, E, F, G, H } = this; + for (let i = 0; i < 64; i++) { + const sigma1 = (0, utils_js_1.rotr)(E, 6) ^ (0, utils_js_1.rotr)(E, 11) ^ (0, utils_js_1.rotr)(E, 25); + const T1 = (H + sigma1 + Chi(E, F, G) + SHA256_K[i] + SHA256_W[i]) | 0; + const sigma0 = (0, utils_js_1.rotr)(A, 2) ^ (0, utils_js_1.rotr)(A, 13) ^ (0, utils_js_1.rotr)(A, 22); + const T2 = (sigma0 + Maj(A, B, C)) | 0; + H = G; + G = F; + F = E; + E = (D + T1) | 0; + D = C; + C = B; + B = A; + A = (T1 + T2) | 0; + } + // Add the compressed chunk to the current hash value + A = (A + this.A) | 0; + B = (B + this.B) | 0; + C = (C + this.C) | 0; + D = (D + this.D) | 0; + E = (E + this.E) | 0; + F = (F + this.F) | 0; + G = (G + this.G) | 0; + H = (H + this.H) | 0; + this.set(A, B, C, D, E, F, G, H); + } + roundClean() { + SHA256_W.fill(0); + } + destroy() { + this.set(0, 0, 0, 0, 0, 0, 0, 0); + this.buffer.fill(0); + } +} +// Constants from https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf +class SHA224 extends SHA256 { + constructor() { + super(); + this.A = 0xc1059ed8 | 0; + this.B = 0x367cd507 | 0; + this.C = 0x3070dd17 | 0; + this.D = 0xf70e5939 | 0; + this.E = 0xffc00b31 | 0; + this.F = 0x68581511 | 0; + this.G = 0x64f98fa7 | 0; + this.H = 0xbefa4fa4 | 0; + this.outputLen = 28; + } +} +/** + * SHA2-256 hash function + * @param message - data that would be hashed + */ +exports.sha256 = (0, utils_js_1.wrapConstructor)(() => new SHA256()); +exports.sha224 = (0, utils_js_1.wrapConstructor)(() => new SHA224()); +//# sourceMappingURL=sha256.js.map + +/***/ }), + +/***/ 32955: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.shake256 = exports.shake128 = exports.keccak_512 = exports.keccak_384 = exports.keccak_256 = exports.keccak_224 = exports.sha3_512 = exports.sha3_384 = exports.sha3_256 = exports.sha3_224 = exports.Keccak = exports.keccakP = void 0; +const _assert_js_1 = __webpack_require__(67557); +const _u64_js_1 = __webpack_require__(22318); +const utils_js_1 = __webpack_require__(99175); +// SHA3 (keccak) is based on a new design: basically, the internal state is bigger than output size. +// It's called a sponge function. +// Various per round constants calculations +const [SHA3_PI, SHA3_ROTL, _SHA3_IOTA] = [[], [], []]; +const _0n = /* @__PURE__ */ BigInt(0); +const _1n = /* @__PURE__ */ BigInt(1); +const _2n = /* @__PURE__ */ BigInt(2); +const _7n = /* @__PURE__ */ BigInt(7); +const _256n = /* @__PURE__ */ BigInt(256); +const _0x71n = /* @__PURE__ */ BigInt(0x71); +for (let round = 0, R = _1n, x = 1, y = 0; round < 24; round++) { + // Pi + [x, y] = [y, (2 * x + 3 * y) % 5]; + SHA3_PI.push(2 * (5 * y + x)); + // Rotational + SHA3_ROTL.push((((round + 1) * (round + 2)) / 2) % 64); + // Iota + let t = _0n; + for (let j = 0; j < 7; j++) { + R = ((R << _1n) ^ ((R >> _7n) * _0x71n)) % _256n; + if (R & _2n) + t ^= _1n << ((_1n << /* @__PURE__ */ BigInt(j)) - _1n); + } + _SHA3_IOTA.push(t); +} +const [SHA3_IOTA_H, SHA3_IOTA_L] = /* @__PURE__ */ (0, _u64_js_1.split)(_SHA3_IOTA, true); +// Left rotation (without 0, 32, 64) +const rotlH = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBH)(h, l, s) : (0, _u64_js_1.rotlSH)(h, l, s)); +const rotlL = (h, l, s) => (s > 32 ? (0, _u64_js_1.rotlBL)(h, l, s) : (0, _u64_js_1.rotlSL)(h, l, s)); +// Same as keccakf1600, but allows to skip some rounds +function keccakP(s, rounds = 24) { + const B = new Uint32Array(5 * 2); + // NOTE: all indices are x2 since we store state as u32 instead of u64 (bigints to slow in js) + for (let round = 24 - rounds; round < 24; round++) { + // Theta θ + for (let x = 0; x < 10; x++) + B[x] = s[x] ^ s[x + 10] ^ s[x + 20] ^ s[x + 30] ^ s[x + 40]; + for (let x = 0; x < 10; x += 2) { + const idx1 = (x + 8) % 10; + const idx0 = (x + 2) % 10; + const B0 = B[idx0]; + const B1 = B[idx0 + 1]; + const Th = rotlH(B0, B1, 1) ^ B[idx1]; + const Tl = rotlL(B0, B1, 1) ^ B[idx1 + 1]; + for (let y = 0; y < 50; y += 10) { + s[x + y] ^= Th; + s[x + y + 1] ^= Tl; + } + } + // Rho (ρ) and Pi (π) + let curH = s[2]; + let curL = s[3]; + for (let t = 0; t < 24; t++) { + const shift = SHA3_ROTL[t]; + const Th = rotlH(curH, curL, shift); + const Tl = rotlL(curH, curL, shift); + const PI = SHA3_PI[t]; + curH = s[PI]; + curL = s[PI + 1]; + s[PI] = Th; + s[PI + 1] = Tl; + } + // Chi (χ) + for (let y = 0; y < 50; y += 10) { + for (let x = 0; x < 10; x++) + B[x] = s[y + x]; + for (let x = 0; x < 10; x++) + s[y + x] ^= ~B[(x + 2) % 10] & B[(x + 4) % 10]; + } + // Iota (ι) + s[0] ^= SHA3_IOTA_H[round]; + s[1] ^= SHA3_IOTA_L[round]; + } + B.fill(0); +} +exports.keccakP = keccakP; +class Keccak extends utils_js_1.Hash { + // NOTE: we accept arguments in bytes instead of bits here. + constructor(blockLen, suffix, outputLen, enableXOF = false, rounds = 24) { + super(); + this.blockLen = blockLen; + this.suffix = suffix; + this.outputLen = outputLen; + this.enableXOF = enableXOF; + this.rounds = rounds; + this.pos = 0; + this.posOut = 0; + this.finished = false; + this.destroyed = false; + // Can be passed from user as dkLen + (0, _assert_js_1.number)(outputLen); + // 1600 = 5x5 matrix of 64bit. 1600 bits === 200 bytes + if (0 >= this.blockLen || this.blockLen >= 200) + throw new Error('Sha3 supports only keccak-f1600 function'); + this.state = new Uint8Array(200); + this.state32 = (0, utils_js_1.u32)(this.state); + } + keccak() { + keccakP(this.state32, this.rounds); + this.posOut = 0; + this.pos = 0; + } + update(data) { + (0, _assert_js_1.exists)(this); + const { blockLen, state } = this; + data = (0, utils_js_1.toBytes)(data); + const len = data.length; + for (let pos = 0; pos < len;) { + const take = Math.min(blockLen - this.pos, len - pos); + for (let i = 0; i < take; i++) + state[this.pos++] ^= data[pos++]; + if (this.pos === blockLen) + this.keccak(); + } + return this; + } + finish() { + if (this.finished) + return; + this.finished = true; + const { state, suffix, pos, blockLen } = this; + // Do the padding + state[pos] ^= suffix; + if ((suffix & 0x80) !== 0 && pos === blockLen - 1) + this.keccak(); + state[blockLen - 1] ^= 0x80; + this.keccak(); + } + writeInto(out) { + (0, _assert_js_1.exists)(this, false); + (0, _assert_js_1.bytes)(out); + this.finish(); + const bufferOut = this.state; + const { blockLen } = this; + for (let pos = 0, len = out.length; pos < len;) { + if (this.posOut >= blockLen) + this.keccak(); + const take = Math.min(blockLen - this.posOut, len - pos); + out.set(bufferOut.subarray(this.posOut, this.posOut + take), pos); + this.posOut += take; + pos += take; + } + return out; + } + xofInto(out) { + // Sha3/Keccak usage with XOF is probably mistake, only SHAKE instances can do XOF + if (!this.enableXOF) + throw new Error('XOF is not possible for this instance'); + return this.writeInto(out); + } + xof(bytes) { + (0, _assert_js_1.number)(bytes); + return this.xofInto(new Uint8Array(bytes)); + } + digestInto(out) { + (0, _assert_js_1.output)(out, this); + if (this.finished) + throw new Error('digest() was already called'); + this.writeInto(out); + this.destroy(); + return out; + } + digest() { + return this.digestInto(new Uint8Array(this.outputLen)); + } + destroy() { + this.destroyed = true; + this.state.fill(0); + } + _cloneInto(to) { + const { blockLen, suffix, outputLen, rounds, enableXOF } = this; + to || (to = new Keccak(blockLen, suffix, outputLen, enableXOF, rounds)); + to.state32.set(this.state32); + to.pos = this.pos; + to.posOut = this.posOut; + to.finished = this.finished; + to.rounds = rounds; + // Suffix can change in cSHAKE + to.suffix = suffix; + to.outputLen = outputLen; + to.enableXOF = enableXOF; + to.destroyed = this.destroyed; + return to; + } +} +exports.Keccak = Keccak; +const gen = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapConstructor)(() => new Keccak(blockLen, suffix, outputLen)); +exports.sha3_224 = gen(0x06, 144, 224 / 8); +/** + * SHA3-256 hash function + * @param message - that would be hashed + */ +exports.sha3_256 = gen(0x06, 136, 256 / 8); +exports.sha3_384 = gen(0x06, 104, 384 / 8); +exports.sha3_512 = gen(0x06, 72, 512 / 8); +exports.keccak_224 = gen(0x01, 144, 224 / 8); +/** + * keccak-256 hash function. Different from SHA3-256. + * @param message - that would be hashed + */ +exports.keccak_256 = gen(0x01, 136, 256 / 8); +exports.keccak_384 = gen(0x01, 104, 384 / 8); +exports.keccak_512 = gen(0x01, 72, 512 / 8); +const genShake = (suffix, blockLen, outputLen) => (0, utils_js_1.wrapXOFConstructorWithOpts)((opts = {}) => new Keccak(blockLen, suffix, opts.dkLen === undefined ? outputLen : opts.dkLen, true)); +exports.shake128 = genShake(0x1f, 168, 128 / 8); +exports.shake256 = genShake(0x1f, 136, 256 / 8); +//# sourceMappingURL=sha3.js.map + +/***/ }), + +/***/ 99175: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +/*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.randomBytes = exports.wrapXOFConstructorWithOpts = exports.wrapConstructorWithOpts = exports.wrapConstructor = exports.checkOpts = exports.Hash = exports.concatBytes = exports.toBytes = exports.utf8ToBytes = exports.asyncLoop = exports.nextTick = exports.hexToBytes = exports.bytesToHex = exports.isLE = exports.rotr = exports.createView = exports.u32 = exports.u8 = void 0; +// We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. +// node.js versions earlier than v19 don't declare it in global scope. +// For node.js, package.json#exports field mapping rewrites import +// from `crypto` to `cryptoNode`, which imports native module. +// Makes the utils un-importable in browsers without a bundler. +// Once node.js 18 is deprecated (2025-04-30), we can just drop the import. +const crypto_1 = __webpack_require__(25145); +// Cast array to different type +const u8 = (arr) => new Uint8Array(arr.buffer, arr.byteOffset, arr.byteLength); +exports.u8 = u8; +const u32 = (arr) => new Uint32Array(arr.buffer, arr.byteOffset, Math.floor(arr.byteLength / 4)); +exports.u32 = u32; +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +// Cast array to view +const createView = (arr) => new DataView(arr.buffer, arr.byteOffset, arr.byteLength); +exports.createView = createView; +// The rotate right (circular right shift) operation for uint32 +const rotr = (word, shift) => (word << (32 - shift)) | (word >>> shift); +exports.rotr = rotr; +// big-endian hardware is rare. Just in case someone still decides to run hashes: +// early-throw an error because we don't support BE yet. +// Other libraries would silently corrupt the data instead of throwing an error, +// when they don't support it. +exports.isLE = new Uint8Array(new Uint32Array([0x11223344]).buffer)[0] === 0x44; +if (!exports.isLE) + throw new Error('Non little-endian hardware is not supported'); +// Array where index 0xf0 (240) is mapped to string 'f0' +const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(16).padStart(2, '0')); +/** + * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' + */ +function bytesToHex(bytes) { + if (!isBytes(bytes)) + throw new Error('Uint8Array expected'); + // pre-caching improves the speed 6x + let hex = ''; + for (let i = 0; i < bytes.length; i++) { + hex += hexes[bytes[i]]; + } + return hex; +} +exports.bytesToHex = bytesToHex; +// We use optimized technique to convert hex string to byte array +const asciis = { _0: 48, _9: 57, _A: 65, _F: 70, _a: 97, _f: 102 }; +function asciiToBase16(char) { + if (char >= asciis._0 && char <= asciis._9) + return char - asciis._0; + if (char >= asciis._A && char <= asciis._F) + return char - (asciis._A - 10); + if (char >= asciis._a && char <= asciis._f) + return char - (asciis._a - 10); + return; +} +/** + * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) + */ +function hexToBytes(hex) { + if (typeof hex !== 'string') + throw new Error('hex string expected, got ' + typeof hex); + const hl = hex.length; + const al = hl / 2; + if (hl % 2) + throw new Error('padded hex string expected, got unpadded hex of length ' + hl); + const array = new Uint8Array(al); + for (let ai = 0, hi = 0; ai < al; ai++, hi += 2) { + const n1 = asciiToBase16(hex.charCodeAt(hi)); + const n2 = asciiToBase16(hex.charCodeAt(hi + 1)); + if (n1 === undefined || n2 === undefined) { + const char = hex[hi] + hex[hi + 1]; + throw new Error('hex string expected, got non-hex character "' + char + '" at index ' + hi); + } + array[ai] = n1 * 16 + n2; + } + return array; +} +exports.hexToBytes = hexToBytes; +// There is no setImmediate in browser and setTimeout is slow. +// call of async fn will return Promise, which will be fullfiled only on +// next scheduler queue processing step and this is exactly what we need. +const nextTick = async () => { }; +exports.nextTick = nextTick; +// Returns control to thread each 'tick' ms to avoid blocking +async function asyncLoop(iters, tick, cb) { + let ts = Date.now(); + for (let i = 0; i < iters; i++) { + cb(i); + // Date.now() is not monotonic, so in case if clock goes backwards we return return control too + const diff = Date.now() - ts; + if (diff >= 0 && diff < tick) + continue; + await (0, exports.nextTick)(); + ts += diff; + } +} +exports.asyncLoop = asyncLoop; +/** + * @example utf8ToBytes('abc') // new Uint8Array([97, 98, 99]) + */ +function utf8ToBytes(str) { + if (typeof str !== 'string') + throw new Error(`utf8ToBytes expected string, got ${typeof str}`); + return new Uint8Array(new TextEncoder().encode(str)); // https://bugzil.la/1681809 +} +exports.utf8ToBytes = utf8ToBytes; +/** + * Normalizes (non-hex) string or Uint8Array to Uint8Array. + * Warning: when Uint8Array is passed, it would NOT get copied. + * Keep in mind for future mutable operations. + */ +function toBytes(data) { + if (typeof data === 'string') + data = utf8ToBytes(data); + if (!isBytes(data)) + throw new Error(`expected Uint8Array, got ${typeof data}`); + return data; +} +exports.toBytes = toBytes; +/** + * Copies several Uint8Arrays into one. + */ +function concatBytes(...arrays) { + let sum = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + if (!isBytes(a)) + throw new Error('Uint8Array expected'); + sum += a.length; + } + const res = new Uint8Array(sum); + for (let i = 0, pad = 0; i < arrays.length; i++) { + const a = arrays[i]; + res.set(a, pad); + pad += a.length; + } + return res; +} +exports.concatBytes = concatBytes; +// For runtime check if class implements interface +class Hash { + // Safe version that clones internal state + clone() { + return this._cloneInto(); + } +} +exports.Hash = Hash; +const toStr = {}.toString; +function checkOpts(defaults, opts) { + if (opts !== undefined && toStr.call(opts) !== '[object Object]') + throw new Error('Options should be object or undefined'); + const merged = Object.assign(defaults, opts); + return merged; +} +exports.checkOpts = checkOpts; +function wrapConstructor(hashCons) { + const hashC = (msg) => hashCons().update(toBytes(msg)).digest(); + const tmp = hashCons(); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = () => hashCons(); + return hashC; +} +exports.wrapConstructor = wrapConstructor; +function wrapConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +exports.wrapConstructorWithOpts = wrapConstructorWithOpts; +function wrapXOFConstructorWithOpts(hashCons) { + const hashC = (msg, opts) => hashCons(opts).update(toBytes(msg)).digest(); + const tmp = hashCons({}); + hashC.outputLen = tmp.outputLen; + hashC.blockLen = tmp.blockLen; + hashC.create = (opts) => hashCons(opts); + return hashC; +} +exports.wrapXOFConstructorWithOpts = wrapXOFConstructorWithOpts; +/** + * Secure PRNG. Uses `crypto.getRandomValues`, which defers to OS. + */ +function randomBytes(bytesLength = 32) { + if (crypto_1.crypto && typeof crypto_1.crypto.getRandomValues === 'function') { + return crypto_1.crypto.getRandomValues(new Uint8Array(bytesLength)); + } + throw new Error('crypto.getRandomValues must be defined'); +} +exports.randomBytes = randomBytes; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 63203: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/*! scure-base - MIT License (c) 2022 Paul Miller (paulmillr.com) */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.bytes = exports.stringToBytes = exports.str = exports.bytesToString = exports.hex = exports.utf8 = exports.bech32m = exports.bech32 = exports.base58check = exports.createBase58check = exports.base58xmr = exports.base58xrp = exports.base58flickr = exports.base58 = exports.base64urlnopad = exports.base64url = exports.base64nopad = exports.base64 = exports.base32crockford = exports.base32hex = exports.base32 = exports.base16 = exports.utils = exports.assertNumber = void 0; +// Utilities +/** + * @__NO_SIDE_EFFECTS__ + */ +function assertNumber(n) { + if (!Number.isSafeInteger(n)) + throw new Error(`Wrong integer: ${n}`); +} +exports.assertNumber = assertNumber; +function isBytes(a) { + return (a instanceof Uint8Array || + (a != null && typeof a === 'object' && a.constructor.name === 'Uint8Array')); +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function chain(...args) { + const id = (a) => a; + // Wrap call in closure so JIT can inline calls + const wrap = (a, b) => (c) => a(b(c)); + // Construct chain of args[-1].encode(args[-2].encode([...])) + const encode = args.map((x) => x.encode).reduceRight(wrap, id); + // Construct chain of args[0].decode(args[1].decode(...)) + const decode = args.map((x) => x.decode).reduce(wrap, id); + return { encode, decode }; +} +/** + * Encodes integer radix representation to array of strings using alphabet and back + * @__NO_SIDE_EFFECTS__ + */ +function alphabet(alphabet) { + return { + encode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('alphabet.encode input should be an array of numbers'); + return digits.map((i) => { + assertNumber(i); + if (i < 0 || i >= alphabet.length) + throw new Error(`Digit index outside alphabet: ${i} (alphabet: ${alphabet.length})`); + return alphabet[i]; + }); + }, + decode: (input) => { + if (!Array.isArray(input) || (input.length && typeof input[0] !== 'string')) + throw new Error('alphabet.decode input should be array of strings'); + return input.map((letter) => { + if (typeof letter !== 'string') + throw new Error(`alphabet.decode: not string element=${letter}`); + const index = alphabet.indexOf(letter); + if (index === -1) + throw new Error(`Unknown letter: "${letter}". Allowed: ${alphabet}`); + return index; + }); + }, + }; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function join(separator = '') { + if (typeof separator !== 'string') + throw new Error('join separator should be string'); + return { + encode: (from) => { + if (!Array.isArray(from) || (from.length && typeof from[0] !== 'string')) + throw new Error('join.encode input should be array of strings'); + for (let i of from) + if (typeof i !== 'string') + throw new Error(`join.encode: non-string input=${i}`); + return from.join(separator); + }, + decode: (to) => { + if (typeof to !== 'string') + throw new Error('join.decode input should be string'); + return to.split(separator); + }, + }; +} +/** + * Pad strings array so it has integer number of bits + * @__NO_SIDE_EFFECTS__ + */ +function padding(bits, chr = '=') { + assertNumber(bits); + if (typeof chr !== 'string') + throw new Error('padding chr should be string'); + return { + encode(data) { + if (!Array.isArray(data) || (data.length && typeof data[0] !== 'string')) + throw new Error('padding.encode input should be array of strings'); + for (let i of data) + if (typeof i !== 'string') + throw new Error(`padding.encode: non-string input=${i}`); + while ((data.length * bits) % 8) + data.push(chr); + return data; + }, + decode(input) { + if (!Array.isArray(input) || (input.length && typeof input[0] !== 'string')) + throw new Error('padding.encode input should be array of strings'); + for (let i of input) + if (typeof i !== 'string') + throw new Error(`padding.decode: non-string input=${i}`); + let end = input.length; + if ((end * bits) % 8) + throw new Error('Invalid padding: string should have whole number of bytes'); + for (; end > 0 && input[end - 1] === chr; end--) { + if (!(((end - 1) * bits) % 8)) + throw new Error('Invalid padding: string has too much padding'); + } + return input.slice(0, end); + }, + }; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function normalize(fn) { + if (typeof fn !== 'function') + throw new Error('normalize fn should be function'); + return { encode: (from) => from, decode: (to) => fn(to) }; +} +/** + * Slow: O(n^2) time complexity + * @__NO_SIDE_EFFECTS__ + */ +function convertRadix(data, from, to) { + // base 1 is impossible + if (from < 2) + throw new Error(`convertRadix: wrong from=${from}, base cannot be less than 2`); + if (to < 2) + throw new Error(`convertRadix: wrong to=${to}, base cannot be less than 2`); + if (!Array.isArray(data)) + throw new Error('convertRadix: data should be array'); + if (!data.length) + return []; + let pos = 0; + const res = []; + const digits = Array.from(data); + digits.forEach((d) => { + assertNumber(d); + if (d < 0 || d >= from) + throw new Error(`Wrong integer: ${d}`); + }); + while (true) { + let carry = 0; + let done = true; + for (let i = pos; i < digits.length; i++) { + const digit = digits[i]; + const digitBase = from * carry + digit; + if (!Number.isSafeInteger(digitBase) || + (from * carry) / from !== carry || + digitBase - digit !== from * carry) { + throw new Error('convertRadix: carry overflow'); + } + carry = digitBase % to; + const rounded = Math.floor(digitBase / to); + digits[i] = rounded; + if (!Number.isSafeInteger(rounded) || rounded * to + carry !== digitBase) + throw new Error('convertRadix: carry overflow'); + if (!done) + continue; + else if (!rounded) + pos = i; + else + done = false; + } + res.push(carry); + if (done) + break; + } + for (let i = 0; i < data.length - 1 && data[i] === 0; i++) + res.push(0); + return res.reverse(); +} +const gcd = /* @__NO_SIDE_EFFECTS__ */ (a, b) => (!b ? a : gcd(b, a % b)); +const radix2carry = /*@__NO_SIDE_EFFECTS__ */ (from, to) => from + (to - gcd(from, to)); +/** + * Implemented with numbers, because BigInt is 5x slower + * @__NO_SIDE_EFFECTS__ + */ +function convertRadix2(data, from, to, padding) { + if (!Array.isArray(data)) + throw new Error('convertRadix2: data should be array'); + if (from <= 0 || from > 32) + throw new Error(`convertRadix2: wrong from=${from}`); + if (to <= 0 || to > 32) + throw new Error(`convertRadix2: wrong to=${to}`); + if (radix2carry(from, to) > 32) { + throw new Error(`convertRadix2: carry overflow from=${from} to=${to} carryBits=${radix2carry(from, to)}`); + } + let carry = 0; + let pos = 0; // bitwise position in current element + const mask = 2 ** to - 1; + const res = []; + for (const n of data) { + assertNumber(n); + if (n >= 2 ** from) + throw new Error(`convertRadix2: invalid data word=${n} from=${from}`); + carry = (carry << from) | n; + if (pos + from > 32) + throw new Error(`convertRadix2: carry overflow pos=${pos} from=${from}`); + pos += from; + for (; pos >= to; pos -= to) + res.push(((carry >> (pos - to)) & mask) >>> 0); + carry &= 2 ** pos - 1; // clean carry, otherwise it will cause overflow + } + carry = (carry << (to - pos)) & mask; + if (!padding && pos >= from) + throw new Error('Excess padding'); + if (!padding && carry) + throw new Error(`Non-zero padding: ${carry}`); + if (padding && pos > 0) + res.push(carry >>> 0); + return res; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function radix(num) { + assertNumber(num); + return { + encode: (bytes) => { + if (!isBytes(bytes)) + throw new Error('radix.encode input should be Uint8Array'); + return convertRadix(Array.from(bytes), 2 ** 8, num); + }, + decode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('radix.decode input should be array of numbers'); + return Uint8Array.from(convertRadix(digits, num, 2 ** 8)); + }, + }; +} +/** + * If both bases are power of same number (like `2**8 <-> 2**64`), + * there is a linear algorithm. For now we have implementation for power-of-two bases only. + * @__NO_SIDE_EFFECTS__ + */ +function radix2(bits, revPadding = false) { + assertNumber(bits); + if (bits <= 0 || bits > 32) + throw new Error('radix2: bits should be in (0..32]'); + if (radix2carry(8, bits) > 32 || radix2carry(bits, 8) > 32) + throw new Error('radix2: carry overflow'); + return { + encode: (bytes) => { + if (!isBytes(bytes)) + throw new Error('radix2.encode input should be Uint8Array'); + return convertRadix2(Array.from(bytes), 8, bits, !revPadding); + }, + decode: (digits) => { + if (!Array.isArray(digits) || (digits.length && typeof digits[0] !== 'number')) + throw new Error('radix2.decode input should be array of numbers'); + return Uint8Array.from(convertRadix2(digits, bits, 8, revPadding)); + }, + }; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function unsafeWrapper(fn) { + if (typeof fn !== 'function') + throw new Error('unsafeWrapper fn should be function'); + return function (...args) { + try { + return fn.apply(null, args); + } + catch (e) { } + }; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function checksum(len, fn) { + assertNumber(len); + if (typeof fn !== 'function') + throw new Error('checksum fn should be function'); + return { + encode(data) { + if (!isBytes(data)) + throw new Error('checksum.encode: input should be Uint8Array'); + const checksum = fn(data).slice(0, len); + const res = new Uint8Array(data.length + len); + res.set(data); + res.set(checksum, data.length); + return res; + }, + decode(data) { + if (!isBytes(data)) + throw new Error('checksum.decode: input should be Uint8Array'); + const payload = data.slice(0, -len); + const newChecksum = fn(payload).slice(0, len); + const oldChecksum = data.slice(-len); + for (let i = 0; i < len; i++) + if (newChecksum[i] !== oldChecksum[i]) + throw new Error('Invalid checksum'); + return payload; + }, + }; +} +// prettier-ignore +exports.utils = { + alphabet, chain, checksum, convertRadix, convertRadix2, radix, radix2, join, padding, +}; +// RFC 4648 aka RFC 3548 +// --------------------- +exports.base16 = chain(radix2(4), alphabet('0123456789ABCDEF'), join('')); +exports.base32 = chain(radix2(5), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'), padding(5), join('')); +exports.base32hex = chain(radix2(5), alphabet('0123456789ABCDEFGHIJKLMNOPQRSTUV'), padding(5), join('')); +exports.base32crockford = chain(radix2(5), alphabet('0123456789ABCDEFGHJKMNPQRSTVWXYZ'), join(''), normalize((s) => s.toUpperCase().replace(/O/g, '0').replace(/[IL]/g, '1'))); +exports.base64 = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'), padding(6), join('')); +exports.base64nopad = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'), join('')); +exports.base64url = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'), padding(6), join('')); +exports.base64urlnopad = chain(radix2(6), alphabet('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'), join('')); +// base58 code +// ----------- +const genBase58 = (abc) => chain(radix(58), alphabet(abc), join('')); +exports.base58 = genBase58('123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'); +exports.base58flickr = genBase58('123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ'); +exports.base58xrp = genBase58('rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz'); +// xmr ver is done in 8-byte blocks (which equals 11 chars in decoding). Last (non-full) block padded with '1' to size in XMR_BLOCK_LEN. +// Block encoding significantly reduces quadratic complexity of base58. +// Data len (index) -> encoded block len +const XMR_BLOCK_LEN = [0, 2, 3, 5, 6, 7, 9, 10, 11]; +exports.base58xmr = { + encode(data) { + let res = ''; + for (let i = 0; i < data.length; i += 8) { + const block = data.subarray(i, i + 8); + res += exports.base58.encode(block).padStart(XMR_BLOCK_LEN[block.length], '1'); + } + return res; + }, + decode(str) { + let res = []; + for (let i = 0; i < str.length; i += 11) { + const slice = str.slice(i, i + 11); + const blockLen = XMR_BLOCK_LEN.indexOf(slice.length); + const block = exports.base58.decode(slice); + for (let j = 0; j < block.length - blockLen; j++) { + if (block[j] !== 0) + throw new Error('base58xmr: wrong padding'); + } + res = res.concat(Array.from(block.slice(block.length - blockLen))); + } + return Uint8Array.from(res); + }, +}; +const createBase58check = (sha256) => chain(checksum(4, (data) => sha256(sha256(data))), exports.base58); +exports.createBase58check = createBase58check; +// legacy export, bad name +exports.base58check = exports.createBase58check; +const BECH_ALPHABET = /* @__PURE__ */ chain(alphabet('qpzry9x8gf2tvdw0s3jn54khce6mua7l'), join('')); +const POLYMOD_GENERATORS = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3]; +/** + * @__NO_SIDE_EFFECTS__ + */ +function bech32Polymod(pre) { + const b = pre >> 25; + let chk = (pre & 0x1ffffff) << 5; + for (let i = 0; i < POLYMOD_GENERATORS.length; i++) { + if (((b >> i) & 1) === 1) + chk ^= POLYMOD_GENERATORS[i]; + } + return chk; +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function bechChecksum(prefix, words, encodingConst = 1) { + const len = prefix.length; + let chk = 1; + for (let i = 0; i < len; i++) { + const c = prefix.charCodeAt(i); + if (c < 33 || c > 126) + throw new Error(`Invalid prefix (${prefix})`); + chk = bech32Polymod(chk) ^ (c >> 5); + } + chk = bech32Polymod(chk); + for (let i = 0; i < len; i++) + chk = bech32Polymod(chk) ^ (prefix.charCodeAt(i) & 0x1f); + for (let v of words) + chk = bech32Polymod(chk) ^ v; + for (let i = 0; i < 6; i++) + chk = bech32Polymod(chk); + chk ^= encodingConst; + return BECH_ALPHABET.encode(convertRadix2([chk % 2 ** 30], 30, 5, false)); +} +/** + * @__NO_SIDE_EFFECTS__ + */ +function genBech32(encoding) { + const ENCODING_CONST = encoding === 'bech32' ? 1 : 0x2bc830a3; + const _words = radix2(5); + const fromWords = _words.decode; + const toWords = _words.encode; + const fromWordsUnsafe = unsafeWrapper(fromWords); + function encode(prefix, words, limit = 90) { + if (typeof prefix !== 'string') + throw new Error(`bech32.encode prefix should be string, not ${typeof prefix}`); + if (!Array.isArray(words) || (words.length && typeof words[0] !== 'number')) + throw new Error(`bech32.encode words should be array of numbers, not ${typeof words}`); + if (prefix.length === 0) + throw new TypeError(`Invalid prefix length ${prefix.length}`); + const actualLength = prefix.length + 7 + words.length; + if (limit !== false && actualLength > limit) + throw new TypeError(`Length ${actualLength} exceeds limit ${limit}`); + const lowered = prefix.toLowerCase(); + const sum = bechChecksum(lowered, words, ENCODING_CONST); + return `${lowered}1${BECH_ALPHABET.encode(words)}${sum}`; + } + function decode(str, limit = 90) { + if (typeof str !== 'string') + throw new Error(`bech32.decode input should be string, not ${typeof str}`); + if (str.length < 8 || (limit !== false && str.length > limit)) + throw new TypeError(`Wrong string length: ${str.length} (${str}). Expected (8..${limit})`); + // don't allow mixed case + const lowered = str.toLowerCase(); + if (str !== lowered && str !== str.toUpperCase()) + throw new Error(`String must be lowercase or uppercase`); + const sepIndex = lowered.lastIndexOf('1'); + if (sepIndex === 0 || sepIndex === -1) + throw new Error(`Letter "1" must be present between prefix and data only`); + const prefix = lowered.slice(0, sepIndex); + const data = lowered.slice(sepIndex + 1); + if (data.length < 6) + throw new Error('Data must be at least 6 characters long'); + const words = BECH_ALPHABET.decode(data).slice(0, -6); + const sum = bechChecksum(prefix, words, ENCODING_CONST); + if (!data.endsWith(sum)) + throw new Error(`Invalid checksum in ${str}: expected "${sum}"`); + return { prefix, words }; + } + const decodeUnsafe = unsafeWrapper(decode); + function decodeToBytes(str) { + const { prefix, words } = decode(str, false); + return { prefix, words, bytes: fromWords(words) }; + } + return { encode, decode, decodeToBytes, decodeUnsafe, fromWords, fromWordsUnsafe, toWords }; +} +exports.bech32 = genBech32('bech32'); +exports.bech32m = genBech32('bech32m'); +exports.utf8 = { + encode: (data) => new TextDecoder().decode(data), + decode: (str) => new TextEncoder().encode(str), +}; +exports.hex = chain(radix2(4), alphabet('0123456789abcdef'), join(''), normalize((s) => { + if (typeof s !== 'string' || s.length % 2) + throw new TypeError(`hex.decode: expected string, got ${typeof s} with length ${s.length}`); + return s.toLowerCase(); +})); +// prettier-ignore +const CODERS = { + utf8: exports.utf8, hex: exports.hex, base16: exports.base16, base32: exports.base32, base64: exports.base64, base64url: exports.base64url, base58: exports.base58, base58xmr: exports.base58xmr +}; +const coderTypeError = 'Invalid encoding type. Available types: utf8, hex, base16, base32, base64, base64url, base58, base58xmr'; +const bytesToString = (type, bytes) => { + if (typeof type !== 'string' || !CODERS.hasOwnProperty(type)) + throw new TypeError(coderTypeError); + if (!isBytes(bytes)) + throw new TypeError('bytesToString() expects Uint8Array'); + return CODERS[type].encode(bytes); +}; +exports.bytesToString = bytesToString; +exports.str = exports.bytesToString; // as in python, but for bytes only +const stringToBytes = (type, str) => { + if (!CODERS.hasOwnProperty(type)) + throw new TypeError(coderTypeError); + if (typeof str !== 'string') + throw new TypeError('stringToBytes() expects string'); + return CODERS[type].decode(str); +}; +exports.stringToBytes = stringToBytes; +exports.bytes = exports.stringToBytes; +//# sourceMappingURL=index.js.map + +/***/ }), + /***/ 7736: /***/ ((__unused_webpack_module, exports) => { @@ -20183,6 +31352,245 @@ b2wasm.ready(function (err) { }) +/***/ }), + +/***/ 65403: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +var bloom = __webpack_require__(40710); +module.exports = bloom; + +/***/ }), + +/***/ 88217: +/***/ ((module) => { + +/** +* A simple bitview for Array buffer. +* @author: Joy Ghosh. +* @version: 0.0.1 +*/ + +var BitView = function(buffer){ + this.buffer = buffer; + this.unit8 = new Uint8Array(this.buffer); +} + +/** +* Returns the bit value at position 'index'. +*/ +BitView.prototype.get = function(index){ + var value = this.unit8[index >> 3]; + var offset = index & 0x7; + return ((value >> (7-offset)) & 1); +} + +/** +* Sets the bit value at specified position 'index'. +*/ +BitView.prototype.set = function(index){ + var offset = index & 0x7; + this.unit8[index >> 3] |= (0x80 >> offset); +} + +/** +* Clears the bit at position 'index'. +*/ +BitView.prototype.clear = function(index){ + var offset = index & 0x7; + this.unit8[index >> 3] &= ~(0x80 >> offset); +} + +/** +* Returns the byte length of this array buffer. +*/ +BitView.prototype.length = function(){ + return this.unit8.byteLength; +} + +/** +* Returns the array buffer. +*/ +BitView.prototype.view = function(){ + return this.unit8; +} + +module.exports = BitView; + + +/***/ }), + +/***/ 40710: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +/** +* Bloom filter. +* @author: Joy Ghosh +* @version: 0.0.1 +*/ + +var BitView = __webpack_require__(88217); +var fnv_1a = __webpack_require__(89897); +var one_at_a_time_hash = __webpack_require__(81397); + +//Constants. +const BITS_IN_BYTE = 8; +const FALSE_POSITIVE_TOLERANCE = 0.000001; + +/** +* Bloom filter object. +* n represents number of elements in this filter. +*/ +var BloomFilter = function(n, false_postive_tolerance = FALSE_POSITIVE_TOLERANCE){ + //Bits in Bloom filter. + this.m = Math.ceil((-2)*n*Math.log(false_postive_tolerance)); + //Number of hash functions. + this.k = Math.ceil(0.7*(this.m/n)); + + //Normalize size. + this.size = (this.m > BITS_IN_BYTE) ? (Math.ceil(this.m/BITS_IN_BYTE)) : 1; //default size is a byte. + + //Initialize bit array for filter. + this.bitview = new BitView(new ArrayBuffer(this.size)); +} + +//Generate hash value. +BloomFilter.prototype.calculateHash = function(x,m,i){ + //Double hash technique. + return ((fnv_1a(x) + (i*one_at_a_time_hash(x)))%m); +} + +//Looks for membership. +BloomFilter.prototype.test = function(data){ + var hash = data; + for(var i=0; i { + +/** +Fowler-Noll-Vo hash function. +@author: Joy Ghosh +@version: 0.0.1 +*/ + +//FNV constants. +const FNV_PRIME = 16777619; +const FNV_OFFSET_BASIS = 2166136261; + +/** +FNV hash function. (32-bit version) +FNV step 1: hash = hash XOR byte_of_data. +FNV step 2: hash = hash * FNV_Prime. +*/ +function fnv_1a(value){ + + var hash = FNV_OFFSET_BASIS; + for(var i=0; i>> 0; +} + +//FNV step 1:hash = hash XOR byte_of_data. +function fnv_xor(hash, byte_of_data){ + return (hash ^ byte_of_data); +} + +//FNV step 2: hash = hash * FNV_Prime. +function fnv_multiply(hash){ + hash += (hash << 1) + (hash << 4) + (hash << 7) + (hash << 8) + (hash << 24); + return hash; +} + +module.exports = fnv_1a; + +/***/ }), + +/***/ 81397: +/***/ ((module) => { + +/** +Jenkins one_at_a_time hash function. +@author: Joy Ghosh +@version: 0.0.1 +*/ + +/** +* Jenkins's one at a time hash function. +*/ +function one_at_a_time_hash(key){ + + var hash = 0; + for(var i=0;i> 6); + } + + hash += (hash << 3); + hash = hash ^ (hash >> 11); + hash += (hash << 15); + return hash; +} + +module.exports = one_at_a_time_hash; + /***/ }), /***/ 39404: @@ -28190,6 +39598,1041 @@ SafeBuffer.allocUnsafeSlow = function (size) { } +/***/ }), + +/***/ 5974: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +/* provided dependency */ var process = __webpack_require__(65606); + +/* eslint camelcase: "off" */ + +var assert = __webpack_require__(94148); + +var Zstream = __webpack_require__(44442); +var zlib_deflate = __webpack_require__(58411); +var zlib_inflate = __webpack_require__(71447); +var constants = __webpack_require__(19681); + +for (var key in constants) { + exports[key] = constants[key]; +} + +// zlib modes +exports.NONE = 0; +exports.DEFLATE = 1; +exports.INFLATE = 2; +exports.GZIP = 3; +exports.GUNZIP = 4; +exports.DEFLATERAW = 5; +exports.INFLATERAW = 6; +exports.UNZIP = 7; + +var GZIP_HEADER_ID1 = 0x1f; +var GZIP_HEADER_ID2 = 0x8b; + +/** + * Emulate Node's zlib C++ layer for use by the JS layer in index.js + */ +function Zlib(mode) { + if (typeof mode !== 'number' || mode < exports.DEFLATE || mode > exports.UNZIP) { + throw new TypeError('Bad argument'); + } + + this.dictionary = null; + this.err = 0; + this.flush = 0; + this.init_done = false; + this.level = 0; + this.memLevel = 0; + this.mode = mode; + this.strategy = 0; + this.windowBits = 0; + this.write_in_progress = false; + this.pending_close = false; + this.gzip_id_bytes_read = 0; +} + +Zlib.prototype.close = function () { + if (this.write_in_progress) { + this.pending_close = true; + return; + } + + this.pending_close = false; + + assert(this.init_done, 'close before init'); + assert(this.mode <= exports.UNZIP); + + if (this.mode === exports.DEFLATE || this.mode === exports.GZIP || this.mode === exports.DEFLATERAW) { + zlib_deflate.deflateEnd(this.strm); + } else if (this.mode === exports.INFLATE || this.mode === exports.GUNZIP || this.mode === exports.INFLATERAW || this.mode === exports.UNZIP) { + zlib_inflate.inflateEnd(this.strm); + } + + this.mode = exports.NONE; + + this.dictionary = null; +}; + +Zlib.prototype.write = function (flush, input, in_off, in_len, out, out_off, out_len) { + return this._write(true, flush, input, in_off, in_len, out, out_off, out_len); +}; + +Zlib.prototype.writeSync = function (flush, input, in_off, in_len, out, out_off, out_len) { + return this._write(false, flush, input, in_off, in_len, out, out_off, out_len); +}; + +Zlib.prototype._write = function (async, flush, input, in_off, in_len, out, out_off, out_len) { + assert.equal(arguments.length, 8); + + assert(this.init_done, 'write before init'); + assert(this.mode !== exports.NONE, 'already finalized'); + assert.equal(false, this.write_in_progress, 'write already in progress'); + assert.equal(false, this.pending_close, 'close is pending'); + + this.write_in_progress = true; + + assert.equal(false, flush === undefined, 'must provide flush value'); + + this.write_in_progress = true; + + if (flush !== exports.Z_NO_FLUSH && flush !== exports.Z_PARTIAL_FLUSH && flush !== exports.Z_SYNC_FLUSH && flush !== exports.Z_FULL_FLUSH && flush !== exports.Z_FINISH && flush !== exports.Z_BLOCK) { + throw new Error('Invalid flush value'); + } + + if (input == null) { + input = Buffer.alloc(0); + in_len = 0; + in_off = 0; + } + + this.strm.avail_in = in_len; + this.strm.input = input; + this.strm.next_in = in_off; + this.strm.avail_out = out_len; + this.strm.output = out; + this.strm.next_out = out_off; + this.flush = flush; + + if (!async) { + // sync version + this._process(); + + if (this._checkError()) { + return this._afterSync(); + } + return; + } + + // async version + var self = this; + process.nextTick(function () { + self._process(); + self._after(); + }); + + return this; +}; + +Zlib.prototype._afterSync = function () { + var avail_out = this.strm.avail_out; + var avail_in = this.strm.avail_in; + + this.write_in_progress = false; + + return [avail_in, avail_out]; +}; + +Zlib.prototype._process = function () { + var next_expected_header_byte = null; + + // If the avail_out is left at 0, then it means that it ran out + // of room. If there was avail_out left over, then it means + // that all of the input was consumed. + switch (this.mode) { + case exports.DEFLATE: + case exports.GZIP: + case exports.DEFLATERAW: + this.err = zlib_deflate.deflate(this.strm, this.flush); + break; + case exports.UNZIP: + if (this.strm.avail_in > 0) { + next_expected_header_byte = this.strm.next_in; + } + + switch (this.gzip_id_bytes_read) { + case 0: + if (next_expected_header_byte === null) { + break; + } + + if (this.strm.input[next_expected_header_byte] === GZIP_HEADER_ID1) { + this.gzip_id_bytes_read = 1; + next_expected_header_byte++; + + if (this.strm.avail_in === 1) { + // The only available byte was already read. + break; + } + } else { + this.mode = exports.INFLATE; + break; + } + + // fallthrough + case 1: + if (next_expected_header_byte === null) { + break; + } + + if (this.strm.input[next_expected_header_byte] === GZIP_HEADER_ID2) { + this.gzip_id_bytes_read = 2; + this.mode = exports.GUNZIP; + } else { + // There is no actual difference between INFLATE and INFLATERAW + // (after initialization). + this.mode = exports.INFLATE; + } + + break; + default: + throw new Error('invalid number of gzip magic number bytes read'); + } + + // fallthrough + case exports.INFLATE: + case exports.GUNZIP: + case exports.INFLATERAW: + this.err = zlib_inflate.inflate(this.strm, this.flush + + // If data was encoded with dictionary + );if (this.err === exports.Z_NEED_DICT && this.dictionary) { + // Load it + this.err = zlib_inflate.inflateSetDictionary(this.strm, this.dictionary); + if (this.err === exports.Z_OK) { + // And try to decode again + this.err = zlib_inflate.inflate(this.strm, this.flush); + } else if (this.err === exports.Z_DATA_ERROR) { + // Both inflateSetDictionary() and inflate() return Z_DATA_ERROR. + // Make it possible for After() to tell a bad dictionary from bad + // input. + this.err = exports.Z_NEED_DICT; + } + } + while (this.strm.avail_in > 0 && this.mode === exports.GUNZIP && this.err === exports.Z_STREAM_END && this.strm.next_in[0] !== 0x00) { + // Bytes remain in input buffer. Perhaps this is another compressed + // member in the same archive, or just trailing garbage. + // Trailing zero bytes are okay, though, since they are frequently + // used for padding. + + this.reset(); + this.err = zlib_inflate.inflate(this.strm, this.flush); + } + break; + default: + throw new Error('Unknown mode ' + this.mode); + } +}; + +Zlib.prototype._checkError = function () { + // Acceptable error states depend on the type of zlib stream. + switch (this.err) { + case exports.Z_OK: + case exports.Z_BUF_ERROR: + if (this.strm.avail_out !== 0 && this.flush === exports.Z_FINISH) { + this._error('unexpected end of file'); + return false; + } + break; + case exports.Z_STREAM_END: + // normal statuses, not fatal + break; + case exports.Z_NEED_DICT: + if (this.dictionary == null) { + this._error('Missing dictionary'); + } else { + this._error('Bad dictionary'); + } + return false; + default: + // something else. + this._error('Zlib error'); + return false; + } + + return true; +}; + +Zlib.prototype._after = function () { + if (!this._checkError()) { + return; + } + + var avail_out = this.strm.avail_out; + var avail_in = this.strm.avail_in; + + this.write_in_progress = false; + + // call the write() cb + this.callback(avail_in, avail_out); + + if (this.pending_close) { + this.close(); + } +}; + +Zlib.prototype._error = function (message) { + if (this.strm.msg) { + message = this.strm.msg; + } + this.onerror(message, this.err + + // no hope of rescue. + );this.write_in_progress = false; + if (this.pending_close) { + this.close(); + } +}; + +Zlib.prototype.init = function (windowBits, level, memLevel, strategy, dictionary) { + assert(arguments.length === 4 || arguments.length === 5, 'init(windowBits, level, memLevel, strategy, [dictionary])'); + + assert(windowBits >= 8 && windowBits <= 15, 'invalid windowBits'); + assert(level >= -1 && level <= 9, 'invalid compression level'); + + assert(memLevel >= 1 && memLevel <= 9, 'invalid memlevel'); + + assert(strategy === exports.Z_FILTERED || strategy === exports.Z_HUFFMAN_ONLY || strategy === exports.Z_RLE || strategy === exports.Z_FIXED || strategy === exports.Z_DEFAULT_STRATEGY, 'invalid strategy'); + + this._init(level, windowBits, memLevel, strategy, dictionary); + this._setDictionary(); +}; + +Zlib.prototype.params = function () { + throw new Error('deflateParams Not supported'); +}; + +Zlib.prototype.reset = function () { + this._reset(); + this._setDictionary(); +}; + +Zlib.prototype._init = function (level, windowBits, memLevel, strategy, dictionary) { + this.level = level; + this.windowBits = windowBits; + this.memLevel = memLevel; + this.strategy = strategy; + + this.flush = exports.Z_NO_FLUSH; + + this.err = exports.Z_OK; + + if (this.mode === exports.GZIP || this.mode === exports.GUNZIP) { + this.windowBits += 16; + } + + if (this.mode === exports.UNZIP) { + this.windowBits += 32; + } + + if (this.mode === exports.DEFLATERAW || this.mode === exports.INFLATERAW) { + this.windowBits = -1 * this.windowBits; + } + + this.strm = new Zstream(); + + switch (this.mode) { + case exports.DEFLATE: + case exports.GZIP: + case exports.DEFLATERAW: + this.err = zlib_deflate.deflateInit2(this.strm, this.level, exports.Z_DEFLATED, this.windowBits, this.memLevel, this.strategy); + break; + case exports.INFLATE: + case exports.GUNZIP: + case exports.INFLATERAW: + case exports.UNZIP: + this.err = zlib_inflate.inflateInit2(this.strm, this.windowBits); + break; + default: + throw new Error('Unknown mode ' + this.mode); + } + + if (this.err !== exports.Z_OK) { + this._error('Init error'); + } + + this.dictionary = dictionary; + + this.write_in_progress = false; + this.init_done = true; +}; + +Zlib.prototype._setDictionary = function () { + if (this.dictionary == null) { + return; + } + + this.err = exports.Z_OK; + + switch (this.mode) { + case exports.DEFLATE: + case exports.DEFLATERAW: + this.err = zlib_deflate.deflateSetDictionary(this.strm, this.dictionary); + break; + default: + break; + } + + if (this.err !== exports.Z_OK) { + this._error('Failed to set dictionary'); + } +}; + +Zlib.prototype._reset = function () { + this.err = exports.Z_OK; + + switch (this.mode) { + case exports.DEFLATE: + case exports.DEFLATERAW: + case exports.GZIP: + this.err = zlib_deflate.deflateReset(this.strm); + break; + case exports.INFLATE: + case exports.INFLATERAW: + case exports.GUNZIP: + this.err = zlib_inflate.inflateReset(this.strm); + break; + default: + break; + } + + if (this.err !== exports.Z_OK) { + this._error('Failed to reset stream'); + } +}; + +exports.Zlib = Zlib; + +/***/ }), + +/***/ 78559: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var process = __webpack_require__(65606); + + +var Buffer = (__webpack_require__(48287).Buffer); +var Transform = (__webpack_require__(88310).Transform); +var binding = __webpack_require__(5974); +var util = __webpack_require__(40537); +var assert = (__webpack_require__(94148).ok); +var kMaxLength = (__webpack_require__(48287).kMaxLength); +var kRangeErrorMessage = 'Cannot create final Buffer. It would be larger ' + 'than 0x' + kMaxLength.toString(16) + ' bytes'; + +// zlib doesn't provide these, so kludge them in following the same +// const naming scheme zlib uses. +binding.Z_MIN_WINDOWBITS = 8; +binding.Z_MAX_WINDOWBITS = 15; +binding.Z_DEFAULT_WINDOWBITS = 15; + +// fewer than 64 bytes per chunk is stupid. +// technically it could work with as few as 8, but even 64 bytes +// is absurdly low. Usually a MB or more is best. +binding.Z_MIN_CHUNK = 64; +binding.Z_MAX_CHUNK = Infinity; +binding.Z_DEFAULT_CHUNK = 16 * 1024; + +binding.Z_MIN_MEMLEVEL = 1; +binding.Z_MAX_MEMLEVEL = 9; +binding.Z_DEFAULT_MEMLEVEL = 8; + +binding.Z_MIN_LEVEL = -1; +binding.Z_MAX_LEVEL = 9; +binding.Z_DEFAULT_LEVEL = binding.Z_DEFAULT_COMPRESSION; + +// expose all the zlib constants +var bkeys = Object.keys(binding); +for (var bk = 0; bk < bkeys.length; bk++) { + var bkey = bkeys[bk]; + if (bkey.match(/^Z/)) { + Object.defineProperty(exports, bkey, { + enumerable: true, value: binding[bkey], writable: false + }); + } +} + +// translation table for return codes. +var codes = { + Z_OK: binding.Z_OK, + Z_STREAM_END: binding.Z_STREAM_END, + Z_NEED_DICT: binding.Z_NEED_DICT, + Z_ERRNO: binding.Z_ERRNO, + Z_STREAM_ERROR: binding.Z_STREAM_ERROR, + Z_DATA_ERROR: binding.Z_DATA_ERROR, + Z_MEM_ERROR: binding.Z_MEM_ERROR, + Z_BUF_ERROR: binding.Z_BUF_ERROR, + Z_VERSION_ERROR: binding.Z_VERSION_ERROR +}; + +var ckeys = Object.keys(codes); +for (var ck = 0; ck < ckeys.length; ck++) { + var ckey = ckeys[ck]; + codes[codes[ckey]] = ckey; +} + +Object.defineProperty(exports, "codes", ({ + enumerable: true, value: Object.freeze(codes), writable: false +})); + +exports.Deflate = Deflate; +exports.Inflate = Inflate; +exports.Gzip = Gzip; +exports.Gunzip = Gunzip; +exports.DeflateRaw = DeflateRaw; +exports.InflateRaw = InflateRaw; +exports.Unzip = Unzip; + +exports.createDeflate = function (o) { + return new Deflate(o); +}; + +exports.createInflate = function (o) { + return new Inflate(o); +}; + +exports.createDeflateRaw = function (o) { + return new DeflateRaw(o); +}; + +exports.createInflateRaw = function (o) { + return new InflateRaw(o); +}; + +exports.createGzip = function (o) { + return new Gzip(o); +}; + +exports.createGunzip = function (o) { + return new Gunzip(o); +}; + +exports.createUnzip = function (o) { + return new Unzip(o); +}; + +// Convenience methods. +// compress/decompress a string or buffer in one step. +exports.deflate = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Deflate(opts), buffer, callback); +}; + +exports.deflateSync = function (buffer, opts) { + return zlibBufferSync(new Deflate(opts), buffer); +}; + +exports.gzip = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Gzip(opts), buffer, callback); +}; + +exports.gzipSync = function (buffer, opts) { + return zlibBufferSync(new Gzip(opts), buffer); +}; + +exports.deflateRaw = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new DeflateRaw(opts), buffer, callback); +}; + +exports.deflateRawSync = function (buffer, opts) { + return zlibBufferSync(new DeflateRaw(opts), buffer); +}; + +exports.unzip = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Unzip(opts), buffer, callback); +}; + +exports.unzipSync = function (buffer, opts) { + return zlibBufferSync(new Unzip(opts), buffer); +}; + +exports.inflate = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Inflate(opts), buffer, callback); +}; + +exports.inflateSync = function (buffer, opts) { + return zlibBufferSync(new Inflate(opts), buffer); +}; + +exports.gunzip = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new Gunzip(opts), buffer, callback); +}; + +exports.gunzipSync = function (buffer, opts) { + return zlibBufferSync(new Gunzip(opts), buffer); +}; + +exports.inflateRaw = function (buffer, opts, callback) { + if (typeof opts === 'function') { + callback = opts; + opts = {}; + } + return zlibBuffer(new InflateRaw(opts), buffer, callback); +}; + +exports.inflateRawSync = function (buffer, opts) { + return zlibBufferSync(new InflateRaw(opts), buffer); +}; + +function zlibBuffer(engine, buffer, callback) { + var buffers = []; + var nread = 0; + + engine.on('error', onError); + engine.on('end', onEnd); + + engine.end(buffer); + flow(); + + function flow() { + var chunk; + while (null !== (chunk = engine.read())) { + buffers.push(chunk); + nread += chunk.length; + } + engine.once('readable', flow); + } + + function onError(err) { + engine.removeListener('end', onEnd); + engine.removeListener('readable', flow); + callback(err); + } + + function onEnd() { + var buf; + var err = null; + + if (nread >= kMaxLength) { + err = new RangeError(kRangeErrorMessage); + } else { + buf = Buffer.concat(buffers, nread); + } + + buffers = []; + engine.close(); + callback(err, buf); + } +} + +function zlibBufferSync(engine, buffer) { + if (typeof buffer === 'string') buffer = Buffer.from(buffer); + + if (!Buffer.isBuffer(buffer)) throw new TypeError('Not a string or buffer'); + + var flushFlag = engine._finishFlushFlag; + + return engine._processChunk(buffer, flushFlag); +} + +// generic zlib +// minimal 2-byte header +function Deflate(opts) { + if (!(this instanceof Deflate)) return new Deflate(opts); + Zlib.call(this, opts, binding.DEFLATE); +} + +function Inflate(opts) { + if (!(this instanceof Inflate)) return new Inflate(opts); + Zlib.call(this, opts, binding.INFLATE); +} + +// gzip - bigger header, same deflate compression +function Gzip(opts) { + if (!(this instanceof Gzip)) return new Gzip(opts); + Zlib.call(this, opts, binding.GZIP); +} + +function Gunzip(opts) { + if (!(this instanceof Gunzip)) return new Gunzip(opts); + Zlib.call(this, opts, binding.GUNZIP); +} + +// raw - no header +function DeflateRaw(opts) { + if (!(this instanceof DeflateRaw)) return new DeflateRaw(opts); + Zlib.call(this, opts, binding.DEFLATERAW); +} + +function InflateRaw(opts) { + if (!(this instanceof InflateRaw)) return new InflateRaw(opts); + Zlib.call(this, opts, binding.INFLATERAW); +} + +// auto-detect header. +function Unzip(opts) { + if (!(this instanceof Unzip)) return new Unzip(opts); + Zlib.call(this, opts, binding.UNZIP); +} + +function isValidFlushFlag(flag) { + return flag === binding.Z_NO_FLUSH || flag === binding.Z_PARTIAL_FLUSH || flag === binding.Z_SYNC_FLUSH || flag === binding.Z_FULL_FLUSH || flag === binding.Z_FINISH || flag === binding.Z_BLOCK; +} + +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. + +function Zlib(opts, mode) { + var _this = this; + + this._opts = opts = opts || {}; + this._chunkSize = opts.chunkSize || exports.Z_DEFAULT_CHUNK; + + Transform.call(this, opts); + + if (opts.flush && !isValidFlushFlag(opts.flush)) { + throw new Error('Invalid flush flag: ' + opts.flush); + } + if (opts.finishFlush && !isValidFlushFlag(opts.finishFlush)) { + throw new Error('Invalid flush flag: ' + opts.finishFlush); + } + + this._flushFlag = opts.flush || binding.Z_NO_FLUSH; + this._finishFlushFlag = typeof opts.finishFlush !== 'undefined' ? opts.finishFlush : binding.Z_FINISH; + + if (opts.chunkSize) { + if (opts.chunkSize < exports.Z_MIN_CHUNK || opts.chunkSize > exports.Z_MAX_CHUNK) { + throw new Error('Invalid chunk size: ' + opts.chunkSize); + } + } + + if (opts.windowBits) { + if (opts.windowBits < exports.Z_MIN_WINDOWBITS || opts.windowBits > exports.Z_MAX_WINDOWBITS) { + throw new Error('Invalid windowBits: ' + opts.windowBits); + } + } + + if (opts.level) { + if (opts.level < exports.Z_MIN_LEVEL || opts.level > exports.Z_MAX_LEVEL) { + throw new Error('Invalid compression level: ' + opts.level); + } + } + + if (opts.memLevel) { + if (opts.memLevel < exports.Z_MIN_MEMLEVEL || opts.memLevel > exports.Z_MAX_MEMLEVEL) { + throw new Error('Invalid memLevel: ' + opts.memLevel); + } + } + + if (opts.strategy) { + if (opts.strategy != exports.Z_FILTERED && opts.strategy != exports.Z_HUFFMAN_ONLY && opts.strategy != exports.Z_RLE && opts.strategy != exports.Z_FIXED && opts.strategy != exports.Z_DEFAULT_STRATEGY) { + throw new Error('Invalid strategy: ' + opts.strategy); + } + } + + if (opts.dictionary) { + if (!Buffer.isBuffer(opts.dictionary)) { + throw new Error('Invalid dictionary: it should be a Buffer instance'); + } + } + + this._handle = new binding.Zlib(mode); + + var self = this; + this._hadError = false; + this._handle.onerror = function (message, errno) { + // there is no way to cleanly recover. + // continuing only obscures problems. + _close(self); + self._hadError = true; + + var error = new Error(message); + error.errno = errno; + error.code = exports.codes[errno]; + self.emit('error', error); + }; + + var level = exports.Z_DEFAULT_COMPRESSION; + if (typeof opts.level === 'number') level = opts.level; + + var strategy = exports.Z_DEFAULT_STRATEGY; + if (typeof opts.strategy === 'number') strategy = opts.strategy; + + this._handle.init(opts.windowBits || exports.Z_DEFAULT_WINDOWBITS, level, opts.memLevel || exports.Z_DEFAULT_MEMLEVEL, strategy, opts.dictionary); + + this._buffer = Buffer.allocUnsafe(this._chunkSize); + this._offset = 0; + this._level = level; + this._strategy = strategy; + + this.once('end', this.close); + + Object.defineProperty(this, '_closed', { + get: function () { + return !_this._handle; + }, + configurable: true, + enumerable: true + }); +} + +util.inherits(Zlib, Transform); + +Zlib.prototype.params = function (level, strategy, callback) { + if (level < exports.Z_MIN_LEVEL || level > exports.Z_MAX_LEVEL) { + throw new RangeError('Invalid compression level: ' + level); + } + if (strategy != exports.Z_FILTERED && strategy != exports.Z_HUFFMAN_ONLY && strategy != exports.Z_RLE && strategy != exports.Z_FIXED && strategy != exports.Z_DEFAULT_STRATEGY) { + throw new TypeError('Invalid strategy: ' + strategy); + } + + if (this._level !== level || this._strategy !== strategy) { + var self = this; + this.flush(binding.Z_SYNC_FLUSH, function () { + assert(self._handle, 'zlib binding closed'); + self._handle.params(level, strategy); + if (!self._hadError) { + self._level = level; + self._strategy = strategy; + if (callback) callback(); + } + }); + } else { + process.nextTick(callback); + } +}; + +Zlib.prototype.reset = function () { + assert(this._handle, 'zlib binding closed'); + return this._handle.reset(); +}; + +// This is the _flush function called by the transform class, +// internally, when the last chunk has been written. +Zlib.prototype._flush = function (callback) { + this._transform(Buffer.alloc(0), '', callback); +}; + +Zlib.prototype.flush = function (kind, callback) { + var _this2 = this; + + var ws = this._writableState; + + if (typeof kind === 'function' || kind === undefined && !callback) { + callback = kind; + kind = binding.Z_FULL_FLUSH; + } + + if (ws.ended) { + if (callback) process.nextTick(callback); + } else if (ws.ending) { + if (callback) this.once('end', callback); + } else if (ws.needDrain) { + if (callback) { + this.once('drain', function () { + return _this2.flush(kind, callback); + }); + } + } else { + this._flushFlag = kind; + this.write(Buffer.alloc(0), '', callback); + } +}; + +Zlib.prototype.close = function (callback) { + _close(this, callback); + process.nextTick(emitCloseNT, this); +}; + +function _close(engine, callback) { + if (callback) process.nextTick(callback); + + // Caller may invoke .close after a zlib error (which will null _handle). + if (!engine._handle) return; + + engine._handle.close(); + engine._handle = null; +} + +function emitCloseNT(self) { + self.emit('close'); +} + +Zlib.prototype._transform = function (chunk, encoding, cb) { + var flushFlag; + var ws = this._writableState; + var ending = ws.ending || ws.ended; + var last = ending && (!chunk || ws.length === chunk.length); + + if (chunk !== null && !Buffer.isBuffer(chunk)) return cb(new Error('invalid input')); + + if (!this._handle) return cb(new Error('zlib binding closed')); + + // If it's the last chunk, or a final flush, we use the Z_FINISH flush flag + // (or whatever flag was provided using opts.finishFlush). + // If it's explicitly flushing at some other time, then we use + // Z_FULL_FLUSH. Otherwise, use Z_NO_FLUSH for maximum compression + // goodness. + if (last) flushFlag = this._finishFlushFlag;else { + flushFlag = this._flushFlag; + // once we've flushed the last of the queue, stop flushing and + // go back to the normal behavior. + if (chunk.length >= ws.length) { + this._flushFlag = this._opts.flush || binding.Z_NO_FLUSH; + } + } + + this._processChunk(chunk, flushFlag, cb); +}; + +Zlib.prototype._processChunk = function (chunk, flushFlag, cb) { + var availInBefore = chunk && chunk.length; + var availOutBefore = this._chunkSize - this._offset; + var inOff = 0; + + var self = this; + + var async = typeof cb === 'function'; + + if (!async) { + var buffers = []; + var nread = 0; + + var error; + this.on('error', function (er) { + error = er; + }); + + assert(this._handle, 'zlib binding closed'); + do { + var res = this._handle.writeSync(flushFlag, chunk, // in + inOff, // in_off + availInBefore, // in_len + this._buffer, // out + this._offset, //out_off + availOutBefore); // out_len + } while (!this._hadError && callback(res[0], res[1])); + + if (this._hadError) { + throw error; + } + + if (nread >= kMaxLength) { + _close(this); + throw new RangeError(kRangeErrorMessage); + } + + var buf = Buffer.concat(buffers, nread); + _close(this); + + return buf; + } + + assert(this._handle, 'zlib binding closed'); + var req = this._handle.write(flushFlag, chunk, // in + inOff, // in_off + availInBefore, // in_len + this._buffer, // out + this._offset, //out_off + availOutBefore); // out_len + + req.buffer = chunk; + req.callback = callback; + + function callback(availInAfter, availOutAfter) { + // When the callback is used in an async write, the callback's + // context is the `req` object that was created. The req object + // is === this._handle, and that's why it's important to null + // out the values after they are done being used. `this._handle` + // can stay in memory longer than the callback and buffer are needed. + if (this) { + this.buffer = null; + this.callback = null; + } + + if (self._hadError) return; + + var have = availOutBefore - availOutAfter; + assert(have >= 0, 'have should not go down'); + + if (have > 0) { + var out = self._buffer.slice(self._offset, self._offset + have); + self._offset += have; + // serve some output to the consumer. + if (async) { + self.push(out); + } else { + buffers.push(out); + nread += out.length; + } + } + + // exhausted the output buffer, or used all the input create a new one. + if (availOutAfter === 0 || self._offset >= self._chunkSize) { + availOutBefore = self._chunkSize; + self._offset = 0; + self._buffer = Buffer.allocUnsafe(self._chunkSize); + } + + if (availOutAfter === 0) { + // Not actually done. Need to reprocess. + // Also, update the availInBefore to the availInAfter value, + // so that if we have to hit it a third (fourth, etc.) time, + // it'll have the correct byte counts. + inOff += availInBefore - availInAfter; + availInBefore = availInAfter; + + if (!async) return true; + + var newReq = self._handle.write(flushFlag, chunk, inOff, availInBefore, self._buffer, self._offset, self._chunkSize); + newReq.callback = callback; // this same function + newReq.buffer = chunk; + return; + } + + if (!async) return false; + + // finished with the chunk. + cb(); + } +}; + +util.inherits(Deflate, Zlib); +util.inherits(Inflate, Zlib); +util.inherits(Gzip, Zlib); +util.inherits(Gunzip, Zlib); +util.inherits(DeflateRaw, Zlib); +util.inherits(InflateRaw, Zlib); +util.inherits(Unzip, Zlib); + /***/ }), /***/ 30295: @@ -30323,6 +42766,77 @@ function BufferBigIntNotDefined () { } +/***/ }), + +/***/ 86866: +/***/ ((module) => { + +module.exports = { + "100": "Continue", + "101": "Switching Protocols", + "102": "Processing", + "200": "OK", + "201": "Created", + "202": "Accepted", + "203": "Non-Authoritative Information", + "204": "No Content", + "205": "Reset Content", + "206": "Partial Content", + "207": "Multi-Status", + "208": "Already Reported", + "226": "IM Used", + "300": "Multiple Choices", + "301": "Moved Permanently", + "302": "Found", + "303": "See Other", + "304": "Not Modified", + "305": "Use Proxy", + "307": "Temporary Redirect", + "308": "Permanent Redirect", + "400": "Bad Request", + "401": "Unauthorized", + "402": "Payment Required", + "403": "Forbidden", + "404": "Not Found", + "405": "Method Not Allowed", + "406": "Not Acceptable", + "407": "Proxy Authentication Required", + "408": "Request Timeout", + "409": "Conflict", + "410": "Gone", + "411": "Length Required", + "412": "Precondition Failed", + "413": "Payload Too Large", + "414": "URI Too Long", + "415": "Unsupported Media Type", + "416": "Range Not Satisfiable", + "417": "Expectation Failed", + "418": "I'm a teapot", + "421": "Misdirected Request", + "422": "Unprocessable Entity", + "423": "Locked", + "424": "Failed Dependency", + "425": "Unordered Collection", + "426": "Upgrade Required", + "428": "Precondition Required", + "429": "Too Many Requests", + "431": "Request Header Fields Too Large", + "451": "Unavailable For Legal Reasons", + "500": "Internal Server Error", + "501": "Not Implemented", + "502": "Bad Gateway", + "503": "Service Unavailable", + "504": "Gateway Timeout", + "505": "HTTP Version Not Supported", + "506": "Variant Also Negotiates", + "507": "Insufficient Storage", + "508": "Loop Detected", + "509": "Bandwidth Limit Exceeded", + "510": "Not Extended", + "511": "Network Authentication Required" +} + + /***/ }), /***/ 38075: @@ -35222,6 +47736,566 @@ exports.constants = { } +/***/ }), + +/***/ 17833: +/***/ ((module, exports, __webpack_require__) => { + +/* provided dependency */ var console = __webpack_require__(96763); +/* provided dependency */ var process = __webpack_require__(65606); +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = __webpack_require__(40736)(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; + + +/***/ }), + +/***/ 40736: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +/* provided dependency */ var console = __webpack_require__(96763); + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = __webpack_require__(6585); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; + + /***/ }), /***/ 30041: @@ -47410,6 +60484,112 @@ module.exports = URIError; +/***/ }), + +/***/ 32019: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.keccak512 = exports.keccak384 = exports.keccak256 = exports.keccak224 = void 0; +const sha3_1 = __webpack_require__(32955); +const utils_js_1 = __webpack_require__(82672); +exports.keccak224 = (0, utils_js_1.wrapHash)(sha3_1.keccak_224); +exports.keccak256 = (() => { + const k = (0, utils_js_1.wrapHash)(sha3_1.keccak_256); + k.create = sha3_1.keccak_256.create; + return k; +})(); +exports.keccak384 = (0, utils_js_1.wrapHash)(sha3_1.keccak_384); +exports.keccak512 = (0, utils_js_1.wrapHash)(sha3_1.keccak_512); + + +/***/ }), + +/***/ 26513: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.secp256k1 = void 0; +var secp256k1_1 = __webpack_require__(8510); +Object.defineProperty(exports, "secp256k1", ({ enumerable: true, get: function () { return secp256k1_1.secp256k1; } })); + + +/***/ }), + +/***/ 82672: +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; +/* module decorator */ module = __webpack_require__.nmd(module); + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.crypto = exports.wrapHash = exports.equalsBytes = exports.hexToBytes = exports.bytesToUtf8 = exports.utf8ToBytes = exports.createView = exports.concatBytes = exports.toHex = exports.bytesToHex = exports.assertBytes = exports.assertBool = void 0; +const _assert_1 = __importDefault(__webpack_require__(67557)); +const utils_1 = __webpack_require__(99175); +const assertBool = _assert_1.default.bool; +exports.assertBool = assertBool; +const assertBytes = _assert_1.default.bytes; +exports.assertBytes = assertBytes; +var utils_2 = __webpack_require__(99175); +Object.defineProperty(exports, "bytesToHex", ({ enumerable: true, get: function () { return utils_2.bytesToHex; } })); +Object.defineProperty(exports, "toHex", ({ enumerable: true, get: function () { return utils_2.bytesToHex; } })); +Object.defineProperty(exports, "concatBytes", ({ enumerable: true, get: function () { return utils_2.concatBytes; } })); +Object.defineProperty(exports, "createView", ({ enumerable: true, get: function () { return utils_2.createView; } })); +Object.defineProperty(exports, "utf8ToBytes", ({ enumerable: true, get: function () { return utils_2.utf8ToBytes; } })); +// buf.toString('utf8') -> bytesToUtf8(buf) +function bytesToUtf8(data) { + if (!(data instanceof Uint8Array)) { + throw new TypeError(`bytesToUtf8 expected Uint8Array, got ${typeof data}`); + } + return new TextDecoder().decode(data); +} +exports.bytesToUtf8 = bytesToUtf8; +function hexToBytes(data) { + const sliced = data.startsWith("0x") ? data.substring(2) : data; + return (0, utils_1.hexToBytes)(sliced); +} +exports.hexToBytes = hexToBytes; +// buf.equals(buf2) -> equalsBytes(buf, buf2) +function equalsBytes(a, b) { + if (a.length !== b.length) { + return false; + } + for (let i = 0; i < a.length; i++) { + if (a[i] !== b[i]) { + return false; + } + } + return true; +} +exports.equalsBytes = equalsBytes; +// Internal utils +function wrapHash(hash) { + return (msg) => { + _assert_1.default.bytes(msg); + return hash(msg); + }; +} +exports.wrapHash = wrapHash; +// TODO(v3): switch away from node crypto, remove this unnecessary variable. +exports.crypto = (() => { + const webCrypto = typeof globalThis === "object" && "crypto" in globalThis ? globalThis.crypto : undefined; + const nodeRequire = true && + typeof module.require === "function" && + module.require.bind(module); + return { + node: nodeRequire && !webCrypto ? nodeRequire("crypto") : undefined, + web: webCrypto + }; +})(); + + /***/ }), /***/ 37007: @@ -50168,6 +63348,44 @@ HmacDRBG.prototype.generate = function generate(len, enc, add, addEnc) { }; +/***/ }), + +/***/ 11083: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +var http = __webpack_require__(11568) +var url = __webpack_require__(23276) + +var https = module.exports + +for (var key in http) { + if (http.hasOwnProperty(key)) https[key] = http[key] +} + +https.request = function (params, cb) { + params = validateParams(params) + return http.request.call(this, params, cb) +} + +https.get = function (params, cb) { + params = validateParams(params) + return http.get.call(this, params, cb) +} + +function validateParams (params) { + if (typeof params === 'string') { + params = url.parse(params) + } + if (!params.protocol) { + params.protocol = 'https:' + } + if (params.protocol !== 'https:') { + throw new Error('Protocol "' + params.protocol + '" not supported. Expected "https:"') + } + return params +} + + /***/ }), /***/ 251: @@ -51357,6 +64575,348 @@ function escapeJsonPtr(str) { } +/***/ }), + +/***/ 69749: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + + +// A linked list to keep track of recently-used-ness +const Yallist = __webpack_require__(28799) + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache + + /***/ }), /***/ 88276: @@ -51614,6 +65174,233 @@ HashBase.prototype._digest = function () { module.exports = HashBase +/***/ }), + +/***/ 6215: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +/* provided dependency */ var process = __webpack_require__(65606); + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.InvalidStatusCodeError = exports.InvalidCertError = void 0; +const DEFAULT_OPT = Object.freeze({ + redirect: true, + expectStatusCode: 200, + headers: {}, + full: false, + keepAlive: true, + cors: false, + referrer: false, + sslAllowSelfSigned: false, + _redirectCount: 0, +}); +class InvalidCertError extends Error { + constructor(msg, fingerprint256) { + super(msg); + this.fingerprint256 = fingerprint256; + } +} +exports.InvalidCertError = InvalidCertError; +class InvalidStatusCodeError extends Error { + constructor(statusCode) { + super(`Request Failed. Status Code: ${statusCode}`); + this.statusCode = statusCode; + } +} +exports.InvalidStatusCodeError = InvalidStatusCodeError; +function detectType(b, type) { + if (!type || type === 'text' || type === 'json') { + try { + let text = new TextDecoder('utf8', { fatal: true }).decode(b); + if (type === 'text') + return text; + try { + return JSON.parse(text); + } + catch (err) { + if (type === 'json') + throw err; + return text; + } + } + catch (err) { + if (type === 'text' || type === 'json') + throw err; + } + } + return b; +} +let agents = {}; +function fetchNode(url, _options) { + let options = { ...DEFAULT_OPT, ..._options }; + const http = __webpack_require__(11568); + const https = __webpack_require__(11083); + const zlib = __webpack_require__(78559); + const { promisify } = __webpack_require__(40537); + const { resolve: urlResolve } = __webpack_require__(59676); + const isSecure = !!/^https/.test(url); + let opts = { + method: options.method || 'GET', + headers: { 'Accept-Encoding': 'gzip, deflate, br' }, + }; + const compactFP = (s) => s.replace(/:| /g, '').toLowerCase(); + if (options.keepAlive) { + const agentOpt = { + keepAlive: true, + keepAliveMsecs: 30 * 1000, + maxFreeSockets: 1024, + maxCachedSessions: 1024, + }; + const agentKey = [ + isSecure, + isSecure && options.sslPinnedCertificates?.map((i) => compactFP(i)).sort(), + ].join(); + opts.agent = + agents[agentKey] || (agents[agentKey] = new (isSecure ? https : http).Agent(agentOpt)); + } + if (options.type === 'json') + opts.headers['Content-Type'] = 'application/json'; + if (options.data) { + if (!options.method) + opts.method = 'POST'; + opts.body = options.type === 'json' ? JSON.stringify(options.data) : options.data; + } + opts.headers = { ...opts.headers, ...options.headers }; + if (options.sslAllowSelfSigned) + opts.rejectUnauthorized = false; + const handleRes = async (res) => { + const status = res.statusCode; + if (options.redirect && 300 <= status && status < 400 && res.headers['location']) { + if (options._redirectCount == 10) + throw new Error('Request failed. Too much redirects.'); + options._redirectCount += 1; + return await fetchNode(urlResolve(url, res.headers['location']), options); + } + if (options.expectStatusCode && status !== options.expectStatusCode) { + res.resume(); + throw new InvalidStatusCodeError(status); + } + let buf = []; + for await (const chunk of res) + buf.push(chunk); + let bytes = Buffer.concat(buf); + const encoding = res.headers['content-encoding']; + if (encoding === 'br') + bytes = await promisify(zlib.brotliDecompress)(bytes); + if (encoding === 'gzip' || encoding === 'deflate') + bytes = await promisify(zlib.unzip)(bytes); + const body = detectType(bytes, options.type); + if (options.full) + return { headers: res.headers, status, body }; + return body; + }; + return new Promise((resolve, reject) => { + const handleError = async (err) => { + if (err && err.code === 'DEPTH_ZERO_SELF_SIGNED_CERT') { + try { + await fetchNode(url, { ...options, sslAllowSelfSigned: true, sslPinnedCertificates: [] }); + } + catch (e) { + if (e && e.fingerprint256) { + err = new InvalidCertError(`Self-signed SSL certificate: ${e.fingerprint256}`, e.fingerprint256); + } + } + } + reject(err); + }; + const req = (isSecure ? https : http).request(url, opts, (res) => { + res.on('error', handleError); + (async () => { + try { + resolve(await handleRes(res)); + } + catch (error) { + reject(error); + } + })(); + }); + req.on('error', handleError); + const pinned = options.sslPinnedCertificates?.map((i) => compactFP(i)); + const mfetchSecureConnect = (socket) => { + const fp256 = compactFP(socket.getPeerCertificate()?.fingerprint256 || ''); + if (!fp256 && socket.isSessionReused()) + return; + if (pinned.includes(fp256)) + return; + req.emit('error', new InvalidCertError(`Invalid SSL certificate: ${fp256} Expected: ${pinned}`, fp256)); + return req.abort(); + }; + if (options.sslPinnedCertificates) { + req.on('socket', (socket) => { + const hasListeners = socket + .listeners('secureConnect') + .map((i) => (i.name || '').replace('bound ', '')) + .includes('mfetchSecureConnect'); + if (hasListeners) + return; + socket.on('secureConnect', mfetchSecureConnect.bind(null, socket)); + }); + } + if (options.keepAlive) + req.setNoDelay(true); + if (opts.body) + req.write(opts.body); + req.end(); + }); +} +const SAFE_HEADERS = new Set(['Accept', 'Accept-Language', 'Content-Language', 'Content-Type'].map((i) => i.toLowerCase())); +const FORBIDDEN_HEADERS = new Set(['Accept-Charset', 'Accept-Encoding', 'Access-Control-Request-Headers', 'Access-Control-Request-Method', + 'Connection', 'Content-Length', 'Cookie', 'Cookie2', 'Date', 'DNT', 'Expect', 'Host', 'Keep-Alive', 'Origin', 'Referer', 'TE', 'Trailer', + 'Transfer-Encoding', 'Upgrade', 'Via'].map((i) => i.toLowerCase())); +async function fetchBrowser(url, _options) { + let options = { ...DEFAULT_OPT, ..._options }; + const headers = new Headers(); + if (options.type === 'json') + headers.set('Content-Type', 'application/json'); + let parsed = new URL(url); + if (parsed.username) { + const auth = btoa(`${parsed.username}:${parsed.password}`); + headers.set('Authorization', `Basic ${auth}`); + parsed.username = ''; + parsed.password = ''; + } + url = '' + parsed; + for (let k in options.headers) { + const name = k.toLowerCase(); + if (SAFE_HEADERS.has(name) || (options.cors && !FORBIDDEN_HEADERS.has(name))) + headers.set(k, options.headers[k]); + } + let opts = { headers, redirect: options.redirect ? 'follow' : 'manual' }; + if (!options.referrer) + opts.referrerPolicy = 'no-referrer'; + if (options.cors) + opts.mode = 'cors'; + if (options.data) { + if (!options.method) + opts.method = 'POST'; + opts.body = options.type === 'json' ? JSON.stringify(options.data) : options.data; + } + const res = await fetch(url, opts); + if (options.expectStatusCode && res.status !== options.expectStatusCode) + throw new InvalidStatusCodeError(res.status); + const body = detectType(new Uint8Array(await res.arrayBuffer()), options.type); + if (options.full) + return { headers: Object.fromEntries(res.headers.entries()), status: res.status, body }; + return body; +} +const IS_NODE = !!(typeof process == 'object' && + process.versions && + process.versions.node && + process.versions.v8); +function fetchUrl(url, options) { + const fn = IS_NODE ? fetchNode : fetchBrowser; + return fn(url, options); +} +exports["default"] = fetchUrl; + + /***/ }), /***/ 52244: @@ -76730,6 +90517,175 @@ webpackContext.id = 35358; }))); +/***/ }), + +/***/ 6585: +/***/ ((module) => { + +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} + + /***/ }), /***/ 86889: @@ -77213,6 +91169,5796 @@ exports.homedir = function () { }; +/***/ }), + +/***/ 9805: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + + +var TYPED_OK = (typeof Uint8Array !== 'undefined') && + (typeof Uint16Array !== 'undefined') && + (typeof Int32Array !== 'undefined'); + +function _has(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); +} + +exports.assign = function (obj /*from1, from2, from3, ...*/) { + var sources = Array.prototype.slice.call(arguments, 1); + while (sources.length) { + var source = sources.shift(); + if (!source) { continue; } + + if (typeof source !== 'object') { + throw new TypeError(source + 'must be non-object'); + } + + for (var p in source) { + if (_has(source, p)) { + obj[p] = source[p]; + } + } + } + + return obj; +}; + + +// reduce buffer size, avoiding mem copy +exports.shrinkBuf = function (buf, size) { + if (buf.length === size) { return buf; } + if (buf.subarray) { return buf.subarray(0, size); } + buf.length = size; + return buf; +}; + + +var fnTyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + if (src.subarray && dest.subarray) { + dest.set(src.subarray(src_offs, src_offs + len), dest_offs); + return; + } + // Fallback to ordinary array + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + var i, l, len, pos, chunk, result; + + // calculate data length + len = 0; + for (i = 0, l = chunks.length; i < l; i++) { + len += chunks[i].length; + } + + // join chunks + result = new Uint8Array(len); + pos = 0; + for (i = 0, l = chunks.length; i < l; i++) { + chunk = chunks[i]; + result.set(chunk, pos); + pos += chunk.length; + } + + return result; + } +}; + +var fnUntyped = { + arraySet: function (dest, src, src_offs, len, dest_offs) { + for (var i = 0; i < len; i++) { + dest[dest_offs + i] = src[src_offs + i]; + } + }, + // Join array of chunks to single array. + flattenChunks: function (chunks) { + return [].concat.apply([], chunks); + } +}; + + +// Enable/Disable typed arrays use, for testing +// +exports.setTyped = function (on) { + if (on) { + exports.Buf8 = Uint8Array; + exports.Buf16 = Uint16Array; + exports.Buf32 = Int32Array; + exports.assign(exports, fnTyped); + } else { + exports.Buf8 = Array; + exports.Buf16 = Array; + exports.Buf32 = Array; + exports.assign(exports, fnUntyped); + } +}; + +exports.setTyped(TYPED_OK); + + +/***/ }), + +/***/ 53269: +/***/ ((module) => { + +"use strict"; + + +// Note: adler32 takes 12% for level 0 and 2% for level 6. +// It isn't worth it to make additional optimizations as in original. +// Small size is preferable. + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function adler32(adler, buf, len, pos) { + var s1 = (adler & 0xffff) |0, + s2 = ((adler >>> 16) & 0xffff) |0, + n = 0; + + while (len !== 0) { + // Set limit ~ twice less than 5552, to keep + // s2 in 31-bits, because we force signed ints. + // in other case %= will fail. + n = len > 2000 ? 2000 : len; + len -= n; + + do { + s1 = (s1 + buf[pos++]) |0; + s2 = (s2 + s1) |0; + } while (--n); + + s1 %= 65521; + s2 %= 65521; + } + + return (s1 | (s2 << 16)) |0; +} + + +module.exports = adler32; + + +/***/ }), + +/***/ 19681: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +module.exports = { + + /* Allowed flush values; see deflate() and inflate() below for details */ + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_TREES: 6, + + /* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + //Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + //Z_VERSION_ERROR: -6, + + /* compression levels */ + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + + + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + + /* Possible values of the data_type field (though see inflate()) */ + Z_BINARY: 0, + Z_TEXT: 1, + //Z_ASCII: 1, // = Z_TEXT (deprecated) + Z_UNKNOWN: 2, + + /* The deflate compression method */ + Z_DEFLATED: 8 + //Z_NULL: null // Use -1 or null inline, depending on var type +}; + + +/***/ }), + +/***/ 14823: +/***/ ((module) => { + +"use strict"; + + +// Note: we can't get significant speed boost here. +// So write code to minimize size - no pregenerated tables +// and array tools dependencies. + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +// Use ordinary array, since untyped makes no boost here +function makeTable() { + var c, table = []; + + for (var n = 0; n < 256; n++) { + c = n; + for (var k = 0; k < 8; k++) { + c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1)); + } + table[n] = c; + } + + return table; +} + +// Create table on load. Just 255 signed longs. Not a problem. +var crcTable = makeTable(); + + +function crc32(crc, buf, len, pos) { + var t = crcTable, + end = pos + len; + + crc ^= -1; + + for (var i = pos; i < end; i++) { + crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF]; + } + + return (crc ^ (-1)); // >>> 0; +} + + +module.exports = crc32; + + +/***/ }), + +/***/ 58411: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __webpack_require__(9805); +var trees = __webpack_require__(23665); +var adler32 = __webpack_require__(53269); +var crc32 = __webpack_require__(14823); +var msg = __webpack_require__(54674); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +var Z_NO_FLUSH = 0; +var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +//var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +//var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +//var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + + +/* compression levels */ +//var Z_NO_COMPRESSION = 0; +//var Z_BEST_SPEED = 1; +//var Z_BEST_COMPRESSION = 9; +var Z_DEFAULT_COMPRESSION = -1; + + +var Z_FILTERED = 1; +var Z_HUFFMAN_ONLY = 2; +var Z_RLE = 3; +var Z_FIXED = 4; +var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +//var Z_BINARY = 0; +//var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + + +/* The deflate compression method */ +var Z_DEFLATED = 8; + +/*============================================================================*/ + + +var MAX_MEM_LEVEL = 9; +/* Maximum value for memLevel in deflateInit2 */ +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_MEM_LEVEL = 8; + + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ +var LITERALS = 256; +/* number of literal bytes 0..255 */ +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ +var D_CODES = 30; +/* number of distance codes */ +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1); + +var PRESET_DICT = 0x20; + +var INIT_STATE = 42; +var EXTRA_STATE = 69; +var NAME_STATE = 73; +var COMMENT_STATE = 91; +var HCRC_STATE = 103; +var BUSY_STATE = 113; +var FINISH_STATE = 666; + +var BS_NEED_MORE = 1; /* block not completed, need more input or more output */ +var BS_BLOCK_DONE = 2; /* block flush performed */ +var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */ +var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */ + +var OS_CODE = 0x03; // Unix :) . Don't detect, use this default. + +function err(strm, errorCode) { + strm.msg = msg[errorCode]; + return errorCode; +} + +function rank(f) { + return ((f) << 1) - ((f) > 4 ? 9 : 0); +} + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + + +/* ========================================================================= + * Flush as much pending output as possible. All deflate() output goes + * through this function so some applications may wish to modify it + * to avoid allocating a large strm->output buffer and copying into it. + * (See also read_buf()). + */ +function flush_pending(strm) { + var s = strm.state; + + //_tr_flush_bits(s); + var len = s.pending; + if (len > strm.avail_out) { + len = strm.avail_out; + } + if (len === 0) { return; } + + utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out); + strm.next_out += len; + s.pending_out += len; + strm.total_out += len; + strm.avail_out -= len; + s.pending -= len; + if (s.pending === 0) { + s.pending_out = 0; + } +} + + +function flush_block_only(s, last) { + trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last); + s.block_start = s.strstart; + flush_pending(s.strm); +} + + +function put_byte(s, b) { + s.pending_buf[s.pending++] = b; +} + + +/* ========================================================================= + * Put a short in the pending buffer. The 16-bit value is put in MSB order. + * IN assertion: the stream state is correct and there is enough room in + * pending_buf. + */ +function putShortMSB(s, b) { +// put_byte(s, (Byte)(b >> 8)); +// put_byte(s, (Byte)(b & 0xff)); + s.pending_buf[s.pending++] = (b >>> 8) & 0xff; + s.pending_buf[s.pending++] = b & 0xff; +} + + +/* =========================================================================== + * Read a new buffer from the current input stream, update the adler32 + * and total number of bytes read. All deflate() input goes through + * this function so some applications may wish to modify it to avoid + * allocating a large strm->input buffer and copying from it. + * (See also flush_pending()). + */ +function read_buf(strm, buf, start, size) { + var len = strm.avail_in; + + if (len > size) { len = size; } + if (len === 0) { return 0; } + + strm.avail_in -= len; + + // zmemcpy(buf, strm->next_in, len); + utils.arraySet(buf, strm.input, strm.next_in, len, start); + if (strm.state.wrap === 1) { + strm.adler = adler32(strm.adler, buf, len, start); + } + + else if (strm.state.wrap === 2) { + strm.adler = crc32(strm.adler, buf, len, start); + } + + strm.next_in += len; + strm.total_in += len; + + return len; +} + + +/* =========================================================================== + * Set match_start to the longest match starting at the given string and + * return its length. Matches shorter or equal to prev_length are discarded, + * in which case the result is equal to prev_length and match_start is + * garbage. + * IN assertions: cur_match is the head of the hash chain for the current + * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 + * OUT assertion: the match length is not greater than s->lookahead. + */ +function longest_match(s, cur_match) { + var chain_length = s.max_chain_length; /* max hash chain length */ + var scan = s.strstart; /* current string */ + var match; /* matched string */ + var len; /* length of current match */ + var best_len = s.prev_length; /* best match length so far */ + var nice_match = s.nice_match; /* stop if match long enough */ + var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ? + s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/; + + var _win = s.window; // shortcut + + var wmask = s.w_mask; + var prev = s.prev; + + /* Stop when cur_match becomes <= limit. To simplify the code, + * we prevent matches with the string of window index 0. + */ + + var strend = s.strstart + MAX_MATCH; + var scan_end1 = _win[scan + best_len - 1]; + var scan_end = _win[scan + best_len]; + + /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. + * It is easy to get rid of this optimization if necessary. + */ + // Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); + + /* Do not waste too much time if we already have a good match: */ + if (s.prev_length >= s.good_match) { + chain_length >>= 2; + } + /* Do not look for matches beyond the end of the input. This is necessary + * to make deflate deterministic. + */ + if (nice_match > s.lookahead) { nice_match = s.lookahead; } + + // Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); + + do { + // Assert(cur_match < s->strstart, "no future"); + match = cur_match; + + /* Skip to next match if the match length cannot increase + * or if the match length is less than 2. Note that the checks below + * for insufficient lookahead only occur occasionally for performance + * reasons. Therefore uninitialized memory will be accessed, and + * conditional jumps will be made that depend on those values. + * However the length of the match is limited to the lookahead, so + * the output of deflate is not affected by the uninitialized values. + */ + + if (_win[match + best_len] !== scan_end || + _win[match + best_len - 1] !== scan_end1 || + _win[match] !== _win[scan] || + _win[++match] !== _win[scan + 1]) { + continue; + } + + /* The check at best_len-1 can be removed because it will be made + * again later. (This heuristic is not always a win.) + * It is not necessary to compare scan[2] and match[2] since they + * are always equal when the other bytes match, given that + * the hash keys are equal and that HASH_BITS >= 8. + */ + scan += 2; + match++; + // Assert(*scan == *match, "match[2]?"); + + /* We check for insufficient lookahead only every 8th comparison; + * the 256th check will be made at strstart+258. + */ + do { + /*jshint noempty:false*/ + } while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + _win[++scan] === _win[++match] && _win[++scan] === _win[++match] && + scan < strend); + + // Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); + + len = MAX_MATCH - (strend - scan); + scan = strend - MAX_MATCH; + + if (len > best_len) { + s.match_start = cur_match; + best_len = len; + if (len >= nice_match) { + break; + } + scan_end1 = _win[scan + best_len - 1]; + scan_end = _win[scan + best_len]; + } + } while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0); + + if (best_len <= s.lookahead) { + return best_len; + } + return s.lookahead; +} + + +/* =========================================================================== + * Fill the window when the lookahead becomes insufficient. + * Updates strstart and lookahead. + * + * IN assertion: lookahead < MIN_LOOKAHEAD + * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD + * At least one byte has been read, or avail_in == 0; reads are + * performed for at least two bytes (required for the zip translate_eol + * option -- not supported here). + */ +function fill_window(s) { + var _w_size = s.w_size; + var p, n, m, more, str; + + //Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead"); + + do { + more = s.window_size - s.lookahead - s.strstart; + + // JS ints have 32 bit, block below not needed + /* Deal with !@#$% 64K limit: */ + //if (sizeof(int) <= 2) { + // if (more == 0 && s->strstart == 0 && s->lookahead == 0) { + // more = wsize; + // + // } else if (more == (unsigned)(-1)) { + // /* Very unlikely, but possible on 16 bit machine if + // * strstart == 0 && lookahead == 1 (input done a byte at time) + // */ + // more--; + // } + //} + + + /* If the window is almost full and there is insufficient lookahead, + * move the upper half to the lower one to make room in the upper half. + */ + if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) { + + utils.arraySet(s.window, s.window, _w_size, _w_size, 0); + s.match_start -= _w_size; + s.strstart -= _w_size; + /* we now have strstart >= MAX_DIST */ + s.block_start -= _w_size; + + /* Slide the hash table (could be avoided with 32 bit values + at the expense of memory usage). We slide even when level == 0 + to keep the hash table consistent if we switch back to level > 0 + later. (Using level 0 permanently is not an optimal usage of + zlib, so we don't care about this pathological case.) + */ + + n = s.hash_size; + p = n; + do { + m = s.head[--p]; + s.head[p] = (m >= _w_size ? m - _w_size : 0); + } while (--n); + + n = _w_size; + p = n; + do { + m = s.prev[--p]; + s.prev[p] = (m >= _w_size ? m - _w_size : 0); + /* If n is not on any hash chain, prev[n] is garbage but + * its value will never be used. + */ + } while (--n); + + more += _w_size; + } + if (s.strm.avail_in === 0) { + break; + } + + /* If there was no sliding: + * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && + * more == window_size - lookahead - strstart + * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) + * => more >= window_size - 2*WSIZE + 2 + * In the BIG_MEM or MMAP case (not yet supported), + * window_size == input_size + MIN_LOOKAHEAD && + * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. + * Otherwise, window_size == 2*WSIZE so more >= 2. + * If there was sliding, more >= WSIZE. So in all cases, more >= 2. + */ + //Assert(more >= 2, "more < 2"); + n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more); + s.lookahead += n; + + /* Initialize the hash value now that we have some input: */ + if (s.lookahead + s.insert >= MIN_MATCH) { + str = s.strstart - s.insert; + s.ins_h = s.window[str]; + + /* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask; +//#if MIN_MATCH != 3 +// Call update_hash() MIN_MATCH-3 more times +//#endif + while (s.insert) { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = str; + str++; + s.insert--; + if (s.lookahead + s.insert < MIN_MATCH) { + break; + } + } + } + /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, + * but this is not important since only literal bytes will be emitted. + */ + + } while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0); + + /* If the WIN_INIT bytes after the end of the current data have never been + * written, then zero those bytes in order to avoid memory check reports of + * the use of uninitialized (or uninitialised as Julian writes) bytes by + * the longest match routines. Update the high water mark for the next + * time through here. WIN_INIT is set to MAX_MATCH since the longest match + * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead. + */ +// if (s.high_water < s.window_size) { +// var curr = s.strstart + s.lookahead; +// var init = 0; +// +// if (s.high_water < curr) { +// /* Previous high water mark below current data -- zero WIN_INIT +// * bytes or up to end of window, whichever is less. +// */ +// init = s.window_size - curr; +// if (init > WIN_INIT) +// init = WIN_INIT; +// zmemzero(s->window + curr, (unsigned)init); +// s->high_water = curr + init; +// } +// else if (s->high_water < (ulg)curr + WIN_INIT) { +// /* High water mark at or above current data, but below current data +// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up +// * to end of window, whichever is less. +// */ +// init = (ulg)curr + WIN_INIT - s->high_water; +// if (init > s->window_size - s->high_water) +// init = s->window_size - s->high_water; +// zmemzero(s->window + s->high_water, (unsigned)init); +// s->high_water += init; +// } +// } +// +// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD, +// "not enough room for search"); +} + +/* =========================================================================== + * Copy without compression as much as possible from the input stream, return + * the current block state. + * This function does not insert new strings in the dictionary since + * uncompressible data is probably not useful. This function is used + * only for the level=0 compression option. + * NOTE: this function should be optimized to avoid extra copying from + * window to pending_buf. + */ +function deflate_stored(s, flush) { + /* Stored blocks are limited to 0xffff bytes, pending_buf is limited + * to pending_buf_size, and each stored block has a 5 byte header: + */ + var max_block_size = 0xffff; + + if (max_block_size > s.pending_buf_size - 5) { + max_block_size = s.pending_buf_size - 5; + } + + /* Copy as much as possible from input to output: */ + for (;;) { + /* Fill the window as much as possible: */ + if (s.lookahead <= 1) { + + //Assert(s->strstart < s->w_size+MAX_DIST(s) || + // s->block_start >= (long)s->w_size, "slide too late"); +// if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) || +// s.block_start >= s.w_size)) { +// throw new Error("slide too late"); +// } + + fill_window(s); + if (s.lookahead === 0 && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + + if (s.lookahead === 0) { + break; + } + /* flush the current block */ + } + //Assert(s->block_start >= 0L, "block gone"); +// if (s.block_start < 0) throw new Error("block gone"); + + s.strstart += s.lookahead; + s.lookahead = 0; + + /* Emit a stored block if pending_buf will be full: */ + var max_start = s.block_start + max_block_size; + + if (s.strstart === 0 || s.strstart >= max_start) { + /* strstart == 0 is possible when wraparound on 16-bit machine */ + s.lookahead = s.strstart - max_start; + s.strstart = max_start; + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + + + } + /* Flush if we may have to slide, otherwise block_start may become + * negative and the data will be gone: + */ + if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + + s.insert = 0; + + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + + if (s.strstart > s.block_start) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_NEED_MORE; +} + +/* =========================================================================== + * Compress as much as possible from the input stream, return the current + * block state. + * This function does not perform lazy evaluation of matches and inserts + * new strings in the dictionary only for unmatched strings or for short + * matches. It is used only for the fast compression options. + */ +function deflate_fast(s, flush) { + var hash_head; /* head of the hash chain */ + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { + break; /* flush the current block */ + } + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + * At this point we have always match_length < MIN_MATCH + */ + if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + } + if (s.match_length >= MIN_MATCH) { + // check_match(s, s.strstart, s.match_start, s.match_length); // for debug only + + /*** _tr_tally_dist(s, s.strstart - s.match_start, + s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + + /* Insert new strings in the hash table only if the match length + * is not too large. This saves time but degrades compression. + */ + if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) { + s.match_length--; /* string at strstart already in table */ + do { + s.strstart++; + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + /* strstart never exceeds WSIZE-MAX_MATCH, so there are + * always MIN_MATCH bytes ahead. + */ + } while (--s.match_length !== 0); + s.strstart++; + } else + { + s.strstart += s.match_length; + s.match_length = 0; + s.ins_h = s.window[s.strstart]; + /* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask; + +//#if MIN_MATCH != 3 +// Call UPDATE_HASH() MIN_MATCH-3 more times +//#endif + /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not + * matter since it will be recomputed at next deflate call. + */ + } + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s.window[s.strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1); + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * Same as above, but achieves better compression. We use a lazy + * evaluation for matches: a match is finally adopted only if there is + * no better match at the next window position. + */ +function deflate_slow(s, flush) { + var hash_head; /* head of hash chain */ + var bflush; /* set if current block must be flushed */ + + var max_insert; + + /* Process the input block. */ + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the next match, plus MIN_MATCH bytes to insert the + * string following the next match. + */ + if (s.lookahead < MIN_LOOKAHEAD) { + fill_window(s); + if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* Insert the string window[strstart .. strstart+2] in the + * dictionary, and set hash_head to the head of the hash chain: + */ + hash_head = 0/*NIL*/; + if (s.lookahead >= MIN_MATCH) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + + /* Find the longest match, discarding those <= prev_length. + */ + s.prev_length = s.match_length; + s.prev_match = s.match_start; + s.match_length = MIN_MATCH - 1; + + if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match && + s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) { + /* To simplify the code, we prevent matches with the string + * of window index 0 (in particular we have to avoid a match + * of the string with itself at the start of the input file). + */ + s.match_length = longest_match(s, hash_head); + /* longest_match() sets match_start */ + + if (s.match_length <= 5 && + (s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) { + + /* If prev_match is also MIN_MATCH, match_start is garbage + * but we will ignore the current match anyway. + */ + s.match_length = MIN_MATCH - 1; + } + } + /* If there was a match at the previous step and the current + * match is not better, output the previous match: + */ + if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) { + max_insert = s.strstart + s.lookahead - MIN_MATCH; + /* Do not insert strings in hash table beyond this. */ + + //check_match(s, s.strstart-1, s.prev_match, s.prev_length); + + /***_tr_tally_dist(s, s.strstart - 1 - s.prev_match, + s.prev_length - MIN_MATCH, bflush);***/ + bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH); + /* Insert in hash table all strings up to the end of the match. + * strstart-1 and strstart are already inserted. If there is not + * enough lookahead, the last two strings are not inserted in + * the hash table. + */ + s.lookahead -= s.prev_length - 1; + s.prev_length -= 2; + do { + if (++s.strstart <= max_insert) { + /*** INSERT_STRING(s, s.strstart, hash_head); ***/ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask; + hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h]; + s.head[s.ins_h] = s.strstart; + /***/ + } + } while (--s.prev_length !== 0); + s.match_available = 0; + s.match_length = MIN_MATCH - 1; + s.strstart++; + + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + } else if (s.match_available) { + /* If there was no match at the previous position, output a + * single literal. If there was a match but the current match + * is longer, truncate the previous match to a single literal. + */ + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + if (bflush) { + /*** FLUSH_BLOCK_ONLY(s, 0) ***/ + flush_block_only(s, false); + /***/ + } + s.strstart++; + s.lookahead--; + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + } else { + /* There is no previous match to compare with, wait for + * the next step to decide. + */ + s.match_available = 1; + s.strstart++; + s.lookahead--; + } + } + //Assert (flush != Z_NO_FLUSH, "no flush?"); + if (s.match_available) { + //Tracevv((stderr,"%c", s->window[s->strstart-1])); + /*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]); + + s.match_available = 0; + } + s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + + return BS_BLOCK_DONE; +} + + +/* =========================================================================== + * For Z_RLE, simply look for runs of bytes, generate matches only of distance + * one. Do not maintain a hash table. (It will be regenerated if this run of + * deflate switches away from Z_RLE.) + */ +function deflate_rle(s, flush) { + var bflush; /* set if current block must be flushed */ + var prev; /* byte at distance one to match */ + var scan, strend; /* scan goes up to strend for length of run */ + + var _win = s.window; + + for (;;) { + /* Make sure that we always have enough lookahead, except + * at the end of the input file. We need MAX_MATCH bytes + * for the longest run, plus one for the unrolled loop. + */ + if (s.lookahead <= MAX_MATCH) { + fill_window(s); + if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + if (s.lookahead === 0) { break; } /* flush the current block */ + } + + /* See how many times the previous byte repeats */ + s.match_length = 0; + if (s.lookahead >= MIN_MATCH && s.strstart > 0) { + scan = s.strstart - 1; + prev = _win[scan]; + if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) { + strend = s.strstart + MAX_MATCH; + do { + /*jshint noempty:false*/ + } while (prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + prev === _win[++scan] && prev === _win[++scan] && + scan < strend); + s.match_length = MAX_MATCH - (strend - scan); + if (s.match_length > s.lookahead) { + s.match_length = s.lookahead; + } + } + //Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan"); + } + + /* Emit match if have run of MIN_MATCH or longer, else emit literal */ + if (s.match_length >= MIN_MATCH) { + //check_match(s, s.strstart, s.strstart - 1, s.match_length); + + /*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/ + bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH); + + s.lookahead -= s.match_length; + s.strstart += s.match_length; + s.match_length = 0; + } else { + /* No match, output a literal byte */ + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + + s.lookahead--; + s.strstart++; + } + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* =========================================================================== + * For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table. + * (It will be regenerated if this run of deflate switches away from Huffman.) + */ +function deflate_huff(s, flush) { + var bflush; /* set if current block must be flushed */ + + for (;;) { + /* Make sure that we have a literal to write. */ + if (s.lookahead === 0) { + fill_window(s); + if (s.lookahead === 0) { + if (flush === Z_NO_FLUSH) { + return BS_NEED_MORE; + } + break; /* flush the current block */ + } + } + + /* Output a literal byte */ + s.match_length = 0; + //Tracevv((stderr,"%c", s->window[s->strstart])); + /*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/ + bflush = trees._tr_tally(s, 0, s.window[s.strstart]); + s.lookahead--; + s.strstart++; + if (bflush) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + } + s.insert = 0; + if (flush === Z_FINISH) { + /*** FLUSH_BLOCK(s, 1); ***/ + flush_block_only(s, true); + if (s.strm.avail_out === 0) { + return BS_FINISH_STARTED; + } + /***/ + return BS_FINISH_DONE; + } + if (s.last_lit) { + /*** FLUSH_BLOCK(s, 0); ***/ + flush_block_only(s, false); + if (s.strm.avail_out === 0) { + return BS_NEED_MORE; + } + /***/ + } + return BS_BLOCK_DONE; +} + +/* Values for max_lazy_match, good_match and max_chain_length, depending on + * the desired pack level (0..9). The values given below have been tuned to + * exclude worst case performance for pathological files. Better values may be + * found for specific files. + */ +function Config(good_length, max_lazy, nice_length, max_chain, func) { + this.good_length = good_length; + this.max_lazy = max_lazy; + this.nice_length = nice_length; + this.max_chain = max_chain; + this.func = func; +} + +var configuration_table; + +configuration_table = [ + /* good lazy nice chain */ + new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */ + new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */ + new Config(4, 5, 16, 8, deflate_fast), /* 2 */ + new Config(4, 6, 32, 32, deflate_fast), /* 3 */ + + new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */ + new Config(8, 16, 32, 32, deflate_slow), /* 5 */ + new Config(8, 16, 128, 128, deflate_slow), /* 6 */ + new Config(8, 32, 128, 256, deflate_slow), /* 7 */ + new Config(32, 128, 258, 1024, deflate_slow), /* 8 */ + new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */ +]; + + +/* =========================================================================== + * Initialize the "longest match" routines for a new zlib stream + */ +function lm_init(s) { + s.window_size = 2 * s.w_size; + + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + + /* Set the default configuration parameters: + */ + s.max_lazy_match = configuration_table[s.level].max_lazy; + s.good_match = configuration_table[s.level].good_length; + s.nice_match = configuration_table[s.level].nice_length; + s.max_chain_length = configuration_table[s.level].max_chain; + + s.strstart = 0; + s.block_start = 0; + s.lookahead = 0; + s.insert = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + s.ins_h = 0; +} + + +function DeflateState() { + this.strm = null; /* pointer back to this zlib stream */ + this.status = 0; /* as the name implies */ + this.pending_buf = null; /* output still pending */ + this.pending_buf_size = 0; /* size of pending_buf */ + this.pending_out = 0; /* next pending byte to output to the stream */ + this.pending = 0; /* nb of bytes in the pending buffer */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.gzhead = null; /* gzip header information to write */ + this.gzindex = 0; /* where in extra, name, or comment */ + this.method = Z_DEFLATED; /* can only be DEFLATED */ + this.last_flush = -1; /* value of flush param for previous deflate call */ + + this.w_size = 0; /* LZ77 window size (32K by default) */ + this.w_bits = 0; /* log2(w_size) (8..16) */ + this.w_mask = 0; /* w_size - 1 */ + + this.window = null; + /* Sliding window. Input bytes are read into the second half of the window, + * and move to the first half later to keep a dictionary of at least wSize + * bytes. With this organization, matches are limited to a distance of + * wSize-MAX_MATCH bytes, but this ensures that IO is always + * performed with a length multiple of the block size. + */ + + this.window_size = 0; + /* Actual size of window: 2*wSize, except when the user input buffer + * is directly used as sliding window. + */ + + this.prev = null; + /* Link to older string with same hash index. To limit the size of this + * array to 64K, this link is maintained only for the last 32K strings. + * An index in this array is thus a window index modulo 32K. + */ + + this.head = null; /* Heads of the hash chains or NIL. */ + + this.ins_h = 0; /* hash index of string to be inserted */ + this.hash_size = 0; /* number of elements in hash table */ + this.hash_bits = 0; /* log2(hash_size) */ + this.hash_mask = 0; /* hash_size-1 */ + + this.hash_shift = 0; + /* Number of bits by which ins_h must be shifted at each input + * step. It must be such that after MIN_MATCH steps, the oldest + * byte no longer takes part in the hash key, that is: + * hash_shift * MIN_MATCH >= hash_bits + */ + + this.block_start = 0; + /* Window position at the beginning of the current output block. Gets + * negative when the window is moved backwards. + */ + + this.match_length = 0; /* length of best match */ + this.prev_match = 0; /* previous match */ + this.match_available = 0; /* set if previous match exists */ + this.strstart = 0; /* start of string to insert */ + this.match_start = 0; /* start of matching string */ + this.lookahead = 0; /* number of valid bytes ahead in window */ + + this.prev_length = 0; + /* Length of the best match at previous step. Matches not greater than this + * are discarded. This is used in the lazy match evaluation. + */ + + this.max_chain_length = 0; + /* To speed up deflation, hash chains are never searched beyond this + * length. A higher limit improves compression ratio but degrades the + * speed. + */ + + this.max_lazy_match = 0; + /* Attempt to find a better match only when the current match is strictly + * smaller than this value. This mechanism is used only for compression + * levels >= 4. + */ + // That's alias to max_lazy_match, don't use directly + //this.max_insert_length = 0; + /* Insert new strings in the hash table only if the match length is not + * greater than this length. This saves time but degrades compression. + * max_insert_length is used only for compression levels <= 3. + */ + + this.level = 0; /* compression level (1..9) */ + this.strategy = 0; /* favor or force Huffman coding*/ + + this.good_match = 0; + /* Use a faster search when the previous match is longer than this */ + + this.nice_match = 0; /* Stop searching when current match exceeds this */ + + /* used by trees.c: */ + + /* Didn't use ct_data typedef below to suppress compiler warning */ + + // struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ + // struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ + // struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ + + // Use flat array of DOUBLE size, with interleaved fata, + // because JS does not support effective + this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2); + this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2); + this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2); + zero(this.dyn_ltree); + zero(this.dyn_dtree); + zero(this.bl_tree); + + this.l_desc = null; /* desc. for literal tree */ + this.d_desc = null; /* desc. for distance tree */ + this.bl_desc = null; /* desc. for bit length tree */ + + //ush bl_count[MAX_BITS+1]; + this.bl_count = new utils.Buf16(MAX_BITS + 1); + /* number of codes at each bit length for an optimal tree */ + + //int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ + this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */ + zero(this.heap); + + this.heap_len = 0; /* number of elements in the heap */ + this.heap_max = 0; /* element of largest frequency */ + /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. + * The same heap array is used to build all trees. + */ + + this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1]; + zero(this.depth); + /* Depth of each subtree used as tie breaker for trees of equal frequency + */ + + this.l_buf = 0; /* buffer index for literals or lengths */ + + this.lit_bufsize = 0; + /* Size of match buffer for literals/lengths. There are 4 reasons for + * limiting lit_bufsize to 64K: + * - frequencies can be kept in 16 bit counters + * - if compression is not successful for the first block, all input + * data is still in the window so we can still emit a stored block even + * when input comes from standard input. (This can also be done for + * all blocks if lit_bufsize is not greater than 32K.) + * - if compression is not successful for a file smaller than 64K, we can + * even emit a stored file instead of a stored block (saving 5 bytes). + * This is applicable only for zip (not gzip or zlib). + * - creating new Huffman trees less frequently may not provide fast + * adaptation to changes in the input data statistics. (Take for + * example a binary file with poorly compressible code followed by + * a highly compressible string table.) Smaller buffer sizes give + * fast adaptation but have of course the overhead of transmitting + * trees more frequently. + * - I can't count above 4 + */ + + this.last_lit = 0; /* running index in l_buf */ + + this.d_buf = 0; + /* Buffer index for distances. To simplify the code, d_buf and l_buf have + * the same number of elements. To use different lengths, an extra flag + * array would be necessary. + */ + + this.opt_len = 0; /* bit length of current block with optimal trees */ + this.static_len = 0; /* bit length of current block with static trees */ + this.matches = 0; /* number of string matches in current block */ + this.insert = 0; /* bytes at end of window left to insert */ + + + this.bi_buf = 0; + /* Output buffer. bits are inserted starting at the bottom (least + * significant bits). + */ + this.bi_valid = 0; + /* Number of valid bits in bi_buf. All bits above the last valid bit + * are always zero. + */ + + // Used for window memory init. We safely ignore it for JS. That makes + // sense only for pointers and memory check tools. + //this.high_water = 0; + /* High water mark offset in window for initialized bytes -- bytes above + * this are set to zero in order to avoid memory check warnings when + * longest match routines access bytes past the input. This is then + * updated to the new high water mark. + */ +} + + +function deflateResetKeep(strm) { + var s; + + if (!strm || !strm.state) { + return err(strm, Z_STREAM_ERROR); + } + + strm.total_in = strm.total_out = 0; + strm.data_type = Z_UNKNOWN; + + s = strm.state; + s.pending = 0; + s.pending_out = 0; + + if (s.wrap < 0) { + s.wrap = -s.wrap; + /* was made negative by deflate(..., Z_FINISH); */ + } + s.status = (s.wrap ? INIT_STATE : BUSY_STATE); + strm.adler = (s.wrap === 2) ? + 0 // crc32(0, Z_NULL, 0) + : + 1; // adler32(0, Z_NULL, 0) + s.last_flush = Z_NO_FLUSH; + trees._tr_init(s); + return Z_OK; +} + + +function deflateReset(strm) { + var ret = deflateResetKeep(strm); + if (ret === Z_OK) { + lm_init(strm.state); + } + return ret; +} + + +function deflateSetHeader(strm, head) { + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; } + strm.state.gzhead = head; + return Z_OK; +} + + +function deflateInit2(strm, level, method, windowBits, memLevel, strategy) { + if (!strm) { // === Z_NULL + return Z_STREAM_ERROR; + } + var wrap = 1; + + if (level === Z_DEFAULT_COMPRESSION) { + level = 6; + } + + if (windowBits < 0) { /* suppress zlib wrapper */ + wrap = 0; + windowBits = -windowBits; + } + + else if (windowBits > 15) { + wrap = 2; /* write gzip wrapper instead */ + windowBits -= 16; + } + + + if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED || + windowBits < 8 || windowBits > 15 || level < 0 || level > 9 || + strategy < 0 || strategy > Z_FIXED) { + return err(strm, Z_STREAM_ERROR); + } + + + if (windowBits === 8) { + windowBits = 9; + } + /* until 256-byte window bug fixed */ + + var s = new DeflateState(); + + strm.state = s; + s.strm = strm; + + s.wrap = wrap; + s.gzhead = null; + s.w_bits = windowBits; + s.w_size = 1 << s.w_bits; + s.w_mask = s.w_size - 1; + + s.hash_bits = memLevel + 7; + s.hash_size = 1 << s.hash_bits; + s.hash_mask = s.hash_size - 1; + s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH); + + s.window = new utils.Buf8(s.w_size * 2); + s.head = new utils.Buf16(s.hash_size); + s.prev = new utils.Buf16(s.w_size); + + // Don't need mem init magic for JS. + //s.high_water = 0; /* nothing written to s->window yet */ + + s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ + + s.pending_buf_size = s.lit_bufsize * 4; + + //overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); + //s->pending_buf = (uchf *) overlay; + s.pending_buf = new utils.Buf8(s.pending_buf_size); + + // It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`) + //s->d_buf = overlay + s->lit_bufsize/sizeof(ush); + s.d_buf = 1 * s.lit_bufsize; + + //s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; + s.l_buf = (1 + 2) * s.lit_bufsize; + + s.level = level; + s.strategy = strategy; + s.method = method; + + return deflateReset(strm); +} + +function deflateInit(strm, level) { + return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY); +} + + +function deflate(strm, flush) { + var old_flush, s; + var beg, val; // for gzip header write only + + if (!strm || !strm.state || + flush > Z_BLOCK || flush < 0) { + return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR; + } + + s = strm.state; + + if (!strm.output || + (!strm.input && strm.avail_in !== 0) || + (s.status === FINISH_STATE && flush !== Z_FINISH)) { + return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR); + } + + s.strm = strm; /* just in case */ + old_flush = s.last_flush; + s.last_flush = flush; + + /* Write the header */ + if (s.status === INIT_STATE) { + + if (s.wrap === 2) { // GZIP header + strm.adler = 0; //crc32(0L, Z_NULL, 0); + put_byte(s, 31); + put_byte(s, 139); + put_byte(s, 8); + if (!s.gzhead) { // s->gzhead == Z_NULL + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, 0); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, OS_CODE); + s.status = BUSY_STATE; + } + else { + put_byte(s, (s.gzhead.text ? 1 : 0) + + (s.gzhead.hcrc ? 2 : 0) + + (!s.gzhead.extra ? 0 : 4) + + (!s.gzhead.name ? 0 : 8) + + (!s.gzhead.comment ? 0 : 16) + ); + put_byte(s, s.gzhead.time & 0xff); + put_byte(s, (s.gzhead.time >> 8) & 0xff); + put_byte(s, (s.gzhead.time >> 16) & 0xff); + put_byte(s, (s.gzhead.time >> 24) & 0xff); + put_byte(s, s.level === 9 ? 2 : + (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ? + 4 : 0)); + put_byte(s, s.gzhead.os & 0xff); + if (s.gzhead.extra && s.gzhead.extra.length) { + put_byte(s, s.gzhead.extra.length & 0xff); + put_byte(s, (s.gzhead.extra.length >> 8) & 0xff); + } + if (s.gzhead.hcrc) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0); + } + s.gzindex = 0; + s.status = EXTRA_STATE; + } + } + else // DEFLATE header + { + var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8; + var level_flags = -1; + + if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) { + level_flags = 0; + } else if (s.level < 6) { + level_flags = 1; + } else if (s.level === 6) { + level_flags = 2; + } else { + level_flags = 3; + } + header |= (level_flags << 6); + if (s.strstart !== 0) { header |= PRESET_DICT; } + header += 31 - (header % 31); + + s.status = BUSY_STATE; + putShortMSB(s, header); + + /* Save the adler32 of the preset dictionary: */ + if (s.strstart !== 0) { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + strm.adler = 1; // adler32(0L, Z_NULL, 0); + } + } + +//#ifdef GZIP + if (s.status === EXTRA_STATE) { + if (s.gzhead.extra/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + + while (s.gzindex < (s.gzhead.extra.length & 0xffff)) { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + break; + } + } + put_byte(s, s.gzhead.extra[s.gzindex] & 0xff); + s.gzindex++; + } + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (s.gzindex === s.gzhead.extra.length) { + s.gzindex = 0; + s.status = NAME_STATE; + } + } + else { + s.status = NAME_STATE; + } + } + if (s.status === NAME_STATE) { + if (s.gzhead.name/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.name.length) { + val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.gzindex = 0; + s.status = COMMENT_STATE; + } + } + else { + s.status = COMMENT_STATE; + } + } + if (s.status === COMMENT_STATE) { + if (s.gzhead.comment/* != Z_NULL*/) { + beg = s.pending; /* start of bytes to update crc */ + //int val; + + do { + if (s.pending === s.pending_buf_size) { + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + flush_pending(strm); + beg = s.pending; + if (s.pending === s.pending_buf_size) { + val = 1; + break; + } + } + // JS specific: little magic to add zero terminator to end of string + if (s.gzindex < s.gzhead.comment.length) { + val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff; + } else { + val = 0; + } + put_byte(s, val); + } while (val !== 0); + + if (s.gzhead.hcrc && s.pending > beg) { + strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg); + } + if (val === 0) { + s.status = HCRC_STATE; + } + } + else { + s.status = HCRC_STATE; + } + } + if (s.status === HCRC_STATE) { + if (s.gzhead.hcrc) { + if (s.pending + 2 > s.pending_buf_size) { + flush_pending(strm); + } + if (s.pending + 2 <= s.pending_buf_size) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + strm.adler = 0; //crc32(0L, Z_NULL, 0); + s.status = BUSY_STATE; + } + } + else { + s.status = BUSY_STATE; + } + } +//#endif + + /* Flush as much pending output as possible */ + if (s.pending !== 0) { + flush_pending(strm); + if (strm.avail_out === 0) { + /* Since avail_out is 0, deflate will be called again with + * more output space, but possibly with both pending and + * avail_in equal to zero. There won't be anything to do, + * but this is not an error situation so make sure we + * return OK instead of BUF_ERROR at next call of deflate: + */ + s.last_flush = -1; + return Z_OK; + } + + /* Make sure there is something to do and avoid duplicate consecutive + * flushes. For repeated and useless calls with Z_FINISH, we keep + * returning Z_STREAM_END instead of Z_BUF_ERROR. + */ + } else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) && + flush !== Z_FINISH) { + return err(strm, Z_BUF_ERROR); + } + + /* User must not provide more input after the first FINISH: */ + if (s.status === FINISH_STATE && strm.avail_in !== 0) { + return err(strm, Z_BUF_ERROR); + } + + /* Start a new block or continue the current one. + */ + if (strm.avail_in !== 0 || s.lookahead !== 0 || + (flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) { + var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) : + (s.strategy === Z_RLE ? deflate_rle(s, flush) : + configuration_table[s.level].func(s, flush)); + + if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) { + s.status = FINISH_STATE; + } + if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) { + if (strm.avail_out === 0) { + s.last_flush = -1; + /* avoid BUF_ERROR next call, see above */ + } + return Z_OK; + /* If flush != Z_NO_FLUSH && avail_out == 0, the next call + * of deflate should use the same flush parameter to make sure + * that the flush is complete. So we don't have to output an + * empty block here, this will be done at next call. This also + * ensures that for a very small output buffer, we emit at most + * one empty block. + */ + } + if (bstate === BS_BLOCK_DONE) { + if (flush === Z_PARTIAL_FLUSH) { + trees._tr_align(s); + } + else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */ + + trees._tr_stored_block(s, 0, 0, false); + /* For a full flush, this empty block will be recognized + * as a special marker by inflate_sync(). + */ + if (flush === Z_FULL_FLUSH) { + /*** CLEAR_HASH(s); ***/ /* forget history */ + zero(s.head); // Fill with NIL (= 0); + + if (s.lookahead === 0) { + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + } + } + flush_pending(strm); + if (strm.avail_out === 0) { + s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */ + return Z_OK; + } + } + } + //Assert(strm->avail_out > 0, "bug2"); + //if (strm.avail_out <= 0) { throw new Error("bug2");} + + if (flush !== Z_FINISH) { return Z_OK; } + if (s.wrap <= 0) { return Z_STREAM_END; } + + /* Write the trailer */ + if (s.wrap === 2) { + put_byte(s, strm.adler & 0xff); + put_byte(s, (strm.adler >> 8) & 0xff); + put_byte(s, (strm.adler >> 16) & 0xff); + put_byte(s, (strm.adler >> 24) & 0xff); + put_byte(s, strm.total_in & 0xff); + put_byte(s, (strm.total_in >> 8) & 0xff); + put_byte(s, (strm.total_in >> 16) & 0xff); + put_byte(s, (strm.total_in >> 24) & 0xff); + } + else + { + putShortMSB(s, strm.adler >>> 16); + putShortMSB(s, strm.adler & 0xffff); + } + + flush_pending(strm); + /* If avail_out is zero, the application will call deflate again + * to flush the rest. + */ + if (s.wrap > 0) { s.wrap = -s.wrap; } + /* write the trailer only once! */ + return s.pending !== 0 ? Z_OK : Z_STREAM_END; +} + +function deflateEnd(strm) { + var status; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + status = strm.state.status; + if (status !== INIT_STATE && + status !== EXTRA_STATE && + status !== NAME_STATE && + status !== COMMENT_STATE && + status !== HCRC_STATE && + status !== BUSY_STATE && + status !== FINISH_STATE + ) { + return err(strm, Z_STREAM_ERROR); + } + + strm.state = null; + + return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK; +} + + +/* ========================================================================= + * Initializes the compression dictionary from the given byte + * sequence without producing any compressed output. + */ +function deflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var s; + var str, n; + var wrap; + var avail; + var next; + var input; + var tmpDict; + + if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) { + return Z_STREAM_ERROR; + } + + s = strm.state; + wrap = s.wrap; + + if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) { + return Z_STREAM_ERROR; + } + + /* when using zlib wrappers, compute Adler-32 for provided dictionary */ + if (wrap === 1) { + /* adler32(strm->adler, dictionary, dictLength); */ + strm.adler = adler32(strm.adler, dictionary, dictLength, 0); + } + + s.wrap = 0; /* avoid computing Adler-32 in read_buf */ + + /* if dictionary would fill window, just replace the history */ + if (dictLength >= s.w_size) { + if (wrap === 0) { /* already empty otherwise */ + /*** CLEAR_HASH(s); ***/ + zero(s.head); // Fill with NIL (= 0); + s.strstart = 0; + s.block_start = 0; + s.insert = 0; + } + /* use the tail */ + // dictionary = dictionary.slice(dictLength - s.w_size); + tmpDict = new utils.Buf8(s.w_size); + utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0); + dictionary = tmpDict; + dictLength = s.w_size; + } + /* insert dictionary into window and hash */ + avail = strm.avail_in; + next = strm.next_in; + input = strm.input; + strm.avail_in = dictLength; + strm.next_in = 0; + strm.input = dictionary; + fill_window(s); + while (s.lookahead >= MIN_MATCH) { + str = s.strstart; + n = s.lookahead - (MIN_MATCH - 1); + do { + /* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */ + s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask; + + s.prev[str & s.w_mask] = s.head[s.ins_h]; + + s.head[s.ins_h] = str; + str++; + } while (--n); + s.strstart = str; + s.lookahead = MIN_MATCH - 1; + fill_window(s); + } + s.strstart += s.lookahead; + s.block_start = s.strstart; + s.insert = s.lookahead; + s.lookahead = 0; + s.match_length = s.prev_length = MIN_MATCH - 1; + s.match_available = 0; + strm.next_in = next; + strm.input = input; + strm.avail_in = avail; + s.wrap = wrap; + return Z_OK; +} + + +exports.deflateInit = deflateInit; +exports.deflateInit2 = deflateInit2; +exports.deflateReset = deflateReset; +exports.deflateResetKeep = deflateResetKeep; +exports.deflateSetHeader = deflateSetHeader; +exports.deflate = deflate; +exports.deflateEnd = deflateEnd; +exports.deflateSetDictionary = deflateSetDictionary; +exports.deflateInfo = 'pako deflate (from Nodeca project)'; + +/* Not implemented +exports.deflateBound = deflateBound; +exports.deflateCopy = deflateCopy; +exports.deflateParams = deflateParams; +exports.deflatePending = deflatePending; +exports.deflatePrime = deflatePrime; +exports.deflateTune = deflateTune; +*/ + + +/***/ }), + +/***/ 47293: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +// See state defs from inflate.js +var BAD = 30; /* got a data error -- remain here until reset */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ + +/* + Decode literal, length, and distance codes and write out the resulting + literal and match bytes until either not enough input or output is + available, an end-of-block is encountered, or a data error is encountered. + When large enough input and output buffers are supplied to inflate(), for + example, a 16K input buffer and a 64K output buffer, more than 95% of the + inflate execution time is spent in this routine. + + Entry assumptions: + + state.mode === LEN + strm.avail_in >= 6 + strm.avail_out >= 258 + start >= strm.avail_out + state.bits < 8 + + On return, state.mode is one of: + + LEN -- ran out of enough output space or enough available input + TYPE -- reached end of block code, inflate() to interpret next block + BAD -- error in block data + + Notes: + + - The maximum input bits used by a length/distance pair is 15 bits for the + length code, 5 bits for the length extra, 15 bits for the distance code, + and 13 bits for the distance extra. This totals 48 bits, or six bytes. + Therefore if strm.avail_in >= 6, then there is enough input to avoid + checking for available input while decoding. + + - The maximum bytes that a single length/distance pair can output is 258 + bytes, which is the maximum length that can be coded. inflate_fast() + requires strm.avail_out >= 258 for each loop to avoid checking for + output space. + */ +module.exports = function inflate_fast(strm, start) { + var state; + var _in; /* local strm.input */ + var last; /* have enough input while in < last */ + var _out; /* local strm.output */ + var beg; /* inflate()'s initial strm.output */ + var end; /* while out < end, enough space available */ +//#ifdef INFLATE_STRICT + var dmax; /* maximum distance from zlib header */ +//#endif + var wsize; /* window size or zero if not using window */ + var whave; /* valid bytes in the window */ + var wnext; /* window write index */ + // Use `s_window` instead `window`, avoid conflict with instrumentation tools + var s_window; /* allocated sliding window, if wsize != 0 */ + var hold; /* local strm.hold */ + var bits; /* local strm.bits */ + var lcode; /* local strm.lencode */ + var dcode; /* local strm.distcode */ + var lmask; /* mask for first level of length codes */ + var dmask; /* mask for first level of distance codes */ + var here; /* retrieved table entry */ + var op; /* code bits, operation, extra bits, or */ + /* window position, window bytes to copy */ + var len; /* match length, unused bytes */ + var dist; /* match distance */ + var from; /* where to copy match from */ + var from_source; + + + var input, output; // JS specific, because we have no pointers + + /* copy state to local variables */ + state = strm.state; + //here = state.here; + _in = strm.next_in; + input = strm.input; + last = _in + (strm.avail_in - 5); + _out = strm.next_out; + output = strm.output; + beg = _out - (start - strm.avail_out); + end = _out + (strm.avail_out - 257); +//#ifdef INFLATE_STRICT + dmax = state.dmax; +//#endif + wsize = state.wsize; + whave = state.whave; + wnext = state.wnext; + s_window = state.window; + hold = state.hold; + bits = state.bits; + lcode = state.lencode; + dcode = state.distcode; + lmask = (1 << state.lenbits) - 1; + dmask = (1 << state.distbits) - 1; + + + /* decode literals and length/distances until end-of-block or not enough + input data or output space */ + + top: + do { + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + + here = lcode[hold & lmask]; + + dolen: + for (;;) { // Goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + if (op === 0) { /* literal */ + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + output[_out++] = here & 0xffff/*here.val*/; + } + else if (op & 16) { /* length base */ + len = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (op) { + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + len += hold & ((1 << op) - 1); + hold >>>= op; + bits -= op; + } + //Tracevv((stderr, "inflate: length %u\n", len)); + if (bits < 15) { + hold += input[_in++] << bits; + bits += 8; + hold += input[_in++] << bits; + bits += 8; + } + here = dcode[hold & dmask]; + + dodist: + for (;;) { // goto emulation + op = here >>> 24/*here.bits*/; + hold >>>= op; + bits -= op; + op = (here >>> 16) & 0xff/*here.op*/; + + if (op & 16) { /* distance base */ + dist = here & 0xffff/*here.val*/; + op &= 15; /* number of extra bits */ + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + if (bits < op) { + hold += input[_in++] << bits; + bits += 8; + } + } + dist += hold & ((1 << op) - 1); +//#ifdef INFLATE_STRICT + if (dist > dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } +//#endif + hold >>>= op; + bits -= op; + //Tracevv((stderr, "inflate: distance %u\n", dist)); + op = _out - beg; /* max distance in output */ + if (dist > op) { /* see if copy from window */ + op = dist - op; /* distance back in window */ + if (op > whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break top; + } + +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// if (len <= op - whave) { +// do { +// output[_out++] = 0; +// } while (--len); +// continue top; +// } +// len -= op - whave; +// do { +// output[_out++] = 0; +// } while (--op > whave); +// if (op === 0) { +// from = _out - dist; +// do { +// output[_out++] = output[from++]; +// } while (--len); +// continue top; +// } +//#endif + } + from = 0; // window index + from_source = s_window; + if (wnext === 0) { /* very common case */ + from += wsize - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + else if (wnext < op) { /* wrap around window */ + from += wsize + wnext - op; + op -= wnext; + if (op < len) { /* some from end of window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = 0; + if (wnext < len) { /* some from start of window */ + op = wnext; + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + } + else { /* contiguous in window */ + from += wnext - op; + if (op < len) { /* some from window */ + len -= op; + do { + output[_out++] = s_window[from++]; + } while (--op); + from = _out - dist; /* rest from output */ + from_source = output; + } + } + while (len > 2) { + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + output[_out++] = from_source[from++]; + len -= 3; + } + if (len) { + output[_out++] = from_source[from++]; + if (len > 1) { + output[_out++] = from_source[from++]; + } + } + } + else { + from = _out - dist; /* copy direct from output */ + do { /* minimum length is three */ + output[_out++] = output[from++]; + output[_out++] = output[from++]; + output[_out++] = output[from++]; + len -= 3; + } while (len > 2); + if (len) { + output[_out++] = output[from++]; + if (len > 1) { + output[_out++] = output[from++]; + } + } + } + } + else if ((op & 64) === 0) { /* 2nd level distance code */ + here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dodist; + } + else { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } + else if ((op & 64) === 0) { /* 2nd level length code */ + here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))]; + continue dolen; + } + else if (op & 32) { /* end-of-block */ + //Tracevv((stderr, "inflate: end of block\n")); + state.mode = TYPE; + break top; + } + else { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break top; + } + + break; // need to emulate goto via "continue" + } + } while (_in < last && _out < end); + + /* return unused bytes (on entry, bits < 8, so in won't go too far back) */ + len = bits >> 3; + _in -= len; + bits -= len << 3; + hold &= (1 << bits) - 1; + + /* update state and return */ + strm.next_in = _in; + strm.next_out = _out; + strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last)); + strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end)); + state.hold = hold; + state.bits = bits; + return; +}; + + +/***/ }), + +/***/ 71447: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __webpack_require__(9805); +var adler32 = __webpack_require__(53269); +var crc32 = __webpack_require__(14823); +var inflate_fast = __webpack_require__(47293); +var inflate_table = __webpack_require__(21998); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +/* Allowed flush values; see deflate() and inflate() below for details */ +//var Z_NO_FLUSH = 0; +//var Z_PARTIAL_FLUSH = 1; +//var Z_SYNC_FLUSH = 2; +//var Z_FULL_FLUSH = 3; +var Z_FINISH = 4; +var Z_BLOCK = 5; +var Z_TREES = 6; + + +/* Return codes for the compression/decompression functions. Negative values + * are errors, positive values are used for special but normal events. + */ +var Z_OK = 0; +var Z_STREAM_END = 1; +var Z_NEED_DICT = 2; +//var Z_ERRNO = -1; +var Z_STREAM_ERROR = -2; +var Z_DATA_ERROR = -3; +var Z_MEM_ERROR = -4; +var Z_BUF_ERROR = -5; +//var Z_VERSION_ERROR = -6; + +/* The deflate compression method */ +var Z_DEFLATED = 8; + + +/* STATES ====================================================================*/ +/* ===========================================================================*/ + + +var HEAD = 1; /* i: waiting for magic header */ +var FLAGS = 2; /* i: waiting for method and flags (gzip) */ +var TIME = 3; /* i: waiting for modification time (gzip) */ +var OS = 4; /* i: waiting for extra flags and operating system (gzip) */ +var EXLEN = 5; /* i: waiting for extra length (gzip) */ +var EXTRA = 6; /* i: waiting for extra bytes (gzip) */ +var NAME = 7; /* i: waiting for end of file name (gzip) */ +var COMMENT = 8; /* i: waiting for end of comment (gzip) */ +var HCRC = 9; /* i: waiting for header crc (gzip) */ +var DICTID = 10; /* i: waiting for dictionary check value */ +var DICT = 11; /* waiting for inflateSetDictionary() call */ +var TYPE = 12; /* i: waiting for type bits, including last-flag bit */ +var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */ +var STORED = 14; /* i: waiting for stored size (length and complement) */ +var COPY_ = 15; /* i/o: same as COPY below, but only first time in */ +var COPY = 16; /* i/o: waiting for input or output to copy stored block */ +var TABLE = 17; /* i: waiting for dynamic block table lengths */ +var LENLENS = 18; /* i: waiting for code length code lengths */ +var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */ +var LEN_ = 20; /* i: same as LEN below, but only first time in */ +var LEN = 21; /* i: waiting for length/lit/eob code */ +var LENEXT = 22; /* i: waiting for length extra bits */ +var DIST = 23; /* i: waiting for distance code */ +var DISTEXT = 24; /* i: waiting for distance extra bits */ +var MATCH = 25; /* o: waiting for output space to copy string */ +var LIT = 26; /* o: waiting for output space to write literal */ +var CHECK = 27; /* i: waiting for 32-bit check value */ +var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */ +var DONE = 29; /* finished check, done -- remain here until reset */ +var BAD = 30; /* got a data error -- remain here until reset */ +var MEM = 31; /* got an inflate() memory error -- remain here until reset */ +var SYNC = 32; /* looking for synchronization bytes to restart inflate() */ + +/* ===========================================================================*/ + + + +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var MAX_WBITS = 15; +/* 32K LZ77 window */ +var DEF_WBITS = MAX_WBITS; + + +function zswap32(q) { + return (((q >>> 24) & 0xff) + + ((q >>> 8) & 0xff00) + + ((q & 0xff00) << 8) + + ((q & 0xff) << 24)); +} + + +function InflateState() { + this.mode = 0; /* current inflate mode */ + this.last = false; /* true if processing last block */ + this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */ + this.havedict = false; /* true if dictionary provided */ + this.flags = 0; /* gzip header method and flags (0 if zlib) */ + this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */ + this.check = 0; /* protected copy of check value */ + this.total = 0; /* protected copy of output count */ + // TODO: may be {} + this.head = null; /* where to save gzip header information */ + + /* sliding window */ + this.wbits = 0; /* log base 2 of requested window size */ + this.wsize = 0; /* window size or zero if not using window */ + this.whave = 0; /* valid bytes in the window */ + this.wnext = 0; /* window write index */ + this.window = null; /* allocated sliding window, if needed */ + + /* bit accumulator */ + this.hold = 0; /* input bit accumulator */ + this.bits = 0; /* number of bits in "in" */ + + /* for string and stored block copying */ + this.length = 0; /* literal or length of data to copy */ + this.offset = 0; /* distance back to copy string from */ + + /* for table and code decoding */ + this.extra = 0; /* extra bits needed */ + + /* fixed and dynamic code tables */ + this.lencode = null; /* starting table for length/literal codes */ + this.distcode = null; /* starting table for distance codes */ + this.lenbits = 0; /* index bits for lencode */ + this.distbits = 0; /* index bits for distcode */ + + /* dynamic table building */ + this.ncode = 0; /* number of code length code lengths */ + this.nlen = 0; /* number of length code lengths */ + this.ndist = 0; /* number of distance code lengths */ + this.have = 0; /* number of code lengths in lens[] */ + this.next = null; /* next available space in codes[] */ + + this.lens = new utils.Buf16(320); /* temporary storage for code lengths */ + this.work = new utils.Buf16(288); /* work area for code table building */ + + /* + because we don't have pointers in js, we use lencode and distcode directly + as buffers so we don't need codes + */ + //this.codes = new utils.Buf32(ENOUGH); /* space for code tables */ + this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */ + this.distdyn = null; /* dynamic table for distance codes (JS specific) */ + this.sane = 0; /* if false, allow invalid distance too far */ + this.back = 0; /* bits back of last unprocessed length/lit */ + this.was = 0; /* initial length of match */ +} + +function inflateResetKeep(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + strm.total_in = strm.total_out = state.total = 0; + strm.msg = ''; /*Z_NULL*/ + if (state.wrap) { /* to support ill-conceived Java test suite */ + strm.adler = state.wrap & 1; + } + state.mode = HEAD; + state.last = 0; + state.havedict = 0; + state.dmax = 32768; + state.head = null/*Z_NULL*/; + state.hold = 0; + state.bits = 0; + //state.lencode = state.distcode = state.next = state.codes; + state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS); + state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS); + + state.sane = 1; + state.back = -1; + //Tracev((stderr, "inflate: reset\n")); + return Z_OK; +} + +function inflateReset(strm) { + var state; + + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + state.wsize = 0; + state.whave = 0; + state.wnext = 0; + return inflateResetKeep(strm); + +} + +function inflateReset2(strm, windowBits) { + var wrap; + var state; + + /* get the state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + + /* extract wrap request from windowBits parameter */ + if (windowBits < 0) { + wrap = 0; + windowBits = -windowBits; + } + else { + wrap = (windowBits >> 4) + 1; + if (windowBits < 48) { + windowBits &= 15; + } + } + + /* set number of window bits, free window if different */ + if (windowBits && (windowBits < 8 || windowBits > 15)) { + return Z_STREAM_ERROR; + } + if (state.window !== null && state.wbits !== windowBits) { + state.window = null; + } + + /* update state and reset the rest of it */ + state.wrap = wrap; + state.wbits = windowBits; + return inflateReset(strm); +} + +function inflateInit2(strm, windowBits) { + var ret; + var state; + + if (!strm) { return Z_STREAM_ERROR; } + //strm.msg = Z_NULL; /* in case we return an error */ + + state = new InflateState(); + + //if (state === Z_NULL) return Z_MEM_ERROR; + //Tracev((stderr, "inflate: allocated\n")); + strm.state = state; + state.window = null/*Z_NULL*/; + ret = inflateReset2(strm, windowBits); + if (ret !== Z_OK) { + strm.state = null/*Z_NULL*/; + } + return ret; +} + +function inflateInit(strm) { + return inflateInit2(strm, DEF_WBITS); +} + + +/* + Return state with length and distance decoding tables and index sizes set to + fixed code decoding. Normally this returns fixed tables from inffixed.h. + If BUILDFIXED is defined, then instead this routine builds the tables the + first time it's called, and returns those tables the first time and + thereafter. This reduces the size of the code by about 2K bytes, in + exchange for a little execution time. However, BUILDFIXED should not be + used for threaded applications, since the rewriting of the tables and virgin + may not be thread-safe. + */ +var virgin = true; + +var lenfix, distfix; // We have no pointers in JS, so keep tables separate + +function fixedtables(state) { + /* build fixed huffman tables if first call (may not be thread safe) */ + if (virgin) { + var sym; + + lenfix = new utils.Buf32(512); + distfix = new utils.Buf32(32); + + /* literal/length table */ + sym = 0; + while (sym < 144) { state.lens[sym++] = 8; } + while (sym < 256) { state.lens[sym++] = 9; } + while (sym < 280) { state.lens[sym++] = 7; } + while (sym < 288) { state.lens[sym++] = 8; } + + inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 }); + + /* distance table */ + sym = 0; + while (sym < 32) { state.lens[sym++] = 5; } + + inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 }); + + /* do this just once */ + virgin = false; + } + + state.lencode = lenfix; + state.lenbits = 9; + state.distcode = distfix; + state.distbits = 5; +} + + +/* + Update the window with the last wsize (normally 32K) bytes written before + returning. If window does not exist yet, create it. This is only called + when a window is already in use, or when output has been written during this + inflate call, but the end of the deflate stream has not been reached yet. + It is also called to create a window for dictionary data when a dictionary + is loaded. + + Providing output buffers larger than 32K to inflate() should provide a speed + advantage, since only the last 32K of output is copied to the sliding window + upon return from inflate(), and since all distances after the first 32K of + output will fall in the output data, making match copies simpler and faster. + The advantage may be dependent on the size of the processor's data caches. + */ +function updatewindow(strm, src, end, copy) { + var dist; + var state = strm.state; + + /* if it hasn't been done already, allocate space for the window */ + if (state.window === null) { + state.wsize = 1 << state.wbits; + state.wnext = 0; + state.whave = 0; + + state.window = new utils.Buf8(state.wsize); + } + + /* copy state->wsize or less output bytes into the circular window */ + if (copy >= state.wsize) { + utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0); + state.wnext = 0; + state.whave = state.wsize; + } + else { + dist = state.wsize - state.wnext; + if (dist > copy) { + dist = copy; + } + //zmemcpy(state->window + state->wnext, end - copy, dist); + utils.arraySet(state.window, src, end - copy, dist, state.wnext); + copy -= dist; + if (copy) { + //zmemcpy(state->window, end - copy, copy); + utils.arraySet(state.window, src, end - copy, copy, 0); + state.wnext = copy; + state.whave = state.wsize; + } + else { + state.wnext += dist; + if (state.wnext === state.wsize) { state.wnext = 0; } + if (state.whave < state.wsize) { state.whave += dist; } + } + } + return 0; +} + +function inflate(strm, flush) { + var state; + var input, output; // input/output buffers + var next; /* next input INDEX */ + var put; /* next output INDEX */ + var have, left; /* available input and output */ + var hold; /* bit buffer */ + var bits; /* bits in bit buffer */ + var _in, _out; /* save starting available input and output */ + var copy; /* number of stored or match bytes to copy */ + var from; /* where to copy match bytes from */ + var from_source; + var here = 0; /* current decoding table entry */ + var here_bits, here_op, here_val; // paked "here" denormalized (JS specific) + //var last; /* parent table entry */ + var last_bits, last_op, last_val; // paked "last" denormalized (JS specific) + var len; /* length to copy for repeats, bits to drop */ + var ret; /* return code */ + var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */ + var opts; + + var n; // temporary var for NEED_BITS + + var order = /* permutation of code lengths */ + [ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ]; + + + if (!strm || !strm.state || !strm.output || + (!strm.input && strm.avail_in !== 0)) { + return Z_STREAM_ERROR; + } + + state = strm.state; + if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */ + + + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + _in = have; + _out = left; + ret = Z_OK; + + inf_leave: // goto emulation + for (;;) { + switch (state.mode) { + case HEAD: + if (state.wrap === 0) { + state.mode = TYPEDO; + break; + } + //=== NEEDBITS(16); + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */ + state.check = 0/*crc32(0L, Z_NULL, 0)*/; + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = FLAGS; + break; + } + state.flags = 0; /* expect zlib header */ + if (state.head) { + state.head.done = false; + } + if (!(state.wrap & 1) || /* check if zlib header allowed */ + (((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) { + strm.msg = 'incorrect header check'; + state.mode = BAD; + break; + } + if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// + len = (hold & 0x0f)/*BITS(4)*/ + 8; + if (state.wbits === 0) { + state.wbits = len; + } + else if (len > state.wbits) { + strm.msg = 'invalid window size'; + state.mode = BAD; + break; + } + state.dmax = 1 << len; + //Tracev((stderr, "inflate: zlib header ok\n")); + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = hold & 0x200 ? DICTID : TYPE; + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + break; + case FLAGS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.flags = hold; + if ((state.flags & 0xff) !== Z_DEFLATED) { + strm.msg = 'unknown compression method'; + state.mode = BAD; + break; + } + if (state.flags & 0xe000) { + strm.msg = 'unknown header flags set'; + state.mode = BAD; + break; + } + if (state.head) { + state.head.text = ((hold >> 8) & 1); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = TIME; + /* falls through */ + case TIME: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.time = hold; + } + if (state.flags & 0x0200) { + //=== CRC4(state.check, hold) + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + hbuf[2] = (hold >>> 16) & 0xff; + hbuf[3] = (hold >>> 24) & 0xff; + state.check = crc32(state.check, hbuf, 4, 0); + //=== + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = OS; + /* falls through */ + case OS: + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (state.head) { + state.head.xflags = (hold & 0xff); + state.head.os = (hold >> 8); + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = EXLEN; + /* falls through */ + case EXLEN: + if (state.flags & 0x0400) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length = hold; + if (state.head) { + state.head.extra_len = hold; + } + if (state.flags & 0x0200) { + //=== CRC2(state.check, hold); + hbuf[0] = hold & 0xff; + hbuf[1] = (hold >>> 8) & 0xff; + state.check = crc32(state.check, hbuf, 2, 0); + //===// + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + else if (state.head) { + state.head.extra = null/*Z_NULL*/; + } + state.mode = EXTRA; + /* falls through */ + case EXTRA: + if (state.flags & 0x0400) { + copy = state.length; + if (copy > have) { copy = have; } + if (copy) { + if (state.head) { + len = state.head.extra_len - state.length; + if (!state.head.extra) { + // Use untyped array for more convenient processing later + state.head.extra = new Array(state.head.extra_len); + } + utils.arraySet( + state.head.extra, + input, + next, + // extra field is limited to 65536 bytes + // - no need for additional size check + copy, + /*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/ + len + ); + //zmemcpy(state.head.extra + len, next, + // len + copy > state.head.extra_max ? + // state.head.extra_max - len : copy); + } + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + state.length -= copy; + } + if (state.length) { break inf_leave; } + } + state.length = 0; + state.mode = NAME; + /* falls through */ + case NAME: + if (state.flags & 0x0800) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + // TODO: 2 or 1 bytes? + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.name_max*/)) { + state.head.name += String.fromCharCode(len); + } + } while (len && copy < have); + + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.name = null; + } + state.length = 0; + state.mode = COMMENT; + /* falls through */ + case COMMENT: + if (state.flags & 0x1000) { + if (have === 0) { break inf_leave; } + copy = 0; + do { + len = input[next + copy++]; + /* use constant limit because in js we should not preallocate memory */ + if (state.head && len && + (state.length < 65536 /*state.head.comm_max*/)) { + state.head.comment += String.fromCharCode(len); + } + } while (len && copy < have); + if (state.flags & 0x0200) { + state.check = crc32(state.check, input, copy, next); + } + have -= copy; + next += copy; + if (len) { break inf_leave; } + } + else if (state.head) { + state.head.comment = null; + } + state.mode = HCRC; + /* falls through */ + case HCRC: + if (state.flags & 0x0200) { + //=== NEEDBITS(16); */ + while (bits < 16) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.check & 0xffff)) { + strm.msg = 'header crc mismatch'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + } + if (state.head) { + state.head.hcrc = ((state.flags >> 9) & 1); + state.head.done = true; + } + strm.adler = state.check = 0; + state.mode = TYPE; + break; + case DICTID: + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + strm.adler = state.check = zswap32(hold); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = DICT; + /* falls through */ + case DICT: + if (state.havedict === 0) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + return Z_NEED_DICT; + } + strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/; + state.mode = TYPE; + /* falls through */ + case TYPE: + if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; } + /* falls through */ + case TYPEDO: + if (state.last) { + //--- BYTEBITS() ---// + hold >>>= bits & 7; + bits -= bits & 7; + //---// + state.mode = CHECK; + break; + } + //=== NEEDBITS(3); */ + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.last = (hold & 0x01)/*BITS(1)*/; + //--- DROPBITS(1) ---// + hold >>>= 1; + bits -= 1; + //---// + + switch ((hold & 0x03)/*BITS(2)*/) { + case 0: /* stored block */ + //Tracev((stderr, "inflate: stored block%s\n", + // state.last ? " (last)" : "")); + state.mode = STORED; + break; + case 1: /* fixed block */ + fixedtables(state); + //Tracev((stderr, "inflate: fixed codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = LEN_; /* decode codes */ + if (flush === Z_TREES) { + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break inf_leave; + } + break; + case 2: /* dynamic block */ + //Tracev((stderr, "inflate: dynamic codes block%s\n", + // state.last ? " (last)" : "")); + state.mode = TABLE; + break; + case 3: + strm.msg = 'invalid block type'; + state.mode = BAD; + } + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + break; + case STORED: + //--- BYTEBITS() ---// /* go to byte boundary */ + hold >>>= bits & 7; + bits -= bits & 7; + //---// + //=== NEEDBITS(32); */ + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) { + strm.msg = 'invalid stored block lengths'; + state.mode = BAD; + break; + } + state.length = hold & 0xffff; + //Tracev((stderr, "inflate: stored length %u\n", + // state.length)); + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + state.mode = COPY_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case COPY_: + state.mode = COPY; + /* falls through */ + case COPY: + copy = state.length; + if (copy) { + if (copy > have) { copy = have; } + if (copy > left) { copy = left; } + if (copy === 0) { break inf_leave; } + //--- zmemcpy(put, next, copy); --- + utils.arraySet(output, input, next, copy, put); + //---// + have -= copy; + next += copy; + left -= copy; + put += copy; + state.length -= copy; + break; + } + //Tracev((stderr, "inflate: stored end\n")); + state.mode = TYPE; + break; + case TABLE: + //=== NEEDBITS(14); */ + while (bits < 14) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1; + //--- DROPBITS(5) ---// + hold >>>= 5; + bits -= 5; + //---// + state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4; + //--- DROPBITS(4) ---// + hold >>>= 4; + bits -= 4; + //---// +//#ifndef PKZIP_BUG_WORKAROUND + if (state.nlen > 286 || state.ndist > 30) { + strm.msg = 'too many length or distance symbols'; + state.mode = BAD; + break; + } +//#endif + //Tracev((stderr, "inflate: table sizes ok\n")); + state.have = 0; + state.mode = LENLENS; + /* falls through */ + case LENLENS: + while (state.have < state.ncode) { + //=== NEEDBITS(3); + while (bits < 3) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.lens[order[state.have++]] = (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + while (state.have < 19) { + state.lens[order[state.have++]] = 0; + } + // We have separate tables & no pointers. 2 commented lines below not needed. + //state.next = state.codes; + //state.lencode = state.next; + // Switch to use dynamic table + state.lencode = state.lendyn; + state.lenbits = 7; + + opts = { bits: state.lenbits }; + ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts); + state.lenbits = opts.bits; + + if (ret) { + strm.msg = 'invalid code lengths set'; + state.mode = BAD; + break; + } + //Tracev((stderr, "inflate: code lengths ok\n")); + state.have = 0; + state.mode = CODELENS; + /* falls through */ + case CODELENS: + while (state.have < state.nlen + state.ndist) { + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_val < 16) { + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.lens[state.have++] = here_val; + } + else { + if (here_val === 16) { + //=== NEEDBITS(here.bits + 2); + n = here_bits + 2; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + if (state.have === 0) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + len = state.lens[state.have - 1]; + copy = 3 + (hold & 0x03);//BITS(2); + //--- DROPBITS(2) ---// + hold >>>= 2; + bits -= 2; + //---// + } + else if (here_val === 17) { + //=== NEEDBITS(here.bits + 3); + n = here_bits + 3; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 3 + (hold & 0x07);//BITS(3); + //--- DROPBITS(3) ---// + hold >>>= 3; + bits -= 3; + //---// + } + else { + //=== NEEDBITS(here.bits + 7); + n = here_bits + 7; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + len = 0; + copy = 11 + (hold & 0x7f);//BITS(7); + //--- DROPBITS(7) ---// + hold >>>= 7; + bits -= 7; + //---// + } + if (state.have + copy > state.nlen + state.ndist) { + strm.msg = 'invalid bit length repeat'; + state.mode = BAD; + break; + } + while (copy--) { + state.lens[state.have++] = len; + } + } + } + + /* handle error breaks in while */ + if (state.mode === BAD) { break; } + + /* check for end-of-block code (better have one) */ + if (state.lens[256] === 0) { + strm.msg = 'invalid code -- missing end-of-block'; + state.mode = BAD; + break; + } + + /* build code tables -- note: do not change the lenbits or distbits + values here (9 and 6) without reading the comments in inftrees.h + concerning the ENOUGH constants, which depend on those values */ + state.lenbits = 9; + + opts = { bits: state.lenbits }; + ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.lenbits = opts.bits; + // state.lencode = state.next; + + if (ret) { + strm.msg = 'invalid literal/lengths set'; + state.mode = BAD; + break; + } + + state.distbits = 6; + //state.distcode.copy(state.codes); + // Switch to use dynamic table + state.distcode = state.distdyn; + opts = { bits: state.distbits }; + ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts); + // We have separate tables & no pointers. 2 commented lines below not needed. + // state.next_index = opts.table_index; + state.distbits = opts.bits; + // state.distcode = state.next; + + if (ret) { + strm.msg = 'invalid distances set'; + state.mode = BAD; + break; + } + //Tracev((stderr, 'inflate: codes ok\n')); + state.mode = LEN_; + if (flush === Z_TREES) { break inf_leave; } + /* falls through */ + case LEN_: + state.mode = LEN; + /* falls through */ + case LEN: + if (have >= 6 && left >= 258) { + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + inflate_fast(strm, _out); + //--- LOAD() --- + put = strm.next_out; + output = strm.output; + left = strm.avail_out; + next = strm.next_in; + input = strm.input; + have = strm.avail_in; + hold = state.hold; + bits = state.bits; + //--- + + if (state.mode === TYPE) { + state.back = -1; + } + break; + } + state.back = 0; + for (;;) { + here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if (here_bits <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if (here_op && (here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.lencode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + state.length = here_val; + if (here_op === 0) { + //Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ? + // "inflate: literal '%c'\n" : + // "inflate: literal 0x%02x\n", here.val)); + state.mode = LIT; + break; + } + if (here_op & 32) { + //Tracevv((stderr, "inflate: end of block\n")); + state.back = -1; + state.mode = TYPE; + break; + } + if (here_op & 64) { + strm.msg = 'invalid literal/length code'; + state.mode = BAD; + break; + } + state.extra = here_op & 15; + state.mode = LENEXT; + /* falls through */ + case LENEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } + //Tracevv((stderr, "inflate: length %u\n", state.length)); + state.was = state.length; + state.mode = DIST; + /* falls through */ + case DIST: + for (;;) { + here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/ + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + if ((here_op & 0xf0) === 0) { + last_bits = here_bits; + last_op = here_op; + last_val = here_val; + for (;;) { + here = state.distcode[last_val + + ((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)]; + here_bits = here >>> 24; + here_op = (here >>> 16) & 0xff; + here_val = here & 0xffff; + + if ((last_bits + here_bits) <= bits) { break; } + //--- PULLBYTE() ---// + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + //---// + } + //--- DROPBITS(last.bits) ---// + hold >>>= last_bits; + bits -= last_bits; + //---// + state.back += last_bits; + } + //--- DROPBITS(here.bits) ---// + hold >>>= here_bits; + bits -= here_bits; + //---// + state.back += here_bits; + if (here_op & 64) { + strm.msg = 'invalid distance code'; + state.mode = BAD; + break; + } + state.offset = here_val; + state.extra = (here_op) & 15; + state.mode = DISTEXT; + /* falls through */ + case DISTEXT: + if (state.extra) { + //=== NEEDBITS(state.extra); + n = state.extra; + while (bits < n) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/; + //--- DROPBITS(state.extra) ---// + hold >>>= state.extra; + bits -= state.extra; + //---// + state.back += state.extra; + } +//#ifdef INFLATE_STRICT + if (state.offset > state.dmax) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +//#endif + //Tracevv((stderr, "inflate: distance %u\n", state.offset)); + state.mode = MATCH; + /* falls through */ + case MATCH: + if (left === 0) { break inf_leave; } + copy = _out - left; + if (state.offset > copy) { /* copy from window */ + copy = state.offset - copy; + if (copy > state.whave) { + if (state.sane) { + strm.msg = 'invalid distance too far back'; + state.mode = BAD; + break; + } +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility +//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR +// Trace((stderr, "inflate.c too far\n")); +// copy -= state.whave; +// if (copy > state.length) { copy = state.length; } +// if (copy > left) { copy = left; } +// left -= copy; +// state.length -= copy; +// do { +// output[put++] = 0; +// } while (--copy); +// if (state.length === 0) { state.mode = LEN; } +// break; +//#endif + } + if (copy > state.wnext) { + copy -= state.wnext; + from = state.wsize - copy; + } + else { + from = state.wnext - copy; + } + if (copy > state.length) { copy = state.length; } + from_source = state.window; + } + else { /* copy from output */ + from_source = output; + from = put - state.offset; + copy = state.length; + } + if (copy > left) { copy = left; } + left -= copy; + state.length -= copy; + do { + output[put++] = from_source[from++]; + } while (--copy); + if (state.length === 0) { state.mode = LEN; } + break; + case LIT: + if (left === 0) { break inf_leave; } + output[put++] = state.length; + left--; + state.mode = LEN; + break; + case CHECK: + if (state.wrap) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + // Use '|' instead of '+' to make sure that result is signed + hold |= input[next++] << bits; + bits += 8; + } + //===// + _out -= left; + strm.total_out += _out; + state.total += _out; + if (_out) { + strm.adler = state.check = + /*UPDATE(state.check, put - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out)); + + } + _out = left; + // NB: crc32 stored as signed 32-bit int, zswap32 returns signed too + if ((state.flags ? hold : zswap32(hold)) !== state.check) { + strm.msg = 'incorrect data check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: check matches trailer\n")); + } + state.mode = LENGTH; + /* falls through */ + case LENGTH: + if (state.wrap && state.flags) { + //=== NEEDBITS(32); + while (bits < 32) { + if (have === 0) { break inf_leave; } + have--; + hold += input[next++] << bits; + bits += 8; + } + //===// + if (hold !== (state.total & 0xffffffff)) { + strm.msg = 'incorrect length check'; + state.mode = BAD; + break; + } + //=== INITBITS(); + hold = 0; + bits = 0; + //===// + //Tracev((stderr, "inflate: length matches trailer\n")); + } + state.mode = DONE; + /* falls through */ + case DONE: + ret = Z_STREAM_END; + break inf_leave; + case BAD: + ret = Z_DATA_ERROR; + break inf_leave; + case MEM: + return Z_MEM_ERROR; + case SYNC: + /* falls through */ + default: + return Z_STREAM_ERROR; + } + } + + // inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave" + + /* + Return from inflate(), updating the total counts and the check value. + If there was no progress during the inflate() call, return a buffer + error. Call updatewindow() to create and/or update the window state. + Note: a memory error from inflate() is non-recoverable. + */ + + //--- RESTORE() --- + strm.next_out = put; + strm.avail_out = left; + strm.next_in = next; + strm.avail_in = have; + state.hold = hold; + state.bits = bits; + //--- + + if (state.wsize || (_out !== strm.avail_out && state.mode < BAD && + (state.mode < CHECK || flush !== Z_FINISH))) { + if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) { + state.mode = MEM; + return Z_MEM_ERROR; + } + } + _in -= strm.avail_in; + _out -= strm.avail_out; + strm.total_in += _in; + strm.total_out += _out; + state.total += _out; + if (state.wrap && _out) { + strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/ + (state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out)); + } + strm.data_type = state.bits + (state.last ? 64 : 0) + + (state.mode === TYPE ? 128 : 0) + + (state.mode === LEN_ || state.mode === COPY_ ? 256 : 0); + if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) { + ret = Z_BUF_ERROR; + } + return ret; +} + +function inflateEnd(strm) { + + if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) { + return Z_STREAM_ERROR; + } + + var state = strm.state; + if (state.window) { + state.window = null; + } + strm.state = null; + return Z_OK; +} + +function inflateGetHeader(strm, head) { + var state; + + /* check state */ + if (!strm || !strm.state) { return Z_STREAM_ERROR; } + state = strm.state; + if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; } + + /* save header structure */ + state.head = head; + head.done = false; + return Z_OK; +} + +function inflateSetDictionary(strm, dictionary) { + var dictLength = dictionary.length; + + var state; + var dictid; + var ret; + + /* check state */ + if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; } + state = strm.state; + + if (state.wrap !== 0 && state.mode !== DICT) { + return Z_STREAM_ERROR; + } + + /* check for correct dictionary identifier */ + if (state.mode === DICT) { + dictid = 1; /* adler32(0, null, 0)*/ + /* dictid = adler32(dictid, dictionary, dictLength); */ + dictid = adler32(dictid, dictionary, dictLength, 0); + if (dictid !== state.check) { + return Z_DATA_ERROR; + } + } + /* copy dictionary to window using updatewindow(), which will amend the + existing dictionary if appropriate */ + ret = updatewindow(strm, dictionary, dictLength, dictLength); + if (ret) { + state.mode = MEM; + return Z_MEM_ERROR; + } + state.havedict = 1; + // Tracev((stderr, "inflate: dictionary set\n")); + return Z_OK; +} + +exports.inflateReset = inflateReset; +exports.inflateReset2 = inflateReset2; +exports.inflateResetKeep = inflateResetKeep; +exports.inflateInit = inflateInit; +exports.inflateInit2 = inflateInit2; +exports.inflate = inflate; +exports.inflateEnd = inflateEnd; +exports.inflateGetHeader = inflateGetHeader; +exports.inflateSetDictionary = inflateSetDictionary; +exports.inflateInfo = 'pako inflate (from Nodeca project)'; + +/* Not implemented +exports.inflateCopy = inflateCopy; +exports.inflateGetDictionary = inflateGetDictionary; +exports.inflateMark = inflateMark; +exports.inflatePrime = inflatePrime; +exports.inflateSync = inflateSync; +exports.inflateSyncPoint = inflateSyncPoint; +exports.inflateUndermine = inflateUndermine; +*/ + + +/***/ }), + +/***/ 21998: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +var utils = __webpack_require__(9805); + +var MAXBITS = 15; +var ENOUGH_LENS = 852; +var ENOUGH_DISTS = 592; +//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS); + +var CODES = 0; +var LENS = 1; +var DISTS = 2; + +var lbase = [ /* Length codes 257..285 base */ + 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, + 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0 +]; + +var lext = [ /* Length codes 257..285 extra */ + 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, + 19, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 78 +]; + +var dbase = [ /* Distance codes 0..29 base */ + 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, + 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, + 8193, 12289, 16385, 24577, 0, 0 +]; + +var dext = [ /* Distance codes 0..29 extra */ + 16, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, + 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, + 28, 28, 29, 29, 64, 64 +]; + +module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts) +{ + var bits = opts.bits; + //here = opts.here; /* table entry for duplication */ + + var len = 0; /* a code's length in bits */ + var sym = 0; /* index of code symbols */ + var min = 0, max = 0; /* minimum and maximum code lengths */ + var root = 0; /* number of index bits for root table */ + var curr = 0; /* number of index bits for current table */ + var drop = 0; /* code bits to drop for sub-table */ + var left = 0; /* number of prefix codes available */ + var used = 0; /* code entries in table used */ + var huff = 0; /* Huffman code */ + var incr; /* for incrementing code, index */ + var fill; /* index for replicating entries */ + var low; /* low bits for current root entry */ + var mask; /* mask for low root bits */ + var next; /* next available space in table */ + var base = null; /* base value table to use */ + var base_index = 0; +// var shoextra; /* extra bits table to use */ + var end; /* use base and extra for symbol > end */ + var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */ + var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */ + var extra = null; + var extra_index = 0; + + var here_bits, here_op, here_val; + + /* + Process a set of code lengths to create a canonical Huffman code. The + code lengths are lens[0..codes-1]. Each length corresponds to the + symbols 0..codes-1. The Huffman code is generated by first sorting the + symbols by length from short to long, and retaining the symbol order + for codes with equal lengths. Then the code starts with all zero bits + for the first code of the shortest length, and the codes are integer + increments for the same length, and zeros are appended as the length + increases. For the deflate format, these bits are stored backwards + from their more natural integer increment ordering, and so when the + decoding tables are built in the large loop below, the integer codes + are incremented backwards. + + This routine assumes, but does not check, that all of the entries in + lens[] are in the range 0..MAXBITS. The caller must assure this. + 1..MAXBITS is interpreted as that code length. zero means that that + symbol does not occur in this code. + + The codes are sorted by computing a count of codes for each length, + creating from that a table of starting indices for each length in the + sorted table, and then entering the symbols in order in the sorted + table. The sorted table is work[], with that space being provided by + the caller. + + The length counts are used for other purposes as well, i.e. finding + the minimum and maximum length codes, determining if there are any + codes at all, checking for a valid set of lengths, and looking ahead + at length counts to determine sub-table sizes when building the + decoding tables. + */ + + /* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */ + for (len = 0; len <= MAXBITS; len++) { + count[len] = 0; + } + for (sym = 0; sym < codes; sym++) { + count[lens[lens_index + sym]]++; + } + + /* bound code lengths, force root to be within code lengths */ + root = bits; + for (max = MAXBITS; max >= 1; max--) { + if (count[max] !== 0) { break; } + } + if (root > max) { + root = max; + } + if (max === 0) { /* no symbols to code at all */ + //table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */ + //table.bits[opts.table_index] = 1; //here.bits = (var char)1; + //table.val[opts.table_index++] = 0; //here.val = (var short)0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + + //table.op[opts.table_index] = 64; + //table.bits[opts.table_index] = 1; + //table.val[opts.table_index++] = 0; + table[table_index++] = (1 << 24) | (64 << 16) | 0; + + opts.bits = 1; + return 0; /* no symbols, but wait for decoding to report error */ + } + for (min = 1; min < max; min++) { + if (count[min] !== 0) { break; } + } + if (root < min) { + root = min; + } + + /* check for an over-subscribed or incomplete set of lengths */ + left = 1; + for (len = 1; len <= MAXBITS; len++) { + left <<= 1; + left -= count[len]; + if (left < 0) { + return -1; + } /* over-subscribed */ + } + if (left > 0 && (type === CODES || max !== 1)) { + return -1; /* incomplete set */ + } + + /* generate offsets into symbol table for each length for sorting */ + offs[1] = 0; + for (len = 1; len < MAXBITS; len++) { + offs[len + 1] = offs[len] + count[len]; + } + + /* sort symbols by length, by symbol order within each length */ + for (sym = 0; sym < codes; sym++) { + if (lens[lens_index + sym] !== 0) { + work[offs[lens[lens_index + sym]]++] = sym; + } + } + + /* + Create and fill in decoding tables. In this loop, the table being + filled is at next and has curr index bits. The code being used is huff + with length len. That code is converted to an index by dropping drop + bits off of the bottom. For codes where len is less than drop + curr, + those top drop + curr - len bits are incremented through all values to + fill the table with replicated entries. + + root is the number of index bits for the root table. When len exceeds + root, sub-tables are created pointed to by the root entry with an index + of the low root bits of huff. This is saved in low to check for when a + new sub-table should be started. drop is zero when the root table is + being filled, and drop is root when sub-tables are being filled. + + When a new sub-table is needed, it is necessary to look ahead in the + code lengths to determine what size sub-table is needed. The length + counts are used for this, and so count[] is decremented as codes are + entered in the tables. + + used keeps track of how many table entries have been allocated from the + provided *table space. It is checked for LENS and DIST tables against + the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in + the initial root table size constants. See the comments in inftrees.h + for more information. + + sym increments through all symbols, and the loop terminates when + all codes of length max, i.e. all codes, have been processed. This + routine permits incomplete codes, so another loop after this one fills + in the rest of the decoding tables with invalid code markers. + */ + + /* set up for code type */ + // poor man optimization - use if-else instead of switch, + // to avoid deopts in old v8 + if (type === CODES) { + base = extra = work; /* dummy value--not used */ + end = 19; + + } else if (type === LENS) { + base = lbase; + base_index -= 257; + extra = lext; + extra_index -= 257; + end = 256; + + } else { /* DISTS */ + base = dbase; + extra = dext; + end = -1; + } + + /* initialize opts for loop */ + huff = 0; /* starting code */ + sym = 0; /* starting code symbol */ + len = min; /* starting code length */ + next = table_index; /* current table to fill in */ + curr = root; /* current table index bits */ + drop = 0; /* current bits to drop from code for index */ + low = -1; /* trigger new sub-table when len > root */ + used = 1 << root; /* use root table entries */ + mask = used - 1; /* mask for comparing low */ + + /* check available table space */ + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* process all codes and make table entries */ + for (;;) { + /* create table entry */ + here_bits = len - drop; + if (work[sym] < end) { + here_op = 0; + here_val = work[sym]; + } + else if (work[sym] > end) { + here_op = extra[extra_index + work[sym]]; + here_val = base[base_index + work[sym]]; + } + else { + here_op = 32 + 64; /* end of block */ + here_val = 0; + } + + /* replicate for those indices with low len bits equal to huff */ + incr = 1 << (len - drop); + fill = 1 << curr; + min = fill; /* save offset to next table */ + do { + fill -= incr; + table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0; + } while (fill !== 0); + + /* backwards increment the len-bit code huff */ + incr = 1 << (len - 1); + while (huff & incr) { + incr >>= 1; + } + if (incr !== 0) { + huff &= incr - 1; + huff += incr; + } else { + huff = 0; + } + + /* go to next symbol, update count, len */ + sym++; + if (--count[len] === 0) { + if (len === max) { break; } + len = lens[lens_index + work[sym]]; + } + + /* create new sub-table if needed */ + if (len > root && (huff & mask) !== low) { + /* if first time, transition to sub-tables */ + if (drop === 0) { + drop = root; + } + + /* increment past last table */ + next += min; /* here min is 1 << curr */ + + /* determine length of next table */ + curr = len - drop; + left = 1 << curr; + while (curr + drop < max) { + left -= count[curr + drop]; + if (left <= 0) { break; } + curr++; + left <<= 1; + } + + /* check for enough space */ + used += 1 << curr; + if ((type === LENS && used > ENOUGH_LENS) || + (type === DISTS && used > ENOUGH_DISTS)) { + return 1; + } + + /* point entry in root table to sub-table */ + low = huff & mask; + /*table.op[low] = curr; + table.bits[low] = root; + table.val[low] = next - opts.table_index;*/ + table[low] = (root << 24) | (curr << 16) | (next - table_index) |0; + } + } + + /* fill in remaining table entry if code is incomplete (guaranteed to have + at most one remaining entry, since if the code is incomplete, the + maximum code length that was allowed to get this far is one bit) */ + if (huff !== 0) { + //table.op[next + huff] = 64; /* invalid code marker */ + //table.bits[next + huff] = len - drop; + //table.val[next + huff] = 0; + table[next + huff] = ((len - drop) << 24) | (64 << 16) |0; + } + + /* set return parameters */ + //opts.table_index += used; + opts.bits = root; + return 0; +}; + + +/***/ }), + +/***/ 54674: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +module.exports = { + 2: 'need dictionary', /* Z_NEED_DICT 2 */ + 1: 'stream end', /* Z_STREAM_END 1 */ + 0: '', /* Z_OK 0 */ + '-1': 'file error', /* Z_ERRNO (-1) */ + '-2': 'stream error', /* Z_STREAM_ERROR (-2) */ + '-3': 'data error', /* Z_DATA_ERROR (-3) */ + '-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */ + '-5': 'buffer error', /* Z_BUF_ERROR (-5) */ + '-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */ +}; + + +/***/ }), + +/***/ 23665: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +/* eslint-disable space-unary-ops */ + +var utils = __webpack_require__(9805); + +/* Public constants ==========================================================*/ +/* ===========================================================================*/ + + +//var Z_FILTERED = 1; +//var Z_HUFFMAN_ONLY = 2; +//var Z_RLE = 3; +var Z_FIXED = 4; +//var Z_DEFAULT_STRATEGY = 0; + +/* Possible values of the data_type field (though see inflate()) */ +var Z_BINARY = 0; +var Z_TEXT = 1; +//var Z_ASCII = 1; // = Z_TEXT +var Z_UNKNOWN = 2; + +/*============================================================================*/ + + +function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } } + +// From zutil.h + +var STORED_BLOCK = 0; +var STATIC_TREES = 1; +var DYN_TREES = 2; +/* The three kinds of block type */ + +var MIN_MATCH = 3; +var MAX_MATCH = 258; +/* The minimum and maximum match lengths */ + +// From deflate.h +/* =========================================================================== + * Internal compression state. + */ + +var LENGTH_CODES = 29; +/* number of length codes, not counting the special END_BLOCK code */ + +var LITERALS = 256; +/* number of literal bytes 0..255 */ + +var L_CODES = LITERALS + 1 + LENGTH_CODES; +/* number of Literal or Length codes, including the END_BLOCK code */ + +var D_CODES = 30; +/* number of distance codes */ + +var BL_CODES = 19; +/* number of codes used to transfer the bit lengths */ + +var HEAP_SIZE = 2 * L_CODES + 1; +/* maximum heap size */ + +var MAX_BITS = 15; +/* All codes must not exceed MAX_BITS bits */ + +var Buf_size = 16; +/* size of bit buffer in bi_buf */ + + +/* =========================================================================== + * Constants + */ + +var MAX_BL_BITS = 7; +/* Bit length codes must not exceed MAX_BL_BITS bits */ + +var END_BLOCK = 256; +/* end of block literal code */ + +var REP_3_6 = 16; +/* repeat previous bit length 3-6 times (2 bits of repeat count) */ + +var REPZ_3_10 = 17; +/* repeat a zero length 3-10 times (3 bits of repeat count) */ + +var REPZ_11_138 = 18; +/* repeat a zero length 11-138 times (7 bits of repeat count) */ + +/* eslint-disable comma-spacing,array-bracket-spacing */ +var extra_lbits = /* extra bits for each length code */ + [0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0]; + +var extra_dbits = /* extra bits for each distance code */ + [0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13]; + +var extra_blbits = /* extra bits for each bit length code */ + [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7]; + +var bl_order = + [16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15]; +/* eslint-enable comma-spacing,array-bracket-spacing */ + +/* The lengths of the bit length codes are sent in order of decreasing + * probability, to avoid transmitting the lengths for unused bit length codes. + */ + +/* =========================================================================== + * Local data. These are initialized only once. + */ + +// We pre-fill arrays with 0 to avoid uninitialized gaps + +var DIST_CODE_LEN = 512; /* see definition of array dist_code below */ + +// !!!! Use flat array instead of structure, Freq = i*2, Len = i*2+1 +var static_ltree = new Array((L_CODES + 2) * 2); +zero(static_ltree); +/* The static literal tree. Since the bit lengths are imposed, there is no + * need for the L_CODES extra codes used during heap construction. However + * The codes 286 and 287 are needed to build a canonical tree (see _tr_init + * below). + */ + +var static_dtree = new Array(D_CODES * 2); +zero(static_dtree); +/* The static distance tree. (Actually a trivial tree since all codes use + * 5 bits.) + */ + +var _dist_code = new Array(DIST_CODE_LEN); +zero(_dist_code); +/* Distance codes. The first 256 values correspond to the distances + * 3 .. 258, the last 256 values correspond to the top 8 bits of + * the 15 bit distances. + */ + +var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1); +zero(_length_code); +/* length code for each normalized match length (0 == MIN_MATCH) */ + +var base_length = new Array(LENGTH_CODES); +zero(base_length); +/* First normalized length for each code (0 = MIN_MATCH) */ + +var base_dist = new Array(D_CODES); +zero(base_dist); +/* First normalized distance for each code (0 = distance of 1) */ + + +function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) { + + this.static_tree = static_tree; /* static tree or NULL */ + this.extra_bits = extra_bits; /* extra bits for each code or NULL */ + this.extra_base = extra_base; /* base index for extra_bits */ + this.elems = elems; /* max number of elements in the tree */ + this.max_length = max_length; /* max bit length for the codes */ + + // show if `static_tree` has data or dummy - needed for monomorphic objects + this.has_stree = static_tree && static_tree.length; +} + + +var static_l_desc; +var static_d_desc; +var static_bl_desc; + + +function TreeDesc(dyn_tree, stat_desc) { + this.dyn_tree = dyn_tree; /* the dynamic tree */ + this.max_code = 0; /* largest code with non zero frequency */ + this.stat_desc = stat_desc; /* the corresponding static tree */ +} + + + +function d_code(dist) { + return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)]; +} + + +/* =========================================================================== + * Output a short LSB first on the stream. + * IN assertion: there is enough room in pendingBuf. + */ +function put_short(s, w) { +// put_byte(s, (uch)((w) & 0xff)); +// put_byte(s, (uch)((ush)(w) >> 8)); + s.pending_buf[s.pending++] = (w) & 0xff; + s.pending_buf[s.pending++] = (w >>> 8) & 0xff; +} + + +/* =========================================================================== + * Send a value on a given number of bits. + * IN assertion: length <= 16 and value fits in length bits. + */ +function send_bits(s, value, length) { + if (s.bi_valid > (Buf_size - length)) { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + put_short(s, s.bi_buf); + s.bi_buf = value >> (Buf_size - s.bi_valid); + s.bi_valid += length - Buf_size; + } else { + s.bi_buf |= (value << s.bi_valid) & 0xffff; + s.bi_valid += length; + } +} + + +function send_code(s, c, tree) { + send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/); +} + + +/* =========================================================================== + * Reverse the first len bits of a code, using straightforward code (a faster + * method would use a table) + * IN assertion: 1 <= len <= 15 + */ +function bi_reverse(code, len) { + var res = 0; + do { + res |= code & 1; + code >>>= 1; + res <<= 1; + } while (--len > 0); + return res >>> 1; +} + + +/* =========================================================================== + * Flush the bit buffer, keeping at most 7 bits in it. + */ +function bi_flush(s) { + if (s.bi_valid === 16) { + put_short(s, s.bi_buf); + s.bi_buf = 0; + s.bi_valid = 0; + + } else if (s.bi_valid >= 8) { + s.pending_buf[s.pending++] = s.bi_buf & 0xff; + s.bi_buf >>= 8; + s.bi_valid -= 8; + } +} + + +/* =========================================================================== + * Compute the optimal bit lengths for a tree and update the total bit length + * for the current block. + * IN assertion: the fields freq and dad are set, heap[heap_max] and + * above are the tree nodes sorted by increasing frequency. + * OUT assertions: the field len is set to the optimal bit length, the + * array bl_count contains the frequencies for each bit length. + * The length opt_len is updated; static_len is also updated if stree is + * not null. + */ +function gen_bitlen(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var max_code = desc.max_code; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var extra = desc.stat_desc.extra_bits; + var base = desc.stat_desc.extra_base; + var max_length = desc.stat_desc.max_length; + var h; /* heap index */ + var n, m; /* iterate over the tree elements */ + var bits; /* bit length */ + var xbits; /* extra bits */ + var f; /* frequency */ + var overflow = 0; /* number of elements with bit length too large */ + + for (bits = 0; bits <= MAX_BITS; bits++) { + s.bl_count[bits] = 0; + } + + /* In a first pass, compute the optimal bit lengths (which may + * overflow in the case of the bit length tree). + */ + tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */ + + for (h = s.heap_max + 1; h < HEAP_SIZE; h++) { + n = s.heap[h]; + bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1; + if (bits > max_length) { + bits = max_length; + overflow++; + } + tree[n * 2 + 1]/*.Len*/ = bits; + /* We overwrite tree[n].Dad which is no longer needed */ + + if (n > max_code) { continue; } /* not a leaf node */ + + s.bl_count[bits]++; + xbits = 0; + if (n >= base) { + xbits = extra[n - base]; + } + f = tree[n * 2]/*.Freq*/; + s.opt_len += f * (bits + xbits); + if (has_stree) { + s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits); + } + } + if (overflow === 0) { return; } + + // Trace((stderr,"\nbit length overflow\n")); + /* This happens for example on obj2 and pic of the Calgary corpus */ + + /* Find the first bit length which could increase: */ + do { + bits = max_length - 1; + while (s.bl_count[bits] === 0) { bits--; } + s.bl_count[bits]--; /* move one leaf down the tree */ + s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ + s.bl_count[max_length]--; + /* The brother of the overflow item also moves one step up, + * but this does not affect bl_count[max_length] + */ + overflow -= 2; + } while (overflow > 0); + + /* Now recompute all bit lengths, scanning in increasing frequency. + * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all + * lengths instead of fixing only the wrong ones. This idea is taken + * from 'ar' written by Haruhiko Okumura.) + */ + for (bits = max_length; bits !== 0; bits--) { + n = s.bl_count[bits]; + while (n !== 0) { + m = s.heap[--h]; + if (m > max_code) { continue; } + if (tree[m * 2 + 1]/*.Len*/ !== bits) { + // Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); + s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/; + tree[m * 2 + 1]/*.Len*/ = bits; + } + n--; + } + } +} + + +/* =========================================================================== + * Generate the codes for a given tree and bit counts (which need not be + * optimal). + * IN assertion: the array bl_count contains the bit length statistics for + * the given tree and the field len is set for all tree elements. + * OUT assertion: the field code is set for all tree elements of non + * zero code length. + */ +function gen_codes(tree, max_code, bl_count) +// ct_data *tree; /* the tree to decorate */ +// int max_code; /* largest code with non zero frequency */ +// ushf *bl_count; /* number of codes at each bit length */ +{ + var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */ + var code = 0; /* running code value */ + var bits; /* bit index */ + var n; /* code index */ + + /* The distribution counts are first used to generate the code values + * without bit reversal. + */ + for (bits = 1; bits <= MAX_BITS; bits++) { + next_code[bits] = code = (code + bl_count[bits - 1]) << 1; + } + /* Check that the bit counts in bl_count are consistent. The last code + * must be all ones. + */ + //Assert (code + bl_count[MAX_BITS]-1 == (1< length code (0..28) */ + length = 0; + for (code = 0; code < LENGTH_CODES - 1; code++) { + base_length[code] = length; + for (n = 0; n < (1 << extra_lbits[code]); n++) { + _length_code[length++] = code; + } + } + //Assert (length == 256, "tr_static_init: length != 256"); + /* Note that the length 255 (match length 258) can be represented + * in two different ways: code 284 + 5 bits or code 285, so we + * overwrite length_code[255] to use the best encoding: + */ + _length_code[length - 1] = code; + + /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ + dist = 0; + for (code = 0; code < 16; code++) { + base_dist[code] = dist; + for (n = 0; n < (1 << extra_dbits[code]); n++) { + _dist_code[dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: dist != 256"); + dist >>= 7; /* from now on, all distances are divided by 128 */ + for (; code < D_CODES; code++) { + base_dist[code] = dist << 7; + for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { + _dist_code[256 + dist++] = code; + } + } + //Assert (dist == 256, "tr_static_init: 256+dist != 512"); + + /* Construct the codes of the static literal tree */ + for (bits = 0; bits <= MAX_BITS; bits++) { + bl_count[bits] = 0; + } + + n = 0; + while (n <= 143) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + while (n <= 255) { + static_ltree[n * 2 + 1]/*.Len*/ = 9; + n++; + bl_count[9]++; + } + while (n <= 279) { + static_ltree[n * 2 + 1]/*.Len*/ = 7; + n++; + bl_count[7]++; + } + while (n <= 287) { + static_ltree[n * 2 + 1]/*.Len*/ = 8; + n++; + bl_count[8]++; + } + /* Codes 286 and 287 do not exist, but we must include them in the + * tree construction to get a canonical Huffman tree (longest code + * all ones) + */ + gen_codes(static_ltree, L_CODES + 1, bl_count); + + /* The static distance tree is trivial: */ + for (n = 0; n < D_CODES; n++) { + static_dtree[n * 2 + 1]/*.Len*/ = 5; + static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5); + } + + // Now data ready and we can init static trees + static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS); + static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS); + static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS); + + //static_init_done = true; +} + + +/* =========================================================================== + * Initialize a new block. + */ +function init_block(s) { + var n; /* iterates over tree elements */ + + /* Initialize the trees. */ + for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; } + for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; } + + s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1; + s.opt_len = s.static_len = 0; + s.last_lit = s.matches = 0; +} + + +/* =========================================================================== + * Flush the bit buffer and align the output on a byte boundary + */ +function bi_windup(s) +{ + if (s.bi_valid > 8) { + put_short(s, s.bi_buf); + } else if (s.bi_valid > 0) { + //put_byte(s, (Byte)s->bi_buf); + s.pending_buf[s.pending++] = s.bi_buf; + } + s.bi_buf = 0; + s.bi_valid = 0; +} + +/* =========================================================================== + * Copy a stored block, storing first the length and its + * one's complement if requested. + */ +function copy_block(s, buf, len, header) +//DeflateState *s; +//charf *buf; /* the input data */ +//unsigned len; /* its length */ +//int header; /* true if block header must be written */ +{ + bi_windup(s); /* align on byte boundary */ + + if (header) { + put_short(s, len); + put_short(s, ~len); + } +// while (len--) { +// put_byte(s, *buf++); +// } + utils.arraySet(s.pending_buf, s.window, buf, len, s.pending); + s.pending += len; +} + +/* =========================================================================== + * Compares to subtrees, using the tree depth as tie breaker when + * the subtrees have equal frequency. This minimizes the worst case length. + */ +function smaller(tree, n, m, depth) { + var _n2 = n * 2; + var _m2 = m * 2; + return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ || + (tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m])); +} + +/* =========================================================================== + * Restore the heap property by moving down the tree starting at node k, + * exchanging a node with the smallest of its two sons if necessary, stopping + * when the heap property is re-established (each father smaller than its + * two sons). + */ +function pqdownheap(s, tree, k) +// deflate_state *s; +// ct_data *tree; /* the tree to restore */ +// int k; /* node to move down */ +{ + var v = s.heap[k]; + var j = k << 1; /* left son of k */ + while (j <= s.heap_len) { + /* Set j to the smallest of the two sons: */ + if (j < s.heap_len && + smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) { + j++; + } + /* Exit if v is smaller than both sons */ + if (smaller(tree, v, s.heap[j], s.depth)) { break; } + + /* Exchange v with the smallest son */ + s.heap[k] = s.heap[j]; + k = j; + + /* And continue down the tree, setting j to the left son of k */ + j <<= 1; + } + s.heap[k] = v; +} + + +// inlined manually +// var SMALLEST = 1; + +/* =========================================================================== + * Send the block data compressed using the given Huffman trees + */ +function compress_block(s, ltree, dtree) +// deflate_state *s; +// const ct_data *ltree; /* literal tree */ +// const ct_data *dtree; /* distance tree */ +{ + var dist; /* distance of matched string */ + var lc; /* match length or unmatched char (if dist == 0) */ + var lx = 0; /* running index in l_buf */ + var code; /* the code to send */ + var extra; /* number of extra bits to send */ + + if (s.last_lit !== 0) { + do { + dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]); + lc = s.pending_buf[s.l_buf + lx]; + lx++; + + if (dist === 0) { + send_code(s, lc, ltree); /* send a literal byte */ + //Tracecv(isgraph(lc), (stderr," '%c' ", lc)); + } else { + /* Here, lc is the match length - MIN_MATCH */ + code = _length_code[lc]; + send_code(s, code + LITERALS + 1, ltree); /* send the length code */ + extra = extra_lbits[code]; + if (extra !== 0) { + lc -= base_length[code]; + send_bits(s, lc, extra); /* send the extra length bits */ + } + dist--; /* dist is now the match distance - 1 */ + code = d_code(dist); + //Assert (code < D_CODES, "bad d_code"); + + send_code(s, code, dtree); /* send the distance code */ + extra = extra_dbits[code]; + if (extra !== 0) { + dist -= base_dist[code]; + send_bits(s, dist, extra); /* send the extra distance bits */ + } + } /* literal or match pair ? */ + + /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ + //Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx, + // "pendingBuf overflow"); + + } while (lx < s.last_lit); + } + + send_code(s, END_BLOCK, ltree); +} + + +/* =========================================================================== + * Construct one Huffman tree and assigns the code bit strings and lengths. + * Update the total bit length for the current block. + * IN assertion: the field freq is set for all tree elements. + * OUT assertions: the fields len and code are set to the optimal bit length + * and corresponding code. The length opt_len is updated; static_len is + * also updated if stree is not null. The field max_code is set. + */ +function build_tree(s, desc) +// deflate_state *s; +// tree_desc *desc; /* the tree descriptor */ +{ + var tree = desc.dyn_tree; + var stree = desc.stat_desc.static_tree; + var has_stree = desc.stat_desc.has_stree; + var elems = desc.stat_desc.elems; + var n, m; /* iterate over heap elements */ + var max_code = -1; /* largest code with non zero frequency */ + var node; /* new node being created */ + + /* Construct the initial heap, with least frequent element in + * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. + * heap[0] is not used. + */ + s.heap_len = 0; + s.heap_max = HEAP_SIZE; + + for (n = 0; n < elems; n++) { + if (tree[n * 2]/*.Freq*/ !== 0) { + s.heap[++s.heap_len] = max_code = n; + s.depth[n] = 0; + + } else { + tree[n * 2 + 1]/*.Len*/ = 0; + } + } + + /* The pkzip format requires that at least one distance code exists, + * and that at least one bit should be sent even if there is only one + * possible code. So to avoid special checks later on we force at least + * two codes of non zero frequency. + */ + while (s.heap_len < 2) { + node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0); + tree[node * 2]/*.Freq*/ = 1; + s.depth[node] = 0; + s.opt_len--; + + if (has_stree) { + s.static_len -= stree[node * 2 + 1]/*.Len*/; + } + /* node is 0 or 1 so it does not have extra bits */ + } + desc.max_code = max_code; + + /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, + * establish sub-heaps of increasing lengths: + */ + for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); } + + /* Construct the Huffman tree by repeatedly combining the least two + * frequent nodes. + */ + node = elems; /* next internal node of the tree */ + do { + //pqremove(s, tree, n); /* n = node of least frequency */ + /*** pqremove ***/ + n = s.heap[1/*SMALLEST*/]; + s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--]; + pqdownheap(s, tree, 1/*SMALLEST*/); + /***/ + + m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */ + + s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */ + s.heap[--s.heap_max] = m; + + /* Create a new node father of n and m */ + tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/; + s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1; + tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node; + + /* and insert the new node in the heap */ + s.heap[1/*SMALLEST*/] = node++; + pqdownheap(s, tree, 1/*SMALLEST*/); + + } while (s.heap_len >= 2); + + s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/]; + + /* At this point, the fields freq and dad are set. We can now + * generate the bit lengths. + */ + gen_bitlen(s, desc); + + /* The field len is now set, we can generate the bit codes */ + gen_codes(tree, max_code, s.bl_count); +} + + +/* =========================================================================== + * Scan a literal or distance tree to determine the frequencies of the codes + * in the bit length tree. + */ +function scan_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */ + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + s.bl_tree[curlen * 2]/*.Freq*/ += count; + + } else if (curlen !== 0) { + + if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; } + s.bl_tree[REP_3_6 * 2]/*.Freq*/++; + + } else if (count <= 10) { + s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++; + + } else { + s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++; + } + + count = 0; + prevlen = curlen; + + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Send a literal or distance tree in compressed form, using the codes in + * bl_tree. + */ +function send_tree(s, tree, max_code) +// deflate_state *s; +// ct_data *tree; /* the tree to be scanned */ +// int max_code; /* and its largest code of non zero frequency */ +{ + var n; /* iterates over all tree elements */ + var prevlen = -1; /* last emitted length */ + var curlen; /* length of current code */ + + var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */ + + var count = 0; /* repeat count of the current code */ + var max_count = 7; /* max repeat count */ + var min_count = 4; /* min repeat count */ + + /* tree[max_code+1].Len = -1; */ /* guard already set */ + if (nextlen === 0) { + max_count = 138; + min_count = 3; + } + + for (n = 0; n <= max_code; n++) { + curlen = nextlen; + nextlen = tree[(n + 1) * 2 + 1]/*.Len*/; + + if (++count < max_count && curlen === nextlen) { + continue; + + } else if (count < min_count) { + do { send_code(s, curlen, s.bl_tree); } while (--count !== 0); + + } else if (curlen !== 0) { + if (curlen !== prevlen) { + send_code(s, curlen, s.bl_tree); + count--; + } + //Assert(count >= 3 && count <= 6, " 3_6?"); + send_code(s, REP_3_6, s.bl_tree); + send_bits(s, count - 3, 2); + + } else if (count <= 10) { + send_code(s, REPZ_3_10, s.bl_tree); + send_bits(s, count - 3, 3); + + } else { + send_code(s, REPZ_11_138, s.bl_tree); + send_bits(s, count - 11, 7); + } + + count = 0; + prevlen = curlen; + if (nextlen === 0) { + max_count = 138; + min_count = 3; + + } else if (curlen === nextlen) { + max_count = 6; + min_count = 3; + + } else { + max_count = 7; + min_count = 4; + } + } +} + + +/* =========================================================================== + * Construct the Huffman tree for the bit lengths and return the index in + * bl_order of the last bit length code to send. + */ +function build_bl_tree(s) { + var max_blindex; /* index of last bit length code of non zero freq */ + + /* Determine the bit length frequencies for literal and distance trees */ + scan_tree(s, s.dyn_ltree, s.l_desc.max_code); + scan_tree(s, s.dyn_dtree, s.d_desc.max_code); + + /* Build the bit length tree: */ + build_tree(s, s.bl_desc); + /* opt_len now includes the length of the tree representations, except + * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. + */ + + /* Determine the number of bit length codes to send. The pkzip format + * requires that at least 4 bit length codes be sent. (appnote.txt says + * 3 but the actual value used is 4.) + */ + for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { + if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) { + break; + } + } + /* Update opt_len to include the bit length tree and counts */ + s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; + //Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", + // s->opt_len, s->static_len)); + + return max_blindex; +} + + +/* =========================================================================== + * Send the header for a block using dynamic Huffman trees: the counts, the + * lengths of the bit length codes, the literal tree and the distance tree. + * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. + */ +function send_all_trees(s, lcodes, dcodes, blcodes) +// deflate_state *s; +// int lcodes, dcodes, blcodes; /* number of codes for each tree */ +{ + var rank; /* index in bl_order */ + + //Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); + //Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, + // "too many codes"); + //Tracev((stderr, "\nbl counts: ")); + send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */ + send_bits(s, dcodes - 1, 5); + send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */ + for (rank = 0; rank < blcodes; rank++) { + //Tracev((stderr, "\nbl code %2d ", bl_order[rank])); + send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3); + } + //Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */ + //Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); + + send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */ + //Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); +} + + +/* =========================================================================== + * Check if the data type is TEXT or BINARY, using the following algorithm: + * - TEXT if the two conditions below are satisfied: + * a) There are no non-portable control characters belonging to the + * "black list" (0..6, 14..25, 28..31). + * b) There is at least one printable character belonging to the + * "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255). + * - BINARY otherwise. + * - The following partially-portable control characters form a + * "gray list" that is ignored in this detection algorithm: + * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}). + * IN assertion: the fields Freq of dyn_ltree are set. + */ +function detect_data_type(s) { + /* black_mask is the bit mask of black-listed bytes + * set bits 0..6, 14..25, and 28..31 + * 0xf3ffc07f = binary 11110011111111111100000001111111 + */ + var black_mask = 0xf3ffc07f; + var n; + + /* Check for non-textual ("black-listed") bytes. */ + for (n = 0; n <= 31; n++, black_mask >>>= 1) { + if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) { + return Z_BINARY; + } + } + + /* Check for textual ("white-listed") bytes. */ + if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 || + s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + for (n = 32; n < LITERALS; n++) { + if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) { + return Z_TEXT; + } + } + + /* There are no "black-listed" or "white-listed" bytes: + * this stream either is empty or has tolerated ("gray-listed") bytes only. + */ + return Z_BINARY; +} + + +var static_init_done = false; + +/* =========================================================================== + * Initialize the tree data structures for a new zlib stream. + */ +function _tr_init(s) +{ + + if (!static_init_done) { + tr_static_init(); + static_init_done = true; + } + + s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc); + s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc); + s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc); + + s.bi_buf = 0; + s.bi_valid = 0; + + /* Initialize the first block of the first file: */ + init_block(s); +} + + +/* =========================================================================== + * Send a stored block + */ +function _tr_stored_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */ + copy_block(s, buf, stored_len, true); /* with header */ +} + + +/* =========================================================================== + * Send one empty static block to give enough lookahead for inflate. + * This takes 10 bits, of which 7 may remain in the bit buffer. + */ +function _tr_align(s) { + send_bits(s, STATIC_TREES << 1, 3); + send_code(s, END_BLOCK, static_ltree); + bi_flush(s); +} + + +/* =========================================================================== + * Determine the best encoding for the current block: dynamic trees, static + * trees or store, and output the encoded block to the zip file. + */ +function _tr_flush_block(s, buf, stored_len, last) +//DeflateState *s; +//charf *buf; /* input block, or NULL if too old */ +//ulg stored_len; /* length of input block */ +//int last; /* one if this is the last block for a file */ +{ + var opt_lenb, static_lenb; /* opt_len and static_len in bytes */ + var max_blindex = 0; /* index of last bit length code of non zero freq */ + + /* Build the Huffman trees unless a stored block is forced */ + if (s.level > 0) { + + /* Check if the file is binary or text */ + if (s.strm.data_type === Z_UNKNOWN) { + s.strm.data_type = detect_data_type(s); + } + + /* Construct the literal and distance trees */ + build_tree(s, s.l_desc); + // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + + build_tree(s, s.d_desc); + // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, + // s->static_len)); + /* At this point, opt_len and static_len are the total bit lengths of + * the compressed block data, excluding the tree representations. + */ + + /* Build the bit length tree for the above two trees, and get the index + * in bl_order of the last bit length code to send. + */ + max_blindex = build_bl_tree(s); + + /* Determine the best encoding. Compute the block lengths in bytes. */ + opt_lenb = (s.opt_len + 3 + 7) >>> 3; + static_lenb = (s.static_len + 3 + 7) >>> 3; + + // Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", + // opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, + // s->last_lit)); + + if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; } + + } else { + // Assert(buf != (char*)0, "lost buf"); + opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ + } + + if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) { + /* 4: two words for the lengths */ + + /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. + * Otherwise we can't have processed more than WSIZE input bytes since + * the last block flush, because compression would have been + * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to + * transform a block into a stored block. + */ + _tr_stored_block(s, buf, stored_len, last); + + } else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) { + + send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3); + compress_block(s, static_ltree, static_dtree); + + } else { + send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3); + send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1); + compress_block(s, s.dyn_ltree, s.dyn_dtree); + } + // Assert (s->compressed_len == s->bits_sent, "bad compressed size"); + /* The above check is made mod 2^32, for files larger than 512 MB + * and uLong implemented on 32 bits. + */ + init_block(s); + + if (last) { + bi_windup(s); + } + // Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, + // s->compressed_len-7*last)); +} + +/* =========================================================================== + * Save the match info and tally the frequency counts. Return true if + * the current block must be flushed. + */ +function _tr_tally(s, dist, lc) +// deflate_state *s; +// unsigned dist; /* distance of matched string */ +// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ +{ + //var out_length, in_length, dcode; + + s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff; + s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff; + + s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff; + s.last_lit++; + + if (dist === 0) { + /* lc is the unmatched char */ + s.dyn_ltree[lc * 2]/*.Freq*/++; + } else { + s.matches++; + /* Here, lc is the match length - MIN_MATCH */ + dist--; /* dist = match distance - 1 */ + //Assert((ush)dist < (ush)MAX_DIST(s) && + // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && + // (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); + + s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++; + s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++; + } + +// (!) This block is disabled in zlib defaults, +// don't enable it for binary compatibility + +//#ifdef TRUNCATE_BLOCK +// /* Try to guess if it is profitable to stop the current block here */ +// if ((s.last_lit & 0x1fff) === 0 && s.level > 2) { +// /* Compute an upper bound for the compressed length */ +// out_length = s.last_lit*8; +// in_length = s.strstart - s.block_start; +// +// for (dcode = 0; dcode < D_CODES; dcode++) { +// out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]); +// } +// out_length >>>= 3; +// //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", +// // s->last_lit, in_length, out_length, +// // 100L - out_length*100L/in_length)); +// if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) { +// return true; +// } +// } +//#endif + + return (s.last_lit === s.lit_bufsize - 1); + /* We avoid equality with lit_bufsize because of wraparound at 64K + * on 16 bit machines and because stored blocks are restricted to + * 64K-1 bytes. + */ +} + +exports._tr_init = _tr_init; +exports._tr_stored_block = _tr_stored_block; +exports._tr_flush_block = _tr_flush_block; +exports._tr_tally = _tr_tally; +exports._tr_align = _tr_align; + + +/***/ }), + +/***/ 44442: +/***/ ((module) => { + +"use strict"; + + +// (C) 1995-2013 Jean-loup Gailly and Mark Adler +// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin +// +// This software is provided 'as-is', without any express or implied +// warranty. In no event will the authors be held liable for any damages +// arising from the use of this software. +// +// Permission is granted to anyone to use this software for any purpose, +// including commercial applications, and to alter it and redistribute it +// freely, subject to the following restrictions: +// +// 1. The origin of this software must not be misrepresented; you must not +// claim that you wrote the original software. If you use this software +// in a product, an acknowledgment in the product documentation would be +// appreciated but is not required. +// 2. Altered source versions must be plainly marked as such, and must not be +// misrepresented as being the original software. +// 3. This notice may not be removed or altered from any source distribution. + +function ZStream() { + /* next input byte */ + this.input = null; // JS specific, because we have no pointers + this.next_in = 0; + /* number of bytes available at input */ + this.avail_in = 0; + /* total number of input bytes read so far */ + this.total_in = 0; + /* next output byte should be put there */ + this.output = null; // JS specific, because we have no pointers + this.next_out = 0; + /* remaining free space at output */ + this.avail_out = 0; + /* total number of bytes output so far */ + this.total_out = 0; + /* last error message, NULL if no error */ + this.msg = ''/*Z_NULL*/; + /* not visible by applications */ + this.state = null; + /* best guess about the data type: binary or text */ + this.data_type = 2/*Z_UNKNOWN*/; + /* adler32 value of the uncompressed data */ + this.adler = 0; +} + +module.exports = ZStream; + + /***/ }), /***/ 21137: @@ -77913,6 +97659,216 @@ module.exports = function (thing, encoding, name) { } +/***/ }), + +/***/ 71843: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + + +const { ErrorWithCause } = __webpack_require__(75832); // linemod-replace-with: export { ErrorWithCause } from './lib/error-with-cause.mjs'; + +const { // linemod-replace-with: export { + findCauseByReference, + getErrorCause, + messageWithCauses, + stackWithCauses, +} = __webpack_require__(94306); // linemod-replace-with: } from './lib/helpers.mjs'; + +module.exports = { // linemod-remove + ErrorWithCause, // linemod-remove + findCauseByReference, // linemod-remove + getErrorCause, // linemod-remove + stackWithCauses, // linemod-remove + messageWithCauses, // linemod-remove +}; // linemod-remove + + +/***/ }), + +/***/ 75832: +/***/ ((module) => { + +"use strict"; + + +/** @template [T=undefined] */ +class ErrorWithCause extends Error { // linemod-prefix-with: export + /** + * @param {string} message + * @param {{ cause?: T }} options + */ + constructor (message, { cause } = {}) { + super(message); + + /** @type {string} */ + this.name = ErrorWithCause.name; + if (cause) { + /** @type {T} */ + this.cause = cause; + } + /** @type {string} */ + this.message = message; + } +} + +module.exports = { // linemod-remove + ErrorWithCause, // linemod-remove +}; // linemod-remove + + +/***/ }), + +/***/ 94306: +/***/ ((module) => { + +"use strict"; + + +/** + * @template {Error} T + * @param {unknown} err + * @param {new(...args: any[]) => T} reference + * @returns {T|undefined} + */ +const findCauseByReference = (err, reference) => { // linemod-prefix-with: export + if (!err || !reference) return; + if (!(err instanceof Error)) return; + if ( + !(reference.prototype instanceof Error) && + // @ts-ignore + reference !== Error + ) return; + + /** + * Ensures we don't go circular + * + * @type {Set} + */ + const seen = new Set(); + + /** @type {Error|undefined} */ + let currentErr = err; + + while (currentErr && !seen.has(currentErr)) { + seen.add(currentErr); + + if (currentErr instanceof reference) { + return currentErr; + } + + currentErr = getErrorCause(currentErr); + } +}; + +/** + * @param {Error|{ cause?: unknown|(()=>err)}} err + * @returns {Error|undefined} + */ +const getErrorCause = (err) => { // linemod-prefix-with: export + if (!err || typeof err !== 'object' || !('cause' in err)) { + return; + } + + // VError / NError style causes + if (typeof err.cause === 'function') { + const causeResult = err.cause(); + + return causeResult instanceof Error + ? causeResult + : undefined; + } else { + return err.cause instanceof Error + ? err.cause + : undefined; + } +}; + +/** + * Internal method that keeps a track of which error we have already added, to avoid circular recursion + * + * @private + * @param {Error} err + * @param {Set} seen + * @returns {string} + */ +const _stackWithCauses = (err, seen) => { + if (!(err instanceof Error)) return ''; + + const stack = err.stack || ''; + + // Ensure we don't go circular or crazily deep + if (seen.has(err)) { + return stack + '\ncauses have become circular...'; + } + + const cause = getErrorCause(err); + + // TODO: Follow up in https://github.com/nodejs/node/issues/38725#issuecomment-920309092 on how to log stuff + + if (cause) { + seen.add(err); + return (stack + '\ncaused by: ' + _stackWithCauses(cause, seen)); + } else { + return stack; + } +}; + +/** + * @param {Error} err + * @returns {string} + */ +const stackWithCauses = (err) => _stackWithCauses(err, new Set()); // linemod-prefix-with: export + +/** + * Internal method that keeps a track of which error we have already added, to avoid circular recursion + * + * @private + * @param {Error} err + * @param {Set} seen + * @param {boolean} [skip] + * @returns {string} + */ +const _messageWithCauses = (err, seen, skip) => { + if (!(err instanceof Error)) return ''; + + const message = skip ? '' : (err.message || ''); + + // Ensure we don't go circular or crazily deep + if (seen.has(err)) { + return message + ': ...'; + } + + const cause = getErrorCause(err); + + if (cause) { + seen.add(err); + + const skipIfVErrorStyleCause = 'cause' in err && typeof err.cause === 'function'; + + return (message + + (skipIfVErrorStyleCause ? '' : ': ') + + _messageWithCauses(cause, seen, skipIfVErrorStyleCause)); + } else { + return message; + } +}; + +/** + * @param {Error} err + * @returns {string} + */ +const messageWithCauses = (err) => _messageWithCauses(err, new Set()); // linemod-prefix-with: export + +module.exports = { // linemod-remove + findCauseByReference, // linemod-remove + getErrorCause, // linemod-remove + stackWithCauses, // linemod-remove + messageWithCauses, // linemod-remove +}; // linemod-remove + + /***/ }), /***/ 76578: @@ -85367,6 +105323,2495 @@ SafeBuffer.allocUnsafeSlow = function (size) { } +/***/ }), + +/***/ 93904: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = __webpack_require__(98587) +const { safeRe: re, t } = __webpack_require__(99718) +const cmp = __webpack_require__(72111) +const debug = __webpack_require__(57272) +const SemVer = __webpack_require__(53908) +const Range = __webpack_require__(78311) + + +/***/ }), + +/***/ 78311: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.format() + } + + format () { + this.range = this.set + .map((comps) => comps.join(' ').trim()) + .join('||') + .trim() + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = __webpack_require__(69749) +const cache = new LRU({ max: 1000 }) + +const parseOptions = __webpack_require__(98587) +const Comparator = __webpack_require__(93904) +const debug = __webpack_require__(57272) +const SemVer = __webpack_require__(53908) +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = __webpack_require__(99718) +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = __webpack_require__(16874) + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + + +/***/ }), + +/***/ 53908: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const debug = __webpack_require__(57272) +const { MAX_LENGTH, MAX_SAFE_INTEGER } = __webpack_require__(16874) +const { safeRe: re, t } = __webpack_require__(99718) + +const parseOptions = __webpack_require__(98587) +const { compareIdentifiers } = __webpack_require__(61123) +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer + + +/***/ }), + +/***/ 57414: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const parse = __webpack_require__(30144) +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean + + +/***/ }), + +/***/ 72111: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const eq = __webpack_require__(94641) +const neq = __webpack_require__(13999) +const gt = __webpack_require__(35580) +const gte = __webpack_require__(54089) +const lt = __webpack_require__(7059) +const lte = __webpack_require__(25200) + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp + + +/***/ }), + +/***/ 46170: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const parse = __webpack_require__(30144) +const { safeRe: re, t } = __webpack_require__(99718) + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 + } + + if (match === null) { + return null + } + + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' + + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) +} +module.exports = coerce + + +/***/ }), + +/***/ 40909: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild + + +/***/ }), + +/***/ 11763: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose + + +/***/ }), + +/***/ 50560: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare + + +/***/ }), + +/***/ 51832: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const parse = __webpack_require__(30144) + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff + + +/***/ }), + +/***/ 94641: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq + + +/***/ }), + +/***/ 35580: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt + + +/***/ }), + +/***/ 54089: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte + + +/***/ }), + +/***/ 93007: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc + + +/***/ }), + +/***/ 7059: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt + + +/***/ }), + +/***/ 25200: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte + + +/***/ }), + +/***/ 32938: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major + + +/***/ }), + +/***/ 46254: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor + + +/***/ }), + +/***/ 13999: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq + + +/***/ }), + +/***/ 30144: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse + + +/***/ }), + +/***/ 24493: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch + + +/***/ }), + +/***/ 31729: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const parse = __webpack_require__(30144) +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease + + +/***/ }), + +/***/ 9970: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compare = __webpack_require__(50560) +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare + + +/***/ }), + +/***/ 74277: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compareBuild = __webpack_require__(40909) +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort + + +/***/ }), + +/***/ 97638: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies + + +/***/ }), + +/***/ 43927: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const compareBuild = __webpack_require__(40909) +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort + + +/***/ }), + +/***/ 56953: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const parse = __webpack_require__(30144) +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid + + +/***/ }), + +/***/ 99589: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// just pre-load all the stuff that index.js lazily exports +const internalRe = __webpack_require__(99718) +const constants = __webpack_require__(16874) +const SemVer = __webpack_require__(53908) +const identifiers = __webpack_require__(61123) +const parse = __webpack_require__(30144) +const valid = __webpack_require__(56953) +const clean = __webpack_require__(57414) +const inc = __webpack_require__(93007) +const diff = __webpack_require__(51832) +const major = __webpack_require__(32938) +const minor = __webpack_require__(46254) +const patch = __webpack_require__(24493) +const prerelease = __webpack_require__(31729) +const compare = __webpack_require__(50560) +const rcompare = __webpack_require__(9970) +const compareLoose = __webpack_require__(11763) +const compareBuild = __webpack_require__(40909) +const sort = __webpack_require__(43927) +const rsort = __webpack_require__(74277) +const gt = __webpack_require__(35580) +const lt = __webpack_require__(7059) +const eq = __webpack_require__(94641) +const neq = __webpack_require__(13999) +const gte = __webpack_require__(54089) +const lte = __webpack_require__(25200) +const cmp = __webpack_require__(72111) +const coerce = __webpack_require__(46170) +const Comparator = __webpack_require__(93904) +const Range = __webpack_require__(78311) +const satisfies = __webpack_require__(97638) +const toComparators = __webpack_require__(77631) +const maxSatisfying = __webpack_require__(19628) +const minSatisfying = __webpack_require__(270) +const minVersion = __webpack_require__(41261) +const validRange = __webpack_require__(13874) +const outside = __webpack_require__(97075) +const gtr = __webpack_require__(75571) +const ltr = __webpack_require__(5342) +const intersects = __webpack_require__(76780) +const simplifyRange = __webpack_require__(72525) +const subset = __webpack_require__(75032) +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} + + +/***/ }), + +/***/ 16874: +/***/ ((module) => { + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} + + +/***/ }), + +/***/ 57272: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +/* provided dependency */ var process = __webpack_require__(65606); +/* provided dependency */ var console = __webpack_require__(96763); +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug + + +/***/ }), + +/***/ 61123: +/***/ ((module) => { + +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} + + +/***/ }), + +/***/ 98587: +/***/ ((module) => { + +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions + + +/***/ }), + +/***/ 99718: +/***/ ((module, exports, __webpack_require__) => { + +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = __webpack_require__(16874) +const debug = __webpack_require__(57272) +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') + + +/***/ }), + +/***/ 75571: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// Determine if version is greater than all the versions possible in the range. +const outside = __webpack_require__(97075) +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr + + +/***/ }), + +/***/ 76780: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects + + +/***/ }), + +/***/ 5342: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const outside = __webpack_require__(97075) +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr + + +/***/ }), + +/***/ 19628: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const Range = __webpack_require__(78311) + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying + + +/***/ }), + +/***/ 270: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const Range = __webpack_require__(78311) +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying + + +/***/ }), + +/***/ 41261: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const Range = __webpack_require__(78311) +const gt = __webpack_require__(35580) + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion + + +/***/ }), + +/***/ 97075: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const SemVer = __webpack_require__(53908) +const Comparator = __webpack_require__(93904) +const { ANY } = Comparator +const Range = __webpack_require__(78311) +const satisfies = __webpack_require__(97638) +const gt = __webpack_require__(35580) +const lt = __webpack_require__(7059) +const lte = __webpack_require__(25200) +const gte = __webpack_require__(54089) + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside + + +/***/ }), + +/***/ 72525: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = __webpack_require__(97638) +const compare = __webpack_require__(50560) +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} + + +/***/ }), + +/***/ 75032: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) +const Comparator = __webpack_require__(93904) +const { ANY } = Comparator +const satisfies = __webpack_require__(97638) +const compare = __webpack_require__(50560) + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset + + +/***/ }), + +/***/ 77631: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators + + +/***/ }), + +/***/ 13874: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +const Range = __webpack_require__(78311) +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange + + /***/ }), /***/ 96897: @@ -86403,6 +108848,744 @@ Stream.prototype.pipe = function(dest, options) { }; +/***/ }), + +/***/ 11568: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +var ClientRequest = __webpack_require__(55537) +var response = __webpack_require__(6917) +var extend = __webpack_require__(57510) +var statusCodes = __webpack_require__(86866) +var url = __webpack_require__(59817) + +var http = exports + +http.request = function (opts, cb) { + if (typeof opts === 'string') + opts = url.parse(opts) + else + opts = extend(opts) + + // Normally, the page is loaded from http or https, so not specifying a protocol + // will result in a (valid) protocol-relative url. However, this won't work if + // the protocol is something else, like 'file:' + var defaultProtocol = __webpack_require__.g.location.protocol.search(/^https?:$/) === -1 ? 'http:' : '' + + var protocol = opts.protocol || defaultProtocol + var host = opts.hostname || opts.host + var port = opts.port + var path = opts.path || '/' + + // Necessary for IPv6 addresses + if (host && host.indexOf(':') !== -1) + host = '[' + host + ']' + + // This may be a relative url. The browser should always be able to interpret it correctly. + opts.url = (host ? (protocol + '//' + host) : '') + (port ? ':' + port : '') + path + opts.method = (opts.method || 'GET').toUpperCase() + opts.headers = opts.headers || {} + + // Also valid opts.auth, opts.mode + + var req = new ClientRequest(opts) + if (cb) + req.on('response', cb) + return req +} + +http.get = function get (opts, cb) { + var req = http.request(opts, cb) + req.end() + return req +} + +http.ClientRequest = ClientRequest +http.IncomingMessage = response.IncomingMessage + +http.Agent = function () {} +http.Agent.defaultMaxSockets = 4 + +http.globalAgent = new http.Agent() + +http.STATUS_CODES = statusCodes + +http.METHODS = [ + 'CHECKOUT', + 'CONNECT', + 'COPY', + 'DELETE', + 'GET', + 'HEAD', + 'LOCK', + 'M-SEARCH', + 'MERGE', + 'MKACTIVITY', + 'MKCOL', + 'MOVE', + 'NOTIFY', + 'OPTIONS', + 'PATCH', + 'POST', + 'PROPFIND', + 'PROPPATCH', + 'PURGE', + 'PUT', + 'REPORT', + 'SEARCH', + 'SUBSCRIBE', + 'TRACE', + 'UNLOCK', + 'UNSUBSCRIBE' +] + +/***/ }), + +/***/ 6688: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +exports.fetch = isFunction(__webpack_require__.g.fetch) && isFunction(__webpack_require__.g.ReadableStream) + +exports.writableStream = isFunction(__webpack_require__.g.WritableStream) + +exports.abortController = isFunction(__webpack_require__.g.AbortController) + +// The xhr request to example.com may violate some restrictive CSP configurations, +// so if we're running in a browser that supports `fetch`, avoid calling getXHR() +// and assume support for certain features below. +var xhr +function getXHR () { + // Cache the xhr value + if (xhr !== undefined) return xhr + + if (__webpack_require__.g.XMLHttpRequest) { + xhr = new __webpack_require__.g.XMLHttpRequest() + // If XDomainRequest is available (ie only, where xhr might not work + // cross domain), use the page location. Otherwise use example.com + // Note: this doesn't actually make an http request. + try { + xhr.open('GET', __webpack_require__.g.XDomainRequest ? '/' : 'https://example.com') + } catch(e) { + xhr = null + } + } else { + // Service workers don't have XHR + xhr = null + } + return xhr +} + +function checkTypeSupport (type) { + var xhr = getXHR() + if (!xhr) return false + try { + xhr.responseType = type + return xhr.responseType === type + } catch (e) {} + return false +} + +// If fetch is supported, then arraybuffer will be supported too. Skip calling +// checkTypeSupport(), since that calls getXHR(). +exports.arraybuffer = exports.fetch || checkTypeSupport('arraybuffer') + +// These next two tests unavoidably show warnings in Chrome. Since fetch will always +// be used if it's available, just return false for these to avoid the warnings. +exports.msstream = !exports.fetch && checkTypeSupport('ms-stream') +exports.mozchunkedarraybuffer = !exports.fetch && checkTypeSupport('moz-chunked-arraybuffer') + +// If fetch is supported, then overrideMimeType will be supported too. Skip calling +// getXHR(). +exports.overrideMimeType = exports.fetch || (getXHR() ? isFunction(getXHR().overrideMimeType) : false) + +function isFunction (value) { + return typeof value === 'function' +} + +xhr = null // Help gc + + +/***/ }), + +/***/ 55537: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +/* provided dependency */ var process = __webpack_require__(65606); +var capability = __webpack_require__(6688) +var inherits = __webpack_require__(56698) +var response = __webpack_require__(6917) +var stream = __webpack_require__(28399) + +var IncomingMessage = response.IncomingMessage +var rStates = response.readyStates + +function decideMode (preferBinary, useFetch) { + if (capability.fetch && useFetch) { + return 'fetch' + } else if (capability.mozchunkedarraybuffer) { + return 'moz-chunked-arraybuffer' + } else if (capability.msstream) { + return 'ms-stream' + } else if (capability.arraybuffer && preferBinary) { + return 'arraybuffer' + } else { + return 'text' + } +} + +var ClientRequest = module.exports = function (opts) { + var self = this + stream.Writable.call(self) + + self._opts = opts + self._body = [] + self._headers = {} + if (opts.auth) + self.setHeader('Authorization', 'Basic ' + Buffer.from(opts.auth).toString('base64')) + Object.keys(opts.headers).forEach(function (name) { + self.setHeader(name, opts.headers[name]) + }) + + var preferBinary + var useFetch = true + if (opts.mode === 'disable-fetch' || ('requestTimeout' in opts && !capability.abortController)) { + // If the use of XHR should be preferred. Not typically needed. + useFetch = false + preferBinary = true + } else if (opts.mode === 'prefer-streaming') { + // If streaming is a high priority but binary compatibility and + // the accuracy of the 'content-type' header aren't + preferBinary = false + } else if (opts.mode === 'allow-wrong-content-type') { + // If streaming is more important than preserving the 'content-type' header + preferBinary = !capability.overrideMimeType + } else if (!opts.mode || opts.mode === 'default' || opts.mode === 'prefer-fast') { + // Use binary if text streaming may corrupt data or the content-type header, or for speed + preferBinary = true + } else { + throw new Error('Invalid value for opts.mode') + } + self._mode = decideMode(preferBinary, useFetch) + self._fetchTimer = null + self._socketTimeout = null + self._socketTimer = null + + self.on('finish', function () { + self._onFinish() + }) +} + +inherits(ClientRequest, stream.Writable) + +ClientRequest.prototype.setHeader = function (name, value) { + var self = this + var lowerName = name.toLowerCase() + // This check is not necessary, but it prevents warnings from browsers about setting unsafe + // headers. To be honest I'm not entirely sure hiding these warnings is a good thing, but + // http-browserify did it, so I will too. + if (unsafeHeaders.indexOf(lowerName) !== -1) + return + + self._headers[lowerName] = { + name: name, + value: value + } +} + +ClientRequest.prototype.getHeader = function (name) { + var header = this._headers[name.toLowerCase()] + if (header) + return header.value + return null +} + +ClientRequest.prototype.removeHeader = function (name) { + var self = this + delete self._headers[name.toLowerCase()] +} + +ClientRequest.prototype._onFinish = function () { + var self = this + + if (self._destroyed) + return + var opts = self._opts + + if ('timeout' in opts && opts.timeout !== 0) { + self.setTimeout(opts.timeout) + } + + var headersObj = self._headers + var body = null + if (opts.method !== 'GET' && opts.method !== 'HEAD') { + body = new Blob(self._body, { + type: (headersObj['content-type'] || {}).value || '' + }); + } + + // create flattened list of headers + var headersList = [] + Object.keys(headersObj).forEach(function (keyName) { + var name = headersObj[keyName].name + var value = headersObj[keyName].value + if (Array.isArray(value)) { + value.forEach(function (v) { + headersList.push([name, v]) + }) + } else { + headersList.push([name, value]) + } + }) + + if (self._mode === 'fetch') { + var signal = null + if (capability.abortController) { + var controller = new AbortController() + signal = controller.signal + self._fetchAbortController = controller + + if ('requestTimeout' in opts && opts.requestTimeout !== 0) { + self._fetchTimer = __webpack_require__.g.setTimeout(function () { + self.emit('requestTimeout') + if (self._fetchAbortController) + self._fetchAbortController.abort() + }, opts.requestTimeout) + } + } + + __webpack_require__.g.fetch(self._opts.url, { + method: self._opts.method, + headers: headersList, + body: body || undefined, + mode: 'cors', + credentials: opts.withCredentials ? 'include' : 'same-origin', + signal: signal + }).then(function (response) { + self._fetchResponse = response + self._resetTimers(false) + self._connect() + }, function (reason) { + self._resetTimers(true) + if (!self._destroyed) + self.emit('error', reason) + }) + } else { + var xhr = self._xhr = new __webpack_require__.g.XMLHttpRequest() + try { + xhr.open(self._opts.method, self._opts.url, true) + } catch (err) { + process.nextTick(function () { + self.emit('error', err) + }) + return + } + + // Can't set responseType on really old browsers + if ('responseType' in xhr) + xhr.responseType = self._mode + + if ('withCredentials' in xhr) + xhr.withCredentials = !!opts.withCredentials + + if (self._mode === 'text' && 'overrideMimeType' in xhr) + xhr.overrideMimeType('text/plain; charset=x-user-defined') + + if ('requestTimeout' in opts) { + xhr.timeout = opts.requestTimeout + xhr.ontimeout = function () { + self.emit('requestTimeout') + } + } + + headersList.forEach(function (header) { + xhr.setRequestHeader(header[0], header[1]) + }) + + self._response = null + xhr.onreadystatechange = function () { + switch (xhr.readyState) { + case rStates.LOADING: + case rStates.DONE: + self._onXHRProgress() + break + } + } + // Necessary for streaming in Firefox, since xhr.response is ONLY defined + // in onprogress, not in onreadystatechange with xhr.readyState = 3 + if (self._mode === 'moz-chunked-arraybuffer') { + xhr.onprogress = function () { + self._onXHRProgress() + } + } + + xhr.onerror = function () { + if (self._destroyed) + return + self._resetTimers(true) + self.emit('error', new Error('XHR error')) + } + + try { + xhr.send(body) + } catch (err) { + process.nextTick(function () { + self.emit('error', err) + }) + return + } + } +} + +/** + * Checks if xhr.status is readable and non-zero, indicating no error. + * Even though the spec says it should be available in readyState 3, + * accessing it throws an exception in IE8 + */ +function statusValid (xhr) { + try { + var status = xhr.status + return (status !== null && status !== 0) + } catch (e) { + return false + } +} + +ClientRequest.prototype._onXHRProgress = function () { + var self = this + + self._resetTimers(false) + + if (!statusValid(self._xhr) || self._destroyed) + return + + if (!self._response) + self._connect() + + self._response._onXHRProgress(self._resetTimers.bind(self)) +} + +ClientRequest.prototype._connect = function () { + var self = this + + if (self._destroyed) + return + + self._response = new IncomingMessage(self._xhr, self._fetchResponse, self._mode, self._resetTimers.bind(self)) + self._response.on('error', function(err) { + self.emit('error', err) + }) + + self.emit('response', self._response) +} + +ClientRequest.prototype._write = function (chunk, encoding, cb) { + var self = this + + self._body.push(chunk) + cb() +} + +ClientRequest.prototype._resetTimers = function (done) { + var self = this + + __webpack_require__.g.clearTimeout(self._socketTimer) + self._socketTimer = null + + if (done) { + __webpack_require__.g.clearTimeout(self._fetchTimer) + self._fetchTimer = null + } else if (self._socketTimeout) { + self._socketTimer = __webpack_require__.g.setTimeout(function () { + self.emit('timeout') + }, self._socketTimeout) + } +} + +ClientRequest.prototype.abort = ClientRequest.prototype.destroy = function (err) { + var self = this + self._destroyed = true + self._resetTimers(true) + if (self._response) + self._response._destroyed = true + if (self._xhr) + self._xhr.abort() + else if (self._fetchAbortController) + self._fetchAbortController.abort() + + if (err) + self.emit('error', err) +} + +ClientRequest.prototype.end = function (data, encoding, cb) { + var self = this + if (typeof data === 'function') { + cb = data + data = undefined + } + + stream.Writable.prototype.end.call(self, data, encoding, cb) +} + +ClientRequest.prototype.setTimeout = function (timeout, cb) { + var self = this + + if (cb) + self.once('timeout', cb) + + self._socketTimeout = timeout + self._resetTimers(false) +} + +ClientRequest.prototype.flushHeaders = function () {} +ClientRequest.prototype.setNoDelay = function () {} +ClientRequest.prototype.setSocketKeepAlive = function () {} + +// Taken from http://www.w3.org/TR/XMLHttpRequest/#the-setrequestheader%28%29-method +var unsafeHeaders = [ + 'accept-charset', + 'accept-encoding', + 'access-control-request-headers', + 'access-control-request-method', + 'connection', + 'content-length', + 'cookie', + 'cookie2', + 'date', + 'dnt', + 'expect', + 'host', + 'keep-alive', + 'origin', + 'referer', + 'te', + 'trailer', + 'transfer-encoding', + 'upgrade', + 'via' +] + + +/***/ }), + +/***/ 6917: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +/* provided dependency */ var process = __webpack_require__(65606); +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +var capability = __webpack_require__(6688) +var inherits = __webpack_require__(56698) +var stream = __webpack_require__(28399) + +var rStates = exports.readyStates = { + UNSENT: 0, + OPENED: 1, + HEADERS_RECEIVED: 2, + LOADING: 3, + DONE: 4 +} + +var IncomingMessage = exports.IncomingMessage = function (xhr, response, mode, resetTimers) { + var self = this + stream.Readable.call(self) + + self._mode = mode + self.headers = {} + self.rawHeaders = [] + self.trailers = {} + self.rawTrailers = [] + + // Fake the 'close' event, but only once 'end' fires + self.on('end', function () { + // The nextTick is necessary to prevent the 'request' module from causing an infinite loop + process.nextTick(function () { + self.emit('close') + }) + }) + + if (mode === 'fetch') { + self._fetchResponse = response + + self.url = response.url + self.statusCode = response.status + self.statusMessage = response.statusText + + response.headers.forEach(function (header, key){ + self.headers[key.toLowerCase()] = header + self.rawHeaders.push(key, header) + }) + + if (capability.writableStream) { + var writable = new WritableStream({ + write: function (chunk) { + resetTimers(false) + return new Promise(function (resolve, reject) { + if (self._destroyed) { + reject() + } else if(self.push(Buffer.from(chunk))) { + resolve() + } else { + self._resumeFetch = resolve + } + }) + }, + close: function () { + resetTimers(true) + if (!self._destroyed) + self.push(null) + }, + abort: function (err) { + resetTimers(true) + if (!self._destroyed) + self.emit('error', err) + } + }) + + try { + response.body.pipeTo(writable).catch(function (err) { + resetTimers(true) + if (!self._destroyed) + self.emit('error', err) + }) + return + } catch (e) {} // pipeTo method isn't defined. Can't find a better way to feature test this + } + // fallback for when writableStream or pipeTo aren't available + var reader = response.body.getReader() + function read () { + reader.read().then(function (result) { + if (self._destroyed) + return + resetTimers(result.done) + if (result.done) { + self.push(null) + return + } + self.push(Buffer.from(result.value)) + read() + }).catch(function (err) { + resetTimers(true) + if (!self._destroyed) + self.emit('error', err) + }) + } + read() + } else { + self._xhr = xhr + self._pos = 0 + + self.url = xhr.responseURL + self.statusCode = xhr.status + self.statusMessage = xhr.statusText + var headers = xhr.getAllResponseHeaders().split(/\r?\n/) + headers.forEach(function (header) { + var matches = header.match(/^([^:]+):\s*(.*)/) + if (matches) { + var key = matches[1].toLowerCase() + if (key === 'set-cookie') { + if (self.headers[key] === undefined) { + self.headers[key] = [] + } + self.headers[key].push(matches[2]) + } else if (self.headers[key] !== undefined) { + self.headers[key] += ', ' + matches[2] + } else { + self.headers[key] = matches[2] + } + self.rawHeaders.push(matches[1], matches[2]) + } + }) + + self._charset = 'x-user-defined' + if (!capability.overrideMimeType) { + var mimeType = self.rawHeaders['mime-type'] + if (mimeType) { + var charsetMatch = mimeType.match(/;\s*charset=([^;])(;|$)/) + if (charsetMatch) { + self._charset = charsetMatch[1].toLowerCase() + } + } + if (!self._charset) + self._charset = 'utf-8' // best guess + } + } +} + +inherits(IncomingMessage, stream.Readable) + +IncomingMessage.prototype._read = function () { + var self = this + + var resolve = self._resumeFetch + if (resolve) { + self._resumeFetch = null + resolve() + } +} + +IncomingMessage.prototype._onXHRProgress = function (resetTimers) { + var self = this + + var xhr = self._xhr + + var response = null + switch (self._mode) { + case 'text': + response = xhr.responseText + if (response.length > self._pos) { + var newData = response.substr(self._pos) + if (self._charset === 'x-user-defined') { + var buffer = Buffer.alloc(newData.length) + for (var i = 0; i < newData.length; i++) + buffer[i] = newData.charCodeAt(i) & 0xff + + self.push(buffer) + } else { + self.push(newData, self._charset) + } + self._pos = response.length + } + break + case 'arraybuffer': + if (xhr.readyState !== rStates.DONE || !xhr.response) + break + response = xhr.response + self.push(Buffer.from(new Uint8Array(response))) + break + case 'moz-chunked-arraybuffer': // take whole + response = xhr.response + if (xhr.readyState !== rStates.LOADING || !response) + break + self.push(Buffer.from(new Uint8Array(response))) + break + case 'ms-stream': + response = xhr.response + if (xhr.readyState !== rStates.LOADING) + break + var reader = new __webpack_require__.g.MSStreamReader() + reader.onprogress = function () { + if (reader.result.byteLength > self._pos) { + self.push(Buffer.from(new Uint8Array(reader.result.slice(self._pos)))) + self._pos = reader.result.byteLength + } + } + reader.onload = function () { + resetTimers(true) + self.push(null) + } + // reader.onerror = ??? // TODO: this + reader.readAsArrayBuffer(response) + break + } + + // The ms-stream case handles end separately in reader.onload() + if (self._xhr.readyState === rStates.DONE && self._mode !== 'ms-stream') { + resetTimers(true) + self.push(null) + } +} + + /***/ }), /***/ 83141: @@ -86706,6 +109889,2493 @@ function simpleEnd(buf) { return buf && buf.length ? this.write(buf) : ''; } +/***/ }), + +/***/ 76386: +/***/ (function(module, __unused_webpack_exports, __webpack_require__) { + +/* provided dependency */ var Buffer = __webpack_require__(48287)["Buffer"]; +// Written in 2014-2016 by Dmitry Chestnykh and Devi Mandiri. +// Public domain. +(function(root, f) { + 'use strict'; + if ( true && module.exports) module.exports = f(); + else if (root.nacl) root.nacl.util = f(); + else { + root.nacl = {}; + root.nacl.util = f(); + } +}(this, function() { + 'use strict'; + + var util = {}; + + function validateBase64(s) { + if (!(/^(?:[A-Za-z0-9+\/]{2}[A-Za-z0-9+\/]{2})*(?:[A-Za-z0-9+\/]{2}==|[A-Za-z0-9+\/]{3}=)?$/.test(s))) { + throw new TypeError('invalid encoding'); + } + } + + util.decodeUTF8 = function(s) { + if (typeof s !== 'string') throw new TypeError('expected string'); + var i, d = unescape(encodeURIComponent(s)), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; + }; + + util.encodeUTF8 = function(arr) { + var i, s = []; + for (i = 0; i < arr.length; i++) s.push(String.fromCharCode(arr[i])); + return decodeURIComponent(escape(s.join(''))); + }; + + if (typeof atob === 'undefined') { + // Node.js + + if (typeof Buffer.from !== 'undefined') { + // Node v6 and later + util.encodeBase64 = function (arr) { // v6 and later + return Buffer.from(arr).toString('base64'); + }; + + util.decodeBase64 = function (s) { + validateBase64(s); + return new Uint8Array(Array.prototype.slice.call(Buffer.from(s, 'base64'), 0)); + }; + + } else { + // Node earlier than v6 + util.encodeBase64 = function (arr) { // v6 and later + return (new Buffer(arr)).toString('base64'); + }; + + util.decodeBase64 = function(s) { + validateBase64(s); + return new Uint8Array(Array.prototype.slice.call(new Buffer(s, 'base64'), 0)); + }; + } + + } else { + // Browsers + + util.encodeBase64 = function(arr) { + var i, s = [], len = arr.length; + for (i = 0; i < len; i++) s.push(String.fromCharCode(arr[i])); + return btoa(s.join('')); + }; + + util.decodeBase64 = function(s) { + validateBase64(s); + var i, d = atob(s), b = new Uint8Array(d.length); + for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i); + return b; + }; + + } + + return util; + +})); + + +/***/ }), + +/***/ 88947: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +(function(nacl) { +'use strict'; + +// Ported in 2014 by Dmitry Chestnykh and Devi Mandiri. +// Public domain. +// +// Implementation derived from TweetNaCl version 20140427. +// See for details: http://tweetnacl.cr.yp.to/ + +var gf = function(init) { + var i, r = new Float64Array(16); + if (init) for (i = 0; i < init.length; i++) r[i] = init[i]; + return r; +}; + +// Pluggable, initialized in high-level API below. +var randombytes = function(/* x, n */) { throw new Error('no PRNG'); }; + +var _0 = new Uint8Array(16); +var _9 = new Uint8Array(32); _9[0] = 9; + +var gf0 = gf(), + gf1 = gf([1]), + _121665 = gf([0xdb41, 1]), + D = gf([0x78a3, 0x1359, 0x4dca, 0x75eb, 0xd8ab, 0x4141, 0x0a4d, 0x0070, 0xe898, 0x7779, 0x4079, 0x8cc7, 0xfe73, 0x2b6f, 0x6cee, 0x5203]), + D2 = gf([0xf159, 0x26b2, 0x9b94, 0xebd6, 0xb156, 0x8283, 0x149a, 0x00e0, 0xd130, 0xeef3, 0x80f2, 0x198e, 0xfce7, 0x56df, 0xd9dc, 0x2406]), + X = gf([0xd51a, 0x8f25, 0x2d60, 0xc956, 0xa7b2, 0x9525, 0xc760, 0x692c, 0xdc5c, 0xfdd6, 0xe231, 0xc0a4, 0x53fe, 0xcd6e, 0x36d3, 0x2169]), + Y = gf([0x6658, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666, 0x6666]), + I = gf([0xa0b0, 0x4a0e, 0x1b27, 0xc4ee, 0xe478, 0xad2f, 0x1806, 0x2f43, 0xd7a7, 0x3dfb, 0x0099, 0x2b4d, 0xdf0b, 0x4fc1, 0x2480, 0x2b83]); + +function ts64(x, i, h, l) { + x[i] = (h >> 24) & 0xff; + x[i+1] = (h >> 16) & 0xff; + x[i+2] = (h >> 8) & 0xff; + x[i+3] = h & 0xff; + x[i+4] = (l >> 24) & 0xff; + x[i+5] = (l >> 16) & 0xff; + x[i+6] = (l >> 8) & 0xff; + x[i+7] = l & 0xff; +} + +function vn(x, xi, y, yi, n) { + var i,d = 0; + for (i = 0; i < n; i++) d |= x[xi+i]^y[yi+i]; + return (1 & ((d - 1) >>> 8)) - 1; +} + +function crypto_verify_16(x, xi, y, yi) { + return vn(x,xi,y,yi,16); +} + +function crypto_verify_32(x, xi, y, yi) { + return vn(x,xi,y,yi,32); +} + +function core_salsa20(o, p, k, c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + x0 = x0 + j0 | 0; + x1 = x1 + j1 | 0; + x2 = x2 + j2 | 0; + x3 = x3 + j3 | 0; + x4 = x4 + j4 | 0; + x5 = x5 + j5 | 0; + x6 = x6 + j6 | 0; + x7 = x7 + j7 | 0; + x8 = x8 + j8 | 0; + x9 = x9 + j9 | 0; + x10 = x10 + j10 | 0; + x11 = x11 + j11 | 0; + x12 = x12 + j12 | 0; + x13 = x13 + j13 | 0; + x14 = x14 + j14 | 0; + x15 = x15 + j15 | 0; + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x1 >>> 0 & 0xff; + o[ 5] = x1 >>> 8 & 0xff; + o[ 6] = x1 >>> 16 & 0xff; + o[ 7] = x1 >>> 24 & 0xff; + + o[ 8] = x2 >>> 0 & 0xff; + o[ 9] = x2 >>> 8 & 0xff; + o[10] = x2 >>> 16 & 0xff; + o[11] = x2 >>> 24 & 0xff; + + o[12] = x3 >>> 0 & 0xff; + o[13] = x3 >>> 8 & 0xff; + o[14] = x3 >>> 16 & 0xff; + o[15] = x3 >>> 24 & 0xff; + + o[16] = x4 >>> 0 & 0xff; + o[17] = x4 >>> 8 & 0xff; + o[18] = x4 >>> 16 & 0xff; + o[19] = x4 >>> 24 & 0xff; + + o[20] = x5 >>> 0 & 0xff; + o[21] = x5 >>> 8 & 0xff; + o[22] = x5 >>> 16 & 0xff; + o[23] = x5 >>> 24 & 0xff; + + o[24] = x6 >>> 0 & 0xff; + o[25] = x6 >>> 8 & 0xff; + o[26] = x6 >>> 16 & 0xff; + o[27] = x6 >>> 24 & 0xff; + + o[28] = x7 >>> 0 & 0xff; + o[29] = x7 >>> 8 & 0xff; + o[30] = x7 >>> 16 & 0xff; + o[31] = x7 >>> 24 & 0xff; + + o[32] = x8 >>> 0 & 0xff; + o[33] = x8 >>> 8 & 0xff; + o[34] = x8 >>> 16 & 0xff; + o[35] = x8 >>> 24 & 0xff; + + o[36] = x9 >>> 0 & 0xff; + o[37] = x9 >>> 8 & 0xff; + o[38] = x9 >>> 16 & 0xff; + o[39] = x9 >>> 24 & 0xff; + + o[40] = x10 >>> 0 & 0xff; + o[41] = x10 >>> 8 & 0xff; + o[42] = x10 >>> 16 & 0xff; + o[43] = x10 >>> 24 & 0xff; + + o[44] = x11 >>> 0 & 0xff; + o[45] = x11 >>> 8 & 0xff; + o[46] = x11 >>> 16 & 0xff; + o[47] = x11 >>> 24 & 0xff; + + o[48] = x12 >>> 0 & 0xff; + o[49] = x12 >>> 8 & 0xff; + o[50] = x12 >>> 16 & 0xff; + o[51] = x12 >>> 24 & 0xff; + + o[52] = x13 >>> 0 & 0xff; + o[53] = x13 >>> 8 & 0xff; + o[54] = x13 >>> 16 & 0xff; + o[55] = x13 >>> 24 & 0xff; + + o[56] = x14 >>> 0 & 0xff; + o[57] = x14 >>> 8 & 0xff; + o[58] = x14 >>> 16 & 0xff; + o[59] = x14 >>> 24 & 0xff; + + o[60] = x15 >>> 0 & 0xff; + o[61] = x15 >>> 8 & 0xff; + o[62] = x15 >>> 16 & 0xff; + o[63] = x15 >>> 24 & 0xff; +} + +function core_hsalsa20(o,p,k,c) { + var j0 = c[ 0] & 0xff | (c[ 1] & 0xff)<<8 | (c[ 2] & 0xff)<<16 | (c[ 3] & 0xff)<<24, + j1 = k[ 0] & 0xff | (k[ 1] & 0xff)<<8 | (k[ 2] & 0xff)<<16 | (k[ 3] & 0xff)<<24, + j2 = k[ 4] & 0xff | (k[ 5] & 0xff)<<8 | (k[ 6] & 0xff)<<16 | (k[ 7] & 0xff)<<24, + j3 = k[ 8] & 0xff | (k[ 9] & 0xff)<<8 | (k[10] & 0xff)<<16 | (k[11] & 0xff)<<24, + j4 = k[12] & 0xff | (k[13] & 0xff)<<8 | (k[14] & 0xff)<<16 | (k[15] & 0xff)<<24, + j5 = c[ 4] & 0xff | (c[ 5] & 0xff)<<8 | (c[ 6] & 0xff)<<16 | (c[ 7] & 0xff)<<24, + j6 = p[ 0] & 0xff | (p[ 1] & 0xff)<<8 | (p[ 2] & 0xff)<<16 | (p[ 3] & 0xff)<<24, + j7 = p[ 4] & 0xff | (p[ 5] & 0xff)<<8 | (p[ 6] & 0xff)<<16 | (p[ 7] & 0xff)<<24, + j8 = p[ 8] & 0xff | (p[ 9] & 0xff)<<8 | (p[10] & 0xff)<<16 | (p[11] & 0xff)<<24, + j9 = p[12] & 0xff | (p[13] & 0xff)<<8 | (p[14] & 0xff)<<16 | (p[15] & 0xff)<<24, + j10 = c[ 8] & 0xff | (c[ 9] & 0xff)<<8 | (c[10] & 0xff)<<16 | (c[11] & 0xff)<<24, + j11 = k[16] & 0xff | (k[17] & 0xff)<<8 | (k[18] & 0xff)<<16 | (k[19] & 0xff)<<24, + j12 = k[20] & 0xff | (k[21] & 0xff)<<8 | (k[22] & 0xff)<<16 | (k[23] & 0xff)<<24, + j13 = k[24] & 0xff | (k[25] & 0xff)<<8 | (k[26] & 0xff)<<16 | (k[27] & 0xff)<<24, + j14 = k[28] & 0xff | (k[29] & 0xff)<<8 | (k[30] & 0xff)<<16 | (k[31] & 0xff)<<24, + j15 = c[12] & 0xff | (c[13] & 0xff)<<8 | (c[14] & 0xff)<<16 | (c[15] & 0xff)<<24; + + var x0 = j0, x1 = j1, x2 = j2, x3 = j3, x4 = j4, x5 = j5, x6 = j6, x7 = j7, + x8 = j8, x9 = j9, x10 = j10, x11 = j11, x12 = j12, x13 = j13, x14 = j14, + x15 = j15, u; + + for (var i = 0; i < 20; i += 2) { + u = x0 + x12 | 0; + x4 ^= u<<7 | u>>>(32-7); + u = x4 + x0 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x4 | 0; + x12 ^= u<<13 | u>>>(32-13); + u = x12 + x8 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x1 | 0; + x9 ^= u<<7 | u>>>(32-7); + u = x9 + x5 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x9 | 0; + x1 ^= u<<13 | u>>>(32-13); + u = x1 + x13 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x6 | 0; + x14 ^= u<<7 | u>>>(32-7); + u = x14 + x10 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x14 | 0; + x6 ^= u<<13 | u>>>(32-13); + u = x6 + x2 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x11 | 0; + x3 ^= u<<7 | u>>>(32-7); + u = x3 + x15 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x3 | 0; + x11 ^= u<<13 | u>>>(32-13); + u = x11 + x7 | 0; + x15 ^= u<<18 | u>>>(32-18); + + u = x0 + x3 | 0; + x1 ^= u<<7 | u>>>(32-7); + u = x1 + x0 | 0; + x2 ^= u<<9 | u>>>(32-9); + u = x2 + x1 | 0; + x3 ^= u<<13 | u>>>(32-13); + u = x3 + x2 | 0; + x0 ^= u<<18 | u>>>(32-18); + + u = x5 + x4 | 0; + x6 ^= u<<7 | u>>>(32-7); + u = x6 + x5 | 0; + x7 ^= u<<9 | u>>>(32-9); + u = x7 + x6 | 0; + x4 ^= u<<13 | u>>>(32-13); + u = x4 + x7 | 0; + x5 ^= u<<18 | u>>>(32-18); + + u = x10 + x9 | 0; + x11 ^= u<<7 | u>>>(32-7); + u = x11 + x10 | 0; + x8 ^= u<<9 | u>>>(32-9); + u = x8 + x11 | 0; + x9 ^= u<<13 | u>>>(32-13); + u = x9 + x8 | 0; + x10 ^= u<<18 | u>>>(32-18); + + u = x15 + x14 | 0; + x12 ^= u<<7 | u>>>(32-7); + u = x12 + x15 | 0; + x13 ^= u<<9 | u>>>(32-9); + u = x13 + x12 | 0; + x14 ^= u<<13 | u>>>(32-13); + u = x14 + x13 | 0; + x15 ^= u<<18 | u>>>(32-18); + } + + o[ 0] = x0 >>> 0 & 0xff; + o[ 1] = x0 >>> 8 & 0xff; + o[ 2] = x0 >>> 16 & 0xff; + o[ 3] = x0 >>> 24 & 0xff; + + o[ 4] = x5 >>> 0 & 0xff; + o[ 5] = x5 >>> 8 & 0xff; + o[ 6] = x5 >>> 16 & 0xff; + o[ 7] = x5 >>> 24 & 0xff; + + o[ 8] = x10 >>> 0 & 0xff; + o[ 9] = x10 >>> 8 & 0xff; + o[10] = x10 >>> 16 & 0xff; + o[11] = x10 >>> 24 & 0xff; + + o[12] = x15 >>> 0 & 0xff; + o[13] = x15 >>> 8 & 0xff; + o[14] = x15 >>> 16 & 0xff; + o[15] = x15 >>> 24 & 0xff; + + o[16] = x6 >>> 0 & 0xff; + o[17] = x6 >>> 8 & 0xff; + o[18] = x6 >>> 16 & 0xff; + o[19] = x6 >>> 24 & 0xff; + + o[20] = x7 >>> 0 & 0xff; + o[21] = x7 >>> 8 & 0xff; + o[22] = x7 >>> 16 & 0xff; + o[23] = x7 >>> 24 & 0xff; + + o[24] = x8 >>> 0 & 0xff; + o[25] = x8 >>> 8 & 0xff; + o[26] = x8 >>> 16 & 0xff; + o[27] = x8 >>> 24 & 0xff; + + o[28] = x9 >>> 0 & 0xff; + o[29] = x9 >>> 8 & 0xff; + o[30] = x9 >>> 16 & 0xff; + o[31] = x9 >>> 24 & 0xff; +} + +function crypto_core_salsa20(out,inp,k,c) { + core_salsa20(out,inp,k,c); +} + +function crypto_core_hsalsa20(out,inp,k,c) { + core_hsalsa20(out,inp,k,c); +} + +var sigma = new Uint8Array([101, 120, 112, 97, 110, 100, 32, 51, 50, 45, 98, 121, 116, 101, 32, 107]); + // "expand 32-byte k" + +function crypto_stream_salsa20_xor(c,cpos,m,mpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + mpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = m[mpos+i] ^ x[i]; + } + return 0; +} + +function crypto_stream_salsa20(c,cpos,b,n,k) { + var z = new Uint8Array(16), x = new Uint8Array(64); + var u, i; + for (i = 0; i < 16; i++) z[i] = 0; + for (i = 0; i < 8; i++) z[i] = n[i]; + while (b >= 64) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < 64; i++) c[cpos+i] = x[i]; + u = 1; + for (i = 8; i < 16; i++) { + u = u + (z[i] & 0xff) | 0; + z[i] = u & 0xff; + u >>>= 8; + } + b -= 64; + cpos += 64; + } + if (b > 0) { + crypto_core_salsa20(x,z,k,sigma); + for (i = 0; i < b; i++) c[cpos+i] = x[i]; + } + return 0; +} + +function crypto_stream(c,cpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20(c,cpos,d,sn,s); +} + +function crypto_stream_xor(c,cpos,m,mpos,d,n,k) { + var s = new Uint8Array(32); + crypto_core_hsalsa20(s,n,k,sigma); + var sn = new Uint8Array(8); + for (var i = 0; i < 8; i++) sn[i] = n[i+16]; + return crypto_stream_salsa20_xor(c,cpos,m,mpos,d,sn,s); +} + +/* +* Port of Andrew Moon's Poly1305-donna-16. Public domain. +* https://github.com/floodyberry/poly1305-donna +*/ + +var poly1305 = function(key) { + this.buffer = new Uint8Array(16); + this.r = new Uint16Array(10); + this.h = new Uint16Array(10); + this.pad = new Uint16Array(8); + this.leftover = 0; + this.fin = 0; + + var t0, t1, t2, t3, t4, t5, t6, t7; + + t0 = key[ 0] & 0xff | (key[ 1] & 0xff) << 8; this.r[0] = ( t0 ) & 0x1fff; + t1 = key[ 2] & 0xff | (key[ 3] & 0xff) << 8; this.r[1] = ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = key[ 4] & 0xff | (key[ 5] & 0xff) << 8; this.r[2] = ((t1 >>> 10) | (t2 << 6)) & 0x1f03; + t3 = key[ 6] & 0xff | (key[ 7] & 0xff) << 8; this.r[3] = ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = key[ 8] & 0xff | (key[ 9] & 0xff) << 8; this.r[4] = ((t3 >>> 4) | (t4 << 12)) & 0x00ff; + this.r[5] = ((t4 >>> 1)) & 0x1ffe; + t5 = key[10] & 0xff | (key[11] & 0xff) << 8; this.r[6] = ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = key[12] & 0xff | (key[13] & 0xff) << 8; this.r[7] = ((t5 >>> 11) | (t6 << 5)) & 0x1f81; + t7 = key[14] & 0xff | (key[15] & 0xff) << 8; this.r[8] = ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + this.r[9] = ((t7 >>> 5)) & 0x007f; + + this.pad[0] = key[16] & 0xff | (key[17] & 0xff) << 8; + this.pad[1] = key[18] & 0xff | (key[19] & 0xff) << 8; + this.pad[2] = key[20] & 0xff | (key[21] & 0xff) << 8; + this.pad[3] = key[22] & 0xff | (key[23] & 0xff) << 8; + this.pad[4] = key[24] & 0xff | (key[25] & 0xff) << 8; + this.pad[5] = key[26] & 0xff | (key[27] & 0xff) << 8; + this.pad[6] = key[28] & 0xff | (key[29] & 0xff) << 8; + this.pad[7] = key[30] & 0xff | (key[31] & 0xff) << 8; +}; + +poly1305.prototype.blocks = function(m, mpos, bytes) { + var hibit = this.fin ? 0 : (1 << 11); + var t0, t1, t2, t3, t4, t5, t6, t7, c; + var d0, d1, d2, d3, d4, d5, d6, d7, d8, d9; + + var h0 = this.h[0], + h1 = this.h[1], + h2 = this.h[2], + h3 = this.h[3], + h4 = this.h[4], + h5 = this.h[5], + h6 = this.h[6], + h7 = this.h[7], + h8 = this.h[8], + h9 = this.h[9]; + + var r0 = this.r[0], + r1 = this.r[1], + r2 = this.r[2], + r3 = this.r[3], + r4 = this.r[4], + r5 = this.r[5], + r6 = this.r[6], + r7 = this.r[7], + r8 = this.r[8], + r9 = this.r[9]; + + while (bytes >= 16) { + t0 = m[mpos+ 0] & 0xff | (m[mpos+ 1] & 0xff) << 8; h0 += ( t0 ) & 0x1fff; + t1 = m[mpos+ 2] & 0xff | (m[mpos+ 3] & 0xff) << 8; h1 += ((t0 >>> 13) | (t1 << 3)) & 0x1fff; + t2 = m[mpos+ 4] & 0xff | (m[mpos+ 5] & 0xff) << 8; h2 += ((t1 >>> 10) | (t2 << 6)) & 0x1fff; + t3 = m[mpos+ 6] & 0xff | (m[mpos+ 7] & 0xff) << 8; h3 += ((t2 >>> 7) | (t3 << 9)) & 0x1fff; + t4 = m[mpos+ 8] & 0xff | (m[mpos+ 9] & 0xff) << 8; h4 += ((t3 >>> 4) | (t4 << 12)) & 0x1fff; + h5 += ((t4 >>> 1)) & 0x1fff; + t5 = m[mpos+10] & 0xff | (m[mpos+11] & 0xff) << 8; h6 += ((t4 >>> 14) | (t5 << 2)) & 0x1fff; + t6 = m[mpos+12] & 0xff | (m[mpos+13] & 0xff) << 8; h7 += ((t5 >>> 11) | (t6 << 5)) & 0x1fff; + t7 = m[mpos+14] & 0xff | (m[mpos+15] & 0xff) << 8; h8 += ((t6 >>> 8) | (t7 << 8)) & 0x1fff; + h9 += ((t7 >>> 5)) | hibit; + + c = 0; + + d0 = c; + d0 += h0 * r0; + d0 += h1 * (5 * r9); + d0 += h2 * (5 * r8); + d0 += h3 * (5 * r7); + d0 += h4 * (5 * r6); + c = (d0 >>> 13); d0 &= 0x1fff; + d0 += h5 * (5 * r5); + d0 += h6 * (5 * r4); + d0 += h7 * (5 * r3); + d0 += h8 * (5 * r2); + d0 += h9 * (5 * r1); + c += (d0 >>> 13); d0 &= 0x1fff; + + d1 = c; + d1 += h0 * r1; + d1 += h1 * r0; + d1 += h2 * (5 * r9); + d1 += h3 * (5 * r8); + d1 += h4 * (5 * r7); + c = (d1 >>> 13); d1 &= 0x1fff; + d1 += h5 * (5 * r6); + d1 += h6 * (5 * r5); + d1 += h7 * (5 * r4); + d1 += h8 * (5 * r3); + d1 += h9 * (5 * r2); + c += (d1 >>> 13); d1 &= 0x1fff; + + d2 = c; + d2 += h0 * r2; + d2 += h1 * r1; + d2 += h2 * r0; + d2 += h3 * (5 * r9); + d2 += h4 * (5 * r8); + c = (d2 >>> 13); d2 &= 0x1fff; + d2 += h5 * (5 * r7); + d2 += h6 * (5 * r6); + d2 += h7 * (5 * r5); + d2 += h8 * (5 * r4); + d2 += h9 * (5 * r3); + c += (d2 >>> 13); d2 &= 0x1fff; + + d3 = c; + d3 += h0 * r3; + d3 += h1 * r2; + d3 += h2 * r1; + d3 += h3 * r0; + d3 += h4 * (5 * r9); + c = (d3 >>> 13); d3 &= 0x1fff; + d3 += h5 * (5 * r8); + d3 += h6 * (5 * r7); + d3 += h7 * (5 * r6); + d3 += h8 * (5 * r5); + d3 += h9 * (5 * r4); + c += (d3 >>> 13); d3 &= 0x1fff; + + d4 = c; + d4 += h0 * r4; + d4 += h1 * r3; + d4 += h2 * r2; + d4 += h3 * r1; + d4 += h4 * r0; + c = (d4 >>> 13); d4 &= 0x1fff; + d4 += h5 * (5 * r9); + d4 += h6 * (5 * r8); + d4 += h7 * (5 * r7); + d4 += h8 * (5 * r6); + d4 += h9 * (5 * r5); + c += (d4 >>> 13); d4 &= 0x1fff; + + d5 = c; + d5 += h0 * r5; + d5 += h1 * r4; + d5 += h2 * r3; + d5 += h3 * r2; + d5 += h4 * r1; + c = (d5 >>> 13); d5 &= 0x1fff; + d5 += h5 * r0; + d5 += h6 * (5 * r9); + d5 += h7 * (5 * r8); + d5 += h8 * (5 * r7); + d5 += h9 * (5 * r6); + c += (d5 >>> 13); d5 &= 0x1fff; + + d6 = c; + d6 += h0 * r6; + d6 += h1 * r5; + d6 += h2 * r4; + d6 += h3 * r3; + d6 += h4 * r2; + c = (d6 >>> 13); d6 &= 0x1fff; + d6 += h5 * r1; + d6 += h6 * r0; + d6 += h7 * (5 * r9); + d6 += h8 * (5 * r8); + d6 += h9 * (5 * r7); + c += (d6 >>> 13); d6 &= 0x1fff; + + d7 = c; + d7 += h0 * r7; + d7 += h1 * r6; + d7 += h2 * r5; + d7 += h3 * r4; + d7 += h4 * r3; + c = (d7 >>> 13); d7 &= 0x1fff; + d7 += h5 * r2; + d7 += h6 * r1; + d7 += h7 * r0; + d7 += h8 * (5 * r9); + d7 += h9 * (5 * r8); + c += (d7 >>> 13); d7 &= 0x1fff; + + d8 = c; + d8 += h0 * r8; + d8 += h1 * r7; + d8 += h2 * r6; + d8 += h3 * r5; + d8 += h4 * r4; + c = (d8 >>> 13); d8 &= 0x1fff; + d8 += h5 * r3; + d8 += h6 * r2; + d8 += h7 * r1; + d8 += h8 * r0; + d8 += h9 * (5 * r9); + c += (d8 >>> 13); d8 &= 0x1fff; + + d9 = c; + d9 += h0 * r9; + d9 += h1 * r8; + d9 += h2 * r7; + d9 += h3 * r6; + d9 += h4 * r5; + c = (d9 >>> 13); d9 &= 0x1fff; + d9 += h5 * r4; + d9 += h6 * r3; + d9 += h7 * r2; + d9 += h8 * r1; + d9 += h9 * r0; + c += (d9 >>> 13); d9 &= 0x1fff; + + c = (((c << 2) + c)) | 0; + c = (c + d0) | 0; + d0 = c & 0x1fff; + c = (c >>> 13); + d1 += c; + + h0 = d0; + h1 = d1; + h2 = d2; + h3 = d3; + h4 = d4; + h5 = d5; + h6 = d6; + h7 = d7; + h8 = d8; + h9 = d9; + + mpos += 16; + bytes -= 16; + } + this.h[0] = h0; + this.h[1] = h1; + this.h[2] = h2; + this.h[3] = h3; + this.h[4] = h4; + this.h[5] = h5; + this.h[6] = h6; + this.h[7] = h7; + this.h[8] = h8; + this.h[9] = h9; +}; + +poly1305.prototype.finish = function(mac, macpos) { + var g = new Uint16Array(10); + var c, mask, f, i; + + if (this.leftover) { + i = this.leftover; + this.buffer[i++] = 1; + for (; i < 16; i++) this.buffer[i] = 0; + this.fin = 1; + this.blocks(this.buffer, 0, 16); + } + + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + for (i = 2; i < 10; i++) { + this.h[i] += c; + c = this.h[i] >>> 13; + this.h[i] &= 0x1fff; + } + this.h[0] += (c * 5); + c = this.h[0] >>> 13; + this.h[0] &= 0x1fff; + this.h[1] += c; + c = this.h[1] >>> 13; + this.h[1] &= 0x1fff; + this.h[2] += c; + + g[0] = this.h[0] + 5; + c = g[0] >>> 13; + g[0] &= 0x1fff; + for (i = 1; i < 10; i++) { + g[i] = this.h[i] + c; + c = g[i] >>> 13; + g[i] &= 0x1fff; + } + g[9] -= (1 << 13); + + mask = (c ^ 1) - 1; + for (i = 0; i < 10; i++) g[i] &= mask; + mask = ~mask; + for (i = 0; i < 10; i++) this.h[i] = (this.h[i] & mask) | g[i]; + + this.h[0] = ((this.h[0] ) | (this.h[1] << 13) ) & 0xffff; + this.h[1] = ((this.h[1] >>> 3) | (this.h[2] << 10) ) & 0xffff; + this.h[2] = ((this.h[2] >>> 6) | (this.h[3] << 7) ) & 0xffff; + this.h[3] = ((this.h[3] >>> 9) | (this.h[4] << 4) ) & 0xffff; + this.h[4] = ((this.h[4] >>> 12) | (this.h[5] << 1) | (this.h[6] << 14)) & 0xffff; + this.h[5] = ((this.h[6] >>> 2) | (this.h[7] << 11) ) & 0xffff; + this.h[6] = ((this.h[7] >>> 5) | (this.h[8] << 8) ) & 0xffff; + this.h[7] = ((this.h[8] >>> 8) | (this.h[9] << 5) ) & 0xffff; + + f = this.h[0] + this.pad[0]; + this.h[0] = f & 0xffff; + for (i = 1; i < 8; i++) { + f = (((this.h[i] + this.pad[i]) | 0) + (f >>> 16)) | 0; + this.h[i] = f & 0xffff; + } + + mac[macpos+ 0] = (this.h[0] >>> 0) & 0xff; + mac[macpos+ 1] = (this.h[0] >>> 8) & 0xff; + mac[macpos+ 2] = (this.h[1] >>> 0) & 0xff; + mac[macpos+ 3] = (this.h[1] >>> 8) & 0xff; + mac[macpos+ 4] = (this.h[2] >>> 0) & 0xff; + mac[macpos+ 5] = (this.h[2] >>> 8) & 0xff; + mac[macpos+ 6] = (this.h[3] >>> 0) & 0xff; + mac[macpos+ 7] = (this.h[3] >>> 8) & 0xff; + mac[macpos+ 8] = (this.h[4] >>> 0) & 0xff; + mac[macpos+ 9] = (this.h[4] >>> 8) & 0xff; + mac[macpos+10] = (this.h[5] >>> 0) & 0xff; + mac[macpos+11] = (this.h[5] >>> 8) & 0xff; + mac[macpos+12] = (this.h[6] >>> 0) & 0xff; + mac[macpos+13] = (this.h[6] >>> 8) & 0xff; + mac[macpos+14] = (this.h[7] >>> 0) & 0xff; + mac[macpos+15] = (this.h[7] >>> 8) & 0xff; +}; + +poly1305.prototype.update = function(m, mpos, bytes) { + var i, want; + + if (this.leftover) { + want = (16 - this.leftover); + if (want > bytes) + want = bytes; + for (i = 0; i < want; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + bytes -= want; + mpos += want; + this.leftover += want; + if (this.leftover < 16) + return; + this.blocks(this.buffer, 0, 16); + this.leftover = 0; + } + + if (bytes >= 16) { + want = bytes - (bytes % 16); + this.blocks(m, mpos, want); + mpos += want; + bytes -= want; + } + + if (bytes) { + for (i = 0; i < bytes; i++) + this.buffer[this.leftover + i] = m[mpos+i]; + this.leftover += bytes; + } +}; + +function crypto_onetimeauth(out, outpos, m, mpos, n, k) { + var s = new poly1305(k); + s.update(m, mpos, n); + s.finish(out, outpos); + return 0; +} + +function crypto_onetimeauth_verify(h, hpos, m, mpos, n, k) { + var x = new Uint8Array(16); + crypto_onetimeauth(x,0,m,mpos,n,k); + return crypto_verify_16(h,hpos,x,0); +} + +function crypto_secretbox(c,m,d,n,k) { + var i; + if (d < 32) return -1; + crypto_stream_xor(c,0,m,0,d,n,k); + crypto_onetimeauth(c, 16, c, 32, d - 32, c); + for (i = 0; i < 16; i++) c[i] = 0; + return 0; +} + +function crypto_secretbox_open(m,c,d,n,k) { + var i; + var x = new Uint8Array(32); + if (d < 32) return -1; + crypto_stream(x,0,32,n,k); + if (crypto_onetimeauth_verify(c, 16,c, 32,d - 32,x) !== 0) return -1; + crypto_stream_xor(m,0,c,0,d,n,k); + for (i = 0; i < 32; i++) m[i] = 0; + return 0; +} + +function set25519(r, a) { + var i; + for (i = 0; i < 16; i++) r[i] = a[i]|0; +} + +function car25519(o) { + var i, v, c = 1; + for (i = 0; i < 16; i++) { + v = o[i] + c + 65535; + c = Math.floor(v / 65536); + o[i] = v - c * 65536; + } + o[0] += c-1 + 37 * (c-1); +} + +function sel25519(p, q, b) { + var t, c = ~(b-1); + for (var i = 0; i < 16; i++) { + t = c & (p[i] ^ q[i]); + p[i] ^= t; + q[i] ^= t; + } +} + +function pack25519(o, n) { + var i, j, b; + var m = gf(), t = gf(); + for (i = 0; i < 16; i++) t[i] = n[i]; + car25519(t); + car25519(t); + car25519(t); + for (j = 0; j < 2; j++) { + m[0] = t[0] - 0xffed; + for (i = 1; i < 15; i++) { + m[i] = t[i] - 0xffff - ((m[i-1]>>16) & 1); + m[i-1] &= 0xffff; + } + m[15] = t[15] - 0x7fff - ((m[14]>>16) & 1); + b = (m[15]>>16) & 1; + m[14] &= 0xffff; + sel25519(t, m, 1-b); + } + for (i = 0; i < 16; i++) { + o[2*i] = t[i] & 0xff; + o[2*i+1] = t[i]>>8; + } +} + +function neq25519(a, b) { + var c = new Uint8Array(32), d = new Uint8Array(32); + pack25519(c, a); + pack25519(d, b); + return crypto_verify_32(c, 0, d, 0); +} + +function par25519(a) { + var d = new Uint8Array(32); + pack25519(d, a); + return d[0] & 1; +} + +function unpack25519(o, n) { + var i; + for (i = 0; i < 16; i++) o[i] = n[2*i] + (n[2*i+1] << 8); + o[15] &= 0x7fff; +} + +function A(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] + b[i]; +} + +function Z(o, a, b) { + for (var i = 0; i < 16; i++) o[i] = a[i] - b[i]; +} + +function M(o, a, b) { + var v, c, + t0 = 0, t1 = 0, t2 = 0, t3 = 0, t4 = 0, t5 = 0, t6 = 0, t7 = 0, + t8 = 0, t9 = 0, t10 = 0, t11 = 0, t12 = 0, t13 = 0, t14 = 0, t15 = 0, + t16 = 0, t17 = 0, t18 = 0, t19 = 0, t20 = 0, t21 = 0, t22 = 0, t23 = 0, + t24 = 0, t25 = 0, t26 = 0, t27 = 0, t28 = 0, t29 = 0, t30 = 0, + b0 = b[0], + b1 = b[1], + b2 = b[2], + b3 = b[3], + b4 = b[4], + b5 = b[5], + b6 = b[6], + b7 = b[7], + b8 = b[8], + b9 = b[9], + b10 = b[10], + b11 = b[11], + b12 = b[12], + b13 = b[13], + b14 = b[14], + b15 = b[15]; + + v = a[0]; + t0 += v * b0; + t1 += v * b1; + t2 += v * b2; + t3 += v * b3; + t4 += v * b4; + t5 += v * b5; + t6 += v * b6; + t7 += v * b7; + t8 += v * b8; + t9 += v * b9; + t10 += v * b10; + t11 += v * b11; + t12 += v * b12; + t13 += v * b13; + t14 += v * b14; + t15 += v * b15; + v = a[1]; + t1 += v * b0; + t2 += v * b1; + t3 += v * b2; + t4 += v * b3; + t5 += v * b4; + t6 += v * b5; + t7 += v * b6; + t8 += v * b7; + t9 += v * b8; + t10 += v * b9; + t11 += v * b10; + t12 += v * b11; + t13 += v * b12; + t14 += v * b13; + t15 += v * b14; + t16 += v * b15; + v = a[2]; + t2 += v * b0; + t3 += v * b1; + t4 += v * b2; + t5 += v * b3; + t6 += v * b4; + t7 += v * b5; + t8 += v * b6; + t9 += v * b7; + t10 += v * b8; + t11 += v * b9; + t12 += v * b10; + t13 += v * b11; + t14 += v * b12; + t15 += v * b13; + t16 += v * b14; + t17 += v * b15; + v = a[3]; + t3 += v * b0; + t4 += v * b1; + t5 += v * b2; + t6 += v * b3; + t7 += v * b4; + t8 += v * b5; + t9 += v * b6; + t10 += v * b7; + t11 += v * b8; + t12 += v * b9; + t13 += v * b10; + t14 += v * b11; + t15 += v * b12; + t16 += v * b13; + t17 += v * b14; + t18 += v * b15; + v = a[4]; + t4 += v * b0; + t5 += v * b1; + t6 += v * b2; + t7 += v * b3; + t8 += v * b4; + t9 += v * b5; + t10 += v * b6; + t11 += v * b7; + t12 += v * b8; + t13 += v * b9; + t14 += v * b10; + t15 += v * b11; + t16 += v * b12; + t17 += v * b13; + t18 += v * b14; + t19 += v * b15; + v = a[5]; + t5 += v * b0; + t6 += v * b1; + t7 += v * b2; + t8 += v * b3; + t9 += v * b4; + t10 += v * b5; + t11 += v * b6; + t12 += v * b7; + t13 += v * b8; + t14 += v * b9; + t15 += v * b10; + t16 += v * b11; + t17 += v * b12; + t18 += v * b13; + t19 += v * b14; + t20 += v * b15; + v = a[6]; + t6 += v * b0; + t7 += v * b1; + t8 += v * b2; + t9 += v * b3; + t10 += v * b4; + t11 += v * b5; + t12 += v * b6; + t13 += v * b7; + t14 += v * b8; + t15 += v * b9; + t16 += v * b10; + t17 += v * b11; + t18 += v * b12; + t19 += v * b13; + t20 += v * b14; + t21 += v * b15; + v = a[7]; + t7 += v * b0; + t8 += v * b1; + t9 += v * b2; + t10 += v * b3; + t11 += v * b4; + t12 += v * b5; + t13 += v * b6; + t14 += v * b7; + t15 += v * b8; + t16 += v * b9; + t17 += v * b10; + t18 += v * b11; + t19 += v * b12; + t20 += v * b13; + t21 += v * b14; + t22 += v * b15; + v = a[8]; + t8 += v * b0; + t9 += v * b1; + t10 += v * b2; + t11 += v * b3; + t12 += v * b4; + t13 += v * b5; + t14 += v * b6; + t15 += v * b7; + t16 += v * b8; + t17 += v * b9; + t18 += v * b10; + t19 += v * b11; + t20 += v * b12; + t21 += v * b13; + t22 += v * b14; + t23 += v * b15; + v = a[9]; + t9 += v * b0; + t10 += v * b1; + t11 += v * b2; + t12 += v * b3; + t13 += v * b4; + t14 += v * b5; + t15 += v * b6; + t16 += v * b7; + t17 += v * b8; + t18 += v * b9; + t19 += v * b10; + t20 += v * b11; + t21 += v * b12; + t22 += v * b13; + t23 += v * b14; + t24 += v * b15; + v = a[10]; + t10 += v * b0; + t11 += v * b1; + t12 += v * b2; + t13 += v * b3; + t14 += v * b4; + t15 += v * b5; + t16 += v * b6; + t17 += v * b7; + t18 += v * b8; + t19 += v * b9; + t20 += v * b10; + t21 += v * b11; + t22 += v * b12; + t23 += v * b13; + t24 += v * b14; + t25 += v * b15; + v = a[11]; + t11 += v * b0; + t12 += v * b1; + t13 += v * b2; + t14 += v * b3; + t15 += v * b4; + t16 += v * b5; + t17 += v * b6; + t18 += v * b7; + t19 += v * b8; + t20 += v * b9; + t21 += v * b10; + t22 += v * b11; + t23 += v * b12; + t24 += v * b13; + t25 += v * b14; + t26 += v * b15; + v = a[12]; + t12 += v * b0; + t13 += v * b1; + t14 += v * b2; + t15 += v * b3; + t16 += v * b4; + t17 += v * b5; + t18 += v * b6; + t19 += v * b7; + t20 += v * b8; + t21 += v * b9; + t22 += v * b10; + t23 += v * b11; + t24 += v * b12; + t25 += v * b13; + t26 += v * b14; + t27 += v * b15; + v = a[13]; + t13 += v * b0; + t14 += v * b1; + t15 += v * b2; + t16 += v * b3; + t17 += v * b4; + t18 += v * b5; + t19 += v * b6; + t20 += v * b7; + t21 += v * b8; + t22 += v * b9; + t23 += v * b10; + t24 += v * b11; + t25 += v * b12; + t26 += v * b13; + t27 += v * b14; + t28 += v * b15; + v = a[14]; + t14 += v * b0; + t15 += v * b1; + t16 += v * b2; + t17 += v * b3; + t18 += v * b4; + t19 += v * b5; + t20 += v * b6; + t21 += v * b7; + t22 += v * b8; + t23 += v * b9; + t24 += v * b10; + t25 += v * b11; + t26 += v * b12; + t27 += v * b13; + t28 += v * b14; + t29 += v * b15; + v = a[15]; + t15 += v * b0; + t16 += v * b1; + t17 += v * b2; + t18 += v * b3; + t19 += v * b4; + t20 += v * b5; + t21 += v * b6; + t22 += v * b7; + t23 += v * b8; + t24 += v * b9; + t25 += v * b10; + t26 += v * b11; + t27 += v * b12; + t28 += v * b13; + t29 += v * b14; + t30 += v * b15; + + t0 += 38 * t16; + t1 += 38 * t17; + t2 += 38 * t18; + t3 += 38 * t19; + t4 += 38 * t20; + t5 += 38 * t21; + t6 += 38 * t22; + t7 += 38 * t23; + t8 += 38 * t24; + t9 += 38 * t25; + t10 += 38 * t26; + t11 += 38 * t27; + t12 += 38 * t28; + t13 += 38 * t29; + t14 += 38 * t30; + // t15 left as is + + // first car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + // second car + c = 1; + v = t0 + c + 65535; c = Math.floor(v / 65536); t0 = v - c * 65536; + v = t1 + c + 65535; c = Math.floor(v / 65536); t1 = v - c * 65536; + v = t2 + c + 65535; c = Math.floor(v / 65536); t2 = v - c * 65536; + v = t3 + c + 65535; c = Math.floor(v / 65536); t3 = v - c * 65536; + v = t4 + c + 65535; c = Math.floor(v / 65536); t4 = v - c * 65536; + v = t5 + c + 65535; c = Math.floor(v / 65536); t5 = v - c * 65536; + v = t6 + c + 65535; c = Math.floor(v / 65536); t6 = v - c * 65536; + v = t7 + c + 65535; c = Math.floor(v / 65536); t7 = v - c * 65536; + v = t8 + c + 65535; c = Math.floor(v / 65536); t8 = v - c * 65536; + v = t9 + c + 65535; c = Math.floor(v / 65536); t9 = v - c * 65536; + v = t10 + c + 65535; c = Math.floor(v / 65536); t10 = v - c * 65536; + v = t11 + c + 65535; c = Math.floor(v / 65536); t11 = v - c * 65536; + v = t12 + c + 65535; c = Math.floor(v / 65536); t12 = v - c * 65536; + v = t13 + c + 65535; c = Math.floor(v / 65536); t13 = v - c * 65536; + v = t14 + c + 65535; c = Math.floor(v / 65536); t14 = v - c * 65536; + v = t15 + c + 65535; c = Math.floor(v / 65536); t15 = v - c * 65536; + t0 += c-1 + 37 * (c-1); + + o[ 0] = t0; + o[ 1] = t1; + o[ 2] = t2; + o[ 3] = t3; + o[ 4] = t4; + o[ 5] = t5; + o[ 6] = t6; + o[ 7] = t7; + o[ 8] = t8; + o[ 9] = t9; + o[10] = t10; + o[11] = t11; + o[12] = t12; + o[13] = t13; + o[14] = t14; + o[15] = t15; +} + +function S(o, a) { + M(o, a, a); +} + +function inv25519(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 253; a >= 0; a--) { + S(c, c); + if(a !== 2 && a !== 4) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function pow2523(o, i) { + var c = gf(); + var a; + for (a = 0; a < 16; a++) c[a] = i[a]; + for (a = 250; a >= 0; a--) { + S(c, c); + if(a !== 1) M(c, c, i); + } + for (a = 0; a < 16; a++) o[a] = c[a]; +} + +function crypto_scalarmult(q, n, p) { + var z = new Uint8Array(32); + var x = new Float64Array(80), r, i; + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(); + for (i = 0; i < 31; i++) z[i] = n[i]; + z[31]=(n[31]&127)|64; + z[0]&=248; + unpack25519(x,p); + for (i = 0; i < 16; i++) { + b[i]=x[i]; + d[i]=a[i]=c[i]=0; + } + a[0]=d[0]=1; + for (i=254; i>=0; --i) { + r=(z[i>>>3]>>>(i&7))&1; + sel25519(a,b,r); + sel25519(c,d,r); + A(e,a,c); + Z(a,a,c); + A(c,b,d); + Z(b,b,d); + S(d,e); + S(f,a); + M(a,c,a); + M(c,b,e); + A(e,a,c); + Z(a,a,c); + S(b,a); + Z(c,d,f); + M(a,c,_121665); + A(a,a,d); + M(c,c,a); + M(a,d,f); + M(d,b,x); + S(b,e); + sel25519(a,b,r); + sel25519(c,d,r); + } + for (i = 0; i < 16; i++) { + x[i+16]=a[i]; + x[i+32]=c[i]; + x[i+48]=b[i]; + x[i+64]=d[i]; + } + var x32 = x.subarray(32); + var x16 = x.subarray(16); + inv25519(x32,x32); + M(x16,x16,x32); + pack25519(q,x16); + return 0; +} + +function crypto_scalarmult_base(q, n) { + return crypto_scalarmult(q, n, _9); +} + +function crypto_box_keypair(y, x) { + randombytes(x, 32); + return crypto_scalarmult_base(y, x); +} + +function crypto_box_beforenm(k, y, x) { + var s = new Uint8Array(32); + crypto_scalarmult(s, x, y); + return crypto_core_hsalsa20(k, _0, s, sigma); +} + +var crypto_box_afternm = crypto_secretbox; +var crypto_box_open_afternm = crypto_secretbox_open; + +function crypto_box(c, m, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_afternm(c, m, d, n, k); +} + +function crypto_box_open(m, c, d, n, y, x) { + var k = new Uint8Array(32); + crypto_box_beforenm(k, y, x); + return crypto_box_open_afternm(m, c, d, n, k); +} + +var K = [ + 0x428a2f98, 0xd728ae22, 0x71374491, 0x23ef65cd, + 0xb5c0fbcf, 0xec4d3b2f, 0xe9b5dba5, 0x8189dbbc, + 0x3956c25b, 0xf348b538, 0x59f111f1, 0xb605d019, + 0x923f82a4, 0xaf194f9b, 0xab1c5ed5, 0xda6d8118, + 0xd807aa98, 0xa3030242, 0x12835b01, 0x45706fbe, + 0x243185be, 0x4ee4b28c, 0x550c7dc3, 0xd5ffb4e2, + 0x72be5d74, 0xf27b896f, 0x80deb1fe, 0x3b1696b1, + 0x9bdc06a7, 0x25c71235, 0xc19bf174, 0xcf692694, + 0xe49b69c1, 0x9ef14ad2, 0xefbe4786, 0x384f25e3, + 0x0fc19dc6, 0x8b8cd5b5, 0x240ca1cc, 0x77ac9c65, + 0x2de92c6f, 0x592b0275, 0x4a7484aa, 0x6ea6e483, + 0x5cb0a9dc, 0xbd41fbd4, 0x76f988da, 0x831153b5, + 0x983e5152, 0xee66dfab, 0xa831c66d, 0x2db43210, + 0xb00327c8, 0x98fb213f, 0xbf597fc7, 0xbeef0ee4, + 0xc6e00bf3, 0x3da88fc2, 0xd5a79147, 0x930aa725, + 0x06ca6351, 0xe003826f, 0x14292967, 0x0a0e6e70, + 0x27b70a85, 0x46d22ffc, 0x2e1b2138, 0x5c26c926, + 0x4d2c6dfc, 0x5ac42aed, 0x53380d13, 0x9d95b3df, + 0x650a7354, 0x8baf63de, 0x766a0abb, 0x3c77b2a8, + 0x81c2c92e, 0x47edaee6, 0x92722c85, 0x1482353b, + 0xa2bfe8a1, 0x4cf10364, 0xa81a664b, 0xbc423001, + 0xc24b8b70, 0xd0f89791, 0xc76c51a3, 0x0654be30, + 0xd192e819, 0xd6ef5218, 0xd6990624, 0x5565a910, + 0xf40e3585, 0x5771202a, 0x106aa070, 0x32bbd1b8, + 0x19a4c116, 0xb8d2d0c8, 0x1e376c08, 0x5141ab53, + 0x2748774c, 0xdf8eeb99, 0x34b0bcb5, 0xe19b48a8, + 0x391c0cb3, 0xc5c95a63, 0x4ed8aa4a, 0xe3418acb, + 0x5b9cca4f, 0x7763e373, 0x682e6ff3, 0xd6b2b8a3, + 0x748f82ee, 0x5defb2fc, 0x78a5636f, 0x43172f60, + 0x84c87814, 0xa1f0ab72, 0x8cc70208, 0x1a6439ec, + 0x90befffa, 0x23631e28, 0xa4506ceb, 0xde82bde9, + 0xbef9a3f7, 0xb2c67915, 0xc67178f2, 0xe372532b, + 0xca273ece, 0xea26619c, 0xd186b8c7, 0x21c0c207, + 0xeada7dd6, 0xcde0eb1e, 0xf57d4f7f, 0xee6ed178, + 0x06f067aa, 0x72176fba, 0x0a637dc5, 0xa2c898a6, + 0x113f9804, 0xbef90dae, 0x1b710b35, 0x131c471b, + 0x28db77f5, 0x23047d84, 0x32caab7b, 0x40c72493, + 0x3c9ebe0a, 0x15c9bebc, 0x431d67c4, 0x9c100d4c, + 0x4cc5d4be, 0xcb3e42b6, 0x597f299c, 0xfc657e2a, + 0x5fcb6fab, 0x3ad6faec, 0x6c44198c, 0x4a475817 +]; + +function crypto_hashblocks_hl(hh, hl, m, n) { + var wh = new Int32Array(16), wl = new Int32Array(16), + bh0, bh1, bh2, bh3, bh4, bh5, bh6, bh7, + bl0, bl1, bl2, bl3, bl4, bl5, bl6, bl7, + th, tl, i, j, h, l, a, b, c, d; + + var ah0 = hh[0], + ah1 = hh[1], + ah2 = hh[2], + ah3 = hh[3], + ah4 = hh[4], + ah5 = hh[5], + ah6 = hh[6], + ah7 = hh[7], + + al0 = hl[0], + al1 = hl[1], + al2 = hl[2], + al3 = hl[3], + al4 = hl[4], + al5 = hl[5], + al6 = hl[6], + al7 = hl[7]; + + var pos = 0; + while (n >= 128) { + for (i = 0; i < 16; i++) { + j = 8 * i + pos; + wh[i] = (m[j+0] << 24) | (m[j+1] << 16) | (m[j+2] << 8) | m[j+3]; + wl[i] = (m[j+4] << 24) | (m[j+5] << 16) | (m[j+6] << 8) | m[j+7]; + } + for (i = 0; i < 80; i++) { + bh0 = ah0; + bh1 = ah1; + bh2 = ah2; + bh3 = ah3; + bh4 = ah4; + bh5 = ah5; + bh6 = ah6; + bh7 = ah7; + + bl0 = al0; + bl1 = al1; + bl2 = al2; + bl3 = al3; + bl4 = al4; + bl5 = al5; + bl6 = al6; + bl7 = al7; + + // add + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma1 + h = ((ah4 >>> 14) | (al4 << (32-14))) ^ ((ah4 >>> 18) | (al4 << (32-18))) ^ ((al4 >>> (41-32)) | (ah4 << (32-(41-32)))); + l = ((al4 >>> 14) | (ah4 << (32-14))) ^ ((al4 >>> 18) | (ah4 << (32-18))) ^ ((ah4 >>> (41-32)) | (al4 << (32-(41-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Ch + h = (ah4 & ah5) ^ (~ah4 & ah6); + l = (al4 & al5) ^ (~al4 & al6); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // K + h = K[i*2]; + l = K[i*2+1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // w + h = wh[i%16]; + l = wl[i%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + th = c & 0xffff | d << 16; + tl = a & 0xffff | b << 16; + + // add + h = th; + l = tl; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + // Sigma0 + h = ((ah0 >>> 28) | (al0 << (32-28))) ^ ((al0 >>> (34-32)) | (ah0 << (32-(34-32)))) ^ ((al0 >>> (39-32)) | (ah0 << (32-(39-32)))); + l = ((al0 >>> 28) | (ah0 << (32-28))) ^ ((ah0 >>> (34-32)) | (al0 << (32-(34-32)))) ^ ((ah0 >>> (39-32)) | (al0 << (32-(39-32)))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // Maj + h = (ah0 & ah1) ^ (ah0 & ah2) ^ (ah1 & ah2); + l = (al0 & al1) ^ (al0 & al2) ^ (al1 & al2); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh7 = (c & 0xffff) | (d << 16); + bl7 = (a & 0xffff) | (b << 16); + + // add + h = bh3; + l = bl3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = th; + l = tl; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + bh3 = (c & 0xffff) | (d << 16); + bl3 = (a & 0xffff) | (b << 16); + + ah1 = bh0; + ah2 = bh1; + ah3 = bh2; + ah4 = bh3; + ah5 = bh4; + ah6 = bh5; + ah7 = bh6; + ah0 = bh7; + + al1 = bl0; + al2 = bl1; + al3 = bl2; + al4 = bl3; + al5 = bl4; + al6 = bl5; + al7 = bl6; + al0 = bl7; + + if (i%16 === 15) { + for (j = 0; j < 16; j++) { + // add + h = wh[j]; + l = wl[j]; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = wh[(j+9)%16]; + l = wl[(j+9)%16]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma0 + th = wh[(j+1)%16]; + tl = wl[(j+1)%16]; + h = ((th >>> 1) | (tl << (32-1))) ^ ((th >>> 8) | (tl << (32-8))) ^ (th >>> 7); + l = ((tl >>> 1) | (th << (32-1))) ^ ((tl >>> 8) | (th << (32-8))) ^ ((tl >>> 7) | (th << (32-7))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + // sigma1 + th = wh[(j+14)%16]; + tl = wl[(j+14)%16]; + h = ((th >>> 19) | (tl << (32-19))) ^ ((tl >>> (61-32)) | (th << (32-(61-32)))) ^ (th >>> 6); + l = ((tl >>> 19) | (th << (32-19))) ^ ((th >>> (61-32)) | (tl << (32-(61-32)))) ^ ((tl >>> 6) | (th << (32-6))); + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + wh[j] = (c & 0xffff) | (d << 16); + wl[j] = (a & 0xffff) | (b << 16); + } + } + } + + // add + h = ah0; + l = al0; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[0]; + l = hl[0]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[0] = ah0 = (c & 0xffff) | (d << 16); + hl[0] = al0 = (a & 0xffff) | (b << 16); + + h = ah1; + l = al1; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[1]; + l = hl[1]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[1] = ah1 = (c & 0xffff) | (d << 16); + hl[1] = al1 = (a & 0xffff) | (b << 16); + + h = ah2; + l = al2; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[2]; + l = hl[2]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[2] = ah2 = (c & 0xffff) | (d << 16); + hl[2] = al2 = (a & 0xffff) | (b << 16); + + h = ah3; + l = al3; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[3]; + l = hl[3]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[3] = ah3 = (c & 0xffff) | (d << 16); + hl[3] = al3 = (a & 0xffff) | (b << 16); + + h = ah4; + l = al4; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[4]; + l = hl[4]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[4] = ah4 = (c & 0xffff) | (d << 16); + hl[4] = al4 = (a & 0xffff) | (b << 16); + + h = ah5; + l = al5; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[5]; + l = hl[5]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[5] = ah5 = (c & 0xffff) | (d << 16); + hl[5] = al5 = (a & 0xffff) | (b << 16); + + h = ah6; + l = al6; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[6]; + l = hl[6]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[6] = ah6 = (c & 0xffff) | (d << 16); + hl[6] = al6 = (a & 0xffff) | (b << 16); + + h = ah7; + l = al7; + + a = l & 0xffff; b = l >>> 16; + c = h & 0xffff; d = h >>> 16; + + h = hh[7]; + l = hl[7]; + + a += l & 0xffff; b += l >>> 16; + c += h & 0xffff; d += h >>> 16; + + b += a >>> 16; + c += b >>> 16; + d += c >>> 16; + + hh[7] = ah7 = (c & 0xffff) | (d << 16); + hl[7] = al7 = (a & 0xffff) | (b << 16); + + pos += 128; + n -= 128; + } + + return n; +} + +function crypto_hash(out, m, n) { + var hh = new Int32Array(8), + hl = new Int32Array(8), + x = new Uint8Array(256), + i, b = n; + + hh[0] = 0x6a09e667; + hh[1] = 0xbb67ae85; + hh[2] = 0x3c6ef372; + hh[3] = 0xa54ff53a; + hh[4] = 0x510e527f; + hh[5] = 0x9b05688c; + hh[6] = 0x1f83d9ab; + hh[7] = 0x5be0cd19; + + hl[0] = 0xf3bcc908; + hl[1] = 0x84caa73b; + hl[2] = 0xfe94f82b; + hl[3] = 0x5f1d36f1; + hl[4] = 0xade682d1; + hl[5] = 0x2b3e6c1f; + hl[6] = 0xfb41bd6b; + hl[7] = 0x137e2179; + + crypto_hashblocks_hl(hh, hl, m, n); + n %= 128; + + for (i = 0; i < n; i++) x[i] = m[b-n+i]; + x[n] = 128; + + n = 256-128*(n<112?1:0); + x[n-9] = 0; + ts64(x, n-8, (b / 0x20000000) | 0, b << 3); + crypto_hashblocks_hl(hh, hl, x, n); + + for (i = 0; i < 8; i++) ts64(out, 8*i, hh[i], hl[i]); + + return 0; +} + +function add(p, q) { + var a = gf(), b = gf(), c = gf(), + d = gf(), e = gf(), f = gf(), + g = gf(), h = gf(), t = gf(); + + Z(a, p[1], p[0]); + Z(t, q[1], q[0]); + M(a, a, t); + A(b, p[0], p[1]); + A(t, q[0], q[1]); + M(b, b, t); + M(c, p[3], q[3]); + M(c, c, D2); + M(d, p[2], q[2]); + A(d, d, d); + Z(e, b, a); + Z(f, d, c); + A(g, d, c); + A(h, b, a); + + M(p[0], e, f); + M(p[1], h, g); + M(p[2], g, f); + M(p[3], e, h); +} + +function cswap(p, q, b) { + var i; + for (i = 0; i < 4; i++) { + sel25519(p[i], q[i], b); + } +} + +function pack(r, p) { + var tx = gf(), ty = gf(), zi = gf(); + inv25519(zi, p[2]); + M(tx, p[0], zi); + M(ty, p[1], zi); + pack25519(r, ty); + r[31] ^= par25519(tx) << 7; +} + +function scalarmult(p, q, s) { + var b, i; + set25519(p[0], gf0); + set25519(p[1], gf1); + set25519(p[2], gf1); + set25519(p[3], gf0); + for (i = 255; i >= 0; --i) { + b = (s[(i/8)|0] >> (i&7)) & 1; + cswap(p, q, b); + add(q, p); + add(p, p); + cswap(p, q, b); + } +} + +function scalarbase(p, s) { + var q = [gf(), gf(), gf(), gf()]; + set25519(q[0], X); + set25519(q[1], Y); + set25519(q[2], gf1); + M(q[3], X, Y); + scalarmult(p, q, s); +} + +function crypto_sign_keypair(pk, sk, seeded) { + var d = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()]; + var i; + + if (!seeded) randombytes(sk, 32); + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + scalarbase(p, d); + pack(pk, p); + + for (i = 0; i < 32; i++) sk[i+32] = pk[i]; + return 0; +} + +var L = new Float64Array([0xed, 0xd3, 0xf5, 0x5c, 0x1a, 0x63, 0x12, 0x58, 0xd6, 0x9c, 0xf7, 0xa2, 0xde, 0xf9, 0xde, 0x14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x10]); + +function modL(r, x) { + var carry, i, j, k; + for (i = 63; i >= 32; --i) { + carry = 0; + for (j = i - 32, k = i - 12; j < k; ++j) { + x[j] += carry - 16 * x[i] * L[j - (i - 32)]; + carry = Math.floor((x[j] + 128) / 256); + x[j] -= carry * 256; + } + x[j] += carry; + x[i] = 0; + } + carry = 0; + for (j = 0; j < 32; j++) { + x[j] += carry - (x[31] >> 4) * L[j]; + carry = x[j] >> 8; + x[j] &= 255; + } + for (j = 0; j < 32; j++) x[j] -= carry * L[j]; + for (i = 0; i < 32; i++) { + x[i+1] += x[i] >> 8; + r[i] = x[i] & 255; + } +} + +function reduce(r) { + var x = new Float64Array(64), i; + for (i = 0; i < 64; i++) x[i] = r[i]; + for (i = 0; i < 64; i++) r[i] = 0; + modL(r, x); +} + +// Note: difference from C - smlen returned, not passed as argument. +function crypto_sign(sm, m, n, sk) { + var d = new Uint8Array(64), h = new Uint8Array(64), r = new Uint8Array(64); + var i, j, x = new Float64Array(64); + var p = [gf(), gf(), gf(), gf()]; + + crypto_hash(d, sk, 32); + d[0] &= 248; + d[31] &= 127; + d[31] |= 64; + + var smlen = n + 64; + for (i = 0; i < n; i++) sm[64 + i] = m[i]; + for (i = 0; i < 32; i++) sm[32 + i] = d[32 + i]; + + crypto_hash(r, sm.subarray(32), n+32); + reduce(r); + scalarbase(p, r); + pack(sm, p); + + for (i = 32; i < 64; i++) sm[i] = sk[i]; + crypto_hash(h, sm, n + 64); + reduce(h); + + for (i = 0; i < 64; i++) x[i] = 0; + for (i = 0; i < 32; i++) x[i] = r[i]; + for (i = 0; i < 32; i++) { + for (j = 0; j < 32; j++) { + x[i+j] += h[i] * d[j]; + } + } + + modL(sm.subarray(32), x); + return smlen; +} + +function unpackneg(r, p) { + var t = gf(), chk = gf(), num = gf(), + den = gf(), den2 = gf(), den4 = gf(), + den6 = gf(); + + set25519(r[2], gf1); + unpack25519(r[1], p); + S(num, r[1]); + M(den, num, D); + Z(num, num, r[2]); + A(den, r[2], den); + + S(den2, den); + S(den4, den2); + M(den6, den4, den2); + M(t, den6, num); + M(t, t, den); + + pow2523(t, t); + M(t, t, num); + M(t, t, den); + M(t, t, den); + M(r[0], t, den); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) M(r[0], r[0], I); + + S(chk, r[0]); + M(chk, chk, den); + if (neq25519(chk, num)) return -1; + + if (par25519(r[0]) === (p[31]>>7)) Z(r[0], gf0, r[0]); + + M(r[3], r[0], r[1]); + return 0; +} + +function crypto_sign_open(m, sm, n, pk) { + var i; + var t = new Uint8Array(32), h = new Uint8Array(64); + var p = [gf(), gf(), gf(), gf()], + q = [gf(), gf(), gf(), gf()]; + + if (n < 64) return -1; + + if (unpackneg(q, pk)) return -1; + + for (i = 0; i < n; i++) m[i] = sm[i]; + for (i = 0; i < 32; i++) m[i+32] = pk[i]; + crypto_hash(h, m, n); + reduce(h); + scalarmult(p, q, h); + + scalarbase(q, sm.subarray(32)); + add(p, q); + pack(t, p); + + n -= 64; + if (crypto_verify_32(sm, 0, t, 0)) { + for (i = 0; i < n; i++) m[i] = 0; + return -1; + } + + for (i = 0; i < n; i++) m[i] = sm[i + 64]; + return n; +} + +var crypto_secretbox_KEYBYTES = 32, + crypto_secretbox_NONCEBYTES = 24, + crypto_secretbox_ZEROBYTES = 32, + crypto_secretbox_BOXZEROBYTES = 16, + crypto_scalarmult_BYTES = 32, + crypto_scalarmult_SCALARBYTES = 32, + crypto_box_PUBLICKEYBYTES = 32, + crypto_box_SECRETKEYBYTES = 32, + crypto_box_BEFORENMBYTES = 32, + crypto_box_NONCEBYTES = crypto_secretbox_NONCEBYTES, + crypto_box_ZEROBYTES = crypto_secretbox_ZEROBYTES, + crypto_box_BOXZEROBYTES = crypto_secretbox_BOXZEROBYTES, + crypto_sign_BYTES = 64, + crypto_sign_PUBLICKEYBYTES = 32, + crypto_sign_SECRETKEYBYTES = 64, + crypto_sign_SEEDBYTES = 32, + crypto_hash_BYTES = 64; + +nacl.lowlevel = { + crypto_core_hsalsa20: crypto_core_hsalsa20, + crypto_stream_xor: crypto_stream_xor, + crypto_stream: crypto_stream, + crypto_stream_salsa20_xor: crypto_stream_salsa20_xor, + crypto_stream_salsa20: crypto_stream_salsa20, + crypto_onetimeauth: crypto_onetimeauth, + crypto_onetimeauth_verify: crypto_onetimeauth_verify, + crypto_verify_16: crypto_verify_16, + crypto_verify_32: crypto_verify_32, + crypto_secretbox: crypto_secretbox, + crypto_secretbox_open: crypto_secretbox_open, + crypto_scalarmult: crypto_scalarmult, + crypto_scalarmult_base: crypto_scalarmult_base, + crypto_box_beforenm: crypto_box_beforenm, + crypto_box_afternm: crypto_box_afternm, + crypto_box: crypto_box, + crypto_box_open: crypto_box_open, + crypto_box_keypair: crypto_box_keypair, + crypto_hash: crypto_hash, + crypto_sign: crypto_sign, + crypto_sign_keypair: crypto_sign_keypair, + crypto_sign_open: crypto_sign_open, + + crypto_secretbox_KEYBYTES: crypto_secretbox_KEYBYTES, + crypto_secretbox_NONCEBYTES: crypto_secretbox_NONCEBYTES, + crypto_secretbox_ZEROBYTES: crypto_secretbox_ZEROBYTES, + crypto_secretbox_BOXZEROBYTES: crypto_secretbox_BOXZEROBYTES, + crypto_scalarmult_BYTES: crypto_scalarmult_BYTES, + crypto_scalarmult_SCALARBYTES: crypto_scalarmult_SCALARBYTES, + crypto_box_PUBLICKEYBYTES: crypto_box_PUBLICKEYBYTES, + crypto_box_SECRETKEYBYTES: crypto_box_SECRETKEYBYTES, + crypto_box_BEFORENMBYTES: crypto_box_BEFORENMBYTES, + crypto_box_NONCEBYTES: crypto_box_NONCEBYTES, + crypto_box_ZEROBYTES: crypto_box_ZEROBYTES, + crypto_box_BOXZEROBYTES: crypto_box_BOXZEROBYTES, + crypto_sign_BYTES: crypto_sign_BYTES, + crypto_sign_PUBLICKEYBYTES: crypto_sign_PUBLICKEYBYTES, + crypto_sign_SECRETKEYBYTES: crypto_sign_SECRETKEYBYTES, + crypto_sign_SEEDBYTES: crypto_sign_SEEDBYTES, + crypto_hash_BYTES: crypto_hash_BYTES, + + gf: gf, + D: D, + L: L, + pack25519: pack25519, + unpack25519: unpack25519, + M: M, + A: A, + S: S, + Z: Z, + pow2523: pow2523, + add: add, + set25519: set25519, + modL: modL, + scalarmult: scalarmult, + scalarbase: scalarbase, +}; + +/* High-level API */ + +function checkLengths(k, n) { + if (k.length !== crypto_secretbox_KEYBYTES) throw new Error('bad key size'); + if (n.length !== crypto_secretbox_NONCEBYTES) throw new Error('bad nonce size'); +} + +function checkBoxLengths(pk, sk) { + if (pk.length !== crypto_box_PUBLICKEYBYTES) throw new Error('bad public key size'); + if (sk.length !== crypto_box_SECRETKEYBYTES) throw new Error('bad secret key size'); +} + +function checkArrayTypes() { + for (var i = 0; i < arguments.length; i++) { + if (!(arguments[i] instanceof Uint8Array)) + throw new TypeError('unexpected type, use Uint8Array'); + } +} + +function cleanup(arr) { + for (var i = 0; i < arr.length; i++) arr[i] = 0; +} + +nacl.randomBytes = function(n) { + var b = new Uint8Array(n); + randombytes(b, n); + return b; +}; + +nacl.secretbox = function(msg, nonce, key) { + checkArrayTypes(msg, nonce, key); + checkLengths(key, nonce); + var m = new Uint8Array(crypto_secretbox_ZEROBYTES + msg.length); + var c = new Uint8Array(m.length); + for (var i = 0; i < msg.length; i++) m[i+crypto_secretbox_ZEROBYTES] = msg[i]; + crypto_secretbox(c, m, m.length, nonce, key); + return c.subarray(crypto_secretbox_BOXZEROBYTES); +}; + +nacl.secretbox.open = function(box, nonce, key) { + checkArrayTypes(box, nonce, key); + checkLengths(key, nonce); + var c = new Uint8Array(crypto_secretbox_BOXZEROBYTES + box.length); + var m = new Uint8Array(c.length); + for (var i = 0; i < box.length; i++) c[i+crypto_secretbox_BOXZEROBYTES] = box[i]; + if (c.length < 32) return null; + if (crypto_secretbox_open(m, c, c.length, nonce, key) !== 0) return null; + return m.subarray(crypto_secretbox_ZEROBYTES); +}; + +nacl.secretbox.keyLength = crypto_secretbox_KEYBYTES; +nacl.secretbox.nonceLength = crypto_secretbox_NONCEBYTES; +nacl.secretbox.overheadLength = crypto_secretbox_BOXZEROBYTES; + +nacl.scalarMult = function(n, p) { + checkArrayTypes(n, p); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + if (p.length !== crypto_scalarmult_BYTES) throw new Error('bad p size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult(q, n, p); + return q; +}; + +nacl.scalarMult.base = function(n) { + checkArrayTypes(n); + if (n.length !== crypto_scalarmult_SCALARBYTES) throw new Error('bad n size'); + var q = new Uint8Array(crypto_scalarmult_BYTES); + crypto_scalarmult_base(q, n); + return q; +}; + +nacl.scalarMult.scalarLength = crypto_scalarmult_SCALARBYTES; +nacl.scalarMult.groupElementLength = crypto_scalarmult_BYTES; + +nacl.box = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox(msg, nonce, k); +}; + +nacl.box.before = function(publicKey, secretKey) { + checkArrayTypes(publicKey, secretKey); + checkBoxLengths(publicKey, secretKey); + var k = new Uint8Array(crypto_box_BEFORENMBYTES); + crypto_box_beforenm(k, publicKey, secretKey); + return k; +}; + +nacl.box.after = nacl.secretbox; + +nacl.box.open = function(msg, nonce, publicKey, secretKey) { + var k = nacl.box.before(publicKey, secretKey); + return nacl.secretbox.open(msg, nonce, k); +}; + +nacl.box.open.after = nacl.secretbox.open; + +nacl.box.keyPair = function() { + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_box_SECRETKEYBYTES); + crypto_box_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.box.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_box_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_box_PUBLICKEYBYTES); + crypto_scalarmult_base(pk, secretKey); + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.box.publicKeyLength = crypto_box_PUBLICKEYBYTES; +nacl.box.secretKeyLength = crypto_box_SECRETKEYBYTES; +nacl.box.sharedKeyLength = crypto_box_BEFORENMBYTES; +nacl.box.nonceLength = crypto_box_NONCEBYTES; +nacl.box.overheadLength = nacl.secretbox.overheadLength; + +nacl.sign = function(msg, secretKey) { + checkArrayTypes(msg, secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var signedMsg = new Uint8Array(crypto_sign_BYTES+msg.length); + crypto_sign(signedMsg, msg, msg.length, secretKey); + return signedMsg; +}; + +nacl.sign.open = function(signedMsg, publicKey) { + checkArrayTypes(signedMsg, publicKey); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var tmp = new Uint8Array(signedMsg.length); + var mlen = crypto_sign_open(tmp, signedMsg, signedMsg.length, publicKey); + if (mlen < 0) return null; + var m = new Uint8Array(mlen); + for (var i = 0; i < m.length; i++) m[i] = tmp[i]; + return m; +}; + +nacl.sign.detached = function(msg, secretKey) { + var signedMsg = nacl.sign(msg, secretKey); + var sig = new Uint8Array(crypto_sign_BYTES); + for (var i = 0; i < sig.length; i++) sig[i] = signedMsg[i]; + return sig; +}; + +nacl.sign.detached.verify = function(msg, sig, publicKey) { + checkArrayTypes(msg, sig, publicKey); + if (sig.length !== crypto_sign_BYTES) + throw new Error('bad signature size'); + if (publicKey.length !== crypto_sign_PUBLICKEYBYTES) + throw new Error('bad public key size'); + var sm = new Uint8Array(crypto_sign_BYTES + msg.length); + var m = new Uint8Array(crypto_sign_BYTES + msg.length); + var i; + for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i]; + for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i]; + return (crypto_sign_open(m, sm, sm.length, publicKey) >= 0); +}; + +nacl.sign.keyPair = function() { + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + crypto_sign_keypair(pk, sk); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.keyPair.fromSecretKey = function(secretKey) { + checkArrayTypes(secretKey); + if (secretKey.length !== crypto_sign_SECRETKEYBYTES) + throw new Error('bad secret key size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + for (var i = 0; i < pk.length; i++) pk[i] = secretKey[32+i]; + return {publicKey: pk, secretKey: new Uint8Array(secretKey)}; +}; + +nacl.sign.keyPair.fromSeed = function(seed) { + checkArrayTypes(seed); + if (seed.length !== crypto_sign_SEEDBYTES) + throw new Error('bad seed size'); + var pk = new Uint8Array(crypto_sign_PUBLICKEYBYTES); + var sk = new Uint8Array(crypto_sign_SECRETKEYBYTES); + for (var i = 0; i < 32; i++) sk[i] = seed[i]; + crypto_sign_keypair(pk, sk, true); + return {publicKey: pk, secretKey: sk}; +}; + +nacl.sign.publicKeyLength = crypto_sign_PUBLICKEYBYTES; +nacl.sign.secretKeyLength = crypto_sign_SECRETKEYBYTES; +nacl.sign.seedLength = crypto_sign_SEEDBYTES; +nacl.sign.signatureLength = crypto_sign_BYTES; + +nacl.hash = function(msg) { + checkArrayTypes(msg); + var h = new Uint8Array(crypto_hash_BYTES); + crypto_hash(h, msg, msg.length); + return h; +}; + +nacl.hash.hashLength = crypto_hash_BYTES; + +nacl.verify = function(x, y) { + checkArrayTypes(x, y); + // Zero length arguments are considered not equal. + if (x.length === 0 || y.length === 0) return false; + if (x.length !== y.length) return false; + return (vn(x, 0, y, 0, x.length) === 0) ? true : false; +}; + +nacl.setPRNG = function(fn) { + randombytes = fn; +}; + +(function() { + // Initialize PRNG if environment provides CSPRNG. + // If not, methods calling randombytes will throw. + var crypto = typeof self !== 'undefined' ? (self.crypto || self.msCrypto) : null; + if (crypto && crypto.getRandomValues) { + // Browsers. + var QUOTA = 65536; + nacl.setPRNG(function(x, n) { + var i, v = new Uint8Array(n); + for (i = 0; i < n; i += QUOTA) { + crypto.getRandomValues(v.subarray(i, i + Math.min(n - i, QUOTA))); + } + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } else if (true) { + // Node.js. + crypto = __webpack_require__(71281); + if (crypto && crypto.randomBytes) { + nacl.setPRNG(function(x, n) { + var i, v = crypto.randomBytes(n); + for (i = 0; i < n; i++) x[i] = v[i]; + cleanup(v); + }); + } + } +})(); + +})( true && module.exports ? module.exports : (self.nacl = self.nacl || {})); + + /***/ }), /***/ 46579: @@ -102855,6 +128525,482 @@ module.exports = function whichTypedArray(value) { }; +/***/ }), + +/***/ 57510: +/***/ ((module) => { + +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} + + +/***/ }), + +/***/ 40259: +/***/ ((module) => { + +"use strict"; + +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } + } +} + + +/***/ }), + +/***/ 28799: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + +module.exports = Yallist + +Yallist.Node = Node +Yallist.create = Yallist + +function Yallist (list) { + var self = this + if (!(self instanceof Yallist)) { + self = new Yallist() + } + + self.tail = null + self.head = null + self.length = 0 + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item) + }) + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]) + } + } + + return self +} + +Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next + var prev = node.prev + + if (next) { + next.prev = prev + } + + if (prev) { + prev.next = next + } + + if (node === this.head) { + this.head = next + } + if (node === this.tail) { + this.tail = prev + } + + node.list.length-- + node.next = null + node.prev = null + node.list = null + + return next +} + +Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var head = this.head + node.list = this + node.next = head + if (head) { + head.prev = node + } + + this.head = node + if (!this.tail) { + this.tail = node + } + this.length++ +} + +Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var tail = this.tail + node.list = this + node.prev = tail + if (tail) { + tail.next = node + } + + this.tail = node + if (!this.head) { + this.head = node + } + this.length++ +} + +Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } + + var res = this.tail.value + this.tail = this.tail.prev + if (this.tail) { + this.tail.next = null + } else { + this.head = null + } + this.length-- + return res +} + +Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + + var res = this.head.value + this.head = this.head.next + if (this.head) { + this.head.prev = null + } else { + this.tail = null + } + this.length-- + return res +} + +Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this) + walker = walker.next + } +} + +Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this) + walker = walker.prev + } +} + +Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.next + } + return res +} + +Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.prev + } + return res +} + +Yallist.prototype.reduce = function (fn, initial) { + var acc + var walker = this.head + if (arguments.length > 1) { + acc = initial + } else if (this.head) { + walker = this.head.next + acc = this.head.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i) + walker = walker.next + } + + return acc +} + +Yallist.prototype.reduceReverse = function (fn, initial) { + var acc + var walker = this.tail + if (arguments.length > 1) { + acc = initial + } else if (this.tail) { + walker = this.tail.prev + acc = this.tail.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i) + walker = walker.prev + } + + return acc +} + +Yallist.prototype.toArray = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value + walker = walker.next + } + return arr +} + +Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value + walker = walker.prev + } + return arr +} + +Yallist.prototype.slice = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.splice = function (start, deleteCount, ...nodes) { + if (start > this.length) { + start = this.length - 1 + } + if (start < 0) { + start = this.length + start; + } + + for (var i = 0, walker = this.head; walker !== null && i < start; i++) { + walker = walker.next + } + + var ret = [] + for (var i = 0; walker && i < deleteCount; i++) { + ret.push(walker.value) + walker = this.removeNode(walker) + } + if (walker === null) { + walker = this.tail + } + + if (walker !== this.head && walker !== this.tail) { + walker = walker.prev + } + + for (var i = 0; i < nodes.length; i++) { + walker = insert(this, walker, nodes[i]) + } + return ret; +} + +Yallist.prototype.reverse = function () { + var head = this.head + var tail = this.tail + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev + walker.prev = walker.next + walker.next = p + } + this.head = tail + this.tail = head + return this +} + +function insert (self, node, value) { + var inserted = node === self.head ? + new Node(value, null, node, self) : + new Node(value, node, node.next, self) + + if (inserted.next === null) { + self.tail = inserted + } + if (inserted.prev === null) { + self.head = inserted + } + + self.length++ + + return inserted +} + +function push (self, item) { + self.tail = new Node(item, self.tail, null, self) + if (!self.head) { + self.head = self.tail + } + self.length++ +} + +function unshift (self, item) { + self.head = new Node(item, null, self.head, self) + if (!self.tail) { + self.tail = self.head + } + self.length++ +} + +function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) + } + + this.list = list + this.value = value + + if (prev) { + prev.next = this + this.prev = prev + } else { + this.prev = null + } + + if (next) { + next.prev = this + this.next = next + } else { + this.next = null + } +} + +try { + // add if support for Symbol.iterator is present + __webpack_require__(40259)(Yallist) +} catch (er) {} + + /***/ }), /***/ 5183: @@ -102920,6 +129066,20 @@ module.exports = function whichTypedArray(value) { /***/ }), +/***/ 23276: +/***/ (() => { + +/* (ignored) */ + +/***/ }), + +/***/ 59676: +/***/ (() => { + +/* (ignored) */ + +/***/ }), + /***/ 64688: /***/ (() => { @@ -102948,6 +129108,20 @@ module.exports = function whichTypedArray(value) { /***/ }), +/***/ 59817: +/***/ (() => { + +/* (ignored) */ + +/***/ }), + +/***/ 71281: +/***/ (() => { + +/* (ignored) */ + +/***/ }), + /***/ 20933: /***/ (() => { @@ -102997,13 +129171,6 @@ module.exports = function whichTypedArray(value) { /***/ }), -/***/ 40262: -/***/ (() => { - -/* (ignored) */ - -/***/ }), - /***/ 40607: /***/ (() => { @@ -103041,6 +129208,1072 @@ module.exports = function availableTypedArrays() { }; +/***/ }), + +/***/ 2150: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => { + +"use strict"; +__webpack_require__.r(__webpack_exports__); +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ Struct: () => (/* binding */ Struct), +/* harmony export */ StructError: () => (/* binding */ StructError), +/* harmony export */ any: () => (/* binding */ any), +/* harmony export */ array: () => (/* binding */ array), +/* harmony export */ assert: () => (/* binding */ assert), +/* harmony export */ assign: () => (/* binding */ assign), +/* harmony export */ bigint: () => (/* binding */ bigint), +/* harmony export */ boolean: () => (/* binding */ boolean), +/* harmony export */ coerce: () => (/* binding */ coerce), +/* harmony export */ create: () => (/* binding */ create), +/* harmony export */ date: () => (/* binding */ date), +/* harmony export */ defaulted: () => (/* binding */ defaulted), +/* harmony export */ define: () => (/* binding */ define), +/* harmony export */ deprecated: () => (/* binding */ deprecated), +/* harmony export */ dynamic: () => (/* binding */ dynamic), +/* harmony export */ empty: () => (/* binding */ empty), +/* harmony export */ enums: () => (/* binding */ enums), +/* harmony export */ func: () => (/* binding */ func), +/* harmony export */ instance: () => (/* binding */ instance), +/* harmony export */ integer: () => (/* binding */ integer), +/* harmony export */ intersection: () => (/* binding */ intersection), +/* harmony export */ is: () => (/* binding */ is), +/* harmony export */ lazy: () => (/* binding */ lazy), +/* harmony export */ literal: () => (/* binding */ literal), +/* harmony export */ map: () => (/* binding */ map), +/* harmony export */ mask: () => (/* binding */ mask), +/* harmony export */ max: () => (/* binding */ max), +/* harmony export */ min: () => (/* binding */ min), +/* harmony export */ never: () => (/* binding */ never), +/* harmony export */ nonempty: () => (/* binding */ nonempty), +/* harmony export */ nullable: () => (/* binding */ nullable), +/* harmony export */ number: () => (/* binding */ number), +/* harmony export */ object: () => (/* binding */ object), +/* harmony export */ omit: () => (/* binding */ omit), +/* harmony export */ optional: () => (/* binding */ optional), +/* harmony export */ partial: () => (/* binding */ partial), +/* harmony export */ pattern: () => (/* binding */ pattern), +/* harmony export */ pick: () => (/* binding */ pick), +/* harmony export */ record: () => (/* binding */ record), +/* harmony export */ refine: () => (/* binding */ refine), +/* harmony export */ regexp: () => (/* binding */ regexp), +/* harmony export */ set: () => (/* binding */ set), +/* harmony export */ size: () => (/* binding */ size), +/* harmony export */ string: () => (/* binding */ string), +/* harmony export */ struct: () => (/* binding */ struct), +/* harmony export */ trimmed: () => (/* binding */ trimmed), +/* harmony export */ tuple: () => (/* binding */ tuple), +/* harmony export */ type: () => (/* binding */ type), +/* harmony export */ union: () => (/* binding */ union), +/* harmony export */ unknown: () => (/* binding */ unknown), +/* harmony export */ validate: () => (/* binding */ validate) +/* harmony export */ }); +/* provided dependency */ var console = __webpack_require__(96763); +/** + * A `StructFailure` represents a single specific failure in validation. + */ +/** + * `StructError` objects are thrown (or returned) when validation fails. + * + * Validation logic is design to exit early for maximum performance. The error + * represents the first error encountered during validation. For more detail, + * the `error.failures` property is a generator function that can be run to + * continue validation and receive all the failures in the data. + */ +class StructError extends TypeError { + constructor(failure, failures) { + let cached; + const { message, explanation, ...rest } = failure; + const { path } = failure; + const msg = path.length === 0 ? message : `At path: ${path.join('.')} -- ${message}`; + super(explanation ?? msg); + if (explanation != null) + this.cause = msg; + Object.assign(this, rest); + this.name = this.constructor.name; + this.failures = () => { + return (cached ?? (cached = [failure, ...failures()])); + }; + } +} + +/** + * Check if a value is an iterator. + */ +function isIterable(x) { + return isObject(x) && typeof x[Symbol.iterator] === 'function'; +} +/** + * Check if a value is a plain object. + */ +function isObject(x) { + return typeof x === 'object' && x != null; +} +/** + * Check if a value is a plain object. + */ +function isPlainObject(x) { + if (Object.prototype.toString.call(x) !== '[object Object]') { + return false; + } + const prototype = Object.getPrototypeOf(x); + return prototype === null || prototype === Object.prototype; +} +/** + * Return a value as a printable string. + */ +function print(value) { + if (typeof value === 'symbol') { + return value.toString(); + } + return typeof value === 'string' ? JSON.stringify(value) : `${value}`; +} +/** + * Shifts (removes and returns) the first value from the `input` iterator. + * Like `Array.prototype.shift()` but for an `Iterator`. + */ +function shiftIterator(input) { + const { done, value } = input.next(); + return done ? undefined : value; +} +/** + * Convert a single validation result to a failure. + */ +function toFailure(result, context, struct, value) { + if (result === true) { + return; + } + else if (result === false) { + result = {}; + } + else if (typeof result === 'string') { + result = { message: result }; + } + const { path, branch } = context; + const { type } = struct; + const { refinement, message = `Expected a value of type \`${type}\`${refinement ? ` with refinement \`${refinement}\`` : ''}, but received: \`${print(value)}\``, } = result; + return { + value, + type, + refinement, + key: path[path.length - 1], + path, + branch, + ...result, + message, + }; +} +/** + * Convert a validation result to an iterable of failures. + */ +function* toFailures(result, context, struct, value) { + if (!isIterable(result)) { + result = [result]; + } + for (const r of result) { + const failure = toFailure(r, context, struct, value); + if (failure) { + yield failure; + } + } +} +/** + * Check a value against a struct, traversing deeply into nested values, and + * returning an iterator of failures or success. + */ +function* run(value, struct, options = {}) { + const { path = [], branch = [value], coerce = false, mask = false } = options; + const ctx = { path, branch }; + if (coerce) { + value = struct.coercer(value, ctx); + if (mask && + struct.type !== 'type' && + isObject(struct.schema) && + isObject(value) && + !Array.isArray(value)) { + for (const key in value) { + if (struct.schema[key] === undefined) { + delete value[key]; + } + } + } + } + let status = 'valid'; + for (const failure of struct.validator(value, ctx)) { + failure.explanation = options.message; + status = 'not_valid'; + yield [failure, undefined]; + } + for (let [k, v, s] of struct.entries(value, ctx)) { + const ts = run(v, s, { + path: k === undefined ? path : [...path, k], + branch: k === undefined ? branch : [...branch, v], + coerce, + mask, + message: options.message, + }); + for (const t of ts) { + if (t[0]) { + status = t[0].refinement != null ? 'not_refined' : 'not_valid'; + yield [t[0], undefined]; + } + else if (coerce) { + v = t[1]; + if (k === undefined) { + value = v; + } + else if (value instanceof Map) { + value.set(k, v); + } + else if (value instanceof Set) { + value.add(v); + } + else if (isObject(value)) { + if (v !== undefined || k in value) + value[k] = v; + } + } + } + } + if (status !== 'not_valid') { + for (const failure of struct.refiner(value, ctx)) { + failure.explanation = options.message; + status = 'not_refined'; + yield [failure, undefined]; + } + } + if (status === 'valid') { + yield [undefined, value]; + } +} + +/** + * `Struct` objects encapsulate the validation logic for a specific type of + * values. Once constructed, you use the `assert`, `is` or `validate` helpers to + * validate unknown input data against the struct. + */ +class Struct { + constructor(props) { + const { type, schema, validator, refiner, coercer = (value) => value, entries = function* () { }, } = props; + this.type = type; + this.schema = schema; + this.entries = entries; + this.coercer = coercer; + if (validator) { + this.validator = (value, context) => { + const result = validator(value, context); + return toFailures(result, context, this, value); + }; + } + else { + this.validator = () => []; + } + if (refiner) { + this.refiner = (value, context) => { + const result = refiner(value, context); + return toFailures(result, context, this, value); + }; + } + else { + this.refiner = () => []; + } + } + /** + * Assert that a value passes the struct's validation, throwing if it doesn't. + */ + assert(value, message) { + return assert(value, this, message); + } + /** + * Create a value with the struct's coercion logic, then validate it. + */ + create(value, message) { + return create(value, this, message); + } + /** + * Check if a value passes the struct's validation. + */ + is(value) { + return is(value, this); + } + /** + * Mask a value, coercing and validating it, but returning only the subset of + * properties defined by the struct's schema. + */ + mask(value, message) { + return mask(value, this, message); + } + /** + * Validate a value with the struct's validation logic, returning a tuple + * representing the result. + * + * You may optionally pass `true` for the `withCoercion` argument to coerce + * the value before attempting to validate it. If you do, the result will + * contain the coerced result when successful. + */ + validate(value, options = {}) { + return validate(value, this, options); + } +} +/** + * Assert that a value passes a struct, throwing if it doesn't. + */ +function assert(value, struct, message) { + const result = validate(value, struct, { message }); + if (result[0]) { + throw result[0]; + } +} +/** + * Create a value with the coercion logic of struct and validate it. + */ +function create(value, struct, message) { + const result = validate(value, struct, { coerce: true, message }); + if (result[0]) { + throw result[0]; + } + else { + return result[1]; + } +} +/** + * Mask a value, returning only the subset of properties defined by a struct. + */ +function mask(value, struct, message) { + const result = validate(value, struct, { coerce: true, mask: true, message }); + if (result[0]) { + throw result[0]; + } + else { + return result[1]; + } +} +/** + * Check if a value passes a struct. + */ +function is(value, struct) { + const result = validate(value, struct); + return !result[0]; +} +/** + * Validate a value against a struct, returning an error if invalid, or the + * value (with potential coercion) if valid. + */ +function validate(value, struct, options = {}) { + const tuples = run(value, struct, options); + const tuple = shiftIterator(tuples); + if (tuple[0]) { + const error = new StructError(tuple[0], function* () { + for (const t of tuples) { + if (t[0]) { + yield t[0]; + } + } + }); + return [error, undefined]; + } + else { + const v = tuple[1]; + return [undefined, v]; + } +} + +function assign(...Structs) { + const isType = Structs[0].type === 'type'; + const schemas = Structs.map((s) => s.schema); + const schema = Object.assign({}, ...schemas); + return isType ? type(schema) : object(schema); +} +/** + * Define a new struct type with a custom validation function. + */ +function define(name, validator) { + return new Struct({ type: name, schema: null, validator }); +} +/** + * Create a new struct based on an existing struct, but the value is allowed to + * be `undefined`. `log` will be called if the value is not `undefined`. + */ +function deprecated(struct, log) { + return new Struct({ + ...struct, + refiner: (value, ctx) => value === undefined || struct.refiner(value, ctx), + validator(value, ctx) { + if (value === undefined) { + return true; + } + else { + log(value, ctx); + return struct.validator(value, ctx); + } + }, + }); +} +/** + * Create a struct with dynamic validation logic. + * + * The callback will receive the value currently being validated, and must + * return a struct object to validate it with. This can be useful to model + * validation logic that changes based on its input. + */ +function dynamic(fn) { + return new Struct({ + type: 'dynamic', + schema: null, + *entries(value, ctx) { + const struct = fn(value, ctx); + yield* struct.entries(value, ctx); + }, + validator(value, ctx) { + const struct = fn(value, ctx); + return struct.validator(value, ctx); + }, + coercer(value, ctx) { + const struct = fn(value, ctx); + return struct.coercer(value, ctx); + }, + refiner(value, ctx) { + const struct = fn(value, ctx); + return struct.refiner(value, ctx); + }, + }); +} +/** + * Create a struct with lazily evaluated validation logic. + * + * The first time validation is run with the struct, the callback will be called + * and must return a struct object to use. This is useful for cases where you + * want to have self-referential structs for nested data structures to avoid a + * circular definition problem. + */ +function lazy(fn) { + let struct; + return new Struct({ + type: 'lazy', + schema: null, + *entries(value, ctx) { + struct ?? (struct = fn()); + yield* struct.entries(value, ctx); + }, + validator(value, ctx) { + struct ?? (struct = fn()); + return struct.validator(value, ctx); + }, + coercer(value, ctx) { + struct ?? (struct = fn()); + return struct.coercer(value, ctx); + }, + refiner(value, ctx) { + struct ?? (struct = fn()); + return struct.refiner(value, ctx); + }, + }); +} +/** + * Create a new struct based on an existing object struct, but excluding + * specific properties. + * + * Like TypeScript's `Omit` utility. + */ +function omit(struct, keys) { + const { schema } = struct; + const subschema = { ...schema }; + for (const key of keys) { + delete subschema[key]; + } + switch (struct.type) { + case 'type': + return type(subschema); + default: + return object(subschema); + } +} +/** + * Create a new struct based on an existing object struct, but with all of its + * properties allowed to be `undefined`. + * + * Like TypeScript's `Partial` utility. + */ +function partial(struct) { + const isStruct = struct instanceof Struct; + const schema = isStruct ? { ...struct.schema } : { ...struct }; + for (const key in schema) { + schema[key] = optional(schema[key]); + } + if (isStruct && struct.type === 'type') { + return type(schema); + } + return object(schema); +} +/** + * Create a new struct based on an existing object struct, but only including + * specific properties. + * + * Like TypeScript's `Pick` utility. + */ +function pick(struct, keys) { + const { schema } = struct; + const subschema = {}; + for (const key of keys) { + subschema[key] = schema[key]; + } + switch (struct.type) { + case 'type': + return type(subschema); + default: + return object(subschema); + } +} +/** + * Define a new struct type with a custom validation function. + * + * @deprecated This function has been renamed to `define`. + */ +function struct(name, validator) { + console.warn('superstruct@0.11 - The `struct` helper has been renamed to `define`.'); + return define(name, validator); +} + +/** + * Ensure that any value passes validation. + */ +function any() { + return define('any', () => true); +} +function array(Element) { + return new Struct({ + type: 'array', + schema: Element, + *entries(value) { + if (Element && Array.isArray(value)) { + for (const [i, v] of value.entries()) { + yield [i, v, Element]; + } + } + }, + coercer(value) { + return Array.isArray(value) ? value.slice() : value; + }, + validator(value) { + return (Array.isArray(value) || + `Expected an array value, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a bigint. + */ +function bigint() { + return define('bigint', (value) => { + return typeof value === 'bigint'; + }); +} +/** + * Ensure that a value is a boolean. + */ +function boolean() { + return define('boolean', (value) => { + return typeof value === 'boolean'; + }); +} +/** + * Ensure that a value is a valid `Date`. + * + * Note: this also ensures that the value is *not* an invalid `Date` object, + * which can occur when parsing a date fails but still returns a `Date`. + */ +function date() { + return define('date', (value) => { + return ((value instanceof Date && !isNaN(value.getTime())) || + `Expected a valid \`Date\` object, but received: ${print(value)}`); + }); +} +function enums(values) { + const schema = {}; + const description = values.map((v) => print(v)).join(); + for (const key of values) { + schema[key] = key; + } + return new Struct({ + type: 'enums', + schema, + validator(value) { + return (values.includes(value) || + `Expected one of \`${description}\`, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a function. + */ +function func() { + return define('func', (value) => { + return (typeof value === 'function' || + `Expected a function, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is an instance of a specific class. + */ +function instance(Class) { + return define('instance', (value) => { + return (value instanceof Class || + `Expected a \`${Class.name}\` instance, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is an integer. + */ +function integer() { + return define('integer', (value) => { + return ((typeof value === 'number' && !isNaN(value) && Number.isInteger(value)) || + `Expected an integer, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value matches all of a set of types. + */ +function intersection(Structs) { + return new Struct({ + type: 'intersection', + schema: null, + *entries(value, ctx) { + for (const S of Structs) { + yield* S.entries(value, ctx); + } + }, + *validator(value, ctx) { + for (const S of Structs) { + yield* S.validator(value, ctx); + } + }, + *refiner(value, ctx) { + for (const S of Structs) { + yield* S.refiner(value, ctx); + } + }, + }); +} +function literal(constant) { + const description = print(constant); + const t = typeof constant; + return new Struct({ + type: 'literal', + schema: t === 'string' || t === 'number' || t === 'boolean' ? constant : null, + validator(value) { + return (value === constant || + `Expected the literal \`${description}\`, but received: ${print(value)}`); + }, + }); +} +function map(Key, Value) { + return new Struct({ + type: 'map', + schema: null, + *entries(value) { + if (Key && Value && value instanceof Map) { + for (const [k, v] of value.entries()) { + yield [k, k, Key]; + yield [k, v, Value]; + } + } + }, + coercer(value) { + return value instanceof Map ? new Map(value) : value; + }, + validator(value) { + return (value instanceof Map || + `Expected a \`Map\` object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that no value ever passes validation. + */ +function never() { + return define('never', () => false); +} +/** + * Augment an existing struct to allow `null` values. + */ +function nullable(struct) { + return new Struct({ + ...struct, + validator: (value, ctx) => value === null || struct.validator(value, ctx), + refiner: (value, ctx) => value === null || struct.refiner(value, ctx), + }); +} +/** + * Ensure that a value is a number. + */ +function number() { + return define('number', (value) => { + return ((typeof value === 'number' && !isNaN(value)) || + `Expected a number, but received: ${print(value)}`); + }); +} +function object(schema) { + const knowns = schema ? Object.keys(schema) : []; + const Never = never(); + return new Struct({ + type: 'object', + schema: schema ? schema : null, + *entries(value) { + if (schema && isObject(value)) { + const unknowns = new Set(Object.keys(value)); + for (const key of knowns) { + unknowns.delete(key); + yield [key, value[key], schema[key]]; + } + for (const key of unknowns) { + yield [key, value[key], Never]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + coercer(value) { + return isObject(value) ? { ...value } : value; + }, + }); +} +/** + * Augment a struct to allow `undefined` values. + */ +function optional(struct) { + return new Struct({ + ...struct, + validator: (value, ctx) => value === undefined || struct.validator(value, ctx), + refiner: (value, ctx) => value === undefined || struct.refiner(value, ctx), + }); +} +/** + * Ensure that a value is an object with keys and values of specific types, but + * without ensuring any specific shape of properties. + * + * Like TypeScript's `Record` utility. + */ +function record(Key, Value) { + return new Struct({ + type: 'record', + schema: null, + *entries(value) { + if (isObject(value)) { + for (const k in value) { + const v = value[k]; + yield [k, k, Key]; + yield [k, v, Value]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a `RegExp`. + * + * Note: this does not test the value against the regular expression! For that + * you need to use the `pattern()` refinement. + */ +function regexp() { + return define('regexp', (value) => { + return value instanceof RegExp; + }); +} +function set(Element) { + return new Struct({ + type: 'set', + schema: null, + *entries(value) { + if (Element && value instanceof Set) { + for (const v of value) { + yield [v, v, Element]; + } + } + }, + coercer(value) { + return value instanceof Set ? new Set(value) : value; + }, + validator(value) { + return (value instanceof Set || + `Expected a \`Set\` object, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value is a string. + */ +function string() { + return define('string', (value) => { + return (typeof value === 'string' || + `Expected a string, but received: ${print(value)}`); + }); +} +/** + * Ensure that a value is a tuple of a specific length, and that each of its + * elements is of a specific type. + */ +function tuple(Structs) { + const Never = never(); + return new Struct({ + type: 'tuple', + schema: null, + *entries(value) { + if (Array.isArray(value)) { + const length = Math.max(Structs.length, value.length); + for (let i = 0; i < length; i++) { + yield [i, value[i], Structs[i] || Never]; + } + } + }, + validator(value) { + return (Array.isArray(value) || + `Expected an array, but received: ${print(value)}`); + }, + }); +} +/** + * Ensure that a value has a set of known properties of specific types. + * + * Note: Unrecognized properties are allowed and untouched. This is similar to + * how TypeScript's structural typing works. + */ +function type(schema) { + const keys = Object.keys(schema); + return new Struct({ + type: 'type', + schema, + *entries(value) { + if (isObject(value)) { + for (const k of keys) { + yield [k, value[k], schema[k]]; + } + } + }, + validator(value) { + return (isObject(value) || `Expected an object, but received: ${print(value)}`); + }, + coercer(value) { + return isObject(value) ? { ...value } : value; + }, + }); +} +/** + * Ensure that a value matches one of a set of types. + */ +function union(Structs) { + const description = Structs.map((s) => s.type).join(' | '); + return new Struct({ + type: 'union', + schema: null, + coercer(value) { + for (const S of Structs) { + const [error, coerced] = S.validate(value, { coerce: true }); + if (!error) { + return coerced; + } + } + return value; + }, + validator(value, ctx) { + const failures = []; + for (const S of Structs) { + const [...tuples] = run(value, S, ctx); + const [first] = tuples; + if (!first[0]) { + return []; + } + else { + for (const [failure] of tuples) { + if (failure) { + failures.push(failure); + } + } + } + } + return [ + `Expected the value to satisfy a union of \`${description}\`, but received: ${print(value)}`, + ...failures, + ]; + }, + }); +} +/** + * Ensure that any value passes validation, without widening its type to `any`. + */ +function unknown() { + return define('unknown', () => true); +} + +/** + * Augment a `Struct` to add an additional coercion step to its input. + * + * This allows you to transform input data before validating it, to increase the + * likelihood that it passes validation—for example for default values, parsing + * different formats, etc. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function coerce(struct, condition, coercer) { + return new Struct({ + ...struct, + coercer: (value, ctx) => { + return is(value, condition) + ? struct.coercer(coercer(value, ctx), ctx) + : struct.coercer(value, ctx); + }, + }); +} +/** + * Augment a struct to replace `undefined` values with a default. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function defaulted(struct, fallback, options = {}) { + return coerce(struct, unknown(), (x) => { + const f = typeof fallback === 'function' ? fallback() : fallback; + if (x === undefined) { + return f; + } + if (!options.strict && isPlainObject(x) && isPlainObject(f)) { + const ret = { ...x }; + let changed = false; + for (const key in f) { + if (ret[key] === undefined) { + ret[key] = f[key]; + changed = true; + } + } + if (changed) { + return ret; + } + } + return x; + }); +} +/** + * Augment a struct to trim string inputs. + * + * Note: You must use `create(value, Struct)` on the value to have the coercion + * take effect! Using simply `assert()` or `is()` will not use coercion. + */ +function trimmed(struct) { + return coerce(struct, string(), (x) => x.trim()); +} + +/** + * Ensure that a string, array, map, or set is empty. + */ +function empty(struct) { + return refine(struct, 'empty', (value) => { + const size = getSize(value); + return (size === 0 || + `Expected an empty ${struct.type} but received one with a size of \`${size}\``); + }); +} +function getSize(value) { + if (value instanceof Map || value instanceof Set) { + return value.size; + } + else { + return value.length; + } +} +/** + * Ensure that a number or date is below a threshold. + */ +function max(struct, threshold, options = {}) { + const { exclusive } = options; + return refine(struct, 'max', (value) => { + return exclusive + ? value < threshold + : value <= threshold || + `Expected a ${struct.type} less than ${exclusive ? '' : 'or equal to '}${threshold} but received \`${value}\``; + }); +} +/** + * Ensure that a number or date is above a threshold. + */ +function min(struct, threshold, options = {}) { + const { exclusive } = options; + return refine(struct, 'min', (value) => { + return exclusive + ? value > threshold + : value >= threshold || + `Expected a ${struct.type} greater than ${exclusive ? '' : 'or equal to '}${threshold} but received \`${value}\``; + }); +} +/** + * Ensure that a string, array, map or set is not empty. + */ +function nonempty(struct) { + return refine(struct, 'nonempty', (value) => { + const size = getSize(value); + return (size > 0 || `Expected a nonempty ${struct.type} but received an empty one`); + }); +} +/** + * Ensure that a string matches a regular expression. + */ +function pattern(struct, regexp) { + return refine(struct, 'pattern', (value) => { + return (regexp.test(value) || + `Expected a ${struct.type} matching \`/${regexp.source}/\` but received "${value}"`); + }); +} +/** + * Ensure that a string, array, number, date, map, or set has a size (or length, or time) between `min` and `max`. + */ +function size(struct, min, max = min) { + const expected = `Expected a ${struct.type}`; + const of = min === max ? `of \`${min}\`` : `between \`${min}\` and \`${max}\``; + return refine(struct, 'size', (value) => { + if (typeof value === 'number' || value instanceof Date) { + return ((min <= value && value <= max) || + `${expected} ${of} but received \`${value}\``); + } + else if (value instanceof Map || value instanceof Set) { + const { size } = value; + return ((min <= size && size <= max) || + `${expected} with a size ${of} but received one with a size of \`${size}\``); + } + else { + const { length } = value; + return ((min <= length && length <= max) || + `${expected} with a length ${of} but received one with a length of \`${length}\``); + } + }); +} +/** + * Augment a `Struct` to add an additional refinement to the validation. + * + * The refiner function is guaranteed to receive a value of the struct's type, + * because the struct's existing validation will already have passed. This + * allows you to layer additional validation on top of existing structs. + */ +function refine(struct, name, refiner) { + return new Struct({ + ...struct, + *refiner(value, ctx) { + yield* struct.refiner(value, ctx); + const result = refiner(value, ctx); + const failures = toFailures(result, ctx, struct, value); + for (const failure of failures) { + yield { ...failure, refinement: name }; + } + }, + }); +} + + +//# sourceMappingURL=index.mjs.map + + /***/ }), /***/ 63837: @@ -103211,18 +130444,18 @@ var __webpack_exports__ = {}; // ESM COMPAT FLAG __webpack_require__.r(__webpack_exports__); -// NAMESPACE OBJECT: ./node_modules/@noble/curves/esm/abstract/utils.js +// NAMESPACE OBJECT: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/utils.js var abstract_utils_namespaceObject = {}; __webpack_require__.r(abstract_utils_namespaceObject); __webpack_require__.d(abstract_utils_namespaceObject, { OG: () => (bitMask), - My: () => (utils_bytesToHex), + My: () => (abstract_utils_bytesToHex), bytesToNumberBE: () => (utils_bytesToNumberBE), lX: () => (utils_bytesToNumberLE), Id: () => (abstract_utils_concatBytes), fg: () => (createHmacDrbg), qj: () => (utils_ensureBytes), - hexToBytes: () => (utils_hexToBytes), + hexToBytes: () => (abstract_utils_hexToBytes), lq: () => (utils_numberToBytesBE), z: () => (numberToBytesLE), Q5: () => (validateObject) @@ -103233,7 +130466,10 @@ var graphql_namespaceObject = {}; __webpack_require__.r(graphql_namespaceObject); __webpack_require__.d(graphql_namespaceObject, { GET_DEPOSITS: () => (GET_DEPOSITS), + GET_ECHO_EVENTS: () => (GET_ECHO_EVENTS), GET_ENCRYPTED_NOTES: () => (GET_ENCRYPTED_NOTES), + GET_GOVERNANCE_APY: () => (GET_GOVERNANCE_APY), + GET_GOVERNANCE_EVENTS: () => (GET_GOVERNANCE_EVENTS), GET_NOTE_ACCOUNTS: () => (GET_NOTE_ACCOUNTS), GET_REGISTERED: () => (GET_REGISTERED), GET_STATISTIC: () => (GET_STATISTIC), @@ -103241,10 +130477,14 @@ __webpack_require__.d(graphql_namespaceObject, { _META: () => (_META), getAllDeposits: () => (getAllDeposits), getAllEncryptedNotes: () => (getAllEncryptedNotes), + getAllGovernanceEvents: () => (getAllGovernanceEvents), + getAllGraphEchoEvents: () => (getAllGraphEchoEvents), getAllRegisters: () => (getAllRegisters), getAllWithdrawals: () => (getAllWithdrawals), getDeposits: () => (getDeposits), getEncryptedNotes: () => (getEncryptedNotes), + getGovernanceEvents: () => (getGovernanceEvents), + getGraphEchoEvents: () => (getGraphEchoEvents), getMeta: () => (getMeta), getNoteAccounts: () => (getNoteAccounts), getRegisters: () => (getRegisters), @@ -103442,7 +130682,7 @@ var worker_threads_ignored_default = /*#__PURE__*/__webpack_require__.n(worker_t var lib = __webpack_require__(41217); // EXTERNAL MODULE: ./src/services/events/types.ts var types = __webpack_require__(98477); -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/_assert.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_assert.js function number(n) { if (!Number.isSafeInteger(n) || n < 0) throw new Error(`Wrong positive integer: ${n}`); @@ -103480,7 +130720,7 @@ function output(out, instance) { const _assert_assert = { number, bool, bytes, hash: _assert_hash, exists, output }; /* harmony default export */ const _assert = ((/* unused pure expression or super */ null && (_assert_assert))); //# sourceMappingURL=_assert.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/_u64.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_u64.js const U32_MASK64 = /* @__PURE__ */ BigInt(2 ** 32 - 1); const _32n = /* @__PURE__ */ BigInt(32); // We are not using BigUint64Array, because they are extremely slow as per 2022 @@ -103543,10 +130783,10 @@ const u64 = { }; /* harmony default export */ const _u64 = (u64); //# sourceMappingURL=_u64.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/crypto.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/crypto.js const crypto_crypto = typeof globalThis === 'object' && 'crypto' in globalThis ? globalThis.crypto : undefined; //# sourceMappingURL=crypto.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/utils.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/utils.js /*! noble-hashes - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // We use WebCrypto aka globalThis.crypto, which exists in browsers and node.js 16+. // node.js versions earlier than v19 don't declare it in global scope. @@ -103572,7 +130812,7 @@ const hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toString(1 /** * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' */ -function bytesToHex(bytes) { +function utils_bytesToHex(bytes) { if (!u8a(bytes)) throw new Error('Uint8Array expected'); // pre-caching improves the speed 6x @@ -103585,7 +130825,7 @@ function bytesToHex(bytes) { /** * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) */ -function hexToBytes(hex) { +function utils_hexToBytes(hex) { if (typeof hex !== 'string') throw new Error('hex string expected, got ' + typeof hex); const len = hex.length; @@ -103701,7 +130941,7 @@ function utils_randomBytes(bytesLength = 32) { throw new Error('crypto.getRandomValues must be defined'); } //# sourceMappingURL=utils.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/sha3.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/sha3.js @@ -106142,7 +133382,7 @@ function createRemovedLogFilter(log) { } }; } //# sourceMappingURL=provider.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/_sha2.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/_sha2.js // Polyfill for Safari 14 @@ -106257,7 +133497,7 @@ class SHA2 extends Hash { } } //# sourceMappingURL=_sha2.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/sha256.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/sha256.js // SHA2-256 need to try 2^128 hashes to execute birthday attack. @@ -106384,7 +133624,7 @@ class SHA224 extends SHA256 { const sha256_sha256 = /* @__PURE__ */ utils_wrapConstructor(() => new SHA256()); const sha224 = /* @__PURE__ */ (/* unused pure expression or super */ null && (wrapConstructor(() => new SHA224()))); //# sourceMappingURL=sha256.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/abstract/utils.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/utils.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // 100 lines of code in the file are duplicated from noble-hashes (utils). // This is OK: `abstract` directory does not use noble-hashes. @@ -106398,7 +133638,7 @@ const utils_hexes = /* @__PURE__ */ Array.from({ length: 256 }, (_, i) => i.toSt /** * @example bytesToHex(Uint8Array.from([0xca, 0xfe, 0x01, 0x23])) // 'cafe0123' */ -function utils_bytesToHex(bytes) { +function abstract_utils_bytesToHex(bytes) { if (!utils_u8a(bytes)) throw new Error('Uint8Array expected'); // pre-caching improves the speed 6x @@ -106421,7 +133661,7 @@ function hexToNumber(hex) { /** * @example hexToBytes('cafe0123') // Uint8Array.from([0xca, 0xfe, 0x01, 0x23]) */ -function utils_hexToBytes(hex) { +function abstract_utils_hexToBytes(hex) { if (typeof hex !== 'string') throw new Error('hex string expected, got ' + typeof hex); const len = hex.length; @@ -106440,22 +133680,22 @@ function utils_hexToBytes(hex) { } // BE: Big Endian, LE: Little Endian function utils_bytesToNumberBE(bytes) { - return hexToNumber(utils_bytesToHex(bytes)); + return hexToNumber(abstract_utils_bytesToHex(bytes)); } function utils_bytesToNumberLE(bytes) { if (!utils_u8a(bytes)) throw new Error('Uint8Array expected'); - return hexToNumber(utils_bytesToHex(Uint8Array.from(bytes).reverse())); + return hexToNumber(abstract_utils_bytesToHex(Uint8Array.from(bytes).reverse())); } function utils_numberToBytesBE(n, len) { - return utils_hexToBytes(n.toString(16).padStart(len * 2, '0')); + return abstract_utils_hexToBytes(n.toString(16).padStart(len * 2, '0')); } function numberToBytesLE(n, len) { return utils_numberToBytesBE(n, len).reverse(); } // Unpadded, rarely used function numberToVarBytesBE(n) { - return utils_hexToBytes(numberToHexUnpadded(n)); + return abstract_utils_hexToBytes(numberToHexUnpadded(n)); } /** * Takes hex string or Uint8Array, converts to Uint8Array. @@ -106470,7 +133710,7 @@ function utils_ensureBytes(title, hex, expectedLength) { let res; if (typeof hex === 'string') { try { - res = utils_hexToBytes(hex); + res = abstract_utils_hexToBytes(hex); } catch (e) { throw new Error(`${title} must be valid hex string, got "${hex}". Cause: ${e}`); @@ -106651,7 +133891,7 @@ function validateObject(object, validators, optValidators = {}) { // const z3 = validateObject(o, { test: 'boolean', z: 'bug' }); // const z4 = validateObject(o, { a: 'boolean', z: 'bug' }); //# sourceMappingURL=utils.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/abstract/modular.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/modular.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Utilities for modular arithmetics and finite fields @@ -107068,7 +134308,7 @@ function mapHashToField(key, fieldOrder, isLE = false) { return isLE ? numberToBytesLE(reduced, fieldLen) : utils_numberToBytesBE(reduced, fieldLen); } //# sourceMappingURL=modular.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/hmac.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/hmac.js // HMAC (RFC 2104) @@ -107146,7 +134386,7 @@ class HMAC extends Hash { const hmac = (hash, key, message) => new HMAC(hash, key).update(message).digest(); hmac.create = (hash, key) => new HMAC(hash, key); //# sourceMappingURL=hmac.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/abstract/curve.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/curve.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Abelian group utilities @@ -107303,7 +134543,7 @@ function validateBasic(curve) { }); } //# sourceMappingURL=curve.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/abstract/weierstrass.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/abstract/weierstrass.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ // Short Weierstrass curve. The formula is: y² = x³ + ax + b @@ -107447,7 +134687,7 @@ function weierstrassPoints(opts) { const { allowedPrivateKeyLengths: lengths, nByteLength, wrapPrivateKey, n } = CURVE; if (lengths && typeof key !== 'bigint') { if (key instanceof Uint8Array) - key = utils_bytesToHex(key); + key = abstract_utils_bytesToHex(key); // Normalize to hex string, pad. E.g. P521 would norm 130-132 char hex to 132-char bytes if (typeof key !== 'string' || !lengths.includes(key.length)) throw new Error('Invalid key'); @@ -107811,7 +135051,7 @@ function weierstrassPoints(opts) { return toBytes(Point, this, isCompressed); } toHex(isCompressed = true) { - return utils_bytesToHex(this.toRawBytes(isCompressed)); + return abstract_utils_bytesToHex(this.toRawBytes(isCompressed)); } } Point.BASE = new Point(CURVE.Gx, CURVE.Gy, Fp.ONE); @@ -107895,7 +135135,7 @@ function weierstrass(curveDef) { } }, }); - const numToNByteStr = (num) => utils_bytesToHex(utils_numberToBytesBE(num, CURVE.nByteLength)); + const numToNByteStr = (num) => abstract_utils_bytesToHex(utils_numberToBytesBE(num, CURVE.nByteLength)); function isBiggerThanHalfOrder(number) { const HALF = CURVE_ORDER >> weierstrass_1n; return number > HALF; @@ -107965,14 +135205,14 @@ function weierstrass(curveDef) { } // DER-encoded toDERRawBytes() { - return utils_hexToBytes(this.toDERHex()); + return abstract_utils_hexToBytes(this.toDERHex()); } toDERHex() { return DER.hexFromSig({ r: this.r, s: this.s }); } // padded bytes of r, then padded bytes of s toCompactRawBytes() { - return utils_hexToBytes(this.toCompactHex()); + return abstract_utils_hexToBytes(this.toCompactHex()); } toCompactHex() { return numToNByteStr(this.r) + numToNByteStr(this.s); @@ -108360,7 +135600,7 @@ function weierstrass_mapToCurveSimpleSWU(Fp, opts) { }; } //# sourceMappingURL=weierstrass.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/_shortw_utils.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/_shortw_utils.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ @@ -108378,7 +135618,7 @@ function createCurve(curveDef, defHash) { return Object.freeze({ ...create(defHash), create }); } //# sourceMappingURL=_shortw_utils.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/curves/esm/secp256k1.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/curves/esm/secp256k1.js /*! noble-curves - MIT License (c) 2022 Paul Miller (paulmillr.com) */ @@ -110060,7 +137300,7 @@ function verifyTypedData(domain, types, value, signature) { */ const addresses_ZeroAddress = "0x0000000000000000000000000000000000000000"; //# sourceMappingURL=addresses.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/pbkdf2.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/pbkdf2.js @@ -110147,7 +137387,7 @@ async function pbkdf2Async(hash, password, salt, opts) { return pbkdf2Output(PRF, PRFSalt, DK, prfW, u); } //# sourceMappingURL=pbkdf2.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/sha512.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/sha512.js @@ -112012,7 +139252,7 @@ computeHmac.register = function (func) { }; Object.freeze(computeHmac); //# sourceMappingURL=hmac.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/ripemd160.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/ripemd160.js // https://homes.esat.kuleuven.be/~bosselae/ripemd160.html @@ -113230,7 +140470,7 @@ function pkcs7Strip(data) { //# sourceMappingURL=index.js.map -;// CONCATENATED MODULE: ./node_modules/@noble/hashes/esm/scrypt.js +;// CONCATENATED MODULE: ./node_modules/ethers/node_modules/@noble/hashes/esm/scrypt.js @@ -113756,7 +140996,7 @@ function isKeystoreJson(json) { catch (error) { } return false; } -function decrypt(data, key, ciphertext) { +function json_keystore_decrypt(data, key, ciphertext) { const cipher = spelunk(data, "crypto.cipher:string"); if (cipher === "aes-128-ctr") { const iv = spelunk(data, "crypto.cipherparams.iv:data!"); @@ -113772,7 +141012,7 @@ function getAccount(data, _key) { const ciphertext = spelunk(data, "crypto.ciphertext:data!"); const computedMAC = hexlify(keccak_keccak256(data_concat([key.slice(16, 32), ciphertext]))).substring(2); errors_assertArgument(computedMAC === spelunk(data, "crypto.mac:string!").toLowerCase(), "incorrect password", "password", "[ REDACTED ]"); - const privateKey = decrypt(data, key.slice(0, 16), ciphertext); + const privateKey = json_keystore_decrypt(data, key.slice(0, 16), ciphertext); const address = address_computeAddress(privateKey); if (data.address) { let check = data.address.toLowerCase(); @@ -129148,8 +156388,8 @@ ReverseRecords__factory.abi = ReverseRecords_factory_abi; -// EXTERNAL MODULE: url (ignored) -var url_ignored_ = __webpack_require__(40262); +// EXTERNAL MODULE: ./node_modules/crypto-browserify/index.js +var crypto_browserify = __webpack_require__(91565); // EXTERNAL MODULE: ./node_modules/bn.js/lib/bn.js var bn = __webpack_require__(39404); ;// CONCATENATED MODULE: ./src/services/utils.ts @@ -129161,6 +156401,7 @@ BigInt.prototype.toJSON = function() { return this.toString(); }; const utils_isNode = !process.browser && typeof globalThis.window === "undefined"; +const utils_crypto = utils_isNode ? crypto_browserify.webcrypto : globalThis.crypto; const utils_chunk = (arr, size) => [...Array(Math.ceil(arr.length / size))].map((_, i) => arr.slice(size * i, size + size * i)); function utils_sleep(ms) { return new Promise((resolve) => setTimeout(resolve, ms)); @@ -129176,28 +156417,36 @@ function utils_validateUrl(url, protocols) { return false; } } +function services_utils_concatBytes(...arrays) { + const totalSize = arrays.reduce((acc, e) => acc + e.length, 0); + const merged = new Uint8Array(totalSize); + arrays.forEach((array, i, arrays2) => { + const offset = arrays2.slice(0, i).reduce((acc, e) => acc + e.length, 0); + merged.set(array, offset); + }); + return merged; +} function bufferToBytes(b) { return new Uint8Array(b.buffer); } -function bytesToBase64(bytes) { - let binary = ""; - const len = bytes.byteLength; - for (let i = 0; i < len; ++i) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); +function utils_bytesToBase64(bytes) { + return btoa(String.fromCharCode.apply(null, Array.from(bytes))); } -function base64ToBytes(base64) { - const binaryString = atob(base64); - const bytes = new Uint8Array(binaryString.length); - for (let i = 0; i < binaryString.length; i++) { - bytes[i] = binaryString.charCodeAt(i); - } - return bytes; +function utils_base64ToBytes(base64) { + return Uint8Array.from(atob(base64), (c) => c.charCodeAt(0)); } function services_utils_bytesToHex(bytes) { return "0x" + Array.from(bytes).map((b) => b.toString(16).padStart(2, "0")).join(""); } +function services_utils_hexToBytes(hexString) { + if (hexString.slice(0, 2) === "0x") { + hexString = hexString.replace("0x", ""); + } + if (hexString.length % 2 !== 0) { + hexString = "0" + hexString; + } + return Uint8Array.from(hexString.match(/.{1,2}/g).map((byte) => parseInt(byte, 16))); +} function utils_bytesToBN(bytes) { return BigInt(services_utils_bytesToHex(bytes)); } @@ -129225,7 +156474,7 @@ function toFixedLength(string, length = 32) { return "0x" + string.padStart(length * 2, "0"); } function utils_rBigInt(nbytes = 31) { - return utils_bytesToBN(crypto.getRandomValues(new Uint8Array(nbytes))); + return utils_bytesToBN(utils_crypto.getRandomValues(new Uint8Array(nbytes))); } function bigIntReplacer(key, value) { return typeof value === "bigint" ? value.toString() : value; @@ -129808,6 +157057,22 @@ const GET_NOTE_ACCOUNTS = ` } } `; +const GET_ECHO_EVENTS = ` + query getNoteAccounts($first: Int, $fromBlock: Int) { + noteAccounts(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + id + blockNumber + address + encryptedAccount + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; const GET_ENCRYPTED_NOTES = ` query getEncryptedNotes($first: Int, $fromBlock: Int) { encryptedNotes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { @@ -129824,6 +157089,59 @@ const GET_ENCRYPTED_NOTES = ` } } `; +const GET_GOVERNANCE_EVENTS = ` + query getGovernanceEvents($first: Int, $fromBlock: Int) { + proposals(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + proposer + target + startTime + endTime + description + } + votes(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + proposalId + voter + support + votes + from + input + } + delegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateTo + } + undelegates(first: $first, orderBy: blockNumber, orderDirection: asc, where: { blockNumber_gte: $fromBlock }) { + blockNumber + logIndex + transactionHash + account + delegateFrom + } + _meta { + block { + number + } + hasIndexingErrors + } + } +`; +const GET_GOVERNANCE_APY = ` + stakeDailyBurns(first: 30, orderBy: date, orderDirection: desc) { + id + date + dailyAmountBurned + } +`; ;// CONCATENATED MODULE: ./src/services/graphql/index.ts /* provided dependency */ var graphql_console = __webpack_require__(96763); @@ -130274,7 +157592,7 @@ function getNoteAccounts(_0) { subgraphName, query: GET_NOTE_ACCOUNTS, variables: { - address + address: address.toLowerCase() }, fetchDataOptions: fetchDataOptions2 }); @@ -130292,6 +157610,95 @@ function getNoteAccounts(_0) { } }); } +function getGraphEchoEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_ECHO_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGraphEchoEvents(_0) { + return graphql_async(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const events = []; + let lastSyncBlock = fromBlock; + while (true) { + let { + noteAccounts: result2, + _meta: { + // eslint-disable-next-line prefer-const + block: { number: currentBlock } + } + } = yield getGraphEchoEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + if (isEmptyArray(result2)) { + break; + } + const [firstEvent] = result2; + const [lastEvent2] = result2.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "EchoEvents", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: result2.length + }); + } + if (result2.length < 900) { + events.push(...result2); + break; + } + result2 = result2.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + events.push(...result2); + } + if (!events.length) { + return { + events: [], + lastSyncBlock + }; + } + const result = events.map((e) => { + const [transactionHash, logIndex] = e.id.split("-"); + return { + blockNumber: Number(e.blockNumber), + logIndex: Number(logIndex), + transactionHash, + address: address_getAddress(e.address), + encryptedAccount: e.encryptedAccount + }; + }); + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + graphql_console.log("Error from getAllGraphEchoEvents query"); + graphql_console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} function getEncryptedNotes({ graphApi, subgraphName, @@ -130377,6 +157784,152 @@ function getAllEncryptedNotes(_0) { } }); } +function getGovernanceEvents({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2 +}) { + return queryGraph({ + graphApi, + subgraphName, + query: GET_GOVERNANCE_EVENTS, + variables: { + first, + fromBlock + }, + fetchDataOptions: fetchDataOptions2 + }); +} +function getAllGovernanceEvents(_0) { + return graphql_async(this, arguments, function* ({ + graphApi, + subgraphName, + fromBlock, + fetchDataOptions: fetchDataOptions2, + onProgress + }) { + try { + const result = []; + let lastSyncBlock = fromBlock; + while (true) { + const { + proposals, + votes, + delegates, + undelegates, + _meta: { + block: { number: currentBlock } + } + } = yield getGovernanceEvents({ graphApi, subgraphName, fromBlock, fetchDataOptions: fetchDataOptions2 }); + lastSyncBlock = currentBlock; + const eventsLength = proposals.length + votes.length + delegates.length + undelegates.length; + if (eventsLength === 0) { + break; + } + const formattedProposals = proposals.map( + ({ blockNumber, logIndex, transactionHash, proposalId, proposer, target, startTime, endTime, description }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "ProposalCreated", + id: Number(proposalId), + proposer: address_getAddress(proposer), + target: address_getAddress(target), + startTime: Number(startTime), + endTime: Number(endTime), + description + }; + } + ); + const formattedVotes = votes.map( + ({ blockNumber, logIndex, transactionHash, proposalId, voter, support, votes: votes2, from, input }) => { + if (!input || input.length > 2048) { + input = ""; + } + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Voted", + proposalId: Number(proposalId), + voter: address_getAddress(voter), + support, + votes: votes2, + from: address_getAddress(from), + input + }; + } + ); + const formattedDelegates = delegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateTo }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Delegated", + account: address_getAddress(account), + delegateTo: address_getAddress(delegateTo) + }; + } + ); + const formattedUndelegates = undelegates.map( + ({ blockNumber, logIndex, transactionHash, account, delegateFrom }) => { + return { + blockNumber: Number(blockNumber), + logIndex: Number(logIndex), + transactionHash, + event: "Undelegated", + account: address_getAddress(account), + delegateFrom: address_getAddress(delegateFrom) + }; + } + ); + let formattedEvents = [ + ...formattedProposals, + ...formattedVotes, + ...formattedDelegates, + ...formattedUndelegates + ].sort((a, b) => { + if (a.blockNumber === b.blockNumber) { + return a.logIndex - b.logIndex; + } + return a.blockNumber - b.blockNumber; + }); + if (eventsLength < 900) { + result.push(...formattedEvents); + break; + } + const [firstEvent] = formattedEvents; + const [lastEvent2] = formattedEvents.slice(-1); + if (typeof onProgress === "function") { + onProgress({ + type: "Governance Events", + fromBlock: Number(firstEvent.blockNumber), + toBlock: Number(lastEvent2.blockNumber), + count: eventsLength + }); + } + formattedEvents = formattedEvents.filter(({ blockNumber }) => blockNumber !== lastEvent2.blockNumber); + fromBlock = Number(lastEvent2.blockNumber); + result.push(...formattedEvents); + } + const [lastEvent] = result.slice(-1); + return { + events: result, + lastSyncBlock: lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock + }; + } catch (err) { + graphql_console.log("Error from getAllGovernance query"); + graphql_console.log(err); + return { + events: [], + lastSyncBlock: fromBlock + }; + } + }); +} ;// CONCATENATED MODULE: ./src/services/batch.ts @@ -130966,6 +158519,57 @@ class BaseDepositsService extends BaseEventsService { } } } +class BaseEchoService extends BaseEventsService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions: fetchDataOptions2 + }) { + super({ netId, provider, graphApi, subgraphName, contract: Echoer, deployedBlock, fetchDataOptions: fetchDataOptions2 }); + } + getInstanceName() { + return `echo_${this.netId}`; + } + getType() { + return "Echo"; + } + getGraphMethod() { + return "getAllGraphEchoEvents"; + } + formatEvents(events) { + return base_async(this, null, function* () { + return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { + const { who, data } = args; + if (who && data) { + const eventObjects = { + blockNumber, + logIndex, + transactionHash + }; + return base_spreadProps(base_spreadValues({}, eventObjects), { + address: who, + encryptedAccount: data + }); + } + }).filter((e) => e); + }); + } + getEventsFromGraph(_0) { + return base_async(this, arguments, function* ({ fromBlock }) { + if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + return { + events: [], + lastBlock: fromBlock + }; + } + return base_superGet(BaseEchoService.prototype, this, "getEventsFromGraph").call(this, { fromBlock }); + }); + } +} class BaseEncryptedNotesService extends BaseEventsService { constructor({ netId, @@ -131028,11 +158632,15 @@ class BaseGovernanceService extends BaseEventsService { return "*"; } getGraphMethod() { - return "governanceEvents"; + return "getAllGovernanceEvents"; } formatEvents(events) { return base_async(this, null, function* () { - const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { + const proposalEvents = []; + const votedEvents = []; + const delegatedEvents = []; + const undelegatedEvents = []; + events.forEach(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => { const eventObjects = { blockNumber, logIndex, @@ -131041,60 +158649,61 @@ class BaseGovernanceService extends BaseEventsService { }; if (event === "ProposalCreated") { const { id, proposer, target, startTime, endTime, description } = args; - return base_spreadProps(base_spreadValues({}, eventObjects), { - id, + proposalEvents.push(base_spreadProps(base_spreadValues({}, eventObjects), { + id: Number(id), proposer, target, - startTime, - endTime, + startTime: Number(startTime), + endTime: Number(endTime), description - }); + })); } if (event === "Voted") { const { proposalId, voter, support, votes } = args; - return base_spreadProps(base_spreadValues({}, eventObjects), { - proposalId, + votedEvents.push(base_spreadProps(base_spreadValues({}, eventObjects), { + proposalId: Number(proposalId), voter, support, - votes - }); + votes, + from: "", + input: "" + })); } if (event === "Delegated") { const { account, to: delegateTo } = args; - return base_spreadProps(base_spreadValues({}, eventObjects), { + delegatedEvents.push(base_spreadProps(base_spreadValues({}, eventObjects), { account, delegateTo - }); + })); } if (event === "Undelegated") { const { account, from: delegateFrom } = args; - return base_spreadProps(base_spreadValues({}, eventObjects), { + undelegatedEvents.push(base_spreadProps(base_spreadValues({}, eventObjects), { account, delegateFrom - }); + })); } - }).filter((e) => e); - const votedEvents = formattedEvents.map((event, index) => base_spreadProps(base_spreadValues({}, event), { index })).filter(({ event }) => event === "Voted"); + }); if (votedEvents.length) { this.updateTransactionProgress({ percentage: 0 }); const txs = yield this.batchTransactionService.getBatchTransactions([ ...new Set(votedEvents.map(({ transactionHash }) => transactionHash)) ]); - votedEvents.forEach((event) => { + votedEvents.forEach((event, index) => { let { data: input, from } = txs.find((t) => t.hash === event.transactionHash); if (!input || input.length > 2048) { input = ""; } - formattedEvents[event.index].from = from; - formattedEvents[event.index].input = input; + votedEvents[index].from = from; + votedEvents[index].input = input; }); } - return formattedEvents; + return [...proposalEvents, ...votedEvents, ...delegatedEvents, ...undelegatedEvents]; }); } getEventsFromGraph(_0) { return base_async(this, arguments, function* ({ fromBlock }) { - if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { + if (!this.graphApi || !this.subgraphName || this.graphApi.includes("api.thegraph.com")) { return { events: [], lastBlock: fromBlock @@ -133885,23 +161494,22 @@ function unzipAsync(data) { }); }); } -function saveEvents(_0) { +function data_saveUserFile(_0) { return data_async(this, arguments, function* ({ - name, + fileName, userDirectory, - events + dataString }) { - const fileName = `${name}.json`.toLowerCase(); + fileName = fileName.toLowerCase(); const filePath = path_ignored_default().join(userDirectory, fileName); - const stringEvents = JSON.stringify(events, null, 2) + "\n"; const payload = yield zipAsync({ - [fileName]: new TextEncoder().encode(stringEvents) + [fileName]: new TextEncoder().encode(dataString) }); if (!(yield existsAsync(userDirectory))) { yield (0,promises_ignored_.mkdir)(userDirectory, { recursive: true }); } yield (0,promises_ignored_.writeFile)(filePath + ".zip", payload); - yield (0,promises_ignored_.writeFile)(filePath, stringEvents); + yield (0,promises_ignored_.writeFile)(filePath, dataString); }); } function loadSavedEvents(_0) { @@ -134140,10 +161748,136 @@ class NodeDepositsService extends BaseDepositsService { ); node_console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield data_saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" + }); + } + }); + } +} +class NodeEchoService extends BaseEchoService { + constructor({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions, + cacheDirectory, + userDirectory + }) { + super({ + netId, + provider, + graphApi, + subgraphName, + Echoer, + deployedBlock, + fetchDataOptions + }); + this.cacheDirectory = cacheDirectory; + this.userDirectory = userDirectory; + } + updateEventProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + node_console.log(`fromBlock - ${fromBlock}`); + node_console.log(`toBlock - ${toBlock}`); + if (count) { + node_console.log(`downloaded ${type} events count - ${count}`); + node_console.log("____________________________________________"); + node_console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock} +`); + } + } + } + updateGraphProgress({ type, fromBlock, toBlock, count }) { + if (toBlock) { + node_console.log(`fromBlock - ${fromBlock}`); + node_console.log(`toBlock - ${toBlock}`); + if (count) { + node_console.log(`downloaded ${type} events from graph node count - ${count}`); + node_console.log("____________________________________________"); + node_console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock} +`); + } + } + } + getEventsFromDB() { + return node_async(this, null, function* () { + if (!this.userDirectory) { + node_console.log(`Updating events for ${this.netId} chain echo events +`); + node_console.log(`savedEvents count - ${0}`); + node_console.log(`savedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const savedEvents = yield loadSavedEvents({ + name: this.getInstanceName(), + userDirectory: this.userDirectory, + deployedBlock: this.deployedBlock + }); + node_console.log(`Updating events for ${this.netId} chain echo events +`); + node_console.log(`savedEvents count - ${savedEvents.events.length}`); + node_console.log(`savedEvents lastBlock - ${savedEvents.lastBlock} +`); + return savedEvents; + }); + } + getEventsFromCache() { + return node_async(this, null, function* () { + if (!this.cacheDirectory) { + node_console.log(`cachedEvents count - ${0}`); + node_console.log(`cachedEvents lastBlock - ${this.deployedBlock} +`); + return { + events: [], + lastBlock: this.deployedBlock + }; + } + const cachedEvents = yield loadCachedEvents({ + name: this.getInstanceName(), + cacheDirectory: this.cacheDirectory, + deployedBlock: this.deployedBlock + }); + node_console.log(`cachedEvents count - ${cachedEvents.events.length}`); + node_console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock} +`); + return cachedEvents; + }); + } + saveEvents(_0) { + return node_async(this, arguments, function* ({ events, lastBlock }) { + const instanceName = this.getInstanceName(); + node_console.log("\ntotalEvents count - ", events.length); + node_console.log( + `totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock} +` + ); + const eventTable = new (cli_table3_ignored_default())(); + eventTable.push( + [{ colSpan: 2, content: "Echo Accounts", hAlign: "center" }], + ["Network", `${this.netId} chain`], + ["Events", `${events.length} events`], + [{ colSpan: 2, content: "Latest events" }], + ...events.slice(events.length - 10).reverse().map(({ blockNumber }, index) => { + const eventIndex = events.length - index; + return [eventIndex, blockNumber]; + }) + ); + node_console.log(eventTable.toString() + "\n"); + if (this.userDirectory) { + yield data_saveUserFile({ + fileName: instanceName + ".json", + userDirectory: this.userDirectory, + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -134266,10 +162000,10 @@ class NodeEncryptedNotesService extends BaseEncryptedNotesService { ); node_console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield data_saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -134397,10 +162131,10 @@ class NodeGovernanceService extends BaseGovernanceService { ); node_console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield data_saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -134523,10 +162257,10 @@ class NodeRegistryService extends BaseRegistryService { ); node_console.log(eventTable.toString() + "\n"); if (this.userDirectory) { - yield saveEvents({ - name: instanceName, + yield data_saveUserFile({ + fileName: instanceName + ".json", userDirectory: this.userDirectory, - events + dataString: JSON.stringify(events, null, 2) + "\n" }); } }); @@ -134542,8 +162276,688 @@ class NodeRegistryService extends BaseRegistryService { // EXTERNAL MODULE: ./node_modules/ajv/dist/ajv.js var dist_ajv = __webpack_require__(63282); var ajv_default = /*#__PURE__*/__webpack_require__.n(dist_ajv); +;// CONCATENATED MODULE: ./src/services/networkConfig.ts + +var networkConfig_defProp = Object.defineProperty; +var networkConfig_getOwnPropSymbols = Object.getOwnPropertySymbols; +var networkConfig_hasOwnProp = Object.prototype.hasOwnProperty; +var networkConfig_propIsEnum = Object.prototype.propertyIsEnumerable; +var networkConfig_defNormalProp = (obj, key, value) => key in obj ? networkConfig_defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var networkConfig_spreadValues = (a, b) => { + for (var prop in b || (b = {})) + if (networkConfig_hasOwnProp.call(b, prop)) + networkConfig_defNormalProp(a, prop, b[prop]); + if (networkConfig_getOwnPropSymbols) + for (var prop of networkConfig_getOwnPropSymbols(b)) { + if (networkConfig_propIsEnum.call(b, prop)) + networkConfig_defNormalProp(a, prop, b[prop]); + } + return a; +}; +var networkConfig_NetId = /* @__PURE__ */ ((NetId2) => { + NetId2[NetId2["MAINNET"] = 1] = "MAINNET"; + NetId2[NetId2["BSC"] = 56] = "BSC"; + NetId2[NetId2["POLYGON"] = 137] = "POLYGON"; + NetId2[NetId2["OPTIMISM"] = 10] = "OPTIMISM"; + NetId2[NetId2["ARBITRUM"] = 42161] = "ARBITRUM"; + NetId2[NetId2["GNOSIS"] = 100] = "GNOSIS"; + NetId2[NetId2["AVALANCHE"] = 43114] = "AVALANCHE"; + NetId2[NetId2["SEPOLIA"] = 11155111] = "SEPOLIA"; + return NetId2; +})(networkConfig_NetId || {}); +const theGraph = { + name: "Hosted Graph", + url: "https://api.thegraph.com" +}; +const tornado = { + name: "Tornado Subgraphs", + url: "https://tornadocash-rpc.com" +}; +const defaultConfig = { + [1 /* MAINNET */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 80, + fast: 50, + standard: 25, + low: 8 + }, + nativeCurrency: "eth", + currencyName: "ETH", + explorerUrl: "https://etherscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Ethereum Mainnet", + deployedBlock: 9116966, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + mevblockerRPC: { + name: "MevblockerRPC", + url: "https://rpc.mevblocker.io" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/ethereum-mainnet" + }, + noderealRPC: { + name: "NodeReal RPC", + url: "https://eth-mainnet.nodereal.io/v1/1659dfb40aa24bbb8153a677b98064d7" + }, + notadegenRPC: { + name: "NotADegen RPC", + url: "https://rpc.notadegen.com/eth" + }, + keydonixRPC: { + name: "Keydonix RPC", + url: "https://ethereum.keydonix.com/v1/mainnet" + }, + oneRPC: { + name: "1RPC", + url: "https://1rpc.io/eth" + } + }, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", + echoContract: "0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornContract: "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", + governanceContract: "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", + stakingRewardsContract: "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29", + registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", + aggregatorContract: "0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49", + reverseRecordsContract: "0x3671aE578E63FdF66ad4F3E12CC0c0d71Ac7510C", + tornadoSubgraph: "tornadocash/mainnet-tornado-subgraph", + registrySubgraph: "tornadocash/tornado-relayer-registry", + governanceSubgraph: "tornadocash/tornado-governance", + subgraphs: { + tornado, + theGraph + }, + tokens: { + eth: { + instanceAddress: { + "0.1": "0x12D66f87A04A9E220743712cE6d9bB1B5616B8Fc", + "1": "0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936", + "10": "0x910Cbd523D972eb0a6f4cAe4618aD62622b39DbF", + "100": "0xA160cdAB225685dA1d56aa342Ad8841c3b53f291" + }, + symbol: "ETH", + decimals: 18 + }, + dai: { + instanceAddress: { + "100": "0xD4B88Df4D29F5CedD6857912842cff3b20C8Cfa3", + "1000": "0xFD8610d20aA15b7B2E3Be39B396a1bC3516c7144", + "10000": "0x07687e702b410Fa43f4cB4Af7FA097918ffD2730", + "100000": "0x23773E65ed146A459791799d01336DB287f25334" + }, + tokenAddress: "0x6B175474E89094C44Da98b954EedeAC495271d0F", + tokenGasLimit: 7e4, + symbol: "DAI", + decimals: 18, + gasLimit: 7e5 + }, + cdai: { + instanceAddress: { + "5000": "0x22aaA7720ddd5388A3c0A3333430953C68f1849b", + "50000": "0x03893a7c7463AE47D46bc7f091665f1893656003", + "500000": "0x2717c5e28cf931547B621a5dddb772Ab6A35B701", + "5000000": "0xD21be7248e0197Ee08E0c20D4a96DEBdaC3D20Af" + }, + tokenAddress: "0x5d3a536E4D6DbD6114cc1Ead35777bAB948E3643", + tokenGasLimit: 2e5, + symbol: "cDAI", + decimals: 8, + gasLimit: 7e5 + }, + usdc: { + instanceAddress: { + "100": "0xd96f2B1c14Db8458374d9Aca76E26c3D18364307", + "1000": "0x4736dCf1b7A3d580672CcE6E7c65cd5cc9cFBa9D" + }, + tokenAddress: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + tokenGasLimit: 7e4, + symbol: "USDC", + decimals: 6, + gasLimit: 7e5 + }, + usdt: { + instanceAddress: { + "100": "0x169AD27A470D064DEDE56a2D3ff727986b15D52B", + "1000": "0x0836222F2B2B24A3F36f98668Ed8F0B38D1a872f" + }, + tokenAddress: "0xdAC17F958D2ee523a2206206994597C13D831ec7", + tokenGasLimit: 7e4, + symbol: "USDT", + decimals: 6, + gasLimit: 7e5 + }, + wbtc: { + instanceAddress: { + "0.1": "0x178169B423a011fff22B9e3F3abeA13414dDD0F1", + "1": "0x610B717796ad172B316836AC95a2ffad065CeaB4", + "10": "0xbB93e510BbCD0B7beb5A853875f9eC60275CF498" + }, + tokenAddress: "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", + tokenGasLimit: 7e4, + symbol: "WBTC", + decimals: 8, + gasLimit: 7e5 + } + }, + ensSubdomainKey: "mainnet-tornado", + pollInterval: 15, + constants: { + GOVERNANCE_BLOCK: 11474695, + NOTE_ACCOUNT_BLOCK: 11842486, + ENCRYPTED_NOTES_BLOCK: 12143762, + REGISTRY_BLOCK: 14173129, + MINING_BLOCK_TIME: 15 + } + }, + [56 /* BSC */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 5, + fast: 5, + standard: 5, + low: 5 + }, + nativeCurrency: "bnb", + currencyName: "BNB", + explorerUrl: "https://bscscan.com", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Binance Smart Chain", + deployedBlock: 8158799, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornadoSubgraph: "tornadocash/bsc-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/bsc" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/bsc-mainnet" + }, + noderealRPC: { + name: "NodeReal RPC", + url: "https://bsc-mainnet.nodereal.io/v1/64a9df0874fb4a93b9d0a3849de012d3" + }, + oneRPC: { + name: "1RPC", + url: "https://1rpc.io/bnb" + } + }, + tokens: { + bnb: { + instanceAddress: { + "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", + "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", + "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" + }, + symbol: "BNB", + decimals: 18 + } + }, + ensSubdomainKey: "bsc-tornado", + pollInterval: 10, + constants: { + NOTE_ACCOUNT_BLOCK: 8159269, + ENCRYPTED_NOTES_BLOCK: 8159269 + } + }, + [137 /* POLYGON */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 100, + fast: 75, + standard: 50, + low: 30 + }, + nativeCurrency: "matic", + currencyName: "MATIC", + explorerUrl: "https://polygonscan.com", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Polygon (Matic) Network", + deployedBlock: 16257962, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + gasPriceOracleContract: "0xF81A8D8D3581985D3969fe53bFA67074aDFa8F3C", + tornadoSubgraph: "tornadocash/matic-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + chainnodes: { + name: "Tornado RPC", + url: "https://polygon-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/polygon-mainnet" + }, + oneRpc: { + name: "1RPC", + url: "https://1rpc.io/matic" + } + }, + tokens: { + matic: { + instanceAddress: { + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", + "1000": "0xdf231d99Ff8b6c6CBF4E9B9a945CBAcEF9339178", + "10000": "0xaf4c0B70B2Ea9FB7487C7CbB37aDa259579fe040", + "100000": "0xa5C2254e4253490C54cef0a4347fddb8f75A4998" + }, + symbol: "MATIC", + decimals: 18 + } + }, + ensSubdomainKey: "polygon-tornado", + pollInterval: 10, + constants: { + NOTE_ACCOUNT_BLOCK: 16257996, + ENCRYPTED_NOTES_BLOCK: 16257996 + } + }, + [10 /* OPTIMISM */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 1e-3, + fast: 1e-3, + standard: 1e-3, + low: 1e-3 + }, + nativeCurrency: "eth", + currencyName: "ETH", + explorerUrl: "https://optimistic.etherscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Optimism", + deployedBlock: 2243689, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + ovmGasPriceOracleContract: "0x420000000000000000000000000000000000000F", + tornadoSubgraph: "tornadocash/optimism-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/op" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://optimism-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + optimism: { + name: "Optimism RPC", + url: "https://mainnet.optimism.io" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/optimism-mainnet" + }, + oneRpc: { + name: "1RPC", + url: "https://1rpc.io/op" + } + }, + tokens: { + eth: { + instanceAddress: { + "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", + "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", + "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" + }, + symbol: "ETH", + decimals: 18 + } + }, + ensSubdomainKey: "optimism-tornado", + pollInterval: 15, + constants: { + NOTE_ACCOUNT_BLOCK: 2243694, + ENCRYPTED_NOTES_BLOCK: 2243694 + } + }, + [42161 /* ARBITRUM */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 4, + fast: 3, + standard: 2.52, + low: 2.29 + }, + nativeCurrency: "eth", + currencyName: "ETH", + explorerUrl: "https://arbiscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Arbitrum One", + deployedBlock: 3430648, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornadoSubgraph: "tornadocash/arbitrum-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/arbitrum" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + arbitrum: { + name: "Arbitrum RPC", + url: "https://arb1.arbitrum.io/rpc" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/arbitrum-one" + }, + oneRpc: { + name: "1rpc", + url: "https://1rpc.io/arb" + } + }, + tokens: { + eth: { + instanceAddress: { + "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", + "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", + "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" + }, + symbol: "ETH", + decimals: 18 + } + }, + ensSubdomainKey: "arbitrum-tornado", + pollInterval: 15, + constants: { + NOTE_ACCOUNT_BLOCK: 3430605, + ENCRYPTED_NOTES_BLOCK: 3430605 + } + }, + [100 /* GNOSIS */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 6, + fast: 5, + standard: 4, + low: 1 + }, + nativeCurrency: "xdai", + currencyName: "xDAI", + explorerUrl: "https://gnosisscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Gnosis Chain", + deployedBlock: 17754561, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornadoSubgraph: "tornadocash/xdai-tornado-subgraph", + subgraphs: { + tornado, + theGraph + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/gnosis" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + }, + gnosis: { + name: "Gnosis RPC", + url: "https://rpc.gnosischain.com" + }, + stackup: { + name: "Stackup RPC", + url: "https://public.stackup.sh/api/v1/node/arbitrum-one" + }, + blockPi: { + name: "BlockPi", + url: "https://gnosis.blockpi.network/v1/rpc/public" + } + }, + tokens: { + xdai: { + instanceAddress: { + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", + "1000": "0xdf231d99Ff8b6c6CBF4E9B9a945CBAcEF9339178", + "10000": "0xaf4c0B70B2Ea9FB7487C7CbB37aDa259579fe040", + "100000": "0xa5C2254e4253490C54cef0a4347fddb8f75A4998" + }, + symbol: "xDAI", + decimals: 18 + } + }, + ensSubdomainKey: "gnosis-tornado", + pollInterval: 15, + constants: { + NOTE_ACCOUNT_BLOCK: 17754564, + ENCRYPTED_NOTES_BLOCK: 17754564 + } + }, + [43114 /* AVALANCHE */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 225, + fast: 35, + standard: 25, + low: 25 + }, + nativeCurrency: "avax", + currencyName: "AVAX", + explorerUrl: "https://snowtrace.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Avalanche Mainnet", + deployedBlock: 4429818, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + offchainOracleContract: "0x0AdDd25a91563696D8567Df78D5A01C9a991F9B8", + tornadoSubgraph: "tornadocash/avalanche-tornado-subgraph", + subgraphs: { + theGraph + }, + rpcUrls: { + oneRPC: { + name: "OneRPC", + url: "https://1rpc.io/avax/c" + }, + avalancheRPC: { + name: "Avalanche RPC", + url: "https://api.avax.network/ext/bc/C/rpc" + }, + meowRPC: { + name: "Meow RPC", + url: "https://avax.meowrpc.com" + } + }, + tokens: { + avax: { + instanceAddress: { + "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", + "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", + "500": "0xaf8d1839c3c67cf571aa74B5c12398d4901147B3" + }, + symbol: "AVAX", + decimals: 18 + } + }, + ensSubdomainKey: "avalanche-tornado", + pollInterval: 10, + constants: { + NOTE_ACCOUNT_BLOCK: 4429813, + ENCRYPTED_NOTES_BLOCK: 4429813 + } + }, + [11155111 /* SEPOLIA */]: { + rpcCallRetryAttempt: 15, + gasPrices: { + instant: 2, + fast: 2, + standard: 2, + low: 2 + }, + nativeCurrency: "eth", + currencyName: "SepoliaETH", + explorerUrl: "https://sepolia.etherscan.io", + merkleTreeHeight: 20, + emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", + networkName: "Ethereum Sepolia", + deployedBlock: 5594395, + multicallContract: "0xcA11bde05977b3631167028862bE2a173976CA11", + routerContract: "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee", + echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", + tornContract: "0x3AE6667167C0f44394106E197904519D808323cA", + governanceContract: "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", + stakingRewardsContract: "0x6d0018890751Efd31feb8166711B16732E2b496b", + registryContract: "0x1428e5d2356b13778A13108b10c440C83011dfB8", + aggregatorContract: "0x4088712AC9fad39ea133cdb9130E465d235e9642", + reverseRecordsContract: "0xEc29700C0283e5Be64AcdFe8077d6cC95dE23C23", + tornadoSubgraph: "tornadocash/sepolia-tornado-subgraph", + subgraphs: { + tornado + }, + rpcUrls: { + tornado: { + name: "Tornado RPC", + url: "https://tornadocash-rpc.com/sepolia" + }, + sepolia: { + name: "Sepolia RPC", + url: "https://rpc.sepolia.org" + }, + chainnodes: { + name: "Chainnodes RPC", + url: "https://sepolia.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" + } + }, + tokens: { + eth: { + instanceAddress: { + "0.1": "0x8C4A04d872a6C1BE37964A21ba3a138525dFF50b", + "1": "0x8cc930096B4Df705A007c4A039BDFA1320Ed2508", + "10": "0x8D10d506D29Fc62ABb8A290B99F66dB27Fc43585", + "100": "0x44c5C92ed73dB43888210264f0C8b36Fd68D8379" + }, + symbol: "ETH", + decimals: 18 + }, + dai: { + instanceAddress: { + "100": "0x6921fd1a97441dd603a997ED6DDF388658daf754", + "1000": "0x50a637770F5d161999420F7d70d888DE47207145", + "10000": "0xecD649870407cD43923A816Cc6334a5bdf113621", + "100000": "0x73B4BD04bF83206B6e979BE2507098F92EDf4F90" + }, + tokenAddress: "0xFF34B3d4Aee8ddCd6F9AFFFB6Fe49bD371b8a357", + tokenGasLimit: 7e4, + symbol: "DAI", + decimals: 18, + gasLimit: 7e5 + } + }, + ensSubdomainKey: "sepolia-tornado", + pollInterval: 15, + constants: { + GOVERNANCE_BLOCK: 5594395, + NOTE_ACCOUNT_BLOCK: 5594395, + ENCRYPTED_NOTES_BLOCK: 5594395, + MINING_BLOCK_TIME: 15 + } + } +}; +const enabledChains = Object.values(networkConfig_NetId); +let customConfig = {}; +function addNetwork(newConfig) { + enabledChains.push( + ...Object.keys(newConfig).map((netId) => Number(netId)).filter((netId) => !enabledChains.includes(netId)) + ); + customConfig = networkConfig_spreadValues(networkConfig_spreadValues({}, customConfig), newConfig); +} +function getNetworkConfig() { + const allConfig = networkConfig_spreadValues(networkConfig_spreadValues({}, defaultConfig), customConfig); + return enabledChains.reduce((acc, curr) => { + acc[curr] = allConfig[curr]; + return acc; + }, {}); +} +function getConfig(netId) { + const allConfig = getNetworkConfig(); + const chainConfig = allConfig[netId]; + if (!chainConfig) { + const errMsg = `No config found for network ${netId}!`; + throw new Error(errMsg); + } + return chainConfig; +} +function getInstanceByAddress({ netId, address }) { + const { tokens } = getConfig(netId); + for (const [currency, { instanceAddress }] of Object.entries(tokens)) { + for (const [amount, instance] of Object.entries(instanceAddress)) { + if (instance === address) { + return { + amount, + currency + }; + } + } + } +} +function getSubdomains() { + const allConfig = getNetworkConfig(); + return enabledChains.map((chain) => allConfig[chain].ensSubdomainKey); +} + ;// CONCATENATED MODULE: ./src/services/schemas/status.ts + const addressType = { type: "string", pattern: "^0x[a-fA-F0-9]{40}$" }; const bnType = { type: "string", BN: true }; const statusSchema = { @@ -134618,7 +163032,7 @@ function status_getStatusSchema(netId, config) { } ); schema.properties.instances = instances; - if (Number(netId) === 1) { + if (netId === NetId.MAINNET) { const _tokens = Object.keys(tokens).filter((t) => t !== nativeCurrency); const ethPrices = { type: "object", @@ -136157,8 +164571,6 @@ class ChaCha { } } -// EXTERNAL MODULE: ./node_modules/crypto-browserify/index.js -var crypto_browserify = __webpack_require__(91565); ;// CONCATENATED MODULE: ./node_modules/ffjavascript/src/random.js /* provided dependency */ var random_process = __webpack_require__(65606); @@ -169907,7 +198319,7 @@ class Deposit { const newDeposit = new Deposit({ currency: currency.toLowerCase(), amount, - netId: Number(netId), + netId, note: `tornado-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.noteHex}`, noteHex: depositObject.noteHex, invoice: `tornadoInvoice-${currency.toLowerCase()}-${amount}-${netId}-${depositObject.commitmentHex}`, @@ -169983,6 +198395,135 @@ class Invoice { } } +// EXTERNAL MODULE: ./node_modules/@metamask/eth-sig-util/dist/index.js +var dist = __webpack_require__(51594); +;// CONCATENATED MODULE: ./src/services/encryptedNotes.ts + + + + +function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext }) { + const nonceBuf = toFixedHex(bytesToHex(base64ToBytes(nonce)), 24); + const ephemPublicKeyBuf = toFixedHex(bytesToHex(base64ToBytes(ephemPublicKey)), 32); + const ciphertextBuf = bytesToHex(base64ToBytes(ciphertext)); + const messageBuff = concatBytes(hexToBytes(nonceBuf), hexToBytes(ephemPublicKeyBuf), hexToBytes(ciphertextBuf)); + return bytesToHex(messageBuff); +} +function unpackEncryptedMessage(encryptedMessage) { + const messageBuff = hexToBytes(encryptedMessage); + const nonceBuf = bytesToBase64(messageBuff.slice(0, 24)); + const ephemPublicKeyBuf = bytesToBase64(messageBuff.slice(24, 56)); + const ciphertextBuf = bytesToBase64(messageBuff.slice(56)); + return { + messageBuff: bytesToHex(messageBuff), + version: "x25519-xsalsa20-poly1305", + nonce: nonceBuf, + ephemPublicKey: ephemPublicKeyBuf, + ciphertext: ciphertextBuf + }; +} +class NoteAccount { + constructor({ netId, blockNumber, recoveryKey, Echoer: Echoer2 }) { + if (!recoveryKey) { + recoveryKey = bytesToHex(crypto.getRandomValues(new Uint8Array(32))).slice(2); + } + this.netId = Math.floor(Number(netId)); + this.blockNumber = blockNumber; + this.recoveryKey = recoveryKey; + this.recoveryAddress = computeAddress("0x" + recoveryKey); + this.recoveryPublicKey = getEncryptionPublicKey(recoveryKey); + this.Echoer = Echoer2; + } + /** + * Intends to mock eth_getEncryptionPublicKey behavior from MetaMask + * In order to make the recoveryKey retrival from Echoer possible from the bare private key + */ + static getWalletPublicKey(wallet) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + return getEncryptionPublicKey(privateKey); + } + // This function intends to provide an encrypted value of recoveryKey for an on-chain Echoer backup purpose + // Thus, the pubKey should be derived by a Wallet instance or from Web3 wallets + // pubKey: base64 encoded 32 bytes key from https://docs.metamask.io/wallet/reference/eth_getencryptionpublickey/ + getEncryptedAccount(walletPublicKey) { + const encryptedData = encrypt({ + publicKey: walletPublicKey, + data: this.recoveryKey, + version: "x25519-xsalsa20-poly1305" + }); + const data = packEncryptedMessage(encryptedData); + return { + // Use this later to save hexPrivateKey generated with + // Buffer.from(JSON.stringify(encryptedData)).toString('hex') + // As we don't use buffer with this library we should leave UI to do the rest + encryptedData, + // Data that could be used as an echo(data) params + data + }; + } + /** + * Decrypt Echoer backuped note encryption account with private keys + */ + decryptAccountsWithWallet(wallet, events) { + let { privateKey } = wallet; + if (privateKey.startsWith("0x")) { + privateKey = privateKey.replace("0x", ""); + } + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedAccount); + const recoveryKey = decrypt({ + encryptedData: unpackedMessage, + privateKey + }); + decryptedEvents.push( + new NoteAccount({ + netId: this.netId, + blockNumber: event.blockNumber, + recoveryKey, + Echoer: this.Echoer + }) + ); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + decryptNotes(events) { + const decryptedEvents = []; + for (const event of events) { + try { + const unpackedMessage = unpackEncryptedMessage(event.encryptedNote); + const [address, noteHex] = decrypt({ + encryptedData: unpackedMessage, + privateKey: this.recoveryKey + }).split("-"); + decryptedEvents.push({ + blockNumber: event.blockNumber, + address: getAddress(address), + noteHex + }); + } catch (e) { + continue; + } + } + return decryptedEvents; + } + encryptNote({ address, noteHex }) { + const encryptedData = encrypt({ + publicKey: this.recoveryPublicKey, + data: `${address}-${noteHex}`, + version: "x25519-xsalsa20-poly1305" + }); + return packEncryptedMessage(encryptedData); + } +} + // EXTERNAL MODULE: worker_threads (ignored) var worker_threads_ignored_0 = __webpack_require__(40607); ;// CONCATENATED MODULE: ./src/services/mimc.ts @@ -170066,7 +198607,7 @@ class MerkleTreeService { amount, currency, Tornado, - commitment, + commitmentHex, merkleTreeHeight = 20, emptyElement = "21663839004416932945382355908790599225266501822907911457504978515578255421292", merkleWorkerPath @@ -170077,13 +198618,13 @@ class MerkleTreeService { this.netId = Number(netId); this.Tornado = Tornado; this.instanceName = instanceName; - this.commitment = commitment; + this.commitmentHex = commitmentHex; this.merkleTreeHeight = merkleTreeHeight; this.emptyElement = emptyElement; this.merkleWorkerPath = merkleWorkerPath; } - createTree(_0) { - return merkleTree_async(this, arguments, function* ({ events }) { + createTree(events) { + return merkleTree_async(this, null, function* () { const { hash: hashFunction } = yield mimc.getHash(); if (this.merkleWorkerPath) { merkleTree_console.log("Using merkleWorker\n"); @@ -170134,15 +198675,69 @@ class MerkleTreeService { }); }); } - verifyTree(_0) { - return merkleTree_async(this, arguments, function* ({ events }) { + createPartialTree(_0) { + return merkleTree_async(this, arguments, function* ({ edge, elements }) { + const { hash: hashFunction } = yield mimc.getHash(); + if (this.merkleWorkerPath) { + merkleTree_console.log("Using merkleWorker\n"); + try { + if (isNode) { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new NodeWorker(this.merkleWorkerPath, { + workerData: { + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + } + }); + worker.on("message", resolve); + worker.on("error", reject); + worker.on("exit", (code) => { + if (code !== 0) { + reject(new Error(`Worker stopped with exit code ${code}`)); + } + }); + }); + return PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } else { + const merkleWorkerPromise = new Promise((resolve, reject) => { + const worker = new Worker(this.merkleWorkerPath); + worker.onmessage = (e) => { + resolve(e.data); + }; + worker.onerror = (e) => { + reject(e); + }; + worker.postMessage({ + merkleTreeHeight: this.merkleTreeHeight, + edge, + elements, + zeroElement: this.emptyElement + }); + }); + return PartialMerkleTree.deserialize(JSON.parse(yield merkleWorkerPromise), hashFunction); + } + } catch (err) { + merkleTree_console.log("merkleWorker failed, falling back to synchronous merkle tree"); + merkleTree_console.log(err); + } + } + return new PartialMerkleTree(this.merkleTreeHeight, edge, elements, { + zeroElement: this.emptyElement, + hashFunction + }); + }); + } + verifyTree(events) { + return merkleTree_async(this, null, function* () { merkleTree_console.log( ` Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while ` ); merkleTree_console.time("Created tree in"); - const tree = yield this.createTree({ events: events.map(({ commitment }) => BigInt(commitment).toString()) }); + const tree = yield this.createTree(events.map(({ commitment }) => commitment)); merkleTree_console.timeEnd("Created tree in"); merkleTree_console.log(""); const isKnownRoot = yield this.Tornado.isKnownRoot(toFixedHex(BigInt(tree.root))); @@ -170155,652 +198750,6 @@ Creating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCa } } -;// CONCATENATED MODULE: ./src/services/networkConfig.ts - -const blockSyncInterval = 1e4; -const enabledChains = ["1", "10", "56", "100", "137", "42161", "43114", "11155111"]; -const theGraph = { - name: "Hosted Graph", - url: "https://api.thegraph.com" -}; -const tornado = { - name: "Tornado Subgraphs", - url: "https://tornadocash-rpc.com" -}; -const networkConfig = { - netId1: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 80, - fast: 50, - standard: 25, - low: 8 - }, - nativeCurrency: "eth", - currencyName: "ETH", - explorerUrl: { - tx: "https://etherscan.io/tx/", - address: "https://etherscan.io/address/", - block: "https://etherscan.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Ethereum Mainnet", - deployedBlock: 9116966, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - mevblockerRPC: { - name: "MevblockerRPC", - url: "https://rpc.mevblocker.io" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/ethereum-mainnet" - }, - noderealRPC: { - name: "NodeReal RPC", - url: "https://eth-mainnet.nodereal.io/v1/1659dfb40aa24bbb8153a677b98064d7" - }, - notadegenRPC: { - name: "NotADegen RPC", - url: "https://rpc.notadegen.com/eth" - }, - keydonixRPC: { - name: "Keydonix RPC", - url: "https://ethereum.keydonix.com/v1/mainnet" - }, - oneRPC: { - name: "1RPC", - url: "https://1rpc.io/eth" - } - }, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - routerContract: "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - registryContract: "0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2", - echoContract: "0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42", - aggregatorContract: "0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49", - reverseRecordsContract: "0x3671aE578E63FdF66ad4F3E12CC0c0d71Ac7510C", - tornadoSubgraph: "tornadocash/mainnet-tornado-subgraph", - registrySubgraph: "tornadocash/tornado-relayer-registry", - subgraphs: { - tornado, - theGraph - }, - tokens: { - eth: { - instanceAddress: { - "0.1": "0x12D66f87A04A9E220743712cE6d9bB1B5616B8Fc", - "1": "0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936", - "10": "0x910Cbd523D972eb0a6f4cAe4618aD62622b39DbF", - "100": "0xA160cdAB225685dA1d56aa342Ad8841c3b53f291" - }, - symbol: "ETH", - decimals: 18 - }, - dai: { - instanceAddress: { - "100": "0xD4B88Df4D29F5CedD6857912842cff3b20C8Cfa3", - "1000": "0xFD8610d20aA15b7B2E3Be39B396a1bC3516c7144", - "10000": "0x07687e702b410Fa43f4cB4Af7FA097918ffD2730", - "100000": "0x23773E65ed146A459791799d01336DB287f25334" - }, - tokenAddress: "0x6B175474E89094C44Da98b954EedeAC495271d0F", - tokenGasLimit: 7e4, - symbol: "DAI", - decimals: 18, - gasLimit: 7e5 - }, - cdai: { - instanceAddress: { - "5000": "0x22aaA7720ddd5388A3c0A3333430953C68f1849b", - "50000": "0x03893a7c7463AE47D46bc7f091665f1893656003", - "500000": "0x2717c5e28cf931547B621a5dddb772Ab6A35B701", - "5000000": "0xD21be7248e0197Ee08E0c20D4a96DEBdaC3D20Af" - }, - tokenAddress: "0x5d3a536E4D6DbD6114cc1Ead35777bAB948E3643", - tokenGasLimit: 2e5, - symbol: "cDAI", - decimals: 8, - gasLimit: 7e5 - }, - usdc: { - instanceAddress: { - "100": "0xd96f2B1c14Db8458374d9Aca76E26c3D18364307", - "1000": "0x4736dCf1b7A3d580672CcE6E7c65cd5cc9cFBa9D" - }, - tokenAddress: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", - tokenGasLimit: 7e4, - symbol: "USDC", - decimals: 6, - gasLimit: 7e5 - }, - usdt: { - instanceAddress: { - "100": "0x169AD27A470D064DEDE56a2D3ff727986b15D52B", - "1000": "0x0836222F2B2B24A3F36f98668Ed8F0B38D1a872f" - }, - tokenAddress: "0xdAC17F958D2ee523a2206206994597C13D831ec7", - tokenGasLimit: 7e4, - symbol: "USDT", - decimals: 6, - gasLimit: 7e5 - }, - wbtc: { - instanceAddress: { - "0.1": "0x178169B423a011fff22B9e3F3abeA13414dDD0F1", - "1": "0x610B717796ad172B316836AC95a2ffad065CeaB4", - "10": "0xbB93e510BbCD0B7beb5A853875f9eC60275CF498" - }, - tokenAddress: "0x2260FAC5E5542a773Aa44fBCfeDf7C193bc2C599", - tokenGasLimit: 7e4, - symbol: "WBTC", - decimals: 8, - gasLimit: 7e5 - } - }, - ensSubdomainKey: "mainnet-tornado", - pollInterval: 15, - constants: { - GOVERNANCE_BLOCK: 11474695, - NOTE_ACCOUNT_BLOCK: 11842486, - ENCRYPTED_NOTES_BLOCK: 14248730, - REGISTRY_BLOCK: 14173129, - MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x77777FeDdddFfC19Ff86DB637967013e6C6A116C", - "governance.contract.tornadocash.eth": "0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce", - "tornado-router.contract.tornadocash.eth": "0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b", - "staking-rewards.contract.tornadocash.eth": "0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29" - }, - netId56: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 5, - fast: 5, - standard: 5, - low: 5 - }, - nativeCurrency: "bnb", - currencyName: "BNB", - explorerUrl: { - tx: "https://bscscan.com/tx/", - address: "https://bscscan.com/address/", - block: "https://bscscan.com/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Binance Smart Chain", - deployedBlock: 8158799, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - tornadoSubgraph: "tornadocash/bsc-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/bsc" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/bsc-mainnet" - }, - noderealRPC: { - name: "NodeReal RPC", - url: "https://bsc-mainnet.nodereal.io/v1/64a9df0874fb4a93b9d0a3849de012d3" - }, - oneRPC: { - name: "1RPC", - url: "https://1rpc.io/bnb" - } - }, - tokens: { - bnb: { - instanceAddress: { - "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", - "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", - "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" - }, - symbol: "BNB", - decimals: 18 - } - }, - ensSubdomainKey: "bsc-tornado", - pollInterval: 10, - constants: { - NOTE_ACCOUNT_BLOCK: 8159269, - ENCRYPTED_NOTES_BLOCK: 8159269 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId137: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 100, - fast: 75, - standard: 50, - low: 30 - }, - nativeCurrency: "matic", - currencyName: "MATIC", - explorerUrl: { - tx: "https://polygonscan.com/tx/", - address: "https://polygonscan.com/address/", - block: "https://polygonscan.com/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Polygon (Matic) Network", - deployedBlock: 16257962, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - gasPriceOracleContract: "0xF81A8D8D3581985D3969fe53bFA67074aDFa8F3C", - tornadoSubgraph: "tornadocash/matic-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - chainnodes: { - name: "Tornado RPC", - url: "https://polygon-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/polygon-mainnet" - }, - oneRpc: { - name: "1RPC", - url: "https://1rpc.io/matic" - } - }, - tokens: { - matic: { - instanceAddress: { - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", - "1000": "0xdf231d99Ff8b6c6CBF4E9B9a945CBAcEF9339178", - "10000": "0xaf4c0B70B2Ea9FB7487C7CbB37aDa259579fe040", - "100000": "0xa5C2254e4253490C54cef0a4347fddb8f75A4998" - }, - symbol: "MATIC", - decimals: 18 - } - }, - ensSubdomainKey: "polygon-tornado", - pollInterval: 10, - constants: { - NOTE_ACCOUNT_BLOCK: 16257996, - ENCRYPTED_NOTES_BLOCK: 16257996 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId10: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 1e-3, - fast: 1e-3, - standard: 1e-3, - low: 1e-3 - }, - nativeCurrency: "eth", - currencyName: "ETH", - explorerUrl: { - tx: "https://optimistic.etherscan.io/tx/", - address: "https://optimistic.etherscan.io/address/", - block: "https://optimistic.etherscan.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Optimism", - deployedBlock: 2243689, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - ovmGasPriceOracleContract: "0x420000000000000000000000000000000000000F", - tornadoSubgraph: "tornadocash/optimism-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/op" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://optimism-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - optimism: { - name: "Optimism RPC", - url: "https://mainnet.optimism.io" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/optimism-mainnet" - }, - oneRpc: { - name: "1RPC", - url: "https://1rpc.io/op" - } - }, - tokens: { - eth: { - instanceAddress: { - "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", - "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", - "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" - }, - symbol: "ETH", - decimals: 18 - } - }, - ensSubdomainKey: "optimism-tornado", - pollInterval: 15, - constants: { - NOTE_ACCOUNT_BLOCK: 2243694, - ENCRYPTED_NOTES_BLOCK: 2243694 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId42161: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 4, - fast: 3, - standard: 2.52, - low: 2.29 - }, - nativeCurrency: "eth", - currencyName: "ETH", - explorerUrl: { - tx: "https://arbiscan.io/tx/", - address: "https://arbiscan.io/address/", - block: "https://arbiscan.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Arbitrum One", - deployedBlock: 3430648, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - tornadoSubgraph: "tornadocash/arbitrum-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/arbitrum" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - arbitrum: { - name: "Arbitrum RPC", - url: "https://arb1.arbitrum.io/rpc" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/arbitrum-one" - }, - oneRpc: { - name: "1rpc", - url: "https://1rpc.io/arb" - } - }, - tokens: { - eth: { - instanceAddress: { - "0.1": "0x84443CFd09A48AF6eF360C6976C5392aC5023a1F", - "1": "0xd47438C816c9E7f2E2888E060936a499Af9582b3", - "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD" - }, - symbol: "ETH", - decimals: 18 - } - }, - ensSubdomainKey: "arbitrum-tornado", - pollInterval: 15, - constants: { - NOTE_ACCOUNT_BLOCK: 3430605, - ENCRYPTED_NOTES_BLOCK: 3430605 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId100: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 6, - fast: 5, - standard: 4, - low: 1 - }, - nativeCurrency: "xdai", - currencyName: "xDAI", - explorerUrl: { - tx: "https://blockscout.com/xdai/mainnet/tx/", - address: "https://blockscout.com/xdai/mainnet/address/", - block: "https://blockscout.com/xdai/mainnet/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Gnosis Chain", - deployedBlock: 17754561, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - tornadoSubgraph: "tornadocash/xdai-tornado-subgraph", - subgraphs: { - tornado, - theGraph - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/gnosis" - }, - chainnodes: { - name: "Tornado RPC", - url: "https://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - }, - gnosis: { - name: "Gnosis RPC", - url: "https://rpc.gnosischain.com" - }, - stackup: { - name: "Stackup RPC", - url: "https://public.stackup.sh/api/v1/node/arbitrum-one" - }, - blockPi: { - name: "BlockPi", - url: "https://gnosis.blockpi.network/v1/rpc/public" - } - }, - tokens: { - xdai: { - instanceAddress: { - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", - "1000": "0xdf231d99Ff8b6c6CBF4E9B9a945CBAcEF9339178", - "10000": "0xaf4c0B70B2Ea9FB7487C7CbB37aDa259579fe040", - "100000": "0xa5C2254e4253490C54cef0a4347fddb8f75A4998" - }, - symbol: "xDAI", - decimals: 18 - } - }, - ensSubdomainKey: "gnosis-tornado", - pollInterval: 15, - constants: { - NOTE_ACCOUNT_BLOCK: 17754564, - ENCRYPTED_NOTES_BLOCK: 17754564 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId43114: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 225, - fast: 35, - standard: 25, - low: 25 - }, - nativeCurrency: "avax", - currencyName: "AVAX", - explorerUrl: { - tx: "https://snowtrace.io/tx/", - address: "https://snowtrace.io/address/", - block: "https://snowtrace.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Avalanche Mainnet", - deployedBlock: 4429818, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - echoContract: "0xa75BF2815618872f155b7C4B0C81bF990f5245E4", - routerContract: "0x0D5550d52428E7e3175bfc9550207e4ad3859b17", - tornadoSubgraph: "tornadocash/avalanche-tornado-subgraph", - subgraphs: { - theGraph - }, - rpcUrls: { - oneRPC: { - name: "OneRPC", - url: "https://1rpc.io/avax/c" - }, - avalancheRPC: { - name: "Avalanche RPC", - url: "https://api.avax.network/ext/bc/C/rpc" - }, - meowRPC: { - name: "Meow RPC", - url: "https://avax.meowrpc.com" - } - }, - tokens: { - avax: { - instanceAddress: { - "10": "0x330bdFADE01eE9bF63C209Ee33102DD334618e0a", - "100": "0x1E34A77868E19A6647b1f2F47B51ed72dEDE95DD", - "500": "0xaf8d1839c3c67cf571aa74B5c12398d4901147B3" - }, - symbol: "AVAX", - decimals: 18 - } - }, - ensSubdomainKey: "avalanche-tornado", - pollInterval: 10, - constants: { - NOTE_ACCOUNT_BLOCK: 4429813, - ENCRYPTED_NOTES_BLOCK: 4429813 - }, - "tornado-proxy-light.contract.tornadocash.eth": "0x0D5550d52428E7e3175bfc9550207e4ad3859b17" - }, - netId11155111: { - rpcCallRetryAttempt: 15, - gasPrices: { - instant: 2, - fast: 2, - standard: 2, - low: 2 - }, - nativeCurrency: "eth", - currencyName: "SepoliaETH", - explorerUrl: { - tx: "https://sepolia.etherscan.io/tx/", - address: "https://sepolia.etherscan.io/address/", - block: "https://sepolia.etherscan.io/block/" - }, - merkleTreeHeight: 20, - emptyElement: "21663839004416932945382355908790599225266501822907911457504978515578255421292", - networkName: "Ethereum Sepolia", - deployedBlock: 5594395, - multicall: "0xcA11bde05977b3631167028862bE2a173976CA11", - routerContract: "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee", - registryContract: "0x1428e5d2356b13778A13108b10c440C83011dfB8", - echoContract: "0xcDD1fc3F5ac2782D83449d3AbE80D6b7B273B0e5", - aggregatorContract: "0x4088712AC9fad39ea133cdb9130E465d235e9642", - reverseRecordsContract: "0xEc29700C0283e5Be64AcdFe8077d6cC95dE23C23", - tornadoSubgraph: "tornadocash/sepolia-tornado-subgraph", - subgraphs: { - tornado - }, - rpcUrls: { - tornado: { - name: "Tornado RPC", - url: "https://tornadocash-rpc.com/sepolia" - }, - sepolia: { - name: "Sepolia RPC", - url: "https://rpc.sepolia.org" - }, - chainnodes: { - name: "Chainnodes RPC", - url: "https://sepolia.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607" - } - }, - tokens: { - eth: { - instanceAddress: { - "0.1": "0x8C4A04d872a6C1BE37964A21ba3a138525dFF50b", - "1": "0x8cc930096B4Df705A007c4A039BDFA1320Ed2508", - "10": "0x8D10d506D29Fc62ABb8A290B99F66dB27Fc43585", - "100": "0x44c5C92ed73dB43888210264f0C8b36Fd68D8379" - }, - symbol: "ETH", - decimals: 18 - }, - dai: { - instanceAddress: { - "100": "0x6921fd1a97441dd603a997ED6DDF388658daf754", - "1000": "0x50a637770F5d161999420F7d70d888DE47207145", - "10000": "0xecD649870407cD43923A816Cc6334a5bdf113621", - "100000": "0x73B4BD04bF83206B6e979BE2507098F92EDf4F90" - }, - tokenAddress: "0xFF34B3d4Aee8ddCd6F9AFFFB6Fe49bD371b8a357", - tokenGasLimit: 7e4, - symbol: "DAI", - decimals: 18, - gasLimit: 7e5 - } - }, - ensSubdomainKey: "sepolia-tornado", - pollInterval: 15, - constants: { - GOVERNANCE_BLOCK: 5594395, - NOTE_ACCOUNT_BLOCK: 5594395, - ENCRYPTED_NOTES_BLOCK: 5594395, - MINING_BLOCK_TIME: 15 - }, - "torn.contract.tornadocash.eth": "0x3AE6667167C0f44394106E197904519D808323cA", - "governance.contract.tornadocash.eth": "0xe5324cD7602eeb387418e594B87aCADee08aeCAD", - "tornado-router.contract.tornadocash.eth": "0x1572AFE6949fdF51Cb3E0856216670ae9Ee160Ee" - } -}; -const subdomains = enabledChains.map((chain) => networkConfig[`netId${chain}`].ensSubdomainKey); -/* harmony default export */ const services_networkConfig = ((/* unused pure expression or super */ null && (networkConfig))); - // EXTERNAL MODULE: commander (ignored) var commander_ignored_ = __webpack_require__(66955); ;// CONCATENATED MODULE: ./src/services/parser.ts @@ -170861,6 +198810,17 @@ function parseKey(value) { } return value; } +function parseRecoveryKey(value) { + if (!value) { + throw new InvalidArgumentError("Invalid Recovery Key"); + } + try { + computeAddress("0x" + value); + } catch (e) { + throw new InvalidArgumentError("Invalid Recovery Key"); + } + return value; +} ;// CONCATENATED MODULE: ./node_modules/ethers/lib.esm/utils/fixednumber.js /** @@ -171519,6 +199479,7 @@ var relayerClient_async = (__this, __arguments, generator) => { + const MIN_STAKE_BALANCE = parseEther("500"); const semVerRegex = (/* unused pure expression or super */ null && (new RegExp("^(?0|[1-9]\\d*)\\.(?0|[1-9]\\d*)\\.(?0|[1-9]\\d*)(?:-(?(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+(?[0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$"))); function parseSemanticVersion(version) { @@ -171527,11 +199488,11 @@ function parseSemanticVersion(version) { } function isRelayerUpdated(relayerVersion, netId) { const { major, patch, prerelease } = parseSemanticVersion(relayerVersion); - const requiredMajor = netId === 1 ? "4" : "5"; + const requiredMajor = netId === NetId.MAINNET ? "4" : "5"; const isUpdatedMajor = major === requiredMajor; if (prerelease) return false; - return isUpdatedMajor && (Number(patch) >= 5 || Number(netId) !== 1); + return isUpdatedMajor && (Number(patch) >= 5 || netId !== NetId.MAINNET); } function calculateScore({ stakeBalance, tornadoServiceFee }, minFee = 0.33, maxFee = 0.53) { if (tornadoServiceFee < minFee) { @@ -171553,9 +199514,15 @@ function getWeightRandom(weightsScores, random) { } return Math.floor(Math.random() * weightsScores.length); } +function getSupportedInstances(instanceList) { + const rawList = Object.values(instanceList).map(({ instanceAddress }) => { + return Object.values(instanceAddress); + }).flat(); + return rawList.map((l) => getAddress(l)); +} function pickWeightedRandomRelayer(relayers, netId) { let minFee, maxFee; - if (Number(netId) !== 1) { + if (netId !== NetId.MAINNET) { minFee = 0.01; maxFee = 0.3; } @@ -171569,7 +199536,7 @@ function pickWeightedRandomRelayer(relayers, netId) { } class RelayerClient { constructor({ netId, config, Aggregator, fetchDataOptions: fetchDataOptions2 }) { - this.netId = Number(netId); + this.netId = netId; this.config = config; this.Aggregator = Aggregator; this.fetchDataOptions = fetchDataOptions2; @@ -171601,7 +199568,7 @@ class RelayerClient { if (status.netId !== this.netId) { throw new Error("This relayer serves a different network"); } - if (relayerAddress && this.netId === 1 && status.rewardAccount !== relayerAddress) { + if (relayerAddress && this.netId === NetId.MAINNET && status.rewardAccount !== relayerAddress) { throw new Error("The Relayer reward address must match registered address"); } if (!isRelayerUpdated(status.version, this.netId)) { @@ -171612,6 +199579,7 @@ class RelayerClient { } filterRelayer(curr, relayer, subdomains, debugRelayer = false) { return relayerClient_async(this, null, function* () { + var _a; const { ensSubdomainKey } = this.config; const subdomainIndex = subdomains.indexOf(ensSubdomainKey); const mainnetSubdomain = curr.records[0]; @@ -171632,7 +199600,9 @@ class RelayerClient { ensName, stakeBalance, relayerAddress, - rewardAccount: status.rewardAccount, + rewardAccount: getAddress(status.rewardAccount), + instances: getSupportedInstances(status.instances), + gasPrice: (_a = status.gasPrices) == null ? void 0 : _a.fast, ethPrices: status.ethPrices, currentQueue: status.currentQueue, tornadoServiceFee: status.tornadoServiceFee @@ -171844,6 +199814,127 @@ function getTokenBalances(_0) { }); } +// EXTERNAL MODULE: ./node_modules/bloomfilter.js/index.js +var bloomfilter_js = __webpack_require__(65403); +;// CONCATENATED MODULE: ./src/services/treeCache.ts +/* provided dependency */ var treeCache_console = __webpack_require__(96763); + +var treeCache_defProp = Object.defineProperty; +var treeCache_defProps = Object.defineProperties; +var treeCache_getOwnPropDescs = Object.getOwnPropertyDescriptors; +var treeCache_getOwnPropSymbols = Object.getOwnPropertySymbols; +var treeCache_hasOwnProp = Object.prototype.hasOwnProperty; +var treeCache_propIsEnum = Object.prototype.propertyIsEnumerable; +var treeCache_defNormalProp = (obj, key, value) => key in obj ? treeCache_defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var treeCache_spreadValues = (a, b) => { + for (var prop in b || (b = {})) + if (treeCache_hasOwnProp.call(b, prop)) + treeCache_defNormalProp(a, prop, b[prop]); + if (treeCache_getOwnPropSymbols) + for (var prop of treeCache_getOwnPropSymbols(b)) { + if (treeCache_propIsEnum.call(b, prop)) + treeCache_defNormalProp(a, prop, b[prop]); + } + return a; +}; +var treeCache_spreadProps = (a, b) => treeCache_defProps(a, treeCache_getOwnPropDescs(b)); +var __objRest = (source, exclude) => { + var target = {}; + for (var prop in source) + if (treeCache_hasOwnProp.call(source, prop) && exclude.indexOf(prop) < 0) + target[prop] = source[prop]; + if (source != null && treeCache_getOwnPropSymbols) + for (var prop of treeCache_getOwnPropSymbols(source)) { + if (exclude.indexOf(prop) < 0 && treeCache_propIsEnum.call(source, prop)) + target[prop] = source[prop]; + } + return target; +}; +var treeCache_async = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; + + +class TreeCache { + constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }) { + this.netId = netId; + this.amount = amount; + this.currency = currency; + this.userDirectory = userDirectory; + this.PARTS_COUNT = PARTS_COUNT; + } + getInstanceName() { + return `deposits_${this.netId}_${this.currency}_${this.amount}`; + } + createTree(events, tree) { + return treeCache_async(this, null, function* () { + const bloom = new BloomFilter(events.length); + treeCache_console.log(`Creating cached tree for ${this.getInstanceName()} +`); + const eventsData = events.reduce( + (acc, _a, i) => { + var _b = _a, { leafIndex, commitment } = _b, rest = __objRest(_b, ["leafIndex", "commitment"]); + if (leafIndex !== i) { + throw new Error(`leafIndex (${leafIndex}) !== i (${i})`); + } + acc[commitment] = treeCache_spreadProps(treeCache_spreadValues({}, rest), { leafIndex }); + return acc; + }, + {} + ); + const slices = tree.getTreeSlices(this.PARTS_COUNT); + yield Promise.all( + slices.map((slice, index) => treeCache_async(this, null, function* () { + const metadata = slice.elements.reduce((acc, curr) => { + if (index < this.PARTS_COUNT - 1) { + bloom.add(curr); + } + acc.push(eventsData[curr]); + return acc; + }, []); + const dataString2 = JSON.stringify( + treeCache_spreadProps(treeCache_spreadValues({}, slice), { + metadata + }), + null, + 2 + ) + "\n"; + const fileName2 = `${this.getInstanceName()}_slice${index + 1}.json`; + yield saveUserFile({ + fileName: fileName2, + userDirectory: this.userDirectory, + dataString: dataString2 + }); + })) + ); + const dataString = bloom.serialize() + "\n"; + const fileName = `${this.getInstanceName()}_bloom.json`; + yield saveUserFile({ + fileName, + userDirectory: this.userDirectory, + dataString + }); + }); + } +} + // EXTERNAL MODULE: ./node_modules/@tornado/websnark/src/utils.js var src_utils = __webpack_require__(84276); // EXTERNAL MODULE: ./node_modules/@tornado/websnark/src/groth16.js @@ -171932,6 +200023,8 @@ function calculateSnarkProof(input, circuit, provingKey) { + + @@ -172006,7 +200099,7 @@ if (utils_isNode && (worker_threads_ignored_default())) { postMessage(merkleTree.toString()); })); } else { - throw new Error("This browser / environment doesn't support workers!"); + throw new Error("This browser / environment does not support workers!"); } })();