From 42db44ca3b43c5751637adaeea45470ee755cb01 Mon Sep 17 00:00:00 2001 From: tornadocontrib Date: Fri, 4 Oct 2024 12:58:47 +0000 Subject: [PATCH] IndexedDB --- dist/events/db.d.ts | 30 + dist/events/index.d.ts | 1 + dist/idb.d.ts | 84 + dist/index.d.ts | 2 + dist/index.js | 941 ++++++++-- dist/index.mjs | 933 ++++++++-- dist/tornado.umd.js | 3768 +++++++++++++++++++++++++++++++++++++++- dist/zip.d.ts | 9 + package.json | 3 +- src/events/db.ts | 150 ++ src/events/index.ts | 1 + src/idb.ts | 395 +++++ src/index.ts | 2 + src/zip.ts | 66 + yarn.lock | 5 + 15 files changed, 5937 insertions(+), 453 deletions(-) create mode 100644 dist/events/db.d.ts create mode 100644 dist/idb.d.ts create mode 100644 dist/zip.d.ts create mode 100644 src/events/db.ts create mode 100644 src/idb.ts create mode 100644 src/zip.ts diff --git a/dist/events/db.d.ts b/dist/events/db.d.ts new file mode 100644 index 0000000..e83174c --- /dev/null +++ b/dist/events/db.d.ts @@ -0,0 +1,30 @@ +import { IndexedDB } from '../idb'; +import { BaseTornadoService, BaseTornadoServiceConstructor } from './base'; +import { BaseEvents, MinimalEvents, DepositsEvents, WithdrawalsEvents, CachedEvents } from './types'; +export declare function saveDBEvents({ idb, instanceName, events, lastBlock, }: { + idb: IndexedDB; + instanceName: string; + events: T[]; + lastBlock: number; +}): Promise; +export declare function loadDBEvents({ idb, instanceName, }: { + idb: IndexedDB; + instanceName: string; +}): Promise>; +export declare function loadRemoteEvents({ staticUrl, instanceName, deployedBlock, }: { + staticUrl: string; + instanceName: string; + deployedBlock: number; +}): Promise>; +export interface DBTornadoServiceConstructor extends BaseTornadoServiceConstructor { + staticUrl: string; + idb: IndexedDB; +} +export declare class DBTornadoService extends BaseTornadoService { + staticUrl: string; + idb: IndexedDB; + constructor(params: DBTornadoServiceConstructor); + getEventsFromDB(): Promise>; + getEventsFromCache(): Promise>; + saveEvents({ events, lastBlock }: BaseEvents): Promise; +} diff --git a/dist/events/index.d.ts b/dist/events/index.d.ts index 63fa69b..77219eb 100644 --- a/dist/events/index.d.ts +++ b/dist/events/index.d.ts @@ -1,2 +1,3 @@ export * from './types'; export * from './base'; +export * from './db'; diff --git a/dist/idb.d.ts b/dist/idb.d.ts new file mode 100644 index 0000000..57662b6 --- /dev/null +++ b/dist/idb.d.ts @@ -0,0 +1,84 @@ +import { OpenDBCallbacks, IDBPDatabase } from 'idb'; +import { NetIdType } from './networkConfig'; +export declare const INDEX_DB_ERROR = "A mutation operation was attempted on a database that did not allow mutations."; +export interface IDBIndex { + name: string; + unique?: boolean; +} +export interface IDBStores { + name: string; + keyPath?: string; + indexes?: IDBIndex[]; +} +export interface IDBConstructor { + dbName: string; + stores?: IDBStores[]; +} +export declare class IndexedDB { + dbExists: boolean; + isBlocked: boolean; + options: OpenDBCallbacks; + dbName: string; + dbVersion: number; + db?: IDBPDatabase; + constructor({ dbName, stores }: IDBConstructor); + initDB(): Promise; + _removeExist(): Promise; + getFromIndex({ storeName, indexName, key, }: { + storeName: string; + indexName: string; + key?: string; + }): Promise; + getAllFromIndex({ storeName, indexName, key, count, }: { + storeName: string; + indexName: string; + key?: string; + count?: number; + }): Promise; + getItem({ storeName, key }: { + storeName: string; + key: string; + }): Promise; + addItem({ storeName, data, key }: { + storeName: string; + data: any; + key: string; + }): Promise; + putItem({ storeName, data, key }: { + storeName: string; + data: any; + key?: string; + }): Promise; + deleteItem({ storeName, key }: { + storeName: string; + key: string; + }): Promise; + getAll({ storeName }: { + storeName: string; + }): Promise; + /** + * Simple key-value store inspired by idb-keyval package + */ + getValue(key: string): Promise; + setValue(key: string, data: any): Promise; + delValue(key: string): Promise; + clearStore({ storeName, mode }: { + storeName: string; + mode: IDBTransactionMode; + }): Promise; + createTransactions({ storeName, data, mode, }: { + storeName: string; + data: any; + mode: IDBTransactionMode; + }): Promise; + createMultipleTransactions({ storeName, data, index, mode, }: { + storeName: string; + data: any[]; + index?: any; + mode?: IDBTransactionMode; + }): Promise; +} +/** + * Should check if DB is initialized well + */ +export declare function getIndexedDB(netId?: NetIdType): Promise; diff --git a/dist/index.d.ts b/dist/index.d.ts index 26de59f..4a79754 100644 --- a/dist/index.d.ts +++ b/dist/index.d.ts @@ -6,6 +6,7 @@ export * from './batch'; export * from './deposits'; export * from './encryptedNotes'; export * from './fees'; +export * from './idb'; export * from './merkleTree'; export * from './mimc'; export * from './multicall'; @@ -18,3 +19,4 @@ export * from './tokens'; export * from './tovarishClient'; export * from './utils'; export * from './websnark'; +export * from './zip'; diff --git a/dist/index.js b/dist/index.js index 6ad382d..57d579c 100644 --- a/dist/index.js +++ b/dist/index.js @@ -5,8 +5,10 @@ var crossFetch = require('cross-fetch'); var crypto$1 = require('crypto'); var BN = require('bn.js'); var Ajv = require('ajv'); +var fflate = require('fflate'); var circomlibjs = require('circomlibjs'); var ethSigUtil = require('@metamask/eth-sig-util'); +var idb = require('idb'); var worker_threads = require('worker_threads'); var fixedMerkleTree = require('@tornado/fixed-merkle-tree'); var websnarkUtils = require('@tornado/websnark/src/utils'); @@ -31,7 +33,7 @@ function _interopNamespaceDefault(e) { var websnarkUtils__namespace = /*#__PURE__*/_interopNamespaceDefault(websnarkUtils); -var __async$e = (__this, __arguments, generator) => { +var __async$h = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -140,34 +142,34 @@ function substring(str, length = 10) { return `${str.substring(0, length)}...${str.substring(str.length - length)}`; } function digest(bytes, algo = "SHA-384") { - return __async$e(this, null, function* () { + return __async$h(this, null, function* () { return new Uint8Array(yield crypto.subtle.digest(algo, bytes)); }); } -var __defProp$7 = Object.defineProperty; +var __defProp$8 = Object.defineProperty; var __defProps$6 = Object.defineProperties; var __getOwnPropDescs$6 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$7 = Object.getOwnPropertySymbols; +var __getOwnPropSymbols$8 = Object.getOwnPropertySymbols; var __getProtoOf$2 = Object.getPrototypeOf; -var __hasOwnProp$7 = Object.prototype.hasOwnProperty; -var __propIsEnum$7 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$8 = Object.prototype.hasOwnProperty; +var __propIsEnum$8 = Object.prototype.propertyIsEnumerable; var __reflectGet$2 = Reflect.get; -var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$7 = (a, b) => { +var __defNormalProp$8 = (obj, key, value) => key in obj ? __defProp$8(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$8 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$7.call(b, prop)) - __defNormalProp$7(a, prop, b[prop]); - if (__getOwnPropSymbols$7) - for (var prop of __getOwnPropSymbols$7(b)) { - if (__propIsEnum$7.call(b, prop)) - __defNormalProp$7(a, prop, b[prop]); + if (__hasOwnProp$8.call(b, prop)) + __defNormalProp$8(a, prop, b[prop]); + if (__getOwnPropSymbols$8) + for (var prop of __getOwnPropSymbols$8(b)) { + if (__propIsEnum$8.call(b, prop)) + __defNormalProp$8(a, prop, b[prop]); } return a; }; var __spreadProps$6 = (a, b) => __defProps$6(a, __getOwnPropDescs$6(b)); var __superGet$2 = (cls, obj, key) => __reflectGet$2(__getProtoOf$2(cls), key, obj); -var __async$d = (__this, __arguments, generator) => { +var __async$g = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -216,7 +218,7 @@ function getHttpAgent({ } } function fetchData(_0) { - return __async$d(this, arguments, function* (url, options = {}) { + return __async$g(this, arguments, function* (url, options = {}) { var _a, _b, _c; const MAX_RETRY = (_a = options.maxRetry) != null ? _a : 3; const RETRY_ON = (_b = options.retryOn) != null ? _b : 500; @@ -308,7 +310,7 @@ function fetchData(_0) { throw errorObject; }); } -const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, null, function* () { +const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$g(void 0, null, function* () { let signal; if (_signal) { const controller = new AbortController(); @@ -317,7 +319,7 @@ const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, nu controller.abort(); }); } - const init = __spreadProps$6(__spreadValues$7({}, options), { + const init = __spreadProps$6(__spreadValues$8({}, options), { method: req.method || "POST", headers: req.headers, body: req.body || void 0, @@ -339,7 +341,7 @@ const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, nu }; }); function getProvider(rpcUrl, fetchOptions) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { const fetchReq = new ethers.FetchRequest(rpcUrl); fetchReq.getUrlFunc = fetchGetUrlFunc(fetchOptions); const staticNetwork = yield new ethers.JsonRpcProvider(fetchReq).getNetwork(); @@ -370,7 +372,7 @@ function getProviderWithNetId(netId, rpcUrl, config, fetchOptions) { }); return provider; } -const populateTransaction = (signer, tx) => __async$d(void 0, null, function* () { +const populateTransaction = (signer, tx) => __async$g(void 0, null, function* () { const provider = signer.provider; if (!tx.from) { tx.from = signer.address; @@ -431,7 +433,7 @@ class TornadoWallet extends ethers.Wallet { return new TornadoWallet(privateKey, provider, options); } populateTransaction(tx) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = Number(txObject.nonce); return __superGet$2(TornadoWallet.prototype, this, "populateTransaction").call(this, txObject); @@ -447,7 +449,7 @@ class TornadoVoidSigner extends ethers.VoidSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } populateTransaction(tx) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = Number(txObject.nonce); return __superGet$2(TornadoVoidSigner.prototype, this, "populateTransaction").call(this, txObject); @@ -463,7 +465,7 @@ class TornadoRpcSigner extends ethers.JsonRpcSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } sendUncheckedTransaction(tx) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { return __superGet$2(TornadoRpcSigner.prototype, this, "sendUncheckedTransaction").call(this, yield populateTransaction(this, tx)); }); } @@ -474,7 +476,7 @@ class TornadoBrowserProvider extends ethers.BrowserProvider { this.options = options; } getSigner(address) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { var _a, _b, _c, _d, _e, _f, _g, _h, _i; const signerAddress = (yield __superGet$2(TornadoBrowserProvider.prototype, this, "getSigner").call(this, address)).address; if (((_a = this.options) == null ? void 0 : _a.webChainId) && ((_b = this.options) == null ? void 0 : _b.connectWallet) && Number(yield __superGet$2(TornadoBrowserProvider.prototype, this, "send").call(this, "eth_chainId", [])) !== Number((_c = this.options) == null ? void 0 : _c.webChainId)) { @@ -683,26 +685,26 @@ const GET_GOVERNANCE_APY = ` } `; -var __defProp$6 = Object.defineProperty; +var __defProp$7 = Object.defineProperty; var __defProps$5 = Object.defineProperties; var __getOwnPropDescs$5 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$6 = Object.getOwnPropertySymbols; -var __hasOwnProp$6 = Object.prototype.hasOwnProperty; -var __propIsEnum$6 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$6 = (a, b) => { +var __getOwnPropSymbols$7 = Object.getOwnPropertySymbols; +var __hasOwnProp$7 = Object.prototype.hasOwnProperty; +var __propIsEnum$7 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$7 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$6.call(b, prop)) - __defNormalProp$6(a, prop, b[prop]); - if (__getOwnPropSymbols$6) - for (var prop of __getOwnPropSymbols$6(b)) { - if (__propIsEnum$6.call(b, prop)) - __defNormalProp$6(a, prop, b[prop]); + if (__hasOwnProp$7.call(b, prop)) + __defNormalProp$7(a, prop, b[prop]); + if (__getOwnPropSymbols$7) + for (var prop of __getOwnPropSymbols$7(b)) { + if (__propIsEnum$7.call(b, prop)) + __defNormalProp$7(a, prop, b[prop]); } return a; }; var __spreadProps$5 = (a, b) => __defProps$5(a, __getOwnPropDescs$5(b)); -var __async$c = (__this, __arguments, generator) => { +var __async$f = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -725,7 +727,7 @@ var __async$c = (__this, __arguments, generator) => { const isEmptyArray = (arr) => !Array.isArray(arr) || !arr.length; const GRAPHQL_LIMIT = 1e3; function queryGraph(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, query, @@ -734,7 +736,7 @@ function queryGraph(_0) { }) { var _a; const graphUrl = `${graphApi}/subgraphs/name/${subgraphName}`; - const { data, errors } = yield fetchData(graphUrl, __spreadProps$5(__spreadValues$6({}, fetchDataOptions2), { + const { data, errors } = yield fetchData(graphUrl, __spreadProps$5(__spreadValues$7({}, fetchDataOptions2), { method: "POST", headers: { "Content-Type": "application/json" @@ -754,7 +756,7 @@ function queryGraph(_0) { }); } function getStatistic(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -801,7 +803,7 @@ function getStatistic(_0) { }); } function getMeta(_0) { - return __async$c(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { try { const { _meta: { @@ -846,7 +848,7 @@ function getRegisters({ }); } function getAllRegisters(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -935,7 +937,7 @@ function getDeposits({ }); } function getAllDeposits(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -1032,7 +1034,7 @@ function getWithdrawals({ }); } function getAllWithdrawals(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -1108,7 +1110,7 @@ function getAllWithdrawals(_0) { }); } function getNoteAccounts(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, address, @@ -1161,7 +1163,7 @@ function getGraphEchoEvents({ }); } function getAllGraphEchoEvents(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -1250,7 +1252,7 @@ function getEncryptedNotes({ }); } function getAllEncryptedNotes(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -1335,7 +1337,7 @@ function getGovernanceEvents({ }); } function getAllGovernanceEvents(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -1494,7 +1496,7 @@ var graph = /*#__PURE__*/Object.freeze({ queryGraph: queryGraph }); -var __async$b = (__this, __arguments, generator) => { +var __async$e = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -1533,7 +1535,7 @@ class BatchBlockService { this.retryOn = retryOn; } getBlock(blockTag) { - return __async$b(this, null, function* () { + return __async$e(this, null, function* () { const blockObject = yield this.provider.getBlock(blockTag); if (!blockObject) { const errMsg = `No block for ${blockTag}`; @@ -1543,9 +1545,9 @@ class BatchBlockService { }); } createBatchRequest(batchArray) { - return batchArray.map((blocks, index) => __async$b(this, null, function* () { + return batchArray.map((blocks, index) => __async$e(this, null, function* () { yield sleep(20 * index); - return (() => __async$b(this, null, function* () { + return (() => __async$e(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -1562,7 +1564,7 @@ class BatchBlockService { })); } getBatchBlocks(blocks) { - return __async$b(this, null, function* () { + return __async$e(this, null, function* () { let blockCount = 0; const results = []; for (const chunks of chunk(blocks, this.concurrencySize * this.batchSize)) { @@ -1600,7 +1602,7 @@ class BatchTransactionService { this.retryOn = retryOn; } getTransaction(txHash) { - return __async$b(this, null, function* () { + return __async$e(this, null, function* () { const txObject = yield this.provider.getTransaction(txHash); if (!txObject) { const errMsg = `No transaction for ${txHash}`; @@ -1610,9 +1612,9 @@ class BatchTransactionService { }); } createBatchRequest(batchArray) { - return batchArray.map((txs, index) => __async$b(this, null, function* () { + return batchArray.map((txs, index) => __async$e(this, null, function* () { yield sleep(20 * index); - return (() => __async$b(this, null, function* () { + return (() => __async$e(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -1629,7 +1631,7 @@ class BatchTransactionService { })); } getBatchTransactions(txs) { - return __async$b(this, null, function* () { + return __async$e(this, null, function* () { let txCount = 0; const results = []; for (const chunks of chunk(txs, this.concurrencySize * this.batchSize)) { @@ -1665,7 +1667,7 @@ class BatchEventsService { this.retryOn = retryOn; } getPastEvents(_0) { - return __async$b(this, arguments, function* ({ fromBlock, toBlock, type }) { + return __async$e(this, arguments, function* ({ fromBlock, toBlock, type }) { let err; let retries = 0; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -1685,13 +1687,13 @@ class BatchEventsService { }); } createBatchRequest(batchArray) { - return batchArray.map((event, index) => __async$b(this, null, function* () { + return batchArray.map((event, index) => __async$e(this, null, function* () { yield sleep(20 * index); return this.getPastEvents(event); })); } getBatchEvents(_0) { - return __async$b(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { + return __async$e(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { if (!toBlock) { toBlock = yield this.provider.getBlockNumber(); } @@ -1722,19 +1724,19 @@ class BatchEventsService { } } -var __defProp$5 = Object.defineProperty; -var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols; -var __hasOwnProp$5 = Object.prototype.hasOwnProperty; -var __propIsEnum$5 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$5 = (a, b) => { +var __defProp$6 = Object.defineProperty; +var __getOwnPropSymbols$6 = Object.getOwnPropertySymbols; +var __hasOwnProp$6 = Object.prototype.hasOwnProperty; +var __propIsEnum$6 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$6 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$5.call(b, prop)) - __defNormalProp$5(a, prop, b[prop]); - if (__getOwnPropSymbols$5) - for (var prop of __getOwnPropSymbols$5(b)) { - if (__propIsEnum$5.call(b, prop)) - __defNormalProp$5(a, prop, b[prop]); + if (__hasOwnProp$6.call(b, prop)) + __defNormalProp$6(a, prop, b[prop]); + if (__getOwnPropSymbols$6) + for (var prop of __getOwnPropSymbols$6(b)) { + if (__propIsEnum$6.call(b, prop)) + __defNormalProp$6(a, prop, b[prop]); } return a; }; @@ -2294,10 +2296,10 @@ function addNetwork(newConfig) { enabledChains.push( ...Object.keys(newConfig).map((netId) => Number(netId)).filter((netId) => !enabledChains.includes(netId)) ); - exports.customConfig = __spreadValues$5(__spreadValues$5({}, exports.customConfig), newConfig); + exports.customConfig = __spreadValues$6(__spreadValues$6({}, exports.customConfig), newConfig); } function getNetworkConfig() { - const allConfig = __spreadValues$5(__spreadValues$5({}, defaultConfig), exports.customConfig); + const allConfig = __spreadValues$6(__spreadValues$6({}, defaultConfig), exports.customConfig); return enabledChains.reduce((acc, curr) => { acc[curr] = allConfig[curr]; return acc; @@ -2376,21 +2378,21 @@ ajv.addKeyword({ errors: true }); -var __defProp$4 = Object.defineProperty; +var __defProp$5 = Object.defineProperty; var __defProps$4 = Object.defineProperties; var __getOwnPropDescs$4 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols; -var __hasOwnProp$4 = Object.prototype.hasOwnProperty; -var __propIsEnum$4 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$4 = (a, b) => { +var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols; +var __hasOwnProp$5 = Object.prototype.hasOwnProperty; +var __propIsEnum$5 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$5 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$4.call(b, prop)) - __defNormalProp$4(a, prop, b[prop]); - if (__getOwnPropSymbols$4) - for (var prop of __getOwnPropSymbols$4(b)) { - if (__propIsEnum$4.call(b, prop)) - __defNormalProp$4(a, prop, b[prop]); + if (__hasOwnProp$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); + if (__getOwnPropSymbols$5) + for (var prop of __getOwnPropSymbols$5(b)) { + if (__propIsEnum$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); } return a; }; @@ -2403,23 +2405,23 @@ const addressSchemaType = { const bnSchemaType = { type: "string", BN: true }; const proofSchemaType = { type: "string", pattern: "^0x[a-fA-F0-9]{512}$" }; const bytes32SchemaType = { type: "string", pattern: "^0x[a-fA-F0-9]{64}$" }; -const bytes32BNSchemaType = __spreadProps$4(__spreadValues$4({}, bytes32SchemaType), { BN: true }); +const bytes32BNSchemaType = __spreadProps$4(__spreadValues$5({}, bytes32SchemaType), { BN: true }); -var __defProp$3 = Object.defineProperty; +var __defProp$4 = Object.defineProperty; var __defProps$3 = Object.defineProperties; var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; -var __hasOwnProp$3 = Object.prototype.hasOwnProperty; -var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$3 = (a, b) => { +var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols; +var __hasOwnProp$4 = Object.prototype.hasOwnProperty; +var __propIsEnum$4 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$4 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); - if (__getOwnPropSymbols$3) - for (var prop of __getOwnPropSymbols$3(b)) { - if (__propIsEnum$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); + if (__hasOwnProp$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); + if (__getOwnPropSymbols$4) + for (var prop of __getOwnPropSymbols$4(b)) { + if (__propIsEnum$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); } return a; }; @@ -2440,7 +2442,7 @@ const governanceEventsSchema = { anyOf: [ { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { event: { type: "string" }, id: { type: "number" }, proposer: addressSchemaType, @@ -2463,7 +2465,7 @@ const governanceEventsSchema = { }, { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { event: { type: "string" }, proposalId: { type: "number" }, voter: addressSchemaType, @@ -2477,7 +2479,7 @@ const governanceEventsSchema = { }, { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { event: { type: "string" }, account: addressSchemaType, delegateTo: addressSchemaType @@ -2487,7 +2489,7 @@ const governanceEventsSchema = { }, { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { event: { type: "string" }, account: addressSchemaType, delegateFrom: addressSchemaType @@ -2502,7 +2504,7 @@ const registeredEventsSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { ensName: { type: "string" }, relayerAddress: addressSchemaType }), @@ -2514,7 +2516,7 @@ const depositsEventsSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { commitment: bytes32SchemaType, leafIndex: { type: "number" }, timestamp: { type: "number" }, @@ -2528,7 +2530,7 @@ const withdrawalsEventsSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { nullifierHash: bytes32SchemaType, to: addressSchemaType, fee: bnSchemaType, @@ -2542,7 +2544,7 @@ const echoEventsSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { address: addressSchemaType, encryptedAccount: { type: "string" } }), @@ -2554,7 +2556,7 @@ const encryptedNotesSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { encryptedNote: { type: "string" } }), required: [...baseEventsSchemaRequired, "encryptedNote"], @@ -2713,26 +2715,26 @@ const jobsSchema = { required: ["id", "status"] }; -var __defProp$2 = Object.defineProperty; +var __defProp$3 = Object.defineProperty; var __defProps$2 = Object.defineProperties; var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; -var __hasOwnProp$2 = Object.prototype.hasOwnProperty; -var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$2 = (a, b) => { +var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; +var __hasOwnProp$3 = Object.prototype.hasOwnProperty; +var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$3 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); - if (__getOwnPropSymbols$2) - for (var prop of __getOwnPropSymbols$2(b)) { - if (__propIsEnum$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); + if (__hasOwnProp$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); + if (__getOwnPropSymbols$3) + for (var prop of __getOwnPropSymbols$3(b)) { + if (__propIsEnum$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); } return a; }; var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b)); -var __async$a = (__this, __arguments, generator) => { +var __async$d = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -2798,7 +2800,7 @@ class RelayerClient { this.tovarish = false; } askRelayerStatus(_0) { - return __async$a(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ hostname, url, relayerAddress @@ -2811,7 +2813,7 @@ class RelayerClient { } else { url = ""; } - const rawStatus = yield fetchData(`${url}status`, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { + const rawStatus = yield fetchData(`${url}status`, __spreadProps$2(__spreadValues$3({}, this.fetchDataOptions), { headers: { "Content-Type": "application/json, application/x-www-form-urlencoded" }, @@ -2822,7 +2824,7 @@ class RelayerClient { if (!statusValidator(rawStatus)) { throw new Error("Invalid status schema"); } - const status = __spreadProps$2(__spreadValues$2({}, rawStatus), { + const status = __spreadProps$2(__spreadValues$3({}, rawStatus), { url }); if (status.currentQueue > 5) { @@ -2838,7 +2840,7 @@ class RelayerClient { }); } filterRelayer(relayer) { - return __async$a(this, null, function* () { + return __async$d(this, null, function* () { var _a; const hostname = relayer.hostnames[this.netId]; const { ensName, relayerAddress } = relayer; @@ -2872,7 +2874,7 @@ class RelayerClient { }); } getValidRelayers(relayers) { - return __async$a(this, null, function* () { + return __async$d(this, null, function* () { const invalidRelayers = []; const validRelayers = (yield Promise.all(relayers.map((relayer) => this.filterRelayer(relayer)))).filter((r) => { if (!r) { @@ -2894,9 +2896,9 @@ class RelayerClient { return pickWeightedRandomRelayer(relayers); } tornadoWithdraw(_0, _1) { - return __async$a(this, arguments, function* ({ contract, proof, args }, callback) { + return __async$d(this, arguments, function* ({ contract, proof, args }, callback) { const { url } = this.selectedRelayer; - const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { + const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$2(__spreadValues$3({}, this.fetchDataOptions), { method: "POST", headers: { "Content-Type": "application/json" @@ -2916,7 +2918,7 @@ class RelayerClient { console.log(`Job submitted: ${jobUrl} `); while (!relayerStatus || !["FAILED", "CONFIRMED"].includes(relayerStatus)) { - const jobResponse = yield fetchData(jobUrl, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { + const jobResponse = yield fetchData(jobUrl, __spreadProps$2(__spreadValues$3({}, this.fetchDataOptions), { method: "GET", headers: { "Content-Type": "application/json" @@ -2959,29 +2961,29 @@ class RelayerClient { } } -var __defProp$1 = Object.defineProperty; +var __defProp$2 = Object.defineProperty; var __defProps$1 = Object.defineProperties; var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; +var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; var __getProtoOf$1 = Object.getPrototypeOf; -var __hasOwnProp$1 = Object.prototype.hasOwnProperty; -var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$2 = Object.prototype.hasOwnProperty; +var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; var __reflectGet$1 = Reflect.get; -var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$1 = (a, b) => { +var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$2 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); - if (__getOwnPropSymbols$1) - for (var prop of __getOwnPropSymbols$1(b)) { - if (__propIsEnum$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); + if (__hasOwnProp$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); + if (__getOwnPropSymbols$2) + for (var prop of __getOwnPropSymbols$2(b)) { + if (__propIsEnum$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); } return a; }; var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b)); var __superGet$1 = (cls, obj, key) => __reflectGet$1(__getProtoOf$1(cls), key, obj); -var __async$9 = (__this, __arguments, generator) => { +var __async$c = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -3061,7 +3063,7 @@ class BaseEventsService { } /* eslint-enable @typescript-eslint/no-unused-vars */ formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return yield new Promise((resolve) => resolve(events)); }); } @@ -3069,7 +3071,7 @@ class BaseEventsService { * Get saved or cached events */ getEventsFromDB() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return { events: [], lastBlock: 0 @@ -3080,7 +3082,7 @@ class BaseEventsService { * Events from remote cache (Either from local cache, CDN, or from IPFS) */ getEventsFromCache() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return { events: [], lastBlock: 0, @@ -3089,7 +3091,7 @@ class BaseEventsService { }); } getSavedEvents() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { let dbEvents = yield this.getEventsFromDB(); if (!dbEvents.lastBlock) { dbEvents = yield this.getEventsFromCache(); @@ -3101,7 +3103,7 @@ class BaseEventsService { * Get latest events */ getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ fromBlock, methodName = "" }) { @@ -3111,7 +3113,7 @@ class BaseEventsService { lastBlock: fromBlock }; } - const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$1({ + const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$2({ fromBlock }, this.getGraphParams())); return { @@ -3121,7 +3123,7 @@ class BaseEventsService { }); } getEventsFromRpc(_0) { - return __async$9(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ fromBlock, toBlock }) { @@ -3153,7 +3155,7 @@ class BaseEventsService { }); } getLatestEvents(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$c(this, arguments, function* ({ fromBlock }) { var _a; if (((_a = this.tovarishClient) == null ? void 0 : _a.selectedRelayer) && ![DEPOSIT, WITHDRAWAL].includes(this.type.toLowerCase())) { const { events, lastSyncBlock: lastBlock } = yield this.tovarishClient.getEvents({ @@ -3182,14 +3184,14 @@ class BaseEventsService { */ // eslint-disable-next-line @typescript-eslint/no-unused-vars saveEvents(_0) { - return __async$9(this, arguments, function* ({ events, lastBlock }) { + return __async$c(this, arguments, function* ({ events, lastBlock }) { }); } /** * Trigger saving and receiving latest events */ updateEvents() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { var _a; const savedEvents = yield this.getSavedEvents(); let fromBlock = this.deployedBlock; @@ -3274,7 +3276,7 @@ class BaseTornadoService extends BaseEventsService { }; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { const type = this.getType().toLowerCase(); if (type === DEPOSIT) { const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { @@ -3293,7 +3295,7 @@ class BaseTornadoService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { from } = txs.find(({ hash }) => hash === event.transactionHash); - return __spreadProps$1(__spreadValues$1({}, event), { + return __spreadProps$1(__spreadValues$2({}, event), { from }); }); @@ -3314,7 +3316,7 @@ class BaseTornadoService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { timestamp } = blocks.find(({ number }) => number === event.blockNumber); - return __spreadProps$1(__spreadValues$1({}, event), { + return __spreadProps$1(__spreadValues$2({}, event), { timestamp }); }); @@ -3331,7 +3333,7 @@ class BaseTornadoService extends BaseEventsService { } } getLatestEvents(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$c(this, arguments, function* ({ fromBlock }) { var _a; if ((_a = this.tovarishClient) == null ? void 0 : _a.selectedRelayer) { const { events, lastSyncBlock: lastBlock } = yield this.tovarishClient.getEvents({ @@ -3381,7 +3383,7 @@ class BaseEchoService extends BaseEventsService { return "getAllGraphEchoEvents"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const { who, data } = args; if (who && data) { @@ -3390,7 +3392,7 @@ class BaseEchoService extends BaseEventsService { logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$1(__spreadValues$2({}, eventObjects), { address: who, encryptedAccount: data }); @@ -3399,7 +3401,7 @@ class BaseEchoService extends BaseEventsService { }); } getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$c(this, arguments, function* ({ fromBlock }) { if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { return { events: [], @@ -3445,7 +3447,7 @@ class BaseEncryptedNotesService extends BaseEventsService { return "getAllEncryptedNotes"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const { encryptedNote } = args; if (encryptedNote && encryptedNote !== "0x") { @@ -3454,7 +3456,7 @@ class BaseEncryptedNotesService extends BaseEventsService { logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$1(__spreadValues$2({}, eventObjects), { encryptedNote }); } @@ -3501,7 +3503,7 @@ class BaseGovernanceService extends BaseEventsService { return "getAllGovernanceEvents"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { const proposalEvents = []; const votedEvents = []; const delegatedEvents = []; @@ -3515,7 +3517,7 @@ class BaseGovernanceService extends BaseEventsService { }; if (event === "ProposalCreated") { const { id, proposer, target, startTime, endTime, description } = args; - proposalEvents.push(__spreadProps$1(__spreadValues$1({}, eventObjects), { + proposalEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), { id: Number(id), proposer, target, @@ -3526,7 +3528,7 @@ class BaseGovernanceService extends BaseEventsService { } if (event === "Voted") { const { proposalId, voter, support, votes } = args; - votedEvents.push(__spreadProps$1(__spreadValues$1({}, eventObjects), { + votedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), { proposalId: Number(proposalId), voter, support, @@ -3537,14 +3539,14 @@ class BaseGovernanceService extends BaseEventsService { } if (event === "Delegated") { const { account, to: delegateTo } = args; - delegatedEvents.push(__spreadProps$1(__spreadValues$1({}, eventObjects), { + delegatedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), { account, delegateTo })); } if (event === "Undelegated") { const { account, from: delegateFrom } = args; - undelegatedEvents.push(__spreadProps$1(__spreadValues$1({}, eventObjects), { + undelegatedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), { account, delegateFrom })); @@ -3568,7 +3570,7 @@ class BaseGovernanceService extends BaseEventsService { }); } getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$c(this, arguments, function* ({ fromBlock }) { if (!this.graphApi || !this.subgraphName || this.graphApi.includes("api.thegraph.com")) { return { events: [], @@ -3580,12 +3582,12 @@ class BaseGovernanceService extends BaseEventsService { } } function getTovarishNetworks(registryService, relayers) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { yield Promise.all( - relayers.filter((r) => r.tovarishHost).map((relayer) => __async$9(this, null, function* () { + relayers.filter((r) => r.tovarishHost).map((relayer) => __async$c(this, null, function* () { var _a, _b; try { - relayer.tovarishNetworks = yield fetchData(relayer.tovarishHost, __spreadProps$1(__spreadValues$1({}, registryService.fetchDataOptions), { + relayer.tovarishNetworks = yield fetchData(relayer.tovarishHost, __spreadProps$1(__spreadValues$2({}, registryService.fetchDataOptions), { headers: { "Content-Type": "application/json" }, @@ -3650,14 +3652,14 @@ class BaseRegistryService extends BaseEventsService { return "getAllRegisters"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const eventObjects = { blockNumber, logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$1(__spreadValues$2({}, eventObjects), { ensName: args.ensName, relayerAddress: args.relayerAddress }); @@ -3668,7 +3670,7 @@ class BaseRegistryService extends BaseEventsService { * Get saved or cached relayers */ getRelayersFromDB() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return { lastBlock: 0, timestamp: 0, @@ -3680,7 +3682,7 @@ class BaseRegistryService extends BaseEventsService { * Relayers from remote cache (Either from local cache, CDN, or from IPFS) */ getRelayersFromCache() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return { lastBlock: 0, timestamp: 0, @@ -3690,7 +3692,7 @@ class BaseRegistryService extends BaseEventsService { }); } getSavedRelayers() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { let cachedRelayers = yield this.getRelayersFromDB(); if (!cachedRelayers || !cachedRelayers.relayers.length) { cachedRelayers = yield this.getRelayersFromCache(); @@ -3699,7 +3701,7 @@ class BaseRegistryService extends BaseEventsService { }); } getLatestRelayers() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { const { events, lastBlock } = yield this.updateEvents(); const subdomains = Object.values(this.relayerEnsSubdomains); const registerSet = /* @__PURE__ */ new Set(); @@ -3756,14 +3758,14 @@ class BaseRegistryService extends BaseEventsService { */ // eslint-disable-next-line @typescript-eslint/no-unused-vars saveRelayers(_0) { - return __async$9(this, arguments, function* ({ lastBlock, timestamp, relayers }) { + return __async$c(this, arguments, function* ({ lastBlock, timestamp, relayers }) { }); } /** * Get cached or latest relayer and save to local */ updateRelayers() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { let { lastBlock, timestamp, relayers, fromCache } = yield this.getSavedRelayers(); let shouldSave = fromCache != null ? fromCache : false; if (!relayers.length || timestamp + this.updateInterval < Math.floor(Date.now() / 1e3)) { @@ -3779,6 +3781,219 @@ class BaseRegistryService extends BaseEventsService { } } +var __async$b = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +function zipAsync(file) { + return new Promise((res, rej) => { + fflate.zip(file, { mtime: /* @__PURE__ */ new Date("1/1/1980") }, (err, data) => { + if (err) { + rej(err); + return; + } + res(data); + }); + }); +} +function unzipAsync(data) { + return new Promise((res, rej) => { + fflate.unzip(data, {}, (err, data2) => { + if (err) { + rej(err); + return; + } + res(data2); + }); + }); +} +function downloadZip(_0) { + return __async$b(this, arguments, function* ({ + staticUrl = "", + zipName, + zipDigest, + parseJson = true + }) { + const url = `${staticUrl}/${zipName}.zip`; + const resp = yield fetchData(url, { + method: "GET", + returnResponse: true + }); + const data = new Uint8Array(yield resp.arrayBuffer()); + if (zipDigest) { + const hash = "sha384-" + bytesToBase64(yield digest(data)); + if (zipDigest !== hash) { + const errMsg = `Invalid digest hash for file ${url}, wants ${zipDigest} has ${hash}`; + throw new Error(errMsg); + } + } + const { [zipName]: content } = yield unzipAsync(data); + if (parseJson) { + return JSON.parse(new TextDecoder().decode(content)); + } + return content; + }); +} + +var __async$a = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +function saveDBEvents(_0) { + return __async$a(this, arguments, function* ({ + idb, + instanceName, + events, + lastBlock + }) { + try { + yield idb.createMultipleTransactions({ + data: events, + storeName: instanceName + }); + yield idb.putItem({ + data: { + blockNumber: lastBlock, + name: instanceName + }, + storeName: "lastEvents" + }); + } catch (err) { + console.log("Method saveDBEvents has error"); + console.log(err); + } + }); +} +function loadDBEvents(_0) { + return __async$a(this, arguments, function* ({ + idb, + instanceName + }) { + try { + const lastBlockStore = yield idb.getItem({ + storeName: "lastEvents", + key: instanceName + }); + if (!(lastBlockStore == null ? void 0 : lastBlockStore.blockNumber)) { + return { + events: [], + lastBlock: 0 + }; + } + return { + events: yield idb.getAll({ storeName: instanceName }), + lastBlock: lastBlockStore.blockNumber + }; + } catch (err) { + console.log("Method loadDBEvents has error"); + console.log(err); + return { + events: [], + lastBlock: 0 + }; + } + }); +} +function loadRemoteEvents(_0) { + return __async$a(this, arguments, function* ({ + staticUrl, + instanceName, + deployedBlock + }) { + var _a; + try { + const zipName = `${instanceName}.json`.toLowerCase(); + const events = yield downloadZip({ + staticUrl, + zipName + }); + if (!Array.isArray(events)) { + const errStr = `Invalid events from ${staticUrl}/${zipName}`; + throw new Error(errStr); + } + return { + events, + lastBlock: ((_a = events[events.length - 1]) == null ? void 0 : _a.blockNumber) || deployedBlock, + fromCache: true + }; + } catch (err) { + console.log("Method loadRemoteEvents has error"); + console.log(err); + return { + events: [], + lastBlock: deployedBlock, + fromCache: true + }; + } + }); +} +class DBTornadoService extends BaseTornadoService { + constructor(params) { + super(params); + this.staticUrl = params.staticUrl; + this.idb = params.idb; + } + getEventsFromDB() { + return __async$a(this, null, function* () { + return yield loadDBEvents({ + idb: this.idb, + instanceName: this.getInstanceName() + }); + }); + } + getEventsFromCache() { + return __async$a(this, null, function* () { + return yield loadRemoteEvents({ + staticUrl: this.staticUrl, + instanceName: this.getInstanceName(), + deployedBlock: this.deployedBlock + }); + }); + } + saveEvents(_0) { + return __async$a(this, arguments, function* ({ events, lastBlock }) { + yield saveDBEvents({ + idb: this.idb, + instanceName: this.getInstanceName(), + events, + lastBlock + }); + }); + } +} + const _abi$5 = [ { constant: true, @@ -6116,7 +6331,7 @@ var index = /*#__PURE__*/Object.freeze({ ReverseRecords__factory: ReverseRecords__factory }); -var __async$8 = (__this, __arguments, generator) => { +var __async$9 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -6141,13 +6356,13 @@ class Pedersen { this.pedersenPromise = this.initPedersen(); } initPedersen() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { this.pedersenHash = yield circomlibjs.buildPedersenHash(); this.babyJub = this.pedersenHash.babyJub; }); } unpackPoint(buffer) { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { var _a, _b; yield this.pedersenPromise; return (_b = this.babyJub) == null ? void 0 : _b.unpackPoint((_a = this.pedersenHash) == null ? void 0 : _a.hash(buffer)); @@ -6160,13 +6375,13 @@ class Pedersen { } const pedersen = new Pedersen(); function buffPedersenHash(buffer) { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { const [hash] = yield pedersen.unpackPoint(buffer); return pedersen.toStringBuffer(hash); }); } -var __async$7 = (__this, __arguments, generator) => { +var __async$8 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -6187,7 +6402,7 @@ var __async$7 = (__this, __arguments, generator) => { }); }; function createDeposit(_0) { - return __async$7(this, arguments, function* ({ nullifier, secret }) { + return __async$8(this, arguments, function* ({ nullifier, secret }) { const preimage = new Uint8Array([...leInt2Buff(nullifier), ...leInt2Buff(secret)]); const noteHex = toFixedHex(bytesToBN(preimage), 62); const commitment = BigInt(yield buffPedersenHash(preimage)); @@ -6247,7 +6462,7 @@ class Deposit { ); } static createNote(_0) { - return __async$7(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { + return __async$8(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { if (!nullifier) { nullifier = rBigInt(31); } @@ -6274,7 +6489,7 @@ class Deposit { }); } static parseNote(noteString) { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { const noteRegex = new RegExp("tornado-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{124})", "g"); const match = noteRegex.exec(noteString); if (!match) { @@ -6533,6 +6748,350 @@ class TornadoFeeOracle { } } +var __defProp$1 = Object.defineProperty; +var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; +var __hasOwnProp$1 = Object.prototype.hasOwnProperty; +var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$1 = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); + if (__getOwnPropSymbols$1) + for (var prop of __getOwnPropSymbols$1(b)) { + if (__propIsEnum$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); + } + return a; +}; +var __async$7 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +const INDEX_DB_ERROR = "A mutation operation was attempted on a database that did not allow mutations."; +class IndexedDB { + constructor({ dbName, stores }) { + this.dbExists = false; + this.isBlocked = false; + this.options = { + upgrade(db) { + Object.values(db.objectStoreNames).forEach((value) => { + db.deleteObjectStore(value); + }); + [{ name: "keyval" }, ...stores || []].forEach(({ name, keyPath, indexes }) => { + const store = db.createObjectStore(name, { + keyPath, + autoIncrement: true + }); + if (Array.isArray(indexes)) { + indexes.forEach(({ name: name2, unique = false }) => { + store.createIndex(name2, name2, { unique }); + }); + } + }); + } + }; + this.dbName = dbName; + this.dbVersion = 34; + } + initDB() { + return __async$7(this, null, function* () { + try { + if (this.dbExists || this.isBlocked) { + return; + } + this.db = yield idb.openDB(this.dbName, this.dbVersion, this.options); + this.db.addEventListener("onupgradeneeded", () => __async$7(this, null, function* () { + yield this._removeExist(); + })); + this.dbExists = true; + } catch (err) { + if (err.message.includes(INDEX_DB_ERROR)) { + console.log("This browser does not support IndexedDB!"); + this.isBlocked = true; + return; + } + if (err.message.includes("less than the existing version")) { + console.log(`Upgrading DB ${this.dbName} to ${this.dbVersion}`); + yield this._removeExist(); + return; + } + console.error(`Method initDB has error: ${err.message}`); + } + }); + } + _removeExist() { + return __async$7(this, null, function* () { + yield idb.deleteDB(this.dbName); + this.dbExists = false; + yield this.initDB(); + }); + } + getFromIndex(_0) { + return __async$7(this, arguments, function* ({ + storeName, + indexName, + key + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + return yield this.db.getFromIndex(storeName, indexName, key); + } catch (err) { + throw new Error(`Method getFromIndex has error: ${err.message}`); + } + }); + } + getAllFromIndex(_0) { + return __async$7(this, arguments, function* ({ + storeName, + indexName, + key, + count + }) { + yield this.initDB(); + if (!this.db) { + return []; + } + try { + return yield this.db.getAllFromIndex(storeName, indexName, key, count); + } catch (err) { + throw new Error(`Method getAllFromIndex has error: ${err.message}`); + } + }); + } + getItem(_0) { + return __async$7(this, arguments, function* ({ storeName, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const store = this.db.transaction(storeName).objectStore(storeName); + return yield store.get(key); + } catch (err) { + throw new Error(`Method getItem has error: ${err.message}`); + } + }); + } + addItem(_0) { + return __async$7(this, arguments, function* ({ storeName, data, key = "" }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + const isExist = yield tx.objectStore(storeName).get(key); + if (!isExist) { + yield tx.objectStore(storeName).add(data); + } + } catch (err) { + throw new Error(`Method addItem has error: ${err.message}`); + } + }); + } + putItem(_0) { + return __async$7(this, arguments, function* ({ storeName, data, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + yield tx.objectStore(storeName).put(data, key); + } catch (err) { + throw new Error(`Method putItem has error: ${err.message}`); + } + }); + } + deleteItem(_0) { + return __async$7(this, arguments, function* ({ storeName, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + yield tx.objectStore(storeName).delete(key); + } catch (err) { + throw new Error(`Method deleteItem has error: ${err.message}`); + } + }); + } + getAll(_0) { + return __async$7(this, arguments, function* ({ storeName }) { + yield this.initDB(); + if (!this.db) { + return []; + } + try { + const tx = this.db.transaction(storeName, "readonly"); + return yield tx.objectStore(storeName).getAll(); + } catch (err) { + throw new Error(`Method getAll has error: ${err.message}`); + } + }); + } + /** + * Simple key-value store inspired by idb-keyval package + */ + getValue(key) { + return this.getItem({ storeName: "keyval", key }); + } + setValue(key, data) { + return this.putItem({ storeName: "keyval", key, data }); + } + delValue(key) { + return this.deleteItem({ storeName: "keyval", key }); + } + clearStore(_0) { + return __async$7(this, arguments, function* ({ storeName, mode = "readwrite" }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + yield tx.objectStore(storeName).clear(); + } catch (err) { + throw new Error(`Method clearStore has error: ${err.message}`); + } + }); + } + createTransactions(_0) { + return __async$7(this, arguments, function* ({ + storeName, + data, + mode = "readwrite" + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + yield tx.objectStore(storeName).add(data); + yield tx.done; + } catch (err) { + throw new Error(`Method createTransactions has error: ${err.message}`); + } + }); + } + createMultipleTransactions(_0) { + return __async$7(this, arguments, function* ({ + storeName, + data, + index, + mode = "readwrite" + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + for (const item of data) { + if (item) { + yield tx.store.put(__spreadValues$1(__spreadValues$1({}, item), index)); + } + } + } catch (err) { + throw new Error(`Method createMultipleTransactions has error: ${err.message}`); + } + }); + } +} +function getIndexedDB(netId) { + return __async$7(this, null, function* () { + if (!netId) { + const idb2 = new IndexedDB({ dbName: "tornado-core" }); + yield idb2.initDB(); + return idb2; + } + const DEPOSIT_INDEXES = [ + { name: "transactionHash", unique: false }, + { name: "commitment", unique: true } + ]; + const WITHDRAWAL_INDEXES = [ + { name: "nullifierHash", unique: true } + // keys on which the index is created + ]; + const LAST_EVENT_INDEXES = [{ name: "name", unique: false }]; + const defaultState = [ + { + name: "encrypted_events", + keyPath: "transactionHash" + }, + { + name: "lastEvents", + keyPath: "name", + indexes: LAST_EVENT_INDEXES + } + ]; + const config = getConfig(netId); + const { tokens, nativeCurrency } = config; + const stores = [...defaultState]; + if (netId === NetId.MAINNET) { + stores.push({ + name: "register_events", + keyPath: "ensName" + }); + } + Object.entries(tokens).forEach(([token, { instanceAddress }]) => { + Object.keys(instanceAddress).forEach((amount) => { + if (nativeCurrency === token) { + stores.push({ + name: `stringify_bloom_${netId}_${token}_${amount}`, + keyPath: "hashBloom" + }); + } + stores.push( + { + name: `deposits_${netId}_${token}_${amount}`, + keyPath: "leafIndex", + // the key by which it refers to the object must be in all instances of the storage + indexes: DEPOSIT_INDEXES + }, + { + name: `withdrawals_${netId}_${token}_${amount}`, + keyPath: "blockNumber", + indexes: WITHDRAWAL_INDEXES + }, + { + name: `stringify_tree_${netId}_${token}_${amount}`, + keyPath: "hashTree" + } + ); + }); + }); + const idb = new IndexedDB({ + dbName: `tornado_core_${netId}`, + stores + }); + yield idb.initDB(); + return idb; + }); +} + var __async$6 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { @@ -7179,6 +7738,7 @@ exports.BaseTornadoService = BaseTornadoService; exports.BatchBlockService = BatchBlockService; exports.BatchEventsService = BatchEventsService; exports.BatchTransactionService = BatchTransactionService; +exports.DBTornadoService = DBTornadoService; exports.DEPOSIT = DEPOSIT; exports.Deposit = Deposit; exports.ENS__factory = ENS__factory; @@ -7192,6 +7752,8 @@ exports.GET_NOTE_ACCOUNTS = GET_NOTE_ACCOUNTS; exports.GET_REGISTERED = GET_REGISTERED; exports.GET_STATISTIC = GET_STATISTIC; exports.GET_WITHDRAWALS = GET_WITHDRAWALS; +exports.INDEX_DB_ERROR = INDEX_DB_ERROR; +exports.IndexedDB = IndexedDB; exports.Invoice = Invoice; exports.MAX_FEE = MAX_FEE; exports.MAX_TOVARISH_EVENTS = MAX_TOVARISH_EVENTS; @@ -7241,6 +7803,7 @@ exports.defaultConfig = defaultConfig; exports.defaultUserAgent = defaultUserAgent; exports.depositsEventsSchema = depositsEventsSchema; exports.digest = digest; +exports.downloadZip = downloadZip; exports.echoEventsSchema = echoEventsSchema; exports.enabledChains = enabledChains; exports.encryptedNotesSchema = encryptedNotesSchema; @@ -7263,6 +7826,7 @@ exports.getEventsSchemaValidator = getEventsSchemaValidator; exports.getGovernanceEvents = getGovernanceEvents; exports.getGraphEchoEvents = getGraphEchoEvents; exports.getHttpAgent = getHttpAgent; +exports.getIndexedDB = getIndexedDB; exports.getInstanceByAddress = getInstanceByAddress; exports.getMeta = getMeta; exports.getNetworkConfig = getNetworkConfig; @@ -7285,6 +7849,8 @@ exports.isNode = isNode; exports.jobsSchema = jobsSchema; exports.leBuff2Int = leBuff2Int; exports.leInt2Buff = leInt2Buff; +exports.loadDBEvents = loadDBEvents; +exports.loadRemoteEvents = loadRemoteEvents; exports.mimc = mimc; exports.multicall = multicall; exports.packEncryptedMessage = packEncryptedMessage; @@ -7295,10 +7861,13 @@ exports.proofSchemaType = proofSchemaType; exports.queryGraph = queryGraph; exports.rBigInt = rBigInt; exports.registeredEventsSchema = registeredEventsSchema; +exports.saveDBEvents = saveDBEvents; exports.sleep = sleep; exports.substring = substring; exports.toFixedHex = toFixedHex; exports.toFixedLength = toFixedLength; exports.unpackEncryptedMessage = unpackEncryptedMessage; +exports.unzipAsync = unzipAsync; exports.validateUrl = validateUrl; exports.withdrawalsEventsSchema = withdrawalsEventsSchema; +exports.zipAsync = zipAsync; diff --git a/dist/index.mjs b/dist/index.mjs index a951d81..41a554b 100644 --- a/dist/index.mjs +++ b/dist/index.mjs @@ -3,14 +3,16 @@ import crossFetch from 'cross-fetch'; import { webcrypto } from 'crypto'; import BN from 'bn.js'; import Ajv from 'ajv'; +import { zip, unzip } from 'fflate'; import { buildPedersenHash, buildMimcSponge } from 'circomlibjs'; import { getEncryptionPublicKey, encrypt, decrypt } from '@metamask/eth-sig-util'; +import { openDB, deleteDB } from 'idb'; import { Worker as Worker$1 } from 'worker_threads'; import { MerkleTree, PartialMerkleTree } from '@tornado/fixed-merkle-tree'; import * as websnarkUtils from '@tornado/websnark/src/utils'; import websnarkGroth from '@tornado/websnark/src/groth16'; -var __async$e = (__this, __arguments, generator) => { +var __async$h = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -119,34 +121,34 @@ function substring(str, length = 10) { return `${str.substring(0, length)}...${str.substring(str.length - length)}`; } function digest(bytes, algo = "SHA-384") { - return __async$e(this, null, function* () { + return __async$h(this, null, function* () { return new Uint8Array(yield crypto.subtle.digest(algo, bytes)); }); } -var __defProp$7 = Object.defineProperty; +var __defProp$8 = Object.defineProperty; var __defProps$6 = Object.defineProperties; var __getOwnPropDescs$6 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$7 = Object.getOwnPropertySymbols; +var __getOwnPropSymbols$8 = Object.getOwnPropertySymbols; var __getProtoOf$2 = Object.getPrototypeOf; -var __hasOwnProp$7 = Object.prototype.hasOwnProperty; -var __propIsEnum$7 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$8 = Object.prototype.hasOwnProperty; +var __propIsEnum$8 = Object.prototype.propertyIsEnumerable; var __reflectGet$2 = Reflect.get; -var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$7 = (a, b) => { +var __defNormalProp$8 = (obj, key, value) => key in obj ? __defProp$8(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$8 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$7.call(b, prop)) - __defNormalProp$7(a, prop, b[prop]); - if (__getOwnPropSymbols$7) - for (var prop of __getOwnPropSymbols$7(b)) { - if (__propIsEnum$7.call(b, prop)) - __defNormalProp$7(a, prop, b[prop]); + if (__hasOwnProp$8.call(b, prop)) + __defNormalProp$8(a, prop, b[prop]); + if (__getOwnPropSymbols$8) + for (var prop of __getOwnPropSymbols$8(b)) { + if (__propIsEnum$8.call(b, prop)) + __defNormalProp$8(a, prop, b[prop]); } return a; }; var __spreadProps$6 = (a, b) => __defProps$6(a, __getOwnPropDescs$6(b)); var __superGet$2 = (cls, obj, key) => __reflectGet$2(__getProtoOf$2(cls), key, obj); -var __async$d = (__this, __arguments, generator) => { +var __async$g = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -195,7 +197,7 @@ function getHttpAgent({ } } function fetchData(_0) { - return __async$d(this, arguments, function* (url, options = {}) { + return __async$g(this, arguments, function* (url, options = {}) { var _a, _b, _c; const MAX_RETRY = (_a = options.maxRetry) != null ? _a : 3; const RETRY_ON = (_b = options.retryOn) != null ? _b : 500; @@ -287,7 +289,7 @@ function fetchData(_0) { throw errorObject; }); } -const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, null, function* () { +const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$g(void 0, null, function* () { let signal; if (_signal) { const controller = new AbortController(); @@ -296,7 +298,7 @@ const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, nu controller.abort(); }); } - const init = __spreadProps$6(__spreadValues$7({}, options), { + const init = __spreadProps$6(__spreadValues$8({}, options), { method: req.method || "POST", headers: req.headers, body: req.body || void 0, @@ -318,7 +320,7 @@ const fetchGetUrlFunc = (options = {}) => (req, _signal) => __async$d(void 0, nu }; }); function getProvider(rpcUrl, fetchOptions) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { const fetchReq = new FetchRequest(rpcUrl); fetchReq.getUrlFunc = fetchGetUrlFunc(fetchOptions); const staticNetwork = yield new JsonRpcProvider(fetchReq).getNetwork(); @@ -349,7 +351,7 @@ function getProviderWithNetId(netId, rpcUrl, config, fetchOptions) { }); return provider; } -const populateTransaction = (signer, tx) => __async$d(void 0, null, function* () { +const populateTransaction = (signer, tx) => __async$g(void 0, null, function* () { const provider = signer.provider; if (!tx.from) { tx.from = signer.address; @@ -410,7 +412,7 @@ class TornadoWallet extends Wallet { return new TornadoWallet(privateKey, provider, options); } populateTransaction(tx) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = Number(txObject.nonce); return __superGet$2(TornadoWallet.prototype, this, "populateTransaction").call(this, txObject); @@ -426,7 +428,7 @@ class TornadoVoidSigner extends VoidSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } populateTransaction(tx) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { const txObject = yield populateTransaction(this, tx); this.nonce = Number(txObject.nonce); return __superGet$2(TornadoVoidSigner.prototype, this, "populateTransaction").call(this, txObject); @@ -442,7 +444,7 @@ class TornadoRpcSigner extends JsonRpcSigner { this.bumpNonce = bumpNonce != null ? bumpNonce : false; } sendUncheckedTransaction(tx) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { return __superGet$2(TornadoRpcSigner.prototype, this, "sendUncheckedTransaction").call(this, yield populateTransaction(this, tx)); }); } @@ -453,7 +455,7 @@ class TornadoBrowserProvider extends BrowserProvider { this.options = options; } getSigner(address) { - return __async$d(this, null, function* () { + return __async$g(this, null, function* () { var _a, _b, _c, _d, _e, _f, _g, _h, _i; const signerAddress = (yield __superGet$2(TornadoBrowserProvider.prototype, this, "getSigner").call(this, address)).address; if (((_a = this.options) == null ? void 0 : _a.webChainId) && ((_b = this.options) == null ? void 0 : _b.connectWallet) && Number(yield __superGet$2(TornadoBrowserProvider.prototype, this, "send").call(this, "eth_chainId", [])) !== Number((_c = this.options) == null ? void 0 : _c.webChainId)) { @@ -662,26 +664,26 @@ const GET_GOVERNANCE_APY = ` } `; -var __defProp$6 = Object.defineProperty; +var __defProp$7 = Object.defineProperty; var __defProps$5 = Object.defineProperties; var __getOwnPropDescs$5 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$6 = Object.getOwnPropertySymbols; -var __hasOwnProp$6 = Object.prototype.hasOwnProperty; -var __propIsEnum$6 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$6 = (a, b) => { +var __getOwnPropSymbols$7 = Object.getOwnPropertySymbols; +var __hasOwnProp$7 = Object.prototype.hasOwnProperty; +var __propIsEnum$7 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$7 = (obj, key, value) => key in obj ? __defProp$7(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$7 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$6.call(b, prop)) - __defNormalProp$6(a, prop, b[prop]); - if (__getOwnPropSymbols$6) - for (var prop of __getOwnPropSymbols$6(b)) { - if (__propIsEnum$6.call(b, prop)) - __defNormalProp$6(a, prop, b[prop]); + if (__hasOwnProp$7.call(b, prop)) + __defNormalProp$7(a, prop, b[prop]); + if (__getOwnPropSymbols$7) + for (var prop of __getOwnPropSymbols$7(b)) { + if (__propIsEnum$7.call(b, prop)) + __defNormalProp$7(a, prop, b[prop]); } return a; }; var __spreadProps$5 = (a, b) => __defProps$5(a, __getOwnPropDescs$5(b)); -var __async$c = (__this, __arguments, generator) => { +var __async$f = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -704,7 +706,7 @@ var __async$c = (__this, __arguments, generator) => { const isEmptyArray = (arr) => !Array.isArray(arr) || !arr.length; const GRAPHQL_LIMIT = 1e3; function queryGraph(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, query, @@ -713,7 +715,7 @@ function queryGraph(_0) { }) { var _a; const graphUrl = `${graphApi}/subgraphs/name/${subgraphName}`; - const { data, errors } = yield fetchData(graphUrl, __spreadProps$5(__spreadValues$6({}, fetchDataOptions2), { + const { data, errors } = yield fetchData(graphUrl, __spreadProps$5(__spreadValues$7({}, fetchDataOptions2), { method: "POST", headers: { "Content-Type": "application/json" @@ -733,7 +735,7 @@ function queryGraph(_0) { }); } function getStatistic(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -780,7 +782,7 @@ function getStatistic(_0) { }); } function getMeta(_0) { - return __async$c(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fetchDataOptions: fetchDataOptions2 }) { try { const { _meta: { @@ -825,7 +827,7 @@ function getRegisters({ }); } function getAllRegisters(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -914,7 +916,7 @@ function getDeposits({ }); } function getAllDeposits(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -1011,7 +1013,7 @@ function getWithdrawals({ }); } function getAllWithdrawals(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, currency, @@ -1087,7 +1089,7 @@ function getAllWithdrawals(_0) { }); } function getNoteAccounts(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, address, @@ -1140,7 +1142,7 @@ function getGraphEchoEvents({ }); } function getAllGraphEchoEvents(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -1229,7 +1231,7 @@ function getEncryptedNotes({ }); } function getAllEncryptedNotes(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -1314,7 +1316,7 @@ function getGovernanceEvents({ }); } function getAllGovernanceEvents(_0) { - return __async$c(this, arguments, function* ({ + return __async$f(this, arguments, function* ({ graphApi, subgraphName, fromBlock, @@ -1473,7 +1475,7 @@ var graph = /*#__PURE__*/Object.freeze({ queryGraph: queryGraph }); -var __async$b = (__this, __arguments, generator) => { +var __async$e = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -1512,7 +1514,7 @@ class BatchBlockService { this.retryOn = retryOn; } getBlock(blockTag) { - return __async$b(this, null, function* () { + return __async$e(this, null, function* () { const blockObject = yield this.provider.getBlock(blockTag); if (!blockObject) { const errMsg = `No block for ${blockTag}`; @@ -1522,9 +1524,9 @@ class BatchBlockService { }); } createBatchRequest(batchArray) { - return batchArray.map((blocks, index) => __async$b(this, null, function* () { + return batchArray.map((blocks, index) => __async$e(this, null, function* () { yield sleep(20 * index); - return (() => __async$b(this, null, function* () { + return (() => __async$e(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -1541,7 +1543,7 @@ class BatchBlockService { })); } getBatchBlocks(blocks) { - return __async$b(this, null, function* () { + return __async$e(this, null, function* () { let blockCount = 0; const results = []; for (const chunks of chunk(blocks, this.concurrencySize * this.batchSize)) { @@ -1579,7 +1581,7 @@ class BatchTransactionService { this.retryOn = retryOn; } getTransaction(txHash) { - return __async$b(this, null, function* () { + return __async$e(this, null, function* () { const txObject = yield this.provider.getTransaction(txHash); if (!txObject) { const errMsg = `No transaction for ${txHash}`; @@ -1589,9 +1591,9 @@ class BatchTransactionService { }); } createBatchRequest(batchArray) { - return batchArray.map((txs, index) => __async$b(this, null, function* () { + return batchArray.map((txs, index) => __async$e(this, null, function* () { yield sleep(20 * index); - return (() => __async$b(this, null, function* () { + return (() => __async$e(this, null, function* () { let retries = 0; let err; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -1608,7 +1610,7 @@ class BatchTransactionService { })); } getBatchTransactions(txs) { - return __async$b(this, null, function* () { + return __async$e(this, null, function* () { let txCount = 0; const results = []; for (const chunks of chunk(txs, this.concurrencySize * this.batchSize)) { @@ -1644,7 +1646,7 @@ class BatchEventsService { this.retryOn = retryOn; } getPastEvents(_0) { - return __async$b(this, arguments, function* ({ fromBlock, toBlock, type }) { + return __async$e(this, arguments, function* ({ fromBlock, toBlock, type }) { let err; let retries = 0; while (!this.shouldRetry && retries === 0 || this.shouldRetry && retries < this.retryMax) { @@ -1664,13 +1666,13 @@ class BatchEventsService { }); } createBatchRequest(batchArray) { - return batchArray.map((event, index) => __async$b(this, null, function* () { + return batchArray.map((event, index) => __async$e(this, null, function* () { yield sleep(20 * index); return this.getPastEvents(event); })); } getBatchEvents(_0) { - return __async$b(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { + return __async$e(this, arguments, function* ({ fromBlock, toBlock, type = "*" }) { if (!toBlock) { toBlock = yield this.provider.getBlockNumber(); } @@ -1701,19 +1703,19 @@ class BatchEventsService { } } -var __defProp$5 = Object.defineProperty; -var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols; -var __hasOwnProp$5 = Object.prototype.hasOwnProperty; -var __propIsEnum$5 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$5 = (a, b) => { +var __defProp$6 = Object.defineProperty; +var __getOwnPropSymbols$6 = Object.getOwnPropertySymbols; +var __hasOwnProp$6 = Object.prototype.hasOwnProperty; +var __propIsEnum$6 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$6 = (obj, key, value) => key in obj ? __defProp$6(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$6 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$5.call(b, prop)) - __defNormalProp$5(a, prop, b[prop]); - if (__getOwnPropSymbols$5) - for (var prop of __getOwnPropSymbols$5(b)) { - if (__propIsEnum$5.call(b, prop)) - __defNormalProp$5(a, prop, b[prop]); + if (__hasOwnProp$6.call(b, prop)) + __defNormalProp$6(a, prop, b[prop]); + if (__getOwnPropSymbols$6) + for (var prop of __getOwnPropSymbols$6(b)) { + if (__propIsEnum$6.call(b, prop)) + __defNormalProp$6(a, prop, b[prop]); } return a; }; @@ -2273,10 +2275,10 @@ function addNetwork(newConfig) { enabledChains.push( ...Object.keys(newConfig).map((netId) => Number(netId)).filter((netId) => !enabledChains.includes(netId)) ); - customConfig = __spreadValues$5(__spreadValues$5({}, customConfig), newConfig); + customConfig = __spreadValues$6(__spreadValues$6({}, customConfig), newConfig); } function getNetworkConfig() { - const allConfig = __spreadValues$5(__spreadValues$5({}, defaultConfig), customConfig); + const allConfig = __spreadValues$6(__spreadValues$6({}, defaultConfig), customConfig); return enabledChains.reduce((acc, curr) => { acc[curr] = allConfig[curr]; return acc; @@ -2355,21 +2357,21 @@ ajv.addKeyword({ errors: true }); -var __defProp$4 = Object.defineProperty; +var __defProp$5 = Object.defineProperty; var __defProps$4 = Object.defineProperties; var __getOwnPropDescs$4 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols; -var __hasOwnProp$4 = Object.prototype.hasOwnProperty; -var __propIsEnum$4 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$4 = (a, b) => { +var __getOwnPropSymbols$5 = Object.getOwnPropertySymbols; +var __hasOwnProp$5 = Object.prototype.hasOwnProperty; +var __propIsEnum$5 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$5 = (obj, key, value) => key in obj ? __defProp$5(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$5 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$4.call(b, prop)) - __defNormalProp$4(a, prop, b[prop]); - if (__getOwnPropSymbols$4) - for (var prop of __getOwnPropSymbols$4(b)) { - if (__propIsEnum$4.call(b, prop)) - __defNormalProp$4(a, prop, b[prop]); + if (__hasOwnProp$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); + if (__getOwnPropSymbols$5) + for (var prop of __getOwnPropSymbols$5(b)) { + if (__propIsEnum$5.call(b, prop)) + __defNormalProp$5(a, prop, b[prop]); } return a; }; @@ -2382,23 +2384,23 @@ const addressSchemaType = { const bnSchemaType = { type: "string", BN: true }; const proofSchemaType = { type: "string", pattern: "^0x[a-fA-F0-9]{512}$" }; const bytes32SchemaType = { type: "string", pattern: "^0x[a-fA-F0-9]{64}$" }; -const bytes32BNSchemaType = __spreadProps$4(__spreadValues$4({}, bytes32SchemaType), { BN: true }); +const bytes32BNSchemaType = __spreadProps$4(__spreadValues$5({}, bytes32SchemaType), { BN: true }); -var __defProp$3 = Object.defineProperty; +var __defProp$4 = Object.defineProperty; var __defProps$3 = Object.defineProperties; var __getOwnPropDescs$3 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; -var __hasOwnProp$3 = Object.prototype.hasOwnProperty; -var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$3 = (a, b) => { +var __getOwnPropSymbols$4 = Object.getOwnPropertySymbols; +var __hasOwnProp$4 = Object.prototype.hasOwnProperty; +var __propIsEnum$4 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$4 = (obj, key, value) => key in obj ? __defProp$4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$4 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); - if (__getOwnPropSymbols$3) - for (var prop of __getOwnPropSymbols$3(b)) { - if (__propIsEnum$3.call(b, prop)) - __defNormalProp$3(a, prop, b[prop]); + if (__hasOwnProp$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); + if (__getOwnPropSymbols$4) + for (var prop of __getOwnPropSymbols$4(b)) { + if (__propIsEnum$4.call(b, prop)) + __defNormalProp$4(a, prop, b[prop]); } return a; }; @@ -2419,7 +2421,7 @@ const governanceEventsSchema = { anyOf: [ { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { event: { type: "string" }, id: { type: "number" }, proposer: addressSchemaType, @@ -2442,7 +2444,7 @@ const governanceEventsSchema = { }, { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { event: { type: "string" }, proposalId: { type: "number" }, voter: addressSchemaType, @@ -2456,7 +2458,7 @@ const governanceEventsSchema = { }, { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { event: { type: "string" }, account: addressSchemaType, delegateTo: addressSchemaType @@ -2466,7 +2468,7 @@ const governanceEventsSchema = { }, { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { event: { type: "string" }, account: addressSchemaType, delegateFrom: addressSchemaType @@ -2481,7 +2483,7 @@ const registeredEventsSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { ensName: { type: "string" }, relayerAddress: addressSchemaType }), @@ -2493,7 +2495,7 @@ const depositsEventsSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { commitment: bytes32SchemaType, leafIndex: { type: "number" }, timestamp: { type: "number" }, @@ -2507,7 +2509,7 @@ const withdrawalsEventsSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { nullifierHash: bytes32SchemaType, to: addressSchemaType, fee: bnSchemaType, @@ -2521,7 +2523,7 @@ const echoEventsSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { address: addressSchemaType, encryptedAccount: { type: "string" } }), @@ -2533,7 +2535,7 @@ const encryptedNotesSchema = { type: "array", items: { type: "object", - properties: __spreadProps$3(__spreadValues$3({}, baseEventsSchemaProperty), { + properties: __spreadProps$3(__spreadValues$4({}, baseEventsSchemaProperty), { encryptedNote: { type: "string" } }), required: [...baseEventsSchemaRequired, "encryptedNote"], @@ -2692,26 +2694,26 @@ const jobsSchema = { required: ["id", "status"] }; -var __defProp$2 = Object.defineProperty; +var __defProp$3 = Object.defineProperty; var __defProps$2 = Object.defineProperties; var __getOwnPropDescs$2 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; -var __hasOwnProp$2 = Object.prototype.hasOwnProperty; -var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; -var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$2 = (a, b) => { +var __getOwnPropSymbols$3 = Object.getOwnPropertySymbols; +var __hasOwnProp$3 = Object.prototype.hasOwnProperty; +var __propIsEnum$3 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$3 = (obj, key, value) => key in obj ? __defProp$3(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$3 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); - if (__getOwnPropSymbols$2) - for (var prop of __getOwnPropSymbols$2(b)) { - if (__propIsEnum$2.call(b, prop)) - __defNormalProp$2(a, prop, b[prop]); + if (__hasOwnProp$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); + if (__getOwnPropSymbols$3) + for (var prop of __getOwnPropSymbols$3(b)) { + if (__propIsEnum$3.call(b, prop)) + __defNormalProp$3(a, prop, b[prop]); } return a; }; var __spreadProps$2 = (a, b) => __defProps$2(a, __getOwnPropDescs$2(b)); -var __async$a = (__this, __arguments, generator) => { +var __async$d = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -2777,7 +2779,7 @@ class RelayerClient { this.tovarish = false; } askRelayerStatus(_0) { - return __async$a(this, arguments, function* ({ + return __async$d(this, arguments, function* ({ hostname, url, relayerAddress @@ -2790,7 +2792,7 @@ class RelayerClient { } else { url = ""; } - const rawStatus = yield fetchData(`${url}status`, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { + const rawStatus = yield fetchData(`${url}status`, __spreadProps$2(__spreadValues$3({}, this.fetchDataOptions), { headers: { "Content-Type": "application/json, application/x-www-form-urlencoded" }, @@ -2801,7 +2803,7 @@ class RelayerClient { if (!statusValidator(rawStatus)) { throw new Error("Invalid status schema"); } - const status = __spreadProps$2(__spreadValues$2({}, rawStatus), { + const status = __spreadProps$2(__spreadValues$3({}, rawStatus), { url }); if (status.currentQueue > 5) { @@ -2817,7 +2819,7 @@ class RelayerClient { }); } filterRelayer(relayer) { - return __async$a(this, null, function* () { + return __async$d(this, null, function* () { var _a; const hostname = relayer.hostnames[this.netId]; const { ensName, relayerAddress } = relayer; @@ -2851,7 +2853,7 @@ class RelayerClient { }); } getValidRelayers(relayers) { - return __async$a(this, null, function* () { + return __async$d(this, null, function* () { const invalidRelayers = []; const validRelayers = (yield Promise.all(relayers.map((relayer) => this.filterRelayer(relayer)))).filter((r) => { if (!r) { @@ -2873,9 +2875,9 @@ class RelayerClient { return pickWeightedRandomRelayer(relayers); } tornadoWithdraw(_0, _1) { - return __async$a(this, arguments, function* ({ contract, proof, args }, callback) { + return __async$d(this, arguments, function* ({ contract, proof, args }, callback) { const { url } = this.selectedRelayer; - const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { + const withdrawResponse = yield fetchData(`${url}v1/tornadoWithdraw`, __spreadProps$2(__spreadValues$3({}, this.fetchDataOptions), { method: "POST", headers: { "Content-Type": "application/json" @@ -2895,7 +2897,7 @@ class RelayerClient { console.log(`Job submitted: ${jobUrl} `); while (!relayerStatus || !["FAILED", "CONFIRMED"].includes(relayerStatus)) { - const jobResponse = yield fetchData(jobUrl, __spreadProps$2(__spreadValues$2({}, this.fetchDataOptions), { + const jobResponse = yield fetchData(jobUrl, __spreadProps$2(__spreadValues$3({}, this.fetchDataOptions), { method: "GET", headers: { "Content-Type": "application/json" @@ -2938,29 +2940,29 @@ class RelayerClient { } } -var __defProp$1 = Object.defineProperty; +var __defProp$2 = Object.defineProperty; var __defProps$1 = Object.defineProperties; var __getOwnPropDescs$1 = Object.getOwnPropertyDescriptors; -var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; +var __getOwnPropSymbols$2 = Object.getOwnPropertySymbols; var __getProtoOf$1 = Object.getPrototypeOf; -var __hasOwnProp$1 = Object.prototype.hasOwnProperty; -var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; +var __hasOwnProp$2 = Object.prototype.hasOwnProperty; +var __propIsEnum$2 = Object.prototype.propertyIsEnumerable; var __reflectGet$1 = Reflect.get; -var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; -var __spreadValues$1 = (a, b) => { +var __defNormalProp$2 = (obj, key, value) => key in obj ? __defProp$2(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$2 = (a, b) => { for (var prop in b || (b = {})) - if (__hasOwnProp$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); - if (__getOwnPropSymbols$1) - for (var prop of __getOwnPropSymbols$1(b)) { - if (__propIsEnum$1.call(b, prop)) - __defNormalProp$1(a, prop, b[prop]); + if (__hasOwnProp$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); + if (__getOwnPropSymbols$2) + for (var prop of __getOwnPropSymbols$2(b)) { + if (__propIsEnum$2.call(b, prop)) + __defNormalProp$2(a, prop, b[prop]); } return a; }; var __spreadProps$1 = (a, b) => __defProps$1(a, __getOwnPropDescs$1(b)); var __superGet$1 = (cls, obj, key) => __reflectGet$1(__getProtoOf$1(cls), key, obj); -var __async$9 = (__this, __arguments, generator) => { +var __async$c = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -3040,7 +3042,7 @@ class BaseEventsService { } /* eslint-enable @typescript-eslint/no-unused-vars */ formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return yield new Promise((resolve) => resolve(events)); }); } @@ -3048,7 +3050,7 @@ class BaseEventsService { * Get saved or cached events */ getEventsFromDB() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return { events: [], lastBlock: 0 @@ -3059,7 +3061,7 @@ class BaseEventsService { * Events from remote cache (Either from local cache, CDN, or from IPFS) */ getEventsFromCache() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return { events: [], lastBlock: 0, @@ -3068,7 +3070,7 @@ class BaseEventsService { }); } getSavedEvents() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { let dbEvents = yield this.getEventsFromDB(); if (!dbEvents.lastBlock) { dbEvents = yield this.getEventsFromCache(); @@ -3080,7 +3082,7 @@ class BaseEventsService { * Get latest events */ getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ fromBlock, methodName = "" }) { @@ -3090,7 +3092,7 @@ class BaseEventsService { lastBlock: fromBlock }; } - const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$1({ + const { events, lastSyncBlock } = yield graph[methodName || this.getGraphMethod()](__spreadValues$2({ fromBlock }, this.getGraphParams())); return { @@ -3100,7 +3102,7 @@ class BaseEventsService { }); } getEventsFromRpc(_0) { - return __async$9(this, arguments, function* ({ + return __async$c(this, arguments, function* ({ fromBlock, toBlock }) { @@ -3132,7 +3134,7 @@ class BaseEventsService { }); } getLatestEvents(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$c(this, arguments, function* ({ fromBlock }) { var _a; if (((_a = this.tovarishClient) == null ? void 0 : _a.selectedRelayer) && ![DEPOSIT, WITHDRAWAL].includes(this.type.toLowerCase())) { const { events, lastSyncBlock: lastBlock } = yield this.tovarishClient.getEvents({ @@ -3161,14 +3163,14 @@ class BaseEventsService { */ // eslint-disable-next-line @typescript-eslint/no-unused-vars saveEvents(_0) { - return __async$9(this, arguments, function* ({ events, lastBlock }) { + return __async$c(this, arguments, function* ({ events, lastBlock }) { }); } /** * Trigger saving and receiving latest events */ updateEvents() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { var _a; const savedEvents = yield this.getSavedEvents(); let fromBlock = this.deployedBlock; @@ -3253,7 +3255,7 @@ class BaseTornadoService extends BaseEventsService { }; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { const type = this.getType().toLowerCase(); if (type === DEPOSIT) { const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { @@ -3272,7 +3274,7 @@ class BaseTornadoService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { from } = txs.find(({ hash }) => hash === event.transactionHash); - return __spreadProps$1(__spreadValues$1({}, event), { + return __spreadProps$1(__spreadValues$2({}, event), { from }); }); @@ -3293,7 +3295,7 @@ class BaseTornadoService extends BaseEventsService { ]); return formattedEvents.map((event) => { const { timestamp } = blocks.find(({ number }) => number === event.blockNumber); - return __spreadProps$1(__spreadValues$1({}, event), { + return __spreadProps$1(__spreadValues$2({}, event), { timestamp }); }); @@ -3310,7 +3312,7 @@ class BaseTornadoService extends BaseEventsService { } } getLatestEvents(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$c(this, arguments, function* ({ fromBlock }) { var _a; if ((_a = this.tovarishClient) == null ? void 0 : _a.selectedRelayer) { const { events, lastSyncBlock: lastBlock } = yield this.tovarishClient.getEvents({ @@ -3360,7 +3362,7 @@ class BaseEchoService extends BaseEventsService { return "getAllGraphEchoEvents"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const { who, data } = args; if (who && data) { @@ -3369,7 +3371,7 @@ class BaseEchoService extends BaseEventsService { logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$1(__spreadValues$2({}, eventObjects), { address: who, encryptedAccount: data }); @@ -3378,7 +3380,7 @@ class BaseEchoService extends BaseEventsService { }); } getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$c(this, arguments, function* ({ fromBlock }) { if (!this.graphApi || this.graphApi.includes("api.thegraph.com")) { return { events: [], @@ -3424,7 +3426,7 @@ class BaseEncryptedNotesService extends BaseEventsService { return "getAllEncryptedNotes"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const { encryptedNote } = args; if (encryptedNote && encryptedNote !== "0x") { @@ -3433,7 +3435,7 @@ class BaseEncryptedNotesService extends BaseEventsService { logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$1(__spreadValues$2({}, eventObjects), { encryptedNote }); } @@ -3480,7 +3482,7 @@ class BaseGovernanceService extends BaseEventsService { return "getAllGovernanceEvents"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { const proposalEvents = []; const votedEvents = []; const delegatedEvents = []; @@ -3494,7 +3496,7 @@ class BaseGovernanceService extends BaseEventsService { }; if (event === "ProposalCreated") { const { id, proposer, target, startTime, endTime, description } = args; - proposalEvents.push(__spreadProps$1(__spreadValues$1({}, eventObjects), { + proposalEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), { id: Number(id), proposer, target, @@ -3505,7 +3507,7 @@ class BaseGovernanceService extends BaseEventsService { } if (event === "Voted") { const { proposalId, voter, support, votes } = args; - votedEvents.push(__spreadProps$1(__spreadValues$1({}, eventObjects), { + votedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), { proposalId: Number(proposalId), voter, support, @@ -3516,14 +3518,14 @@ class BaseGovernanceService extends BaseEventsService { } if (event === "Delegated") { const { account, to: delegateTo } = args; - delegatedEvents.push(__spreadProps$1(__spreadValues$1({}, eventObjects), { + delegatedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), { account, delegateTo })); } if (event === "Undelegated") { const { account, from: delegateFrom } = args; - undelegatedEvents.push(__spreadProps$1(__spreadValues$1({}, eventObjects), { + undelegatedEvents.push(__spreadProps$1(__spreadValues$2({}, eventObjects), { account, delegateFrom })); @@ -3547,7 +3549,7 @@ class BaseGovernanceService extends BaseEventsService { }); } getEventsFromGraph(_0) { - return __async$9(this, arguments, function* ({ fromBlock }) { + return __async$c(this, arguments, function* ({ fromBlock }) { if (!this.graphApi || !this.subgraphName || this.graphApi.includes("api.thegraph.com")) { return { events: [], @@ -3559,12 +3561,12 @@ class BaseGovernanceService extends BaseEventsService { } } function getTovarishNetworks(registryService, relayers) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { yield Promise.all( - relayers.filter((r) => r.tovarishHost).map((relayer) => __async$9(this, null, function* () { + relayers.filter((r) => r.tovarishHost).map((relayer) => __async$c(this, null, function* () { var _a, _b; try { - relayer.tovarishNetworks = yield fetchData(relayer.tovarishHost, __spreadProps$1(__spreadValues$1({}, registryService.fetchDataOptions), { + relayer.tovarishNetworks = yield fetchData(relayer.tovarishHost, __spreadProps$1(__spreadValues$2({}, registryService.fetchDataOptions), { headers: { "Content-Type": "application/json" }, @@ -3629,14 +3631,14 @@ class BaseRegistryService extends BaseEventsService { return "getAllRegisters"; } formatEvents(events) { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => { const eventObjects = { blockNumber, logIndex, transactionHash }; - return __spreadProps$1(__spreadValues$1({}, eventObjects), { + return __spreadProps$1(__spreadValues$2({}, eventObjects), { ensName: args.ensName, relayerAddress: args.relayerAddress }); @@ -3647,7 +3649,7 @@ class BaseRegistryService extends BaseEventsService { * Get saved or cached relayers */ getRelayersFromDB() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return { lastBlock: 0, timestamp: 0, @@ -3659,7 +3661,7 @@ class BaseRegistryService extends BaseEventsService { * Relayers from remote cache (Either from local cache, CDN, or from IPFS) */ getRelayersFromCache() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { return { lastBlock: 0, timestamp: 0, @@ -3669,7 +3671,7 @@ class BaseRegistryService extends BaseEventsService { }); } getSavedRelayers() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { let cachedRelayers = yield this.getRelayersFromDB(); if (!cachedRelayers || !cachedRelayers.relayers.length) { cachedRelayers = yield this.getRelayersFromCache(); @@ -3678,7 +3680,7 @@ class BaseRegistryService extends BaseEventsService { }); } getLatestRelayers() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { const { events, lastBlock } = yield this.updateEvents(); const subdomains = Object.values(this.relayerEnsSubdomains); const registerSet = /* @__PURE__ */ new Set(); @@ -3735,14 +3737,14 @@ class BaseRegistryService extends BaseEventsService { */ // eslint-disable-next-line @typescript-eslint/no-unused-vars saveRelayers(_0) { - return __async$9(this, arguments, function* ({ lastBlock, timestamp, relayers }) { + return __async$c(this, arguments, function* ({ lastBlock, timestamp, relayers }) { }); } /** * Get cached or latest relayer and save to local */ updateRelayers() { - return __async$9(this, null, function* () { + return __async$c(this, null, function* () { let { lastBlock, timestamp, relayers, fromCache } = yield this.getSavedRelayers(); let shouldSave = fromCache != null ? fromCache : false; if (!relayers.length || timestamp + this.updateInterval < Math.floor(Date.now() / 1e3)) { @@ -3758,6 +3760,219 @@ class BaseRegistryService extends BaseEventsService { } } +var __async$b = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +function zipAsync(file) { + return new Promise((res, rej) => { + zip(file, { mtime: /* @__PURE__ */ new Date("1/1/1980") }, (err, data) => { + if (err) { + rej(err); + return; + } + res(data); + }); + }); +} +function unzipAsync(data) { + return new Promise((res, rej) => { + unzip(data, {}, (err, data2) => { + if (err) { + rej(err); + return; + } + res(data2); + }); + }); +} +function downloadZip(_0) { + return __async$b(this, arguments, function* ({ + staticUrl = "", + zipName, + zipDigest, + parseJson = true + }) { + const url = `${staticUrl}/${zipName}.zip`; + const resp = yield fetchData(url, { + method: "GET", + returnResponse: true + }); + const data = new Uint8Array(yield resp.arrayBuffer()); + if (zipDigest) { + const hash = "sha384-" + bytesToBase64(yield digest(data)); + if (zipDigest !== hash) { + const errMsg = `Invalid digest hash for file ${url}, wants ${zipDigest} has ${hash}`; + throw new Error(errMsg); + } + } + const { [zipName]: content } = yield unzipAsync(data); + if (parseJson) { + return JSON.parse(new TextDecoder().decode(content)); + } + return content; + }); +} + +var __async$a = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +function saveDBEvents(_0) { + return __async$a(this, arguments, function* ({ + idb, + instanceName, + events, + lastBlock + }) { + try { + yield idb.createMultipleTransactions({ + data: events, + storeName: instanceName + }); + yield idb.putItem({ + data: { + blockNumber: lastBlock, + name: instanceName + }, + storeName: "lastEvents" + }); + } catch (err) { + console.log("Method saveDBEvents has error"); + console.log(err); + } + }); +} +function loadDBEvents(_0) { + return __async$a(this, arguments, function* ({ + idb, + instanceName + }) { + try { + const lastBlockStore = yield idb.getItem({ + storeName: "lastEvents", + key: instanceName + }); + if (!(lastBlockStore == null ? void 0 : lastBlockStore.blockNumber)) { + return { + events: [], + lastBlock: 0 + }; + } + return { + events: yield idb.getAll({ storeName: instanceName }), + lastBlock: lastBlockStore.blockNumber + }; + } catch (err) { + console.log("Method loadDBEvents has error"); + console.log(err); + return { + events: [], + lastBlock: 0 + }; + } + }); +} +function loadRemoteEvents(_0) { + return __async$a(this, arguments, function* ({ + staticUrl, + instanceName, + deployedBlock + }) { + var _a; + try { + const zipName = `${instanceName}.json`.toLowerCase(); + const events = yield downloadZip({ + staticUrl, + zipName + }); + if (!Array.isArray(events)) { + const errStr = `Invalid events from ${staticUrl}/${zipName}`; + throw new Error(errStr); + } + return { + events, + lastBlock: ((_a = events[events.length - 1]) == null ? void 0 : _a.blockNumber) || deployedBlock, + fromCache: true + }; + } catch (err) { + console.log("Method loadRemoteEvents has error"); + console.log(err); + return { + events: [], + lastBlock: deployedBlock, + fromCache: true + }; + } + }); +} +class DBTornadoService extends BaseTornadoService { + constructor(params) { + super(params); + this.staticUrl = params.staticUrl; + this.idb = params.idb; + } + getEventsFromDB() { + return __async$a(this, null, function* () { + return yield loadDBEvents({ + idb: this.idb, + instanceName: this.getInstanceName() + }); + }); + } + getEventsFromCache() { + return __async$a(this, null, function* () { + return yield loadRemoteEvents({ + staticUrl: this.staticUrl, + instanceName: this.getInstanceName(), + deployedBlock: this.deployedBlock + }); + }); + } + saveEvents(_0) { + return __async$a(this, arguments, function* ({ events, lastBlock }) { + yield saveDBEvents({ + idb: this.idb, + instanceName: this.getInstanceName(), + events, + lastBlock + }); + }); + } +} + const _abi$5 = [ { constant: true, @@ -6095,7 +6310,7 @@ var index = /*#__PURE__*/Object.freeze({ ReverseRecords__factory: ReverseRecords__factory }); -var __async$8 = (__this, __arguments, generator) => { +var __async$9 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -6120,13 +6335,13 @@ class Pedersen { this.pedersenPromise = this.initPedersen(); } initPedersen() { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { this.pedersenHash = yield buildPedersenHash(); this.babyJub = this.pedersenHash.babyJub; }); } unpackPoint(buffer) { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { var _a, _b; yield this.pedersenPromise; return (_b = this.babyJub) == null ? void 0 : _b.unpackPoint((_a = this.pedersenHash) == null ? void 0 : _a.hash(buffer)); @@ -6139,13 +6354,13 @@ class Pedersen { } const pedersen = new Pedersen(); function buffPedersenHash(buffer) { - return __async$8(this, null, function* () { + return __async$9(this, null, function* () { const [hash] = yield pedersen.unpackPoint(buffer); return pedersen.toStringBuffer(hash); }); } -var __async$7 = (__this, __arguments, generator) => { +var __async$8 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { try { @@ -6166,7 +6381,7 @@ var __async$7 = (__this, __arguments, generator) => { }); }; function createDeposit(_0) { - return __async$7(this, arguments, function* ({ nullifier, secret }) { + return __async$8(this, arguments, function* ({ nullifier, secret }) { const preimage = new Uint8Array([...leInt2Buff(nullifier), ...leInt2Buff(secret)]); const noteHex = toFixedHex(bytesToBN(preimage), 62); const commitment = BigInt(yield buffPedersenHash(preimage)); @@ -6226,7 +6441,7 @@ class Deposit { ); } static createNote(_0) { - return __async$7(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { + return __async$8(this, arguments, function* ({ currency, amount, netId, nullifier, secret }) { if (!nullifier) { nullifier = rBigInt(31); } @@ -6253,7 +6468,7 @@ class Deposit { }); } static parseNote(noteString) { - return __async$7(this, null, function* () { + return __async$8(this, null, function* () { const noteRegex = new RegExp("tornado-(?\\w+)-(?[\\d.]+)-(?\\d+)-0x(?[0-9a-fA-F]{124})", "g"); const match = noteRegex.exec(noteString); if (!match) { @@ -6512,6 +6727,350 @@ class TornadoFeeOracle { } } +var __defProp$1 = Object.defineProperty; +var __getOwnPropSymbols$1 = Object.getOwnPropertySymbols; +var __hasOwnProp$1 = Object.prototype.hasOwnProperty; +var __propIsEnum$1 = Object.prototype.propertyIsEnumerable; +var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues$1 = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); + if (__getOwnPropSymbols$1) + for (var prop of __getOwnPropSymbols$1(b)) { + if (__propIsEnum$1.call(b, prop)) + __defNormalProp$1(a, prop, b[prop]); + } + return a; +}; +var __async$7 = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; +const INDEX_DB_ERROR = "A mutation operation was attempted on a database that did not allow mutations."; +class IndexedDB { + constructor({ dbName, stores }) { + this.dbExists = false; + this.isBlocked = false; + this.options = { + upgrade(db) { + Object.values(db.objectStoreNames).forEach((value) => { + db.deleteObjectStore(value); + }); + [{ name: "keyval" }, ...stores || []].forEach(({ name, keyPath, indexes }) => { + const store = db.createObjectStore(name, { + keyPath, + autoIncrement: true + }); + if (Array.isArray(indexes)) { + indexes.forEach(({ name: name2, unique = false }) => { + store.createIndex(name2, name2, { unique }); + }); + } + }); + } + }; + this.dbName = dbName; + this.dbVersion = 34; + } + initDB() { + return __async$7(this, null, function* () { + try { + if (this.dbExists || this.isBlocked) { + return; + } + this.db = yield openDB(this.dbName, this.dbVersion, this.options); + this.db.addEventListener("onupgradeneeded", () => __async$7(this, null, function* () { + yield this._removeExist(); + })); + this.dbExists = true; + } catch (err) { + if (err.message.includes(INDEX_DB_ERROR)) { + console.log("This browser does not support IndexedDB!"); + this.isBlocked = true; + return; + } + if (err.message.includes("less than the existing version")) { + console.log(`Upgrading DB ${this.dbName} to ${this.dbVersion}`); + yield this._removeExist(); + return; + } + console.error(`Method initDB has error: ${err.message}`); + } + }); + } + _removeExist() { + return __async$7(this, null, function* () { + yield deleteDB(this.dbName); + this.dbExists = false; + yield this.initDB(); + }); + } + getFromIndex(_0) { + return __async$7(this, arguments, function* ({ + storeName, + indexName, + key + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + return yield this.db.getFromIndex(storeName, indexName, key); + } catch (err) { + throw new Error(`Method getFromIndex has error: ${err.message}`); + } + }); + } + getAllFromIndex(_0) { + return __async$7(this, arguments, function* ({ + storeName, + indexName, + key, + count + }) { + yield this.initDB(); + if (!this.db) { + return []; + } + try { + return yield this.db.getAllFromIndex(storeName, indexName, key, count); + } catch (err) { + throw new Error(`Method getAllFromIndex has error: ${err.message}`); + } + }); + } + getItem(_0) { + return __async$7(this, arguments, function* ({ storeName, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const store = this.db.transaction(storeName).objectStore(storeName); + return yield store.get(key); + } catch (err) { + throw new Error(`Method getItem has error: ${err.message}`); + } + }); + } + addItem(_0) { + return __async$7(this, arguments, function* ({ storeName, data, key = "" }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + const isExist = yield tx.objectStore(storeName).get(key); + if (!isExist) { + yield tx.objectStore(storeName).add(data); + } + } catch (err) { + throw new Error(`Method addItem has error: ${err.message}`); + } + }); + } + putItem(_0) { + return __async$7(this, arguments, function* ({ storeName, data, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + yield tx.objectStore(storeName).put(data, key); + } catch (err) { + throw new Error(`Method putItem has error: ${err.message}`); + } + }); + } + deleteItem(_0) { + return __async$7(this, arguments, function* ({ storeName, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + yield tx.objectStore(storeName).delete(key); + } catch (err) { + throw new Error(`Method deleteItem has error: ${err.message}`); + } + }); + } + getAll(_0) { + return __async$7(this, arguments, function* ({ storeName }) { + yield this.initDB(); + if (!this.db) { + return []; + } + try { + const tx = this.db.transaction(storeName, "readonly"); + return yield tx.objectStore(storeName).getAll(); + } catch (err) { + throw new Error(`Method getAll has error: ${err.message}`); + } + }); + } + /** + * Simple key-value store inspired by idb-keyval package + */ + getValue(key) { + return this.getItem({ storeName: "keyval", key }); + } + setValue(key, data) { + return this.putItem({ storeName: "keyval", key, data }); + } + delValue(key) { + return this.deleteItem({ storeName: "keyval", key }); + } + clearStore(_0) { + return __async$7(this, arguments, function* ({ storeName, mode = "readwrite" }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + yield tx.objectStore(storeName).clear(); + } catch (err) { + throw new Error(`Method clearStore has error: ${err.message}`); + } + }); + } + createTransactions(_0) { + return __async$7(this, arguments, function* ({ + storeName, + data, + mode = "readwrite" + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + yield tx.objectStore(storeName).add(data); + yield tx.done; + } catch (err) { + throw new Error(`Method createTransactions has error: ${err.message}`); + } + }); + } + createMultipleTransactions(_0) { + return __async$7(this, arguments, function* ({ + storeName, + data, + index, + mode = "readwrite" + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + for (const item of data) { + if (item) { + yield tx.store.put(__spreadValues$1(__spreadValues$1({}, item), index)); + } + } + } catch (err) { + throw new Error(`Method createMultipleTransactions has error: ${err.message}`); + } + }); + } +} +function getIndexedDB(netId) { + return __async$7(this, null, function* () { + if (!netId) { + const idb2 = new IndexedDB({ dbName: "tornado-core" }); + yield idb2.initDB(); + return idb2; + } + const DEPOSIT_INDEXES = [ + { name: "transactionHash", unique: false }, + { name: "commitment", unique: true } + ]; + const WITHDRAWAL_INDEXES = [ + { name: "nullifierHash", unique: true } + // keys on which the index is created + ]; + const LAST_EVENT_INDEXES = [{ name: "name", unique: false }]; + const defaultState = [ + { + name: "encrypted_events", + keyPath: "transactionHash" + }, + { + name: "lastEvents", + keyPath: "name", + indexes: LAST_EVENT_INDEXES + } + ]; + const config = getConfig(netId); + const { tokens, nativeCurrency } = config; + const stores = [...defaultState]; + if (netId === NetId.MAINNET) { + stores.push({ + name: "register_events", + keyPath: "ensName" + }); + } + Object.entries(tokens).forEach(([token, { instanceAddress }]) => { + Object.keys(instanceAddress).forEach((amount) => { + if (nativeCurrency === token) { + stores.push({ + name: `stringify_bloom_${netId}_${token}_${amount}`, + keyPath: "hashBloom" + }); + } + stores.push( + { + name: `deposits_${netId}_${token}_${amount}`, + keyPath: "leafIndex", + // the key by which it refers to the object must be in all instances of the storage + indexes: DEPOSIT_INDEXES + }, + { + name: `withdrawals_${netId}_${token}_${amount}`, + keyPath: "blockNumber", + indexes: WITHDRAWAL_INDEXES + }, + { + name: `stringify_tree_${netId}_${token}_${amount}`, + keyPath: "hashTree" + } + ); + }); + }); + const idb = new IndexedDB({ + dbName: `tornado_core_${netId}`, + stores + }); + yield idb.initDB(); + return idb; + }); +} + var __async$6 = (__this, __arguments, generator) => { return new Promise((resolve, reject) => { var fulfilled = (value) => { @@ -7149,4 +7708,4 @@ function calculateSnarkProof(input, circuit, provingKey) { }); } -export { BaseEchoService, BaseEncryptedNotesService, BaseEventsService, BaseGovernanceService, BaseRegistryService, BaseTornadoService, BatchBlockService, BatchEventsService, BatchTransactionService, DEPOSIT, Deposit, ENS__factory, ERC20__factory, GET_DEPOSITS, GET_ECHO_EVENTS, GET_ENCRYPTED_NOTES, GET_GOVERNANCE_APY, GET_GOVERNANCE_EVENTS, GET_NOTE_ACCOUNTS, GET_REGISTERED, GET_STATISTIC, GET_WITHDRAWALS, Invoice, MAX_FEE, MAX_TOVARISH_EVENTS, MIN_FEE, MIN_STAKE_BALANCE, MerkleTreeService, Mimc, Multicall__factory, NetId, NoteAccount, OffchainOracle__factory, OvmGasPriceOracle__factory, Pedersen, RelayerClient, ReverseRecords__factory, TokenPriceOracle, TornadoBrowserProvider, TornadoFeeOracle, TornadoRpcSigner, TornadoVoidSigner, TornadoWallet, TovarishClient, WITHDRAWAL, _META, addNetwork, addressSchemaType, ajv, base64ToBytes, bigIntReplacer, bnSchemaType, bnToBytes, buffPedersenHash, bufferToBytes, bytes32BNSchemaType, bytes32SchemaType, bytesToBN, bytesToBase64, bytesToHex, calculateScore, calculateSnarkProof, chunk, concatBytes, convertETHToTokenAmount, createDeposit, crypto, customConfig, defaultConfig, defaultUserAgent, depositsEventsSchema, digest, echoEventsSchema, enabledChains, encryptedNotesSchema, index as factories, fetch, fetchData, fetchGetUrlFunc, getActiveTokenInstances, getActiveTokens, getAllDeposits, getAllEncryptedNotes, getAllGovernanceEvents, getAllGraphEchoEvents, getAllRegisters, getAllWithdrawals, getConfig, getDeposits, getEncryptedNotes, getEventsSchemaValidator, getGovernanceEvents, getGraphEchoEvents, getHttpAgent, getInstanceByAddress, getMeta, getNetworkConfig, getNoteAccounts, getProvider, getProviderWithNetId, getRegisters, getRelayerEnsSubdomains, getStatistic, getStatusSchema, getSupportedInstances, getTokenBalances, getTovarishNetworks, getWeightRandom, getWithdrawals, governanceEventsSchema, hexToBytes, initGroth16, isNode, jobsSchema, leBuff2Int, leInt2Buff, mimc, multicall, packEncryptedMessage, pedersen, pickWeightedRandomRelayer, populateTransaction, proofSchemaType, queryGraph, rBigInt, registeredEventsSchema, sleep, substring, toFixedHex, toFixedLength, unpackEncryptedMessage, validateUrl, withdrawalsEventsSchema }; +export { BaseEchoService, BaseEncryptedNotesService, BaseEventsService, BaseGovernanceService, BaseRegistryService, BaseTornadoService, BatchBlockService, BatchEventsService, BatchTransactionService, DBTornadoService, DEPOSIT, Deposit, ENS__factory, ERC20__factory, GET_DEPOSITS, GET_ECHO_EVENTS, GET_ENCRYPTED_NOTES, GET_GOVERNANCE_APY, GET_GOVERNANCE_EVENTS, GET_NOTE_ACCOUNTS, GET_REGISTERED, GET_STATISTIC, GET_WITHDRAWALS, INDEX_DB_ERROR, IndexedDB, Invoice, MAX_FEE, MAX_TOVARISH_EVENTS, MIN_FEE, MIN_STAKE_BALANCE, MerkleTreeService, Mimc, Multicall__factory, NetId, NoteAccount, OffchainOracle__factory, OvmGasPriceOracle__factory, Pedersen, RelayerClient, ReverseRecords__factory, TokenPriceOracle, TornadoBrowserProvider, TornadoFeeOracle, TornadoRpcSigner, TornadoVoidSigner, TornadoWallet, TovarishClient, WITHDRAWAL, _META, addNetwork, addressSchemaType, ajv, base64ToBytes, bigIntReplacer, bnSchemaType, bnToBytes, buffPedersenHash, bufferToBytes, bytes32BNSchemaType, bytes32SchemaType, bytesToBN, bytesToBase64, bytesToHex, calculateScore, calculateSnarkProof, chunk, concatBytes, convertETHToTokenAmount, createDeposit, crypto, customConfig, defaultConfig, defaultUserAgent, depositsEventsSchema, digest, downloadZip, echoEventsSchema, enabledChains, encryptedNotesSchema, index as factories, fetch, fetchData, fetchGetUrlFunc, getActiveTokenInstances, getActiveTokens, getAllDeposits, getAllEncryptedNotes, getAllGovernanceEvents, getAllGraphEchoEvents, getAllRegisters, getAllWithdrawals, getConfig, getDeposits, getEncryptedNotes, getEventsSchemaValidator, getGovernanceEvents, getGraphEchoEvents, getHttpAgent, getIndexedDB, getInstanceByAddress, getMeta, getNetworkConfig, getNoteAccounts, getProvider, getProviderWithNetId, getRegisters, getRelayerEnsSubdomains, getStatistic, getStatusSchema, getSupportedInstances, getTokenBalances, getTovarishNetworks, getWeightRandom, getWithdrawals, governanceEventsSchema, hexToBytes, initGroth16, isNode, jobsSchema, leBuff2Int, leInt2Buff, loadDBEvents, loadRemoteEvents, mimc, multicall, packEncryptedMessage, pedersen, pickWeightedRandomRelayer, populateTransaction, proofSchemaType, queryGraph, rBigInt, registeredEventsSchema, saveDBEvents, sleep, substring, toFixedHex, toFixedLength, unpackEncryptedMessage, unzipAsync, validateUrl, withdrawalsEventsSchema, zipAsync }; diff --git a/dist/tornado.umd.js b/dist/tornado.umd.js index 1d80c91..a5436f2 100644 --- a/dist/tornado.umd.js +++ b/dist/tornado.umd.js @@ -59906,6 +59906,167 @@ class BaseRegistryService extends BaseEventsService { } +/***/ }), + +/***/ 12591: +/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { + +"use strict"; +/* harmony export */ __webpack_require__.d(__webpack_exports__, { +/* harmony export */ Fb: () => (/* binding */ saveDBEvents), +/* harmony export */ Oz: () => (/* binding */ loadRemoteEvents), +/* harmony export */ f8: () => (/* binding */ DBTornadoService), +/* harmony export */ w8: () => (/* binding */ loadDBEvents) +/* harmony export */ }); +/* harmony import */ var _zip__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(18995); +/* harmony import */ var _base__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(71304); + +var __async = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; + + +function saveDBEvents(_0) { + return __async(this, arguments, function* ({ + idb, + instanceName, + events, + lastBlock + }) { + try { + yield idb.createMultipleTransactions({ + data: events, + storeName: instanceName + }); + yield idb.putItem({ + data: { + blockNumber: lastBlock, + name: instanceName + }, + storeName: "lastEvents" + }); + } catch (err) { + console.log("Method saveDBEvents has error"); + console.log(err); + } + }); +} +function loadDBEvents(_0) { + return __async(this, arguments, function* ({ + idb, + instanceName + }) { + try { + const lastBlockStore = yield idb.getItem({ + storeName: "lastEvents", + key: instanceName + }); + if (!(lastBlockStore == null ? void 0 : lastBlockStore.blockNumber)) { + return { + events: [], + lastBlock: 0 + }; + } + return { + events: yield idb.getAll({ storeName: instanceName }), + lastBlock: lastBlockStore.blockNumber + }; + } catch (err) { + console.log("Method loadDBEvents has error"); + console.log(err); + return { + events: [], + lastBlock: 0 + }; + } + }); +} +function loadRemoteEvents(_0) { + return __async(this, arguments, function* ({ + staticUrl, + instanceName, + deployedBlock + }) { + var _a; + try { + const zipName = `${instanceName}.json`.toLowerCase(); + const events = yield (0,_zip__WEBPACK_IMPORTED_MODULE_0__/* .downloadZip */ ._6)({ + staticUrl, + zipName + }); + if (!Array.isArray(events)) { + const errStr = `Invalid events from ${staticUrl}/${zipName}`; + throw new Error(errStr); + } + return { + events, + lastBlock: ((_a = events[events.length - 1]) == null ? void 0 : _a.blockNumber) || deployedBlock, + fromCache: true + }; + } catch (err) { + console.log("Method loadRemoteEvents has error"); + console.log(err); + return { + events: [], + lastBlock: deployedBlock, + fromCache: true + }; + } + }); +} +class DBTornadoService extends _base__WEBPACK_IMPORTED_MODULE_1__/* .BaseTornadoService */ .e0 { + constructor(params) { + super(params); + this.staticUrl = params.staticUrl; + this.idb = params.idb; + } + getEventsFromDB() { + return __async(this, null, function* () { + return yield loadDBEvents({ + idb: this.idb, + instanceName: this.getInstanceName() + }); + }); + } + getEventsFromCache() { + return __async(this, null, function* () { + return yield loadRemoteEvents({ + staticUrl: this.staticUrl, + instanceName: this.getInstanceName(), + deployedBlock: this.deployedBlock + }); + }); + } + saveEvents(_0) { + return __async(this, arguments, function* ({ events, lastBlock }) { + yield saveDBEvents({ + idb: this.idb, + instanceName: this.getInstanceName(), + events, + lastBlock + }); + }); + } +} + + /***/ }), /***/ 94513: @@ -59920,9 +60081,13 @@ __webpack_require__.r(__webpack_exports__); /* harmony export */ BaseGovernanceService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.JJ), /* harmony export */ BaseRegistryService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.cE), /* harmony export */ BaseTornadoService: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.e0), +/* harmony export */ DBTornadoService: () => (/* reexport safe */ _db__WEBPACK_IMPORTED_MODULE_2__.f8), /* harmony export */ DEPOSIT: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.Lx), /* harmony export */ WITHDRAWAL: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.oW), -/* harmony export */ getTovarishNetworks: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.EU) +/* harmony export */ getTovarishNetworks: () => (/* reexport safe */ _base__WEBPACK_IMPORTED_MODULE_1__.EU), +/* harmony export */ loadDBEvents: () => (/* reexport safe */ _db__WEBPACK_IMPORTED_MODULE_2__.w8), +/* harmony export */ loadRemoteEvents: () => (/* reexport safe */ _db__WEBPACK_IMPORTED_MODULE_2__.Oz), +/* harmony export */ saveDBEvents: () => (/* reexport safe */ _db__WEBPACK_IMPORTED_MODULE_2__.Fb) /* harmony export */ }); /* harmony import */ var _types__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(61060); /* harmony import */ var _types__WEBPACK_IMPORTED_MODULE_0___default = /*#__PURE__*/__webpack_require__.n(_types__WEBPACK_IMPORTED_MODULE_0__); @@ -59930,6 +60095,8 @@ __webpack_require__.r(__webpack_exports__); /* harmony reexport (unknown) */ for(const __WEBPACK_IMPORT_KEY__ in _types__WEBPACK_IMPORTED_MODULE_0__) if(__WEBPACK_IMPORT_KEY__ !== "default") __WEBPACK_REEXPORT_OBJECT__[__WEBPACK_IMPORT_KEY__] = () => _types__WEBPACK_IMPORTED_MODULE_0__[__WEBPACK_IMPORT_KEY__] /* harmony reexport (unknown) */ __webpack_require__.d(__webpack_exports__, __WEBPACK_REEXPORT_OBJECT__); /* harmony import */ var _base__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(71304); +/* harmony import */ var _db__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(12591); + @@ -61055,6 +61222,678 @@ function getAllGovernanceEvents(_0) { } +/***/ }), + +/***/ 83968: +/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { + +"use strict"; + +// EXPORTS +__webpack_require__.d(__webpack_exports__, { + Fl: () => (/* binding */ INDEX_DB_ERROR), + mc: () => (/* binding */ IndexedDB), + W7: () => (/* binding */ getIndexedDB) +}); + +;// ./node_modules/idb/build/index.js +const instanceOfAny = (object, constructors) => constructors.some((c) => object instanceof c); + +let idbProxyableTypes; +let cursorAdvanceMethods; +// This is a function to prevent it throwing up in node environments. +function getIdbProxyableTypes() { + return (idbProxyableTypes || + (idbProxyableTypes = [ + IDBDatabase, + IDBObjectStore, + IDBIndex, + IDBCursor, + IDBTransaction, + ])); +} +// This is a function to prevent it throwing up in node environments. +function getCursorAdvanceMethods() { + return (cursorAdvanceMethods || + (cursorAdvanceMethods = [ + IDBCursor.prototype.advance, + IDBCursor.prototype.continue, + IDBCursor.prototype.continuePrimaryKey, + ])); +} +const transactionDoneMap = new WeakMap(); +const transformCache = new WeakMap(); +const reverseTransformCache = new WeakMap(); +function promisifyRequest(request) { + const promise = new Promise((resolve, reject) => { + const unlisten = () => { + request.removeEventListener('success', success); + request.removeEventListener('error', error); + }; + const success = () => { + resolve(wrap(request.result)); + unlisten(); + }; + const error = () => { + reject(request.error); + unlisten(); + }; + request.addEventListener('success', success); + request.addEventListener('error', error); + }); + // This mapping exists in reverseTransformCache but doesn't doesn't exist in transformCache. This + // is because we create many promises from a single IDBRequest. + reverseTransformCache.set(promise, request); + return promise; +} +function cacheDonePromiseForTransaction(tx) { + // Early bail if we've already created a done promise for this transaction. + if (transactionDoneMap.has(tx)) + return; + const done = new Promise((resolve, reject) => { + const unlisten = () => { + tx.removeEventListener('complete', complete); + tx.removeEventListener('error', error); + tx.removeEventListener('abort', error); + }; + const complete = () => { + resolve(); + unlisten(); + }; + const error = () => { + reject(tx.error || new DOMException('AbortError', 'AbortError')); + unlisten(); + }; + tx.addEventListener('complete', complete); + tx.addEventListener('error', error); + tx.addEventListener('abort', error); + }); + // Cache it for later retrieval. + transactionDoneMap.set(tx, done); +} +let idbProxyTraps = { + get(target, prop, receiver) { + if (target instanceof IDBTransaction) { + // Special handling for transaction.done. + if (prop === 'done') + return transactionDoneMap.get(target); + // Make tx.store return the only store in the transaction, or undefined if there are many. + if (prop === 'store') { + return receiver.objectStoreNames[1] + ? undefined + : receiver.objectStore(receiver.objectStoreNames[0]); + } + } + // Else transform whatever we get back. + return wrap(target[prop]); + }, + set(target, prop, value) { + target[prop] = value; + return true; + }, + has(target, prop) { + if (target instanceof IDBTransaction && + (prop === 'done' || prop === 'store')) { + return true; + } + return prop in target; + }, +}; +function replaceTraps(callback) { + idbProxyTraps = callback(idbProxyTraps); +} +function wrapFunction(func) { + // Due to expected object equality (which is enforced by the caching in `wrap`), we + // only create one new func per func. + // Cursor methods are special, as the behaviour is a little more different to standard IDB. In + // IDB, you advance the cursor and wait for a new 'success' on the IDBRequest that gave you the + // cursor. It's kinda like a promise that can resolve with many values. That doesn't make sense + // with real promises, so each advance methods returns a new promise for the cursor object, or + // undefined if the end of the cursor has been reached. + if (getCursorAdvanceMethods().includes(func)) { + return function (...args) { + // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use + // the original object. + func.apply(unwrap(this), args); + return wrap(this.request); + }; + } + return function (...args) { + // Calling the original function with the proxy as 'this' causes ILLEGAL INVOCATION, so we use + // the original object. + return wrap(func.apply(unwrap(this), args)); + }; +} +function transformCachableValue(value) { + if (typeof value === 'function') + return wrapFunction(value); + // This doesn't return, it just creates a 'done' promise for the transaction, + // which is later returned for transaction.done (see idbObjectHandler). + if (value instanceof IDBTransaction) + cacheDonePromiseForTransaction(value); + if (instanceOfAny(value, getIdbProxyableTypes())) + return new Proxy(value, idbProxyTraps); + // Return the same value back if we're not going to transform it. + return value; +} +function wrap(value) { + // We sometimes generate multiple promises from a single IDBRequest (eg when cursoring), because + // IDB is weird and a single IDBRequest can yield many responses, so these can't be cached. + if (value instanceof IDBRequest) + return promisifyRequest(value); + // If we've already transformed this value before, reuse the transformed value. + // This is faster, but it also provides object equality. + if (transformCache.has(value)) + return transformCache.get(value); + const newValue = transformCachableValue(value); + // Not all types are transformed. + // These may be primitive types, so they can't be WeakMap keys. + if (newValue !== value) { + transformCache.set(value, newValue); + reverseTransformCache.set(newValue, value); + } + return newValue; +} +const unwrap = (value) => reverseTransformCache.get(value); + +/** + * Open a database. + * + * @param name Name of the database. + * @param version Schema version. + * @param callbacks Additional callbacks. + */ +function openDB(name, version, { blocked, upgrade, blocking, terminated } = {}) { + const request = indexedDB.open(name, version); + const openPromise = wrap(request); + if (upgrade) { + request.addEventListener('upgradeneeded', (event) => { + upgrade(wrap(request.result), event.oldVersion, event.newVersion, wrap(request.transaction), event); + }); + } + if (blocked) { + request.addEventListener('blocked', (event) => blocked( + // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405 + event.oldVersion, event.newVersion, event)); + } + openPromise + .then((db) => { + if (terminated) + db.addEventListener('close', () => terminated()); + if (blocking) { + db.addEventListener('versionchange', (event) => blocking(event.oldVersion, event.newVersion, event)); + } + }) + .catch(() => { }); + return openPromise; +} +/** + * Delete a database. + * + * @param name Name of the database. + */ +function deleteDB(name, { blocked } = {}) { + const request = indexedDB.deleteDatabase(name); + if (blocked) { + request.addEventListener('blocked', (event) => blocked( + // Casting due to https://github.com/microsoft/TypeScript-DOM-lib-generator/pull/1405 + event.oldVersion, event)); + } + return wrap(request).then(() => undefined); +} + +const readMethods = ['get', 'getKey', 'getAll', 'getAllKeys', 'count']; +const writeMethods = ['put', 'add', 'delete', 'clear']; +const cachedMethods = new Map(); +function getMethod(target, prop) { + if (!(target instanceof IDBDatabase && + !(prop in target) && + typeof prop === 'string')) { + return; + } + if (cachedMethods.get(prop)) + return cachedMethods.get(prop); + const targetFuncName = prop.replace(/FromIndex$/, ''); + const useIndex = prop !== targetFuncName; + const isWrite = writeMethods.includes(targetFuncName); + if ( + // Bail if the target doesn't exist on the target. Eg, getAll isn't in Edge. + !(targetFuncName in (useIndex ? IDBIndex : IDBObjectStore).prototype) || + !(isWrite || readMethods.includes(targetFuncName))) { + return; + } + const method = async function (storeName, ...args) { + // isWrite ? 'readwrite' : undefined gzipps better, but fails in Edge :( + const tx = this.transaction(storeName, isWrite ? 'readwrite' : 'readonly'); + let target = tx.store; + if (useIndex) + target = target.index(args.shift()); + // Must reject if op rejects. + // If it's a write operation, must reject if tx.done rejects. + // Must reject with op rejection first. + // Must resolve with op value. + // Must handle both promises (no unhandled rejections) + return (await Promise.all([ + target[targetFuncName](...args), + isWrite && tx.done, + ]))[0]; + }; + cachedMethods.set(prop, method); + return method; +} +replaceTraps((oldTraps) => ({ + ...oldTraps, + get: (target, prop, receiver) => getMethod(target, prop) || oldTraps.get(target, prop, receiver), + has: (target, prop) => !!getMethod(target, prop) || oldTraps.has(target, prop), +})); + +const advanceMethodProps = ['continue', 'continuePrimaryKey', 'advance']; +const methodMap = {}; +const advanceResults = new WeakMap(); +const ittrProxiedCursorToOriginalProxy = new WeakMap(); +const cursorIteratorTraps = { + get(target, prop) { + if (!advanceMethodProps.includes(prop)) + return target[prop]; + let cachedFunc = methodMap[prop]; + if (!cachedFunc) { + cachedFunc = methodMap[prop] = function (...args) { + advanceResults.set(this, ittrProxiedCursorToOriginalProxy.get(this)[prop](...args)); + }; + } + return cachedFunc; + }, +}; +async function* iterate(...args) { + // tslint:disable-next-line:no-this-assignment + let cursor = this; + if (!(cursor instanceof IDBCursor)) { + cursor = await cursor.openCursor(...args); + } + if (!cursor) + return; + cursor = cursor; + const proxiedCursor = new Proxy(cursor, cursorIteratorTraps); + ittrProxiedCursorToOriginalProxy.set(proxiedCursor, cursor); + // Map this double-proxy back to the original, so other cursor methods work. + reverseTransformCache.set(proxiedCursor, unwrap(cursor)); + while (cursor) { + yield proxiedCursor; + // If one of the advancing methods was not called, call continue(). + cursor = await (advanceResults.get(proxiedCursor) || cursor.continue()); + advanceResults.delete(proxiedCursor); + } +} +function isIteratorProp(target, prop) { + return ((prop === Symbol.asyncIterator && + instanceOfAny(target, [IDBIndex, IDBObjectStore, IDBCursor])) || + (prop === 'iterate' && instanceOfAny(target, [IDBIndex, IDBObjectStore]))); +} +replaceTraps((oldTraps) => ({ + ...oldTraps, + get(target, prop, receiver) { + if (isIteratorProp(target, prop)) + return iterate; + return oldTraps.get(target, prop, receiver); + }, + has(target, prop) { + return isIteratorProp(target, prop) || oldTraps.has(target, prop); + }, +})); + + + +// EXTERNAL MODULE: ./src/networkConfig.ts +var networkConfig = __webpack_require__(59499); +;// ./src/idb.ts + +var __defProp = Object.defineProperty; +var __getOwnPropSymbols = Object.getOwnPropertySymbols; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __propIsEnum = Object.prototype.propertyIsEnumerable; +var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __spreadValues = (a, b) => { + for (var prop in b || (b = {})) + if (__hasOwnProp.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + if (__getOwnPropSymbols) + for (var prop of __getOwnPropSymbols(b)) { + if (__propIsEnum.call(b, prop)) + __defNormalProp(a, prop, b[prop]); + } + return a; +}; +var __async = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; + + +const INDEX_DB_ERROR = "A mutation operation was attempted on a database that did not allow mutations."; +class IndexedDB { + constructor({ dbName, stores }) { + this.dbExists = false; + this.isBlocked = false; + this.options = { + upgrade(db) { + Object.values(db.objectStoreNames).forEach((value) => { + db.deleteObjectStore(value); + }); + [{ name: "keyval" }, ...stores || []].forEach(({ name, keyPath, indexes }) => { + const store = db.createObjectStore(name, { + keyPath, + autoIncrement: true + }); + if (Array.isArray(indexes)) { + indexes.forEach(({ name: name2, unique = false }) => { + store.createIndex(name2, name2, { unique }); + }); + } + }); + } + }; + this.dbName = dbName; + this.dbVersion = 34; + } + initDB() { + return __async(this, null, function* () { + try { + if (this.dbExists || this.isBlocked) { + return; + } + this.db = yield openDB(this.dbName, this.dbVersion, this.options); + this.db.addEventListener("onupgradeneeded", () => __async(this, null, function* () { + yield this._removeExist(); + })); + this.dbExists = true; + } catch (err) { + if (err.message.includes(INDEX_DB_ERROR)) { + console.log("This browser does not support IndexedDB!"); + this.isBlocked = true; + return; + } + if (err.message.includes("less than the existing version")) { + console.log(`Upgrading DB ${this.dbName} to ${this.dbVersion}`); + yield this._removeExist(); + return; + } + console.error(`Method initDB has error: ${err.message}`); + } + }); + } + _removeExist() { + return __async(this, null, function* () { + yield deleteDB(this.dbName); + this.dbExists = false; + yield this.initDB(); + }); + } + getFromIndex(_0) { + return __async(this, arguments, function* ({ + storeName, + indexName, + key + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + return yield this.db.getFromIndex(storeName, indexName, key); + } catch (err) { + throw new Error(`Method getFromIndex has error: ${err.message}`); + } + }); + } + getAllFromIndex(_0) { + return __async(this, arguments, function* ({ + storeName, + indexName, + key, + count + }) { + yield this.initDB(); + if (!this.db) { + return []; + } + try { + return yield this.db.getAllFromIndex(storeName, indexName, key, count); + } catch (err) { + throw new Error(`Method getAllFromIndex has error: ${err.message}`); + } + }); + } + getItem(_0) { + return __async(this, arguments, function* ({ storeName, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const store = this.db.transaction(storeName).objectStore(storeName); + return yield store.get(key); + } catch (err) { + throw new Error(`Method getItem has error: ${err.message}`); + } + }); + } + addItem(_0) { + return __async(this, arguments, function* ({ storeName, data, key = "" }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + const isExist = yield tx.objectStore(storeName).get(key); + if (!isExist) { + yield tx.objectStore(storeName).add(data); + } + } catch (err) { + throw new Error(`Method addItem has error: ${err.message}`); + } + }); + } + putItem(_0) { + return __async(this, arguments, function* ({ storeName, data, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + yield tx.objectStore(storeName).put(data, key); + } catch (err) { + throw new Error(`Method putItem has error: ${err.message}`); + } + }); + } + deleteItem(_0) { + return __async(this, arguments, function* ({ storeName, key }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, "readwrite"); + yield tx.objectStore(storeName).delete(key); + } catch (err) { + throw new Error(`Method deleteItem has error: ${err.message}`); + } + }); + } + getAll(_0) { + return __async(this, arguments, function* ({ storeName }) { + yield this.initDB(); + if (!this.db) { + return []; + } + try { + const tx = this.db.transaction(storeName, "readonly"); + return yield tx.objectStore(storeName).getAll(); + } catch (err) { + throw new Error(`Method getAll has error: ${err.message}`); + } + }); + } + /** + * Simple key-value store inspired by idb-keyval package + */ + getValue(key) { + return this.getItem({ storeName: "keyval", key }); + } + setValue(key, data) { + return this.putItem({ storeName: "keyval", key, data }); + } + delValue(key) { + return this.deleteItem({ storeName: "keyval", key }); + } + clearStore(_0) { + return __async(this, arguments, function* ({ storeName, mode = "readwrite" }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + yield tx.objectStore(storeName).clear(); + } catch (err) { + throw new Error(`Method clearStore has error: ${err.message}`); + } + }); + } + createTransactions(_0) { + return __async(this, arguments, function* ({ + storeName, + data, + mode = "readwrite" + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + yield tx.objectStore(storeName).add(data); + yield tx.done; + } catch (err) { + throw new Error(`Method createTransactions has error: ${err.message}`); + } + }); + } + createMultipleTransactions(_0) { + return __async(this, arguments, function* ({ + storeName, + data, + index, + mode = "readwrite" + }) { + yield this.initDB(); + if (!this.db) { + return; + } + try { + const tx = this.db.transaction(storeName, mode); + for (const item of data) { + if (item) { + yield tx.store.put(__spreadValues(__spreadValues({}, item), index)); + } + } + } catch (err) { + throw new Error(`Method createMultipleTransactions has error: ${err.message}`); + } + }); + } +} +function getIndexedDB(netId) { + return __async(this, null, function* () { + if (!netId) { + const idb2 = new IndexedDB({ dbName: "tornado-core" }); + yield idb2.initDB(); + return idb2; + } + const DEPOSIT_INDEXES = [ + { name: "transactionHash", unique: false }, + { name: "commitment", unique: true } + ]; + const WITHDRAWAL_INDEXES = [ + { name: "nullifierHash", unique: true } + // keys on which the index is created + ]; + const LAST_EVENT_INDEXES = [{ name: "name", unique: false }]; + const defaultState = [ + { + name: "encrypted_events", + keyPath: "transactionHash" + }, + { + name: "lastEvents", + keyPath: "name", + indexes: LAST_EVENT_INDEXES + } + ]; + const config = (0,networkConfig/* getConfig */.zj)(netId); + const { tokens, nativeCurrency } = config; + const stores = [...defaultState]; + if (netId === networkConfig/* NetId */.zr.MAINNET) { + stores.push({ + name: "register_events", + keyPath: "ensName" + }); + } + Object.entries(tokens).forEach(([token, { instanceAddress }]) => { + Object.keys(instanceAddress).forEach((amount) => { + if (nativeCurrency === token) { + stores.push({ + name: `stringify_bloom_${netId}_${token}_${amount}`, + keyPath: "hashBloom" + }); + } + stores.push( + { + name: `deposits_${netId}_${token}_${amount}`, + keyPath: "leafIndex", + // the key by which it refers to the object must be in all instances of the storage + indexes: DEPOSIT_INDEXES + }, + { + name: `withdrawals_${netId}_${token}_${amount}`, + keyPath: "blockNumber", + indexes: WITHDRAWAL_INDEXES + }, + { + name: `stringify_tree_${netId}_${token}_${amount}`, + keyPath: "hashTree" + } + ); + }); + }); + const idb = new IndexedDB({ + dbName: `tornado_core_${netId}`, + stores + }); + yield idb.initDB(); + return idb; + }); +} + + /***/ }), /***/ 5217: @@ -75150,6 +75989,2767 @@ function calculateSnarkProof(input, circuit, provingKey) { } +/***/ }), + +/***/ 18995: +/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => { + +"use strict"; + +// EXPORTS +__webpack_require__.d(__webpack_exports__, { + _6: () => (/* binding */ downloadZip), + fY: () => (/* binding */ unzipAsync), + a8: () => (/* binding */ zipAsync) +}); + +;// ./node_modules/fflate/esm/browser.js +// DEFLATE is a complex format; to read this code, you should probably check the RFC first: +// https://tools.ietf.org/html/rfc1951 +// You may also wish to take a look at the guide I made about this program: +// https://gist.github.com/101arrowz/253f31eb5abc3d9275ab943003ffecad +// Some of the following code is similar to that of UZIP.js: +// https://github.com/photopea/UZIP.js +// However, the vast majority of the codebase has diverged from UZIP.js to increase performance and reduce bundle size. +// Sometimes 0 will appear where -1 would be more appropriate. This is because using a uint +// is better for memory in most engines (I *think*). +var ch2 = {}; +var wk = (function (c, id, msg, transfer, cb) { + var w = new Worker(ch2[id] || (ch2[id] = URL.createObjectURL(new Blob([ + c + ';addEventListener("error",function(e){e=e.error;postMessage({$e$:[e.message,e.code,e.stack]})})' + ], { type: 'text/javascript' })))); + w.onmessage = function (e) { + var d = e.data, ed = d.$e$; + if (ed) { + var err = new Error(ed[0]); + err['code'] = ed[1]; + err.stack = ed[2]; + cb(err, null); + } + else + cb(null, d); + }; + w.postMessage(msg, transfer); + return w; +}); + +// aliases for shorter compressed code (most minifers don't do this) +var u8 = Uint8Array, u16 = Uint16Array, i32 = Int32Array; +// fixed length extra bits +var fleb = new u8([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, /* unused */ 0, 0, /* impossible */ 0]); +// fixed distance extra bits +var fdeb = new u8([0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, /* unused */ 0, 0]); +// code length index map +var clim = new u8([16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]); +// get base, reverse index map from extra bits +var freb = function (eb, start) { + var b = new u16(31); + for (var i = 0; i < 31; ++i) { + b[i] = start += 1 << eb[i - 1]; + } + // numbers here are at max 18 bits + var r = new i32(b[30]); + for (var i = 1; i < 30; ++i) { + for (var j = b[i]; j < b[i + 1]; ++j) { + r[j] = ((j - b[i]) << 5) | i; + } + } + return { b: b, r: r }; +}; +var _a = freb(fleb, 2), fl = _a.b, revfl = _a.r; +// we can ignore the fact that the other numbers are wrong; they never happen anyway +fl[28] = 258, revfl[258] = 28; +var _b = freb(fdeb, 0), fd = _b.b, revfd = _b.r; +// map of value to reverse (assuming 16 bits) +var rev = new u16(32768); +for (var i = 0; i < 32768; ++i) { + // reverse table algorithm from SO + var x = ((i & 0xAAAA) >> 1) | ((i & 0x5555) << 1); + x = ((x & 0xCCCC) >> 2) | ((x & 0x3333) << 2); + x = ((x & 0xF0F0) >> 4) | ((x & 0x0F0F) << 4); + rev[i] = (((x & 0xFF00) >> 8) | ((x & 0x00FF) << 8)) >> 1; +} +// create huffman tree from u8 "map": index -> code length for code index +// mb (max bits) must be at most 15 +// TODO: optimize/split up? +var hMap = (function (cd, mb, r) { + var s = cd.length; + // index + var i = 0; + // u16 "map": index -> # of codes with bit length = index + var l = new u16(mb); + // length of cd must be 288 (total # of codes) + for (; i < s; ++i) { + if (cd[i]) + ++l[cd[i] - 1]; + } + // u16 "map": index -> minimum code for bit length = index + var le = new u16(mb); + for (i = 1; i < mb; ++i) { + le[i] = (le[i - 1] + l[i - 1]) << 1; + } + var co; + if (r) { + // u16 "map": index -> number of actual bits, symbol for code + co = new u16(1 << mb); + // bits to remove for reverser + var rvb = 15 - mb; + for (i = 0; i < s; ++i) { + // ignore 0 lengths + if (cd[i]) { + // num encoding both symbol and bits read + var sv = (i << 4) | cd[i]; + // free bits + var r_1 = mb - cd[i]; + // start value + var v = le[cd[i] - 1]++ << r_1; + // m is end value + for (var m = v | ((1 << r_1) - 1); v <= m; ++v) { + // every 16 bit value starting with the code yields the same result + co[rev[v] >> rvb] = sv; + } + } + } + } + else { + co = new u16(s); + for (i = 0; i < s; ++i) { + if (cd[i]) { + co[i] = rev[le[cd[i] - 1]++] >> (15 - cd[i]); + } + } + } + return co; +}); +// fixed length tree +var flt = new u8(288); +for (var i = 0; i < 144; ++i) + flt[i] = 8; +for (var i = 144; i < 256; ++i) + flt[i] = 9; +for (var i = 256; i < 280; ++i) + flt[i] = 7; +for (var i = 280; i < 288; ++i) + flt[i] = 8; +// fixed distance tree +var fdt = new u8(32); +for (var i = 0; i < 32; ++i) + fdt[i] = 5; +// fixed length map +var flm = /*#__PURE__*/ hMap(flt, 9, 0), flrm = /*#__PURE__*/ hMap(flt, 9, 1); +// fixed distance map +var fdm = /*#__PURE__*/ hMap(fdt, 5, 0), fdrm = /*#__PURE__*/ hMap(fdt, 5, 1); +// find max of array +var max = function (a) { + var m = a[0]; + for (var i = 1; i < a.length; ++i) { + if (a[i] > m) + m = a[i]; + } + return m; +}; +// read d, starting at bit p and mask with m +var bits = function (d, p, m) { + var o = (p / 8) | 0; + return ((d[o] | (d[o + 1] << 8)) >> (p & 7)) & m; +}; +// read d, starting at bit p continuing for at least 16 bits +var bits16 = function (d, p) { + var o = (p / 8) | 0; + return ((d[o] | (d[o + 1] << 8) | (d[o + 2] << 16)) >> (p & 7)); +}; +// get end of byte +var shft = function (p) { return ((p + 7) / 8) | 0; }; +// typed array slice - allows garbage collector to free original reference, +// while being more compatible than .slice +var slc = function (v, s, e) { + if (s == null || s < 0) + s = 0; + if (e == null || e > v.length) + e = v.length; + // can't use .constructor in case user-supplied + return new u8(v.subarray(s, e)); +}; +/** + * Codes for errors generated within this library + */ +var FlateErrorCode = { + UnexpectedEOF: 0, + InvalidBlockType: 1, + InvalidLengthLiteral: 2, + InvalidDistance: 3, + StreamFinished: 4, + NoStreamHandler: 5, + InvalidHeader: 6, + NoCallback: 7, + InvalidUTF8: 8, + ExtraFieldTooLong: 9, + InvalidDate: 10, + FilenameTooLong: 11, + StreamFinishing: 12, + InvalidZipData: 13, + UnknownCompressionMethod: 14 +}; +// error codes +var ec = [ + 'unexpected EOF', + 'invalid block type', + 'invalid length/literal', + 'invalid distance', + 'stream finished', + 'no stream handler', + , + 'no callback', + 'invalid UTF-8 data', + 'extra field too long', + 'date not in range 1980-2099', + 'filename too long', + 'stream finishing', + 'invalid zip data' + // determined by unknown compression method +]; +; +var err = function (ind, msg, nt) { + var e = new Error(msg || ec[ind]); + e.code = ind; + if (Error.captureStackTrace) + Error.captureStackTrace(e, err); + if (!nt) + throw e; + return e; +}; +// expands raw DEFLATE data +var inflt = function (dat, st, buf, dict) { + // source length dict length + var sl = dat.length, dl = dict ? dict.length : 0; + if (!sl || st.f && !st.l) + return buf || new u8(0); + var noBuf = !buf; + // have to estimate size + var resize = noBuf || st.i != 2; + // no state + var noSt = st.i; + // Assumes roughly 33% compression ratio average + if (noBuf) + buf = new u8(sl * 3); + // ensure buffer can fit at least l elements + var cbuf = function (l) { + var bl = buf.length; + // need to increase size to fit + if (l > bl) { + // Double or set to necessary, whichever is greater + var nbuf = new u8(Math.max(bl * 2, l)); + nbuf.set(buf); + buf = nbuf; + } + }; + // last chunk bitpos bytes + var final = st.f || 0, pos = st.p || 0, bt = st.b || 0, lm = st.l, dm = st.d, lbt = st.m, dbt = st.n; + // total bits + var tbts = sl * 8; + do { + if (!lm) { + // BFINAL - this is only 1 when last chunk is next + final = bits(dat, pos, 1); + // type: 0 = no compression, 1 = fixed huffman, 2 = dynamic huffman + var type = bits(dat, pos + 1, 3); + pos += 3; + if (!type) { + // go to end of byte boundary + var s = shft(pos) + 4, l = dat[s - 4] | (dat[s - 3] << 8), t = s + l; + if (t > sl) { + if (noSt) + err(0); + break; + } + // ensure size + if (resize) + cbuf(bt + l); + // Copy over uncompressed data + buf.set(dat.subarray(s, t), bt); + // Get new bitpos, update byte count + st.b = bt += l, st.p = pos = t * 8, st.f = final; + continue; + } + else if (type == 1) + lm = flrm, dm = fdrm, lbt = 9, dbt = 5; + else if (type == 2) { + // literal lengths + var hLit = bits(dat, pos, 31) + 257, hcLen = bits(dat, pos + 10, 15) + 4; + var tl = hLit + bits(dat, pos + 5, 31) + 1; + pos += 14; + // length+distance tree + var ldt = new u8(tl); + // code length tree + var clt = new u8(19); + for (var i = 0; i < hcLen; ++i) { + // use index map to get real code + clt[clim[i]] = bits(dat, pos + i * 3, 7); + } + pos += hcLen * 3; + // code lengths bits + var clb = max(clt), clbmsk = (1 << clb) - 1; + // code lengths map + var clm = hMap(clt, clb, 1); + for (var i = 0; i < tl;) { + var r = clm[bits(dat, pos, clbmsk)]; + // bits read + pos += r & 15; + // symbol + var s = r >> 4; + // code length to copy + if (s < 16) { + ldt[i++] = s; + } + else { + // copy count + var c = 0, n = 0; + if (s == 16) + n = 3 + bits(dat, pos, 3), pos += 2, c = ldt[i - 1]; + else if (s == 17) + n = 3 + bits(dat, pos, 7), pos += 3; + else if (s == 18) + n = 11 + bits(dat, pos, 127), pos += 7; + while (n--) + ldt[i++] = c; + } + } + // length tree distance tree + var lt = ldt.subarray(0, hLit), dt = ldt.subarray(hLit); + // max length bits + lbt = max(lt); + // max dist bits + dbt = max(dt); + lm = hMap(lt, lbt, 1); + dm = hMap(dt, dbt, 1); + } + else + err(1); + if (pos > tbts) { + if (noSt) + err(0); + break; + } + } + // Make sure the buffer can hold this + the largest possible addition + // Maximum chunk size (practically, theoretically infinite) is 2^17 + if (resize) + cbuf(bt + 131072); + var lms = (1 << lbt) - 1, dms = (1 << dbt) - 1; + var lpos = pos; + for (;; lpos = pos) { + // bits read, code + var c = lm[bits16(dat, pos) & lms], sym = c >> 4; + pos += c & 15; + if (pos > tbts) { + if (noSt) + err(0); + break; + } + if (!c) + err(2); + if (sym < 256) + buf[bt++] = sym; + else if (sym == 256) { + lpos = pos, lm = null; + break; + } + else { + var add = sym - 254; + // no extra bits needed if less + if (sym > 264) { + // index + var i = sym - 257, b = fleb[i]; + add = bits(dat, pos, (1 << b) - 1) + fl[i]; + pos += b; + } + // dist + var d = dm[bits16(dat, pos) & dms], dsym = d >> 4; + if (!d) + err(3); + pos += d & 15; + var dt = fd[dsym]; + if (dsym > 3) { + var b = fdeb[dsym]; + dt += bits16(dat, pos) & (1 << b) - 1, pos += b; + } + if (pos > tbts) { + if (noSt) + err(0); + break; + } + if (resize) + cbuf(bt + 131072); + var end = bt + add; + if (bt < dt) { + var shift = dl - dt, dend = Math.min(dt, end); + if (shift + bt < 0) + err(3); + for (; bt < dend; ++bt) + buf[bt] = dict[shift + bt]; + } + for (; bt < end; ++bt) + buf[bt] = buf[bt - dt]; + } + } + st.l = lm, st.p = lpos, st.b = bt, st.f = final; + if (lm) + final = 1, st.m = lbt, st.d = dm, st.n = dbt; + } while (!final); + // don't reallocate for streams or user buffers + return bt != buf.length && noBuf ? slc(buf, 0, bt) : buf.subarray(0, bt); +}; +// starting at p, write the minimum number of bits that can hold v to d +var wbits = function (d, p, v) { + v <<= p & 7; + var o = (p / 8) | 0; + d[o] |= v; + d[o + 1] |= v >> 8; +}; +// starting at p, write the minimum number of bits (>8) that can hold v to d +var wbits16 = function (d, p, v) { + v <<= p & 7; + var o = (p / 8) | 0; + d[o] |= v; + d[o + 1] |= v >> 8; + d[o + 2] |= v >> 16; +}; +// creates code lengths from a frequency table +var hTree = function (d, mb) { + // Need extra info to make a tree + var t = []; + for (var i = 0; i < d.length; ++i) { + if (d[i]) + t.push({ s: i, f: d[i] }); + } + var s = t.length; + var t2 = t.slice(); + if (!s) + return { t: et, l: 0 }; + if (s == 1) { + var v = new u8(t[0].s + 1); + v[t[0].s] = 1; + return { t: v, l: 1 }; + } + t.sort(function (a, b) { return a.f - b.f; }); + // after i2 reaches last ind, will be stopped + // freq must be greater than largest possible number of symbols + t.push({ s: -1, f: 25001 }); + var l = t[0], r = t[1], i0 = 0, i1 = 1, i2 = 2; + t[0] = { s: -1, f: l.f + r.f, l: l, r: r }; + // efficient algorithm from UZIP.js + // i0 is lookbehind, i2 is lookahead - after processing two low-freq + // symbols that combined have high freq, will start processing i2 (high-freq, + // non-composite) symbols instead + // see https://reddit.com/r/photopea/comments/ikekht/uzipjs_questions/ + while (i1 != s - 1) { + l = t[t[i0].f < t[i2].f ? i0++ : i2++]; + r = t[i0 != i1 && t[i0].f < t[i2].f ? i0++ : i2++]; + t[i1++] = { s: -1, f: l.f + r.f, l: l, r: r }; + } + var maxSym = t2[0].s; + for (var i = 1; i < s; ++i) { + if (t2[i].s > maxSym) + maxSym = t2[i].s; + } + // code lengths + var tr = new u16(maxSym + 1); + // max bits in tree + var mbt = ln(t[i1 - 1], tr, 0); + if (mbt > mb) { + // more algorithms from UZIP.js + // TODO: find out how this code works (debt) + // ind debt + var i = 0, dt = 0; + // left cost + var lft = mbt - mb, cst = 1 << lft; + t2.sort(function (a, b) { return tr[b.s] - tr[a.s] || a.f - b.f; }); + for (; i < s; ++i) { + var i2_1 = t2[i].s; + if (tr[i2_1] > mb) { + dt += cst - (1 << (mbt - tr[i2_1])); + tr[i2_1] = mb; + } + else + break; + } + dt >>= lft; + while (dt > 0) { + var i2_2 = t2[i].s; + if (tr[i2_2] < mb) + dt -= 1 << (mb - tr[i2_2]++ - 1); + else + ++i; + } + for (; i >= 0 && dt; --i) { + var i2_3 = t2[i].s; + if (tr[i2_3] == mb) { + --tr[i2_3]; + ++dt; + } + } + mbt = mb; + } + return { t: new u8(tr), l: mbt }; +}; +// get the max length and assign length codes +var ln = function (n, l, d) { + return n.s == -1 + ? Math.max(ln(n.l, l, d + 1), ln(n.r, l, d + 1)) + : (l[n.s] = d); +}; +// length codes generation +var lc = function (c) { + var s = c.length; + // Note that the semicolon was intentional + while (s && !c[--s]) + ; + var cl = new u16(++s); + // ind num streak + var cli = 0, cln = c[0], cls = 1; + var w = function (v) { cl[cli++] = v; }; + for (var i = 1; i <= s; ++i) { + if (c[i] == cln && i != s) + ++cls; + else { + if (!cln && cls > 2) { + for (; cls > 138; cls -= 138) + w(32754); + if (cls > 2) { + w(cls > 10 ? ((cls - 11) << 5) | 28690 : ((cls - 3) << 5) | 12305); + cls = 0; + } + } + else if (cls > 3) { + w(cln), --cls; + for (; cls > 6; cls -= 6) + w(8304); + if (cls > 2) + w(((cls - 3) << 5) | 8208), cls = 0; + } + while (cls--) + w(cln); + cls = 1; + cln = c[i]; + } + } + return { c: cl.subarray(0, cli), n: s }; +}; +// calculate the length of output from tree, code lengths +var clen = function (cf, cl) { + var l = 0; + for (var i = 0; i < cl.length; ++i) + l += cf[i] * cl[i]; + return l; +}; +// writes a fixed block +// returns the new bit pos +var wfblk = function (out, pos, dat) { + // no need to write 00 as type: TypedArray defaults to 0 + var s = dat.length; + var o = shft(pos + 2); + out[o] = s & 255; + out[o + 1] = s >> 8; + out[o + 2] = out[o] ^ 255; + out[o + 3] = out[o + 1] ^ 255; + for (var i = 0; i < s; ++i) + out[o + i + 4] = dat[i]; + return (o + 4 + s) * 8; +}; +// writes a block +var wblk = function (dat, out, final, syms, lf, df, eb, li, bs, bl, p) { + wbits(out, p++, final); + ++lf[256]; + var _a = hTree(lf, 15), dlt = _a.t, mlb = _a.l; + var _b = hTree(df, 15), ddt = _b.t, mdb = _b.l; + var _c = lc(dlt), lclt = _c.c, nlc = _c.n; + var _d = lc(ddt), lcdt = _d.c, ndc = _d.n; + var lcfreq = new u16(19); + for (var i = 0; i < lclt.length; ++i) + ++lcfreq[lclt[i] & 31]; + for (var i = 0; i < lcdt.length; ++i) + ++lcfreq[lcdt[i] & 31]; + var _e = hTree(lcfreq, 7), lct = _e.t, mlcb = _e.l; + var nlcc = 19; + for (; nlcc > 4 && !lct[clim[nlcc - 1]]; --nlcc) + ; + var flen = (bl + 5) << 3; + var ftlen = clen(lf, flt) + clen(df, fdt) + eb; + var dtlen = clen(lf, dlt) + clen(df, ddt) + eb + 14 + 3 * nlcc + clen(lcfreq, lct) + 2 * lcfreq[16] + 3 * lcfreq[17] + 7 * lcfreq[18]; + if (bs >= 0 && flen <= ftlen && flen <= dtlen) + return wfblk(out, p, dat.subarray(bs, bs + bl)); + var lm, ll, dm, dl; + wbits(out, p, 1 + (dtlen < ftlen)), p += 2; + if (dtlen < ftlen) { + lm = hMap(dlt, mlb, 0), ll = dlt, dm = hMap(ddt, mdb, 0), dl = ddt; + var llm = hMap(lct, mlcb, 0); + wbits(out, p, nlc - 257); + wbits(out, p + 5, ndc - 1); + wbits(out, p + 10, nlcc - 4); + p += 14; + for (var i = 0; i < nlcc; ++i) + wbits(out, p + 3 * i, lct[clim[i]]); + p += 3 * nlcc; + var lcts = [lclt, lcdt]; + for (var it = 0; it < 2; ++it) { + var clct = lcts[it]; + for (var i = 0; i < clct.length; ++i) { + var len = clct[i] & 31; + wbits(out, p, llm[len]), p += lct[len]; + if (len > 15) + wbits(out, p, (clct[i] >> 5) & 127), p += clct[i] >> 12; + } + } + } + else { + lm = flm, ll = flt, dm = fdm, dl = fdt; + } + for (var i = 0; i < li; ++i) { + var sym = syms[i]; + if (sym > 255) { + var len = (sym >> 18) & 31; + wbits16(out, p, lm[len + 257]), p += ll[len + 257]; + if (len > 7) + wbits(out, p, (sym >> 23) & 31), p += fleb[len]; + var dst = sym & 31; + wbits16(out, p, dm[dst]), p += dl[dst]; + if (dst > 3) + wbits16(out, p, (sym >> 5) & 8191), p += fdeb[dst]; + } + else { + wbits16(out, p, lm[sym]), p += ll[sym]; + } + } + wbits16(out, p, lm[256]); + return p + ll[256]; +}; +// deflate options (nice << 13) | chain +var deo = /*#__PURE__*/ new i32([65540, 131080, 131088, 131104, 262176, 1048704, 1048832, 2114560, 2117632]); +// empty +var et = /*#__PURE__*/ new u8(0); +// compresses data into a raw DEFLATE buffer +var dflt = function (dat, lvl, plvl, pre, post, st) { + var s = st.z || dat.length; + var o = new u8(pre + s + 5 * (1 + Math.ceil(s / 7000)) + post); + // writing to this writes to the output buffer + var w = o.subarray(pre, o.length - post); + var lst = st.l; + var pos = (st.r || 0) & 7; + if (lvl) { + if (pos) + w[0] = st.r >> 3; + var opt = deo[lvl - 1]; + var n = opt >> 13, c = opt & 8191; + var msk_1 = (1 << plvl) - 1; + // prev 2-byte val map curr 2-byte val map + var prev = st.p || new u16(32768), head = st.h || new u16(msk_1 + 1); + var bs1_1 = Math.ceil(plvl / 3), bs2_1 = 2 * bs1_1; + var hsh = function (i) { return (dat[i] ^ (dat[i + 1] << bs1_1) ^ (dat[i + 2] << bs2_1)) & msk_1; }; + // 24576 is an arbitrary number of maximum symbols per block + // 424 buffer for last block + var syms = new i32(25000); + // length/literal freq distance freq + var lf = new u16(288), df = new u16(32); + // l/lcnt exbits index l/lind waitdx blkpos + var lc_1 = 0, eb = 0, i = st.i || 0, li = 0, wi = st.w || 0, bs = 0; + for (; i + 2 < s; ++i) { + // hash value + var hv = hsh(i); + // index mod 32768 previous index mod + var imod = i & 32767, pimod = head[hv]; + prev[imod] = pimod; + head[hv] = imod; + // We always should modify head and prev, but only add symbols if + // this data is not yet processed ("wait" for wait index) + if (wi <= i) { + // bytes remaining + var rem = s - i; + if ((lc_1 > 7000 || li > 24576) && (rem > 423 || !lst)) { + pos = wblk(dat, w, 0, syms, lf, df, eb, li, bs, i - bs, pos); + li = lc_1 = eb = 0, bs = i; + for (var j = 0; j < 286; ++j) + lf[j] = 0; + for (var j = 0; j < 30; ++j) + df[j] = 0; + } + // len dist chain + var l = 2, d = 0, ch_1 = c, dif = imod - pimod & 32767; + if (rem > 2 && hv == hsh(i - dif)) { + var maxn = Math.min(n, rem) - 1; + var maxd = Math.min(32767, i); + // max possible length + // not capped at dif because decompressors implement "rolling" index population + var ml = Math.min(258, rem); + while (dif <= maxd && --ch_1 && imod != pimod) { + if (dat[i + l] == dat[i + l - dif]) { + var nl = 0; + for (; nl < ml && dat[i + nl] == dat[i + nl - dif]; ++nl) + ; + if (nl > l) { + l = nl, d = dif; + // break out early when we reach "nice" (we are satisfied enough) + if (nl > maxn) + break; + // now, find the rarest 2-byte sequence within this + // length of literals and search for that instead. + // Much faster than just using the start + var mmd = Math.min(dif, nl - 2); + var md = 0; + for (var j = 0; j < mmd; ++j) { + var ti = i - dif + j & 32767; + var pti = prev[ti]; + var cd = ti - pti & 32767; + if (cd > md) + md = cd, pimod = ti; + } + } + } + // check the previous match + imod = pimod, pimod = prev[imod]; + dif += imod - pimod & 32767; + } + } + // d will be nonzero only when a match was found + if (d) { + // store both dist and len data in one int32 + // Make sure this is recognized as a len/dist with 28th bit (2^28) + syms[li++] = 268435456 | (revfl[l] << 18) | revfd[d]; + var lin = revfl[l] & 31, din = revfd[d] & 31; + eb += fleb[lin] + fdeb[din]; + ++lf[257 + lin]; + ++df[din]; + wi = i + l; + ++lc_1; + } + else { + syms[li++] = dat[i]; + ++lf[dat[i]]; + } + } + } + for (i = Math.max(i, wi); i < s; ++i) { + syms[li++] = dat[i]; + ++lf[dat[i]]; + } + pos = wblk(dat, w, lst, syms, lf, df, eb, li, bs, i - bs, pos); + if (!lst) { + st.r = (pos & 7) | w[(pos / 8) | 0] << 3; + // shft(pos) now 1 less if pos & 7 != 0 + pos -= 7; + st.h = head, st.p = prev, st.i = i, st.w = wi; + } + } + else { + for (var i = st.w || 0; i < s + lst; i += 65535) { + // end + var e = i + 65535; + if (e >= s) { + // write final block + w[(pos / 8) | 0] = lst; + e = s; + } + pos = wfblk(w, pos + 1, dat.subarray(i, e)); + } + st.i = s; + } + return slc(o, 0, pre + shft(pos) + post); +}; +// CRC32 table +var crct = /*#__PURE__*/ (function () { + var t = new Int32Array(256); + for (var i = 0; i < 256; ++i) { + var c = i, k = 9; + while (--k) + c = ((c & 1) && -306674912) ^ (c >>> 1); + t[i] = c; + } + return t; +})(); +// CRC32 +var crc = function () { + var c = -1; + return { + p: function (d) { + // closures have awful performance + var cr = c; + for (var i = 0; i < d.length; ++i) + cr = crct[(cr & 255) ^ d[i]] ^ (cr >>> 8); + c = cr; + }, + d: function () { return ~c; } + }; +}; +// Adler32 +var adler = function () { + var a = 1, b = 0; + return { + p: function (d) { + // closures have awful performance + var n = a, m = b; + var l = d.length | 0; + for (var i = 0; i != l;) { + var e = Math.min(i + 2655, l); + for (; i < e; ++i) + m += n += d[i]; + n = (n & 65535) + 15 * (n >> 16), m = (m & 65535) + 15 * (m >> 16); + } + a = n, b = m; + }, + d: function () { + a %= 65521, b %= 65521; + return (a & 255) << 24 | (a & 0xFF00) << 8 | (b & 255) << 8 | (b >> 8); + } + }; +}; +; +// deflate with opts +var dopt = function (dat, opt, pre, post, st) { + if (!st) { + st = { l: 1 }; + if (opt.dictionary) { + var dict = opt.dictionary.subarray(-32768); + var newDat = new u8(dict.length + dat.length); + newDat.set(dict); + newDat.set(dat, dict.length); + dat = newDat; + st.w = dict.length; + } + } + return dflt(dat, opt.level == null ? 6 : opt.level, opt.mem == null ? (st.l ? Math.ceil(Math.max(8, Math.min(13, Math.log(dat.length))) * 1.5) : 20) : (12 + opt.mem), pre, post, st); +}; +// Walmart object spread +var mrg = function (a, b) { + var o = {}; + for (var k in a) + o[k] = a[k]; + for (var k in b) + o[k] = b[k]; + return o; +}; +// worker clone +// This is possibly the craziest part of the entire codebase, despite how simple it may seem. +// The only parameter to this function is a closure that returns an array of variables outside of the function scope. +// We're going to try to figure out the variable names used in the closure as strings because that is crucial for workerization. +// We will return an object mapping of true variable name to value (basically, the current scope as a JS object). +// The reason we can't just use the original variable names is minifiers mangling the toplevel scope. +// This took me three weeks to figure out how to do. +var wcln = function (fn, fnStr, td) { + var dt = fn(); + var st = fn.toString(); + var ks = st.slice(st.indexOf('[') + 1, st.lastIndexOf(']')).replace(/\s+/g, '').split(','); + for (var i = 0; i < dt.length; ++i) { + var v = dt[i], k = ks[i]; + if (typeof v == 'function') { + fnStr += ';' + k + '='; + var st_1 = v.toString(); + if (v.prototype) { + // for global objects + if (st_1.indexOf('[native code]') != -1) { + var spInd = st_1.indexOf(' ', 8) + 1; + fnStr += st_1.slice(spInd, st_1.indexOf('(', spInd)); + } + else { + fnStr += st_1; + for (var t in v.prototype) + fnStr += ';' + k + '.prototype.' + t + '=' + v.prototype[t].toString(); + } + } + else + fnStr += st_1; + } + else + td[k] = v; + } + return fnStr; +}; +var ch = []; +// clone bufs +var cbfs = function (v) { + var tl = []; + for (var k in v) { + if (v[k].buffer) { + tl.push((v[k] = new v[k].constructor(v[k])).buffer); + } + } + return tl; +}; +// use a worker to execute code +var wrkr = function (fns, init, id, cb) { + if (!ch[id]) { + var fnStr = '', td_1 = {}, m = fns.length - 1; + for (var i = 0; i < m; ++i) + fnStr = wcln(fns[i], fnStr, td_1); + ch[id] = { c: wcln(fns[m], fnStr, td_1), e: td_1 }; + } + var td = mrg({}, ch[id].e); + return wk(ch[id].c + ';onmessage=function(e){for(var k in e.data)self[k]=e.data[k];onmessage=' + init.toString() + '}', id, td, cbfs(td), cb); +}; +// base async inflate fn +var bInflt = function () { return [u8, u16, i32, fleb, fdeb, clim, fl, fd, flrm, fdrm, rev, ec, hMap, max, bits, bits16, shft, slc, err, inflt, inflateSync, pbf, gopt]; }; +var bDflt = function () { return [u8, u16, i32, fleb, fdeb, clim, revfl, revfd, flm, flt, fdm, fdt, rev, deo, et, hMap, wbits, wbits16, hTree, ln, lc, clen, wfblk, wblk, shft, slc, dflt, dopt, deflateSync, pbf]; }; +// gzip extra +var gze = function () { return [gzh, gzhl, wbytes, crc, crct]; }; +// gunzip extra +var guze = function () { return [gzs, gzl]; }; +// zlib extra +var zle = function () { return [zlh, wbytes, adler]; }; +// unzlib extra +var zule = function () { return [zls]; }; +// post buf +var pbf = function (msg) { return postMessage(msg, [msg.buffer]); }; +// get opts +var gopt = function (o) { return o && { + out: o.size && new u8(o.size), + dictionary: o.dictionary +}; }; +// async helper +var cbify = function (dat, opts, fns, init, id, cb) { + var w = wrkr(fns, init, id, function (err, dat) { + w.terminate(); + cb(err, dat); + }); + w.postMessage([dat, opts], opts.consume ? [dat.buffer] : []); + return function () { w.terminate(); }; +}; +// auto stream +var astrm = function (strm) { + strm.ondata = function (dat, final) { return postMessage([dat, final], [dat.buffer]); }; + return function (ev) { + if (ev.data.length) { + strm.push(ev.data[0], ev.data[1]); + postMessage([ev.data[0].length]); + } + else + strm.flush(); + }; +}; +// async stream attach +var astrmify = function (fns, strm, opts, init, id, flush, ext) { + var t; + var w = wrkr(fns, init, id, function (err, dat) { + if (err) + w.terminate(), strm.ondata.call(strm, err); + else if (!Array.isArray(dat)) + ext(dat); + else if (dat.length == 1) { + strm.queuedSize -= dat[0]; + if (strm.ondrain) + strm.ondrain(dat[0]); + } + else { + if (dat[1]) + w.terminate(); + strm.ondata.call(strm, err, dat[0], dat[1]); + } + }); + w.postMessage(opts); + strm.queuedSize = 0; + strm.push = function (d, f) { + if (!strm.ondata) + err(5); + if (t) + strm.ondata(err(4, 0, 1), null, !!f); + strm.queuedSize += d.length; + w.postMessage([d, t = f], [d.buffer]); + }; + strm.terminate = function () { w.terminate(); }; + if (flush) { + strm.flush = function () { w.postMessage([]); }; + } +}; +// read 2 bytes +var b2 = function (d, b) { return d[b] | (d[b + 1] << 8); }; +// read 4 bytes +var b4 = function (d, b) { return (d[b] | (d[b + 1] << 8) | (d[b + 2] << 16) | (d[b + 3] << 24)) >>> 0; }; +var b8 = function (d, b) { return b4(d, b) + (b4(d, b + 4) * 4294967296); }; +// write bytes +var wbytes = function (d, b, v) { + for (; v; ++b) + d[b] = v, v >>>= 8; +}; +// gzip header +var gzh = function (c, o) { + var fn = o.filename; + c[0] = 31, c[1] = 139, c[2] = 8, c[8] = o.level < 2 ? 4 : o.level == 9 ? 2 : 0, c[9] = 3; // assume Unix + if (o.mtime != 0) + wbytes(c, 4, Math.floor(new Date(o.mtime || Date.now()) / 1000)); + if (fn) { + c[3] = 8; + for (var i = 0; i <= fn.length; ++i) + c[i + 10] = fn.charCodeAt(i); + } +}; +// gzip footer: -8 to -4 = CRC, -4 to -0 is length +// gzip start +var gzs = function (d) { + if (d[0] != 31 || d[1] != 139 || d[2] != 8) + err(6, 'invalid gzip data'); + var flg = d[3]; + var st = 10; + if (flg & 4) + st += (d[10] | d[11] << 8) + 2; + for (var zs = (flg >> 3 & 1) + (flg >> 4 & 1); zs > 0; zs -= !d[st++]) + ; + return st + (flg & 2); +}; +// gzip length +var gzl = function (d) { + var l = d.length; + return (d[l - 4] | d[l - 3] << 8 | d[l - 2] << 16 | d[l - 1] << 24) >>> 0; +}; +// gzip header length +var gzhl = function (o) { return 10 + (o.filename ? o.filename.length + 1 : 0); }; +// zlib header +var zlh = function (c, o) { + var lv = o.level, fl = lv == 0 ? 0 : lv < 6 ? 1 : lv == 9 ? 3 : 2; + c[0] = 120, c[1] = (fl << 6) | (o.dictionary && 32); + c[1] |= 31 - ((c[0] << 8) | c[1]) % 31; + if (o.dictionary) { + var h = adler(); + h.p(o.dictionary); + wbytes(c, 2, h.d()); + } +}; +// zlib start +var zls = function (d, dict) { + if ((d[0] & 15) != 8 || (d[0] >> 4) > 7 || ((d[0] << 8 | d[1]) % 31)) + err(6, 'invalid zlib data'); + if ((d[1] >> 5 & 1) == +!dict) + err(6, 'invalid zlib data: ' + (d[1] & 32 ? 'need' : 'unexpected') + ' dictionary'); + return (d[1] >> 3 & 4) + 2; +}; +function StrmOpt(opts, cb) { + if (typeof opts == 'function') + cb = opts, opts = {}; + this.ondata = cb; + return opts; +} +/** + * Streaming DEFLATE compression + */ +var Deflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function Deflate(opts, cb) { + if (typeof opts == 'function') + cb = opts, opts = {}; + this.ondata = cb; + this.o = opts || {}; + this.s = { l: 0, i: 32768, w: 32768, z: 32768 }; + // Buffer length must always be 0 mod 32768 for index calculations to be correct when modifying head and prev + // 98304 = 32768 (lookback) + 65536 (common chunk size) + this.b = new u8(98304); + if (this.o.dictionary) { + var dict = this.o.dictionary.subarray(-32768); + this.b.set(dict, 32768 - dict.length); + this.s.i = 32768 - dict.length; + } + } + Deflate.prototype.p = function (c, f) { + this.ondata(dopt(c, this.o, 0, 0, this.s), f); + }; + /** + * Pushes a chunk to be deflated + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + Deflate.prototype.push = function (chunk, final) { + if (!this.ondata) + err(5); + if (this.s.l) + err(4); + var endLen = chunk.length + this.s.z; + if (endLen > this.b.length) { + if (endLen > 2 * this.b.length - 32768) { + var newBuf = new u8(endLen & -32768); + newBuf.set(this.b.subarray(0, this.s.z)); + this.b = newBuf; + } + var split = this.b.length - this.s.z; + this.b.set(chunk.subarray(0, split), this.s.z); + this.s.z = this.b.length; + this.p(this.b, false); + this.b.set(this.b.subarray(-32768)); + this.b.set(chunk.subarray(split), 32768); + this.s.z = chunk.length - split + 32768; + this.s.i = 32766, this.s.w = 32768; + } + else { + this.b.set(chunk, this.s.z); + this.s.z += chunk.length; + } + this.s.l = final & 1; + if (this.s.z > this.s.w + 8191 || final) { + this.p(this.b, final || false); + this.s.w = this.s.i, this.s.i -= 2; + } + }; + /** + * Flushes buffered uncompressed data. Useful to immediately retrieve the + * deflated output for small inputs. + */ + Deflate.prototype.flush = function () { + if (!this.ondata) + err(5); + if (this.s.l) + err(4); + this.p(this.b, false); + this.s.w = this.s.i, this.s.i -= 2; + }; + return Deflate; +}()))); + +/** + * Asynchronous streaming DEFLATE compression + */ +var AsyncDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function AsyncDeflate(opts, cb) { + astrmify([ + bDflt, + function () { return [astrm, Deflate]; } + ], this, StrmOpt.call(this, opts, cb), function (ev) { + var strm = new Deflate(ev.data); + onmessage = astrm(strm); + }, 6, 1); + } + return AsyncDeflate; +}()))); + +function deflate(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + return cbify(data, opts, [ + bDflt, + ], function (ev) { return pbf(deflateSync(ev.data[0], ev.data[1])); }, 0, cb); +} +/** + * Compresses data with DEFLATE without any wrapper + * @param data The data to compress + * @param opts The compression options + * @returns The deflated version of the data + */ +function deflateSync(data, opts) { + return dopt(data, opts || {}, 0, 0); +} +/** + * Streaming DEFLATE decompression + */ +var Inflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function Inflate(opts, cb) { + // no StrmOpt here to avoid adding to workerizer + if (typeof opts == 'function') + cb = opts, opts = {}; + this.ondata = cb; + var dict = opts && opts.dictionary && opts.dictionary.subarray(-32768); + this.s = { i: 0, b: dict ? dict.length : 0 }; + this.o = new u8(32768); + this.p = new u8(0); + if (dict) + this.o.set(dict); + } + Inflate.prototype.e = function (c) { + if (!this.ondata) + err(5); + if (this.d) + err(4); + if (!this.p.length) + this.p = c; + else if (c.length) { + var n = new u8(this.p.length + c.length); + n.set(this.p), n.set(c, this.p.length), this.p = n; + } + }; + Inflate.prototype.c = function (final) { + this.s.i = +(this.d = final || false); + var bts = this.s.b; + var dt = inflt(this.p, this.s, this.o); + this.ondata(slc(dt, bts, this.s.b), this.d); + this.o = slc(dt, this.s.b - 32768), this.s.b = this.o.length; + this.p = slc(this.p, (this.s.p / 8) | 0), this.s.p &= 7; + }; + /** + * Pushes a chunk to be inflated + * @param chunk The chunk to push + * @param final Whether this is the final chunk + */ + Inflate.prototype.push = function (chunk, final) { + this.e(chunk), this.c(final); + }; + return Inflate; +}()))); + +/** + * Asynchronous streaming DEFLATE decompression + */ +var AsyncInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function AsyncInflate(opts, cb) { + astrmify([ + bInflt, + function () { return [astrm, Inflate]; } + ], this, StrmOpt.call(this, opts, cb), function (ev) { + var strm = new Inflate(ev.data); + onmessage = astrm(strm); + }, 7, 0); + } + return AsyncInflate; +}()))); + +function inflate(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + return cbify(data, opts, [ + bInflt + ], function (ev) { return pbf(inflateSync(ev.data[0], gopt(ev.data[1]))); }, 1, cb); +} +/** + * Expands DEFLATE data with no wrapper + * @param data The data to decompress + * @param opts The decompression options + * @returns The decompressed version of the data + */ +function inflateSync(data, opts) { + return inflt(data, { i: 2 }, opts && opts.out, opts && opts.dictionary); +} +// before you yell at me for not just using extends, my reason is that TS inheritance is hard to workerize. +/** + * Streaming GZIP compression + */ +var Gzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function Gzip(opts, cb) { + this.c = crc(); + this.l = 0; + this.v = 1; + Deflate.call(this, opts, cb); + } + /** + * Pushes a chunk to be GZIPped + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + Gzip.prototype.push = function (chunk, final) { + this.c.p(chunk); + this.l += chunk.length; + Deflate.prototype.push.call(this, chunk, final); + }; + Gzip.prototype.p = function (c, f) { + var raw = dopt(c, this.o, this.v && gzhl(this.o), f && 8, this.s); + if (this.v) + gzh(raw, this.o), this.v = 0; + if (f) + wbytes(raw, raw.length - 8, this.c.d()), wbytes(raw, raw.length - 4, this.l); + this.ondata(raw, f); + }; + /** + * Flushes buffered uncompressed data. Useful to immediately retrieve the + * GZIPped output for small inputs. + */ + Gzip.prototype.flush = function () { + Deflate.prototype.flush.call(this); + }; + return Gzip; +}()))); + +/** + * Asynchronous streaming GZIP compression + */ +var AsyncGzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function AsyncGzip(opts, cb) { + astrmify([ + bDflt, + gze, + function () { return [astrm, Deflate, Gzip]; } + ], this, StrmOpt.call(this, opts, cb), function (ev) { + var strm = new Gzip(ev.data); + onmessage = astrm(strm); + }, 8, 1); + } + return AsyncGzip; +}()))); + +function gzip(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + return cbify(data, opts, [ + bDflt, + gze, + function () { return [gzipSync]; } + ], function (ev) { return pbf(gzipSync(ev.data[0], ev.data[1])); }, 2, cb); +} +/** + * Compresses data with GZIP + * @param data The data to compress + * @param opts The compression options + * @returns The gzipped version of the data + */ +function gzipSync(data, opts) { + if (!opts) + opts = {}; + var c = crc(), l = data.length; + c.p(data); + var d = dopt(data, opts, gzhl(opts), 8), s = d.length; + return gzh(d, opts), wbytes(d, s - 8, c.d()), wbytes(d, s - 4, l), d; +} +/** + * Streaming single or multi-member GZIP decompression + */ +var Gunzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function Gunzip(opts, cb) { + this.v = 1; + this.r = 0; + Inflate.call(this, opts, cb); + } + /** + * Pushes a chunk to be GUNZIPped + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + Gunzip.prototype.push = function (chunk, final) { + Inflate.prototype.e.call(this, chunk); + this.r += chunk.length; + if (this.v) { + var p = this.p.subarray(this.v - 1); + var s = p.length > 3 ? gzs(p) : 4; + if (s > p.length) { + if (!final) + return; + } + else if (this.v > 1 && this.onmember) { + this.onmember(this.r - p.length); + } + this.p = p.subarray(s), this.v = 0; + } + // necessary to prevent TS from using the closure value + // This allows for workerization to function correctly + Inflate.prototype.c.call(this, final); + // process concatenated GZIP + if (this.s.f && !this.s.l && !final) { + this.v = shft(this.s.p) + 9; + this.s = { i: 0 }; + this.o = new u8(0); + this.push(new u8(0), final); + } + }; + return Gunzip; +}()))); + +/** + * Asynchronous streaming single or multi-member GZIP decompression + */ +var AsyncGunzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function AsyncGunzip(opts, cb) { + var _this = this; + astrmify([ + bInflt, + guze, + function () { return [astrm, Inflate, Gunzip]; } + ], this, StrmOpt.call(this, opts, cb), function (ev) { + var strm = new Gunzip(ev.data); + strm.onmember = function (offset) { return postMessage(offset); }; + onmessage = astrm(strm); + }, 9, 0, function (offset) { return _this.onmember && _this.onmember(offset); }); + } + return AsyncGunzip; +}()))); + +function gunzip(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + return cbify(data, opts, [ + bInflt, + guze, + function () { return [gunzipSync]; } + ], function (ev) { return pbf(gunzipSync(ev.data[0], ev.data[1])); }, 3, cb); +} +/** + * Expands GZIP data + * @param data The data to decompress + * @param opts The decompression options + * @returns The decompressed version of the data + */ +function gunzipSync(data, opts) { + var st = gzs(data); + if (st + 8 > data.length) + err(6, 'invalid gzip data'); + return inflt(data.subarray(st, -8), { i: 2 }, opts && opts.out || new u8(gzl(data)), opts && opts.dictionary); +} +/** + * Streaming Zlib compression + */ +var Zlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function Zlib(opts, cb) { + this.c = adler(); + this.v = 1; + Deflate.call(this, opts, cb); + } + /** + * Pushes a chunk to be zlibbed + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + Zlib.prototype.push = function (chunk, final) { + this.c.p(chunk); + Deflate.prototype.push.call(this, chunk, final); + }; + Zlib.prototype.p = function (c, f) { + var raw = dopt(c, this.o, this.v && (this.o.dictionary ? 6 : 2), f && 4, this.s); + if (this.v) + zlh(raw, this.o), this.v = 0; + if (f) + wbytes(raw, raw.length - 4, this.c.d()); + this.ondata(raw, f); + }; + /** + * Flushes buffered uncompressed data. Useful to immediately retrieve the + * zlibbed output for small inputs. + */ + Zlib.prototype.flush = function () { + Deflate.prototype.flush.call(this); + }; + return Zlib; +}()))); + +/** + * Asynchronous streaming Zlib compression + */ +var AsyncZlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function AsyncZlib(opts, cb) { + astrmify([ + bDflt, + zle, + function () { return [astrm, Deflate, Zlib]; } + ], this, StrmOpt.call(this, opts, cb), function (ev) { + var strm = new Zlib(ev.data); + onmessage = astrm(strm); + }, 10, 1); + } + return AsyncZlib; +}()))); + +function zlib(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + return cbify(data, opts, [ + bDflt, + zle, + function () { return [zlibSync]; } + ], function (ev) { return pbf(zlibSync(ev.data[0], ev.data[1])); }, 4, cb); +} +/** + * Compress data with Zlib + * @param data The data to compress + * @param opts The compression options + * @returns The zlib-compressed version of the data + */ +function zlibSync(data, opts) { + if (!opts) + opts = {}; + var a = adler(); + a.p(data); + var d = dopt(data, opts, opts.dictionary ? 6 : 2, 4); + return zlh(d, opts), wbytes(d, d.length - 4, a.d()), d; +} +/** + * Streaming Zlib decompression + */ +var Unzlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function Unzlib(opts, cb) { + Inflate.call(this, opts, cb); + this.v = opts && opts.dictionary ? 2 : 1; + } + /** + * Pushes a chunk to be unzlibbed + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + Unzlib.prototype.push = function (chunk, final) { + Inflate.prototype.e.call(this, chunk); + if (this.v) { + if (this.p.length < 6 && !final) + return; + this.p = this.p.subarray(zls(this.p, this.v - 1)), this.v = 0; + } + if (final) { + if (this.p.length < 4) + err(6, 'invalid zlib data'); + this.p = this.p.subarray(0, -4); + } + // necessary to prevent TS from using the closure value + // This allows for workerization to function correctly + Inflate.prototype.c.call(this, final); + }; + return Unzlib; +}()))); + +/** + * Asynchronous streaming Zlib decompression + */ +var AsyncUnzlib = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function AsyncUnzlib(opts, cb) { + astrmify([ + bInflt, + zule, + function () { return [astrm, Inflate, Unzlib]; } + ], this, StrmOpt.call(this, opts, cb), function (ev) { + var strm = new Unzlib(ev.data); + onmessage = astrm(strm); + }, 11, 0); + } + return AsyncUnzlib; +}()))); + +function unzlib(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + return cbify(data, opts, [ + bInflt, + zule, + function () { return [unzlibSync]; } + ], function (ev) { return pbf(unzlibSync(ev.data[0], gopt(ev.data[1]))); }, 5, cb); +} +/** + * Expands Zlib data + * @param data The data to decompress + * @param opts The decompression options + * @returns The decompressed version of the data + */ +function unzlibSync(data, opts) { + return inflt(data.subarray(zls(data, opts && opts.dictionary), -4), { i: 2 }, opts && opts.out, opts && opts.dictionary); +} +// Default algorithm for compression (used because having a known output size allows faster decompression) + + +/** + * Streaming GZIP, Zlib, or raw DEFLATE decompression + */ +var Decompress = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function Decompress(opts, cb) { + this.o = StrmOpt.call(this, opts, cb) || {}; + this.G = Gunzip; + this.I = Inflate; + this.Z = Unzlib; + } + // init substream + // overriden by AsyncDecompress + Decompress.prototype.i = function () { + var _this = this; + this.s.ondata = function (dat, final) { + _this.ondata(dat, final); + }; + }; + /** + * Pushes a chunk to be decompressed + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + Decompress.prototype.push = function (chunk, final) { + if (!this.ondata) + err(5); + if (!this.s) { + if (this.p && this.p.length) { + var n = new u8(this.p.length + chunk.length); + n.set(this.p), n.set(chunk, this.p.length); + } + else + this.p = chunk; + if (this.p.length > 2) { + this.s = (this.p[0] == 31 && this.p[1] == 139 && this.p[2] == 8) + ? new this.G(this.o) + : ((this.p[0] & 15) != 8 || (this.p[0] >> 4) > 7 || ((this.p[0] << 8 | this.p[1]) % 31)) + ? new this.I(this.o) + : new this.Z(this.o); + this.i(); + this.s.push(this.p, final); + this.p = null; + } + } + else + this.s.push(chunk, final); + }; + return Decompress; +}()))); + +/** + * Asynchronous streaming GZIP, Zlib, or raw DEFLATE decompression + */ +var AsyncDecompress = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function AsyncDecompress(opts, cb) { + Decompress.call(this, opts, cb); + this.queuedSize = 0; + this.G = AsyncGunzip; + this.I = AsyncInflate; + this.Z = AsyncUnzlib; + } + AsyncDecompress.prototype.i = function () { + var _this = this; + this.s.ondata = function (err, dat, final) { + _this.ondata(err, dat, final); + }; + this.s.ondrain = function (size) { + _this.queuedSize -= size; + if (_this.ondrain) + _this.ondrain(size); + }; + }; + /** + * Pushes a chunk to be decompressed + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + AsyncDecompress.prototype.push = function (chunk, final) { + this.queuedSize += chunk.length; + Decompress.prototype.push.call(this, chunk, final); + }; + return AsyncDecompress; +}()))); + +function decompress(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + return (data[0] == 31 && data[1] == 139 && data[2] == 8) + ? gunzip(data, opts, cb) + : ((data[0] & 15) != 8 || (data[0] >> 4) > 7 || ((data[0] << 8 | data[1]) % 31)) + ? inflate(data, opts, cb) + : unzlib(data, opts, cb); +} +/** + * Expands compressed GZIP, Zlib, or raw DEFLATE data, automatically detecting the format + * @param data The data to decompress + * @param opts The decompression options + * @returns The decompressed version of the data + */ +function decompressSync(data, opts) { + return (data[0] == 31 && data[1] == 139 && data[2] == 8) + ? gunzipSync(data, opts) + : ((data[0] & 15) != 8 || (data[0] >> 4) > 7 || ((data[0] << 8 | data[1]) % 31)) + ? inflateSync(data, opts) + : unzlibSync(data, opts); +} +// flatten a directory structure +var fltn = function (d, p, t, o) { + for (var k in d) { + var val = d[k], n = p + k, op = o; + if (Array.isArray(val)) + op = mrg(o, val[1]), val = val[0]; + if (val instanceof u8) + t[n] = [val, op]; + else { + t[n += '/'] = [new u8(0), op]; + fltn(val, n, t, o); + } + } +}; +// text encoder +var te = typeof TextEncoder != 'undefined' && /*#__PURE__*/ new TextEncoder(); +// text decoder +var td = typeof TextDecoder != 'undefined' && /*#__PURE__*/ new TextDecoder(); +// text decoder stream +var tds = 0; +try { + td.decode(et, { stream: true }); + tds = 1; +} +catch (e) { } +// decode UTF8 +var dutf8 = function (d) { + for (var r = '', i = 0;;) { + var c = d[i++]; + var eb = (c > 127) + (c > 223) + (c > 239); + if (i + eb > d.length) + return { s: r, r: slc(d, i - 1) }; + if (!eb) + r += String.fromCharCode(c); + else if (eb == 3) { + c = ((c & 15) << 18 | (d[i++] & 63) << 12 | (d[i++] & 63) << 6 | (d[i++] & 63)) - 65536, + r += String.fromCharCode(55296 | (c >> 10), 56320 | (c & 1023)); + } + else if (eb & 1) + r += String.fromCharCode((c & 31) << 6 | (d[i++] & 63)); + else + r += String.fromCharCode((c & 15) << 12 | (d[i++] & 63) << 6 | (d[i++] & 63)); + } +}; +/** + * Streaming UTF-8 decoding + */ +var DecodeUTF8 = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates a UTF-8 decoding stream + * @param cb The callback to call whenever data is decoded + */ + function DecodeUTF8(cb) { + this.ondata = cb; + if (tds) + this.t = new TextDecoder(); + else + this.p = et; + } + /** + * Pushes a chunk to be decoded from UTF-8 binary + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + DecodeUTF8.prototype.push = function (chunk, final) { + if (!this.ondata) + err(5); + final = !!final; + if (this.t) { + this.ondata(this.t.decode(chunk, { stream: true }), final); + if (final) { + if (this.t.decode().length) + err(8); + this.t = null; + } + return; + } + if (!this.p) + err(4); + var dat = new u8(this.p.length + chunk.length); + dat.set(this.p); + dat.set(chunk, this.p.length); + var _a = dutf8(dat), s = _a.s, r = _a.r; + if (final) { + if (r.length) + err(8); + this.p = null; + } + else + this.p = r; + this.ondata(s, final); + }; + return DecodeUTF8; +}()))); + +/** + * Streaming UTF-8 encoding + */ +var EncodeUTF8 = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates a UTF-8 decoding stream + * @param cb The callback to call whenever data is encoded + */ + function EncodeUTF8(cb) { + this.ondata = cb; + } + /** + * Pushes a chunk to be encoded to UTF-8 + * @param chunk The string data to push + * @param final Whether this is the last chunk + */ + EncodeUTF8.prototype.push = function (chunk, final) { + if (!this.ondata) + err(5); + if (this.d) + err(4); + this.ondata(strToU8(chunk), this.d = final || false); + }; + return EncodeUTF8; +}()))); + +/** + * Converts a string into a Uint8Array for use with compression/decompression methods + * @param str The string to encode + * @param latin1 Whether or not to interpret the data as Latin-1. This should + * not need to be true unless decoding a binary string. + * @returns The string encoded in UTF-8/Latin-1 binary + */ +function strToU8(str, latin1) { + if (latin1) { + var ar_1 = new u8(str.length); + for (var i = 0; i < str.length; ++i) + ar_1[i] = str.charCodeAt(i); + return ar_1; + } + if (te) + return te.encode(str); + var l = str.length; + var ar = new u8(str.length + (str.length >> 1)); + var ai = 0; + var w = function (v) { ar[ai++] = v; }; + for (var i = 0; i < l; ++i) { + if (ai + 5 > ar.length) { + var n = new u8(ai + 8 + ((l - i) << 1)); + n.set(ar); + ar = n; + } + var c = str.charCodeAt(i); + if (c < 128 || latin1) + w(c); + else if (c < 2048) + w(192 | (c >> 6)), w(128 | (c & 63)); + else if (c > 55295 && c < 57344) + c = 65536 + (c & 1023 << 10) | (str.charCodeAt(++i) & 1023), + w(240 | (c >> 18)), w(128 | ((c >> 12) & 63)), w(128 | ((c >> 6) & 63)), w(128 | (c & 63)); + else + w(224 | (c >> 12)), w(128 | ((c >> 6) & 63)), w(128 | (c & 63)); + } + return slc(ar, 0, ai); +} +/** + * Converts a Uint8Array to a string + * @param dat The data to decode to string + * @param latin1 Whether or not to interpret the data as Latin-1. This should + * not need to be true unless encoding to binary string. + * @returns The original UTF-8/Latin-1 string + */ +function strFromU8(dat, latin1) { + if (latin1) { + var r = ''; + for (var i = 0; i < dat.length; i += 16384) + r += String.fromCharCode.apply(null, dat.subarray(i, i + 16384)); + return r; + } + else if (td) { + return td.decode(dat); + } + else { + var _a = dutf8(dat), s = _a.s, r = _a.r; + if (r.length) + err(8); + return s; + } +} +; +// deflate bit flag +var dbf = function (l) { return l == 1 ? 3 : l < 6 ? 2 : l == 9 ? 1 : 0; }; +// skip local zip header +var slzh = function (d, b) { return b + 30 + b2(d, b + 26) + b2(d, b + 28); }; +// read zip header +var zh = function (d, b, z) { + var fnl = b2(d, b + 28), fn = strFromU8(d.subarray(b + 46, b + 46 + fnl), !(b2(d, b + 8) & 2048)), es = b + 46 + fnl, bs = b4(d, b + 20); + var _a = z && bs == 4294967295 ? z64e(d, es) : [bs, b4(d, b + 24), b4(d, b + 42)], sc = _a[0], su = _a[1], off = _a[2]; + return [b2(d, b + 10), sc, su, fn, es + b2(d, b + 30) + b2(d, b + 32), off]; +}; +// read zip64 extra field +var z64e = function (d, b) { + for (; b2(d, b) != 1; b += 4 + b2(d, b + 2)) + ; + return [b8(d, b + 12), b8(d, b + 4), b8(d, b + 20)]; +}; +// extra field length +var exfl = function (ex) { + var le = 0; + if (ex) { + for (var k in ex) { + var l = ex[k].length; + if (l > 65535) + err(9); + le += l + 4; + } + } + return le; +}; +// write zip header +var wzh = function (d, b, f, fn, u, c, ce, co) { + var fl = fn.length, ex = f.extra, col = co && co.length; + var exl = exfl(ex); + wbytes(d, b, ce != null ? 0x2014B50 : 0x4034B50), b += 4; + if (ce != null) + d[b++] = 20, d[b++] = f.os; + d[b] = 20, b += 2; // spec compliance? what's that? + d[b++] = (f.flag << 1) | (c < 0 && 8), d[b++] = u && 8; + d[b++] = f.compression & 255, d[b++] = f.compression >> 8; + var dt = new Date(f.mtime == null ? Date.now() : f.mtime), y = dt.getFullYear() - 1980; + if (y < 0 || y > 119) + err(10); + wbytes(d, b, (y << 25) | ((dt.getMonth() + 1) << 21) | (dt.getDate() << 16) | (dt.getHours() << 11) | (dt.getMinutes() << 5) | (dt.getSeconds() >> 1)), b += 4; + if (c != -1) { + wbytes(d, b, f.crc); + wbytes(d, b + 4, c < 0 ? -c - 2 : c); + wbytes(d, b + 8, f.size); + } + wbytes(d, b + 12, fl); + wbytes(d, b + 14, exl), b += 16; + if (ce != null) { + wbytes(d, b, col); + wbytes(d, b + 6, f.attrs); + wbytes(d, b + 10, ce), b += 14; + } + d.set(fn, b); + b += fl; + if (exl) { + for (var k in ex) { + var exf = ex[k], l = exf.length; + wbytes(d, b, +k); + wbytes(d, b + 2, l); + d.set(exf, b + 4), b += 4 + l; + } + } + if (col) + d.set(co, b), b += col; + return b; +}; +// write zip footer (end of central directory) +var wzf = function (o, b, c, d, e) { + wbytes(o, b, 0x6054B50); // skip disk + wbytes(o, b + 8, c); + wbytes(o, b + 10, c); + wbytes(o, b + 12, d); + wbytes(o, b + 16, e); +}; +/** + * A pass-through stream to keep data uncompressed in a ZIP archive. + */ +var ZipPassThrough = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates a pass-through stream that can be added to ZIP archives + * @param filename The filename to associate with this data stream + */ + function ZipPassThrough(filename) { + this.filename = filename; + this.c = crc(); + this.size = 0; + this.compression = 0; + } + /** + * Processes a chunk and pushes to the output stream. You can override this + * method in a subclass for custom behavior, but by default this passes + * the data through. You must call this.ondata(err, chunk, final) at some + * point in this method. + * @param chunk The chunk to process + * @param final Whether this is the last chunk + */ + ZipPassThrough.prototype.process = function (chunk, final) { + this.ondata(null, chunk, final); + }; + /** + * Pushes a chunk to be added. If you are subclassing this with a custom + * compression algorithm, note that you must push data from the source + * file only, pre-compression. + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + ZipPassThrough.prototype.push = function (chunk, final) { + if (!this.ondata) + err(5); + this.c.p(chunk); + this.size += chunk.length; + if (final) + this.crc = this.c.d(); + this.process(chunk, final || false); + }; + return ZipPassThrough; +}()))); + +// I don't extend because TypeScript extension adds 1kB of runtime bloat +/** + * Streaming DEFLATE compression for ZIP archives. Prefer using AsyncZipDeflate + * for better performance + */ +var ZipDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates a DEFLATE stream that can be added to ZIP archives + * @param filename The filename to associate with this data stream + * @param opts The compression options + */ + function ZipDeflate(filename, opts) { + var _this = this; + if (!opts) + opts = {}; + ZipPassThrough.call(this, filename); + this.d = new Deflate(opts, function (dat, final) { + _this.ondata(null, dat, final); + }); + this.compression = 8; + this.flag = dbf(opts.level); + } + ZipDeflate.prototype.process = function (chunk, final) { + try { + this.d.push(chunk, final); + } + catch (e) { + this.ondata(e, null, final); + } + }; + /** + * Pushes a chunk to be deflated + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + ZipDeflate.prototype.push = function (chunk, final) { + ZipPassThrough.prototype.push.call(this, chunk, final); + }; + return ZipDeflate; +}()))); + +/** + * Asynchronous streaming DEFLATE compression for ZIP archives + */ +var AsyncZipDeflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates an asynchronous DEFLATE stream that can be added to ZIP archives + * @param filename The filename to associate with this data stream + * @param opts The compression options + */ + function AsyncZipDeflate(filename, opts) { + var _this = this; + if (!opts) + opts = {}; + ZipPassThrough.call(this, filename); + this.d = new AsyncDeflate(opts, function (err, dat, final) { + _this.ondata(err, dat, final); + }); + this.compression = 8; + this.flag = dbf(opts.level); + this.terminate = this.d.terminate; + } + AsyncZipDeflate.prototype.process = function (chunk, final) { + this.d.push(chunk, final); + }; + /** + * Pushes a chunk to be deflated + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + AsyncZipDeflate.prototype.push = function (chunk, final) { + ZipPassThrough.prototype.push.call(this, chunk, final); + }; + return AsyncZipDeflate; +}()))); + +// TODO: Better tree shaking +/** + * A zippable archive to which files can incrementally be added + */ +var Zip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates an empty ZIP archive to which files can be added + * @param cb The callback to call whenever data for the generated ZIP archive + * is available + */ + function Zip(cb) { + this.ondata = cb; + this.u = []; + this.d = 1; + } + /** + * Adds a file to the ZIP archive + * @param file The file stream to add + */ + Zip.prototype.add = function (file) { + var _this = this; + if (!this.ondata) + err(5); + // finishing or finished + if (this.d & 2) + this.ondata(err(4 + (this.d & 1) * 8, 0, 1), null, false); + else { + var f = strToU8(file.filename), fl_1 = f.length; + var com = file.comment, o = com && strToU8(com); + var u = fl_1 != file.filename.length || (o && (com.length != o.length)); + var hl_1 = fl_1 + exfl(file.extra) + 30; + if (fl_1 > 65535) + this.ondata(err(11, 0, 1), null, false); + var header = new u8(hl_1); + wzh(header, 0, file, f, u, -1); + var chks_1 = [header]; + var pAll_1 = function () { + for (var _i = 0, chks_2 = chks_1; _i < chks_2.length; _i++) { + var chk = chks_2[_i]; + _this.ondata(null, chk, false); + } + chks_1 = []; + }; + var tr_1 = this.d; + this.d = 0; + var ind_1 = this.u.length; + var uf_1 = mrg(file, { + f: f, + u: u, + o: o, + t: function () { + if (file.terminate) + file.terminate(); + }, + r: function () { + pAll_1(); + if (tr_1) { + var nxt = _this.u[ind_1 + 1]; + if (nxt) + nxt.r(); + else + _this.d = 1; + } + tr_1 = 1; + } + }); + var cl_1 = 0; + file.ondata = function (err, dat, final) { + if (err) { + _this.ondata(err, dat, final); + _this.terminate(); + } + else { + cl_1 += dat.length; + chks_1.push(dat); + if (final) { + var dd = new u8(16); + wbytes(dd, 0, 0x8074B50); + wbytes(dd, 4, file.crc); + wbytes(dd, 8, cl_1); + wbytes(dd, 12, file.size); + chks_1.push(dd); + uf_1.c = cl_1, uf_1.b = hl_1 + cl_1 + 16, uf_1.crc = file.crc, uf_1.size = file.size; + if (tr_1) + uf_1.r(); + tr_1 = 1; + } + else if (tr_1) + pAll_1(); + } + }; + this.u.push(uf_1); + } + }; + /** + * Ends the process of adding files and prepares to emit the final chunks. + * This *must* be called after adding all desired files for the resulting + * ZIP file to work properly. + */ + Zip.prototype.end = function () { + var _this = this; + if (this.d & 2) { + this.ondata(err(4 + (this.d & 1) * 8, 0, 1), null, true); + return; + } + if (this.d) + this.e(); + else + this.u.push({ + r: function () { + if (!(_this.d & 1)) + return; + _this.u.splice(-1, 1); + _this.e(); + }, + t: function () { } + }); + this.d = 3; + }; + Zip.prototype.e = function () { + var bt = 0, l = 0, tl = 0; + for (var _i = 0, _a = this.u; _i < _a.length; _i++) { + var f = _a[_i]; + tl += 46 + f.f.length + exfl(f.extra) + (f.o ? f.o.length : 0); + } + var out = new u8(tl + 22); + for (var _b = 0, _c = this.u; _b < _c.length; _b++) { + var f = _c[_b]; + wzh(out, bt, f, f.f, f.u, -f.c - 2, l, f.o); + bt += 46 + f.f.length + exfl(f.extra) + (f.o ? f.o.length : 0), l += f.b; + } + wzf(out, bt, this.u.length, tl, l); + this.ondata(null, out, true); + this.d = 2; + }; + /** + * A method to terminate any internal workers used by the stream. Subsequent + * calls to add() will fail. + */ + Zip.prototype.terminate = function () { + for (var _i = 0, _a = this.u; _i < _a.length; _i++) { + var f = _a[_i]; + f.t(); + } + this.d = 2; + }; + return Zip; +}()))); + +function zip(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + var r = {}; + fltn(data, '', r, opts); + var k = Object.keys(r); + var lft = k.length, o = 0, tot = 0; + var slft = lft, files = new Array(lft); + var term = []; + var tAll = function () { + for (var i = 0; i < term.length; ++i) + term[i](); + }; + var cbd = function (a, b) { + mt(function () { cb(a, b); }); + }; + mt(function () { cbd = cb; }); + var cbf = function () { + var out = new u8(tot + 22), oe = o, cdl = tot - o; + tot = 0; + for (var i = 0; i < slft; ++i) { + var f = files[i]; + try { + var l = f.c.length; + wzh(out, tot, f, f.f, f.u, l); + var badd = 30 + f.f.length + exfl(f.extra); + var loc = tot + badd; + out.set(f.c, loc); + wzh(out, o, f, f.f, f.u, l, tot, f.m), o += 16 + badd + (f.m ? f.m.length : 0), tot = loc + l; + } + catch (e) { + return cbd(e, null); + } + } + wzf(out, o, files.length, cdl, oe); + cbd(null, out); + }; + if (!lft) + cbf(); + var _loop_1 = function (i) { + var fn = k[i]; + var _a = r[fn], file = _a[0], p = _a[1]; + var c = crc(), size = file.length; + c.p(file); + var f = strToU8(fn), s = f.length; + var com = p.comment, m = com && strToU8(com), ms = m && m.length; + var exl = exfl(p.extra); + var compression = p.level == 0 ? 0 : 8; + var cbl = function (e, d) { + if (e) { + tAll(); + cbd(e, null); + } + else { + var l = d.length; + files[i] = mrg(p, { + size: size, + crc: c.d(), + c: d, + f: f, + m: m, + u: s != fn.length || (m && (com.length != ms)), + compression: compression + }); + o += 30 + s + exl + l; + tot += 76 + 2 * (s + exl) + (ms || 0) + l; + if (!--lft) + cbf(); + } + }; + if (s > 65535) + cbl(err(11, 0, 1), null); + if (!compression) + cbl(null, file); + else if (size < 160000) { + try { + cbl(null, deflateSync(file, p)); + } + catch (e) { + cbl(e, null); + } + } + else + term.push(deflate(file, p, cbl)); + }; + // Cannot use lft because it can decrease + for (var i = 0; i < slft; ++i) { + _loop_1(i); + } + return tAll; +} +/** + * Synchronously creates a ZIP file. Prefer using `zip` for better performance + * with more than one file. + * @param data The directory structure for the ZIP archive + * @param opts The main options, merged with per-file options + * @returns The generated ZIP archive + */ +function zipSync(data, opts) { + if (!opts) + opts = {}; + var r = {}; + var files = []; + fltn(data, '', r, opts); + var o = 0; + var tot = 0; + for (var fn in r) { + var _a = r[fn], file = _a[0], p = _a[1]; + var compression = p.level == 0 ? 0 : 8; + var f = strToU8(fn), s = f.length; + var com = p.comment, m = com && strToU8(com), ms = m && m.length; + var exl = exfl(p.extra); + if (s > 65535) + err(11); + var d = compression ? deflateSync(file, p) : file, l = d.length; + var c = crc(); + c.p(file); + files.push(mrg(p, { + size: file.length, + crc: c.d(), + c: d, + f: f, + m: m, + u: s != fn.length || (m && (com.length != ms)), + o: o, + compression: compression + })); + o += 30 + s + exl + l; + tot += 76 + 2 * (s + exl) + (ms || 0) + l; + } + var out = new u8(tot + 22), oe = o, cdl = tot - o; + for (var i = 0; i < files.length; ++i) { + var f = files[i]; + wzh(out, f.o, f, f.f, f.u, f.c.length); + var badd = 30 + f.f.length + exfl(f.extra); + out.set(f.c, f.o + badd); + wzh(out, o, f, f.f, f.u, f.c.length, f.o, f.m), o += 16 + badd + (f.m ? f.m.length : 0); + } + wzf(out, o, files.length, cdl, oe); + return out; +} +/** + * Streaming pass-through decompression for ZIP archives + */ +var UnzipPassThrough = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + function UnzipPassThrough() { + } + UnzipPassThrough.prototype.push = function (data, final) { + this.ondata(null, data, final); + }; + UnzipPassThrough.compression = 0; + return UnzipPassThrough; +}()))); + +/** + * Streaming DEFLATE decompression for ZIP archives. Prefer AsyncZipInflate for + * better performance. + */ +var UnzipInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates a DEFLATE decompression that can be used in ZIP archives + */ + function UnzipInflate() { + var _this = this; + this.i = new Inflate(function (dat, final) { + _this.ondata(null, dat, final); + }); + } + UnzipInflate.prototype.push = function (data, final) { + try { + this.i.push(data, final); + } + catch (e) { + this.ondata(e, null, final); + } + }; + UnzipInflate.compression = 8; + return UnzipInflate; +}()))); + +/** + * Asynchronous streaming DEFLATE decompression for ZIP archives + */ +var AsyncUnzipInflate = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates a DEFLATE decompression that can be used in ZIP archives + */ + function AsyncUnzipInflate(_, sz) { + var _this = this; + if (sz < 320000) { + this.i = new Inflate(function (dat, final) { + _this.ondata(null, dat, final); + }); + } + else { + this.i = new AsyncInflate(function (err, dat, final) { + _this.ondata(err, dat, final); + }); + this.terminate = this.i.terminate; + } + } + AsyncUnzipInflate.prototype.push = function (data, final) { + if (this.i.terminate) + data = slc(data, 0); + this.i.push(data, final); + }; + AsyncUnzipInflate.compression = 8; + return AsyncUnzipInflate; +}()))); + +/** + * A ZIP archive decompression stream that emits files as they are discovered + */ +var Unzip = /*#__PURE__*/ ((/* unused pure expression or super */ null && (function () { + /** + * Creates a ZIP decompression stream + * @param cb The callback to call whenever a file in the ZIP archive is found + */ + function Unzip(cb) { + this.onfile = cb; + this.k = []; + this.o = { + 0: UnzipPassThrough + }; + this.p = et; + } + /** + * Pushes a chunk to be unzipped + * @param chunk The chunk to push + * @param final Whether this is the last chunk + */ + Unzip.prototype.push = function (chunk, final) { + var _this = this; + if (!this.onfile) + err(5); + if (!this.p) + err(4); + if (this.c > 0) { + var len = Math.min(this.c, chunk.length); + var toAdd = chunk.subarray(0, len); + this.c -= len; + if (this.d) + this.d.push(toAdd, !this.c); + else + this.k[0].push(toAdd); + chunk = chunk.subarray(len); + if (chunk.length) + return this.push(chunk, final); + } + else { + var f = 0, i = 0, is = void 0, buf = void 0; + if (!this.p.length) + buf = chunk; + else if (!chunk.length) + buf = this.p; + else { + buf = new u8(this.p.length + chunk.length); + buf.set(this.p), buf.set(chunk, this.p.length); + } + var l = buf.length, oc = this.c, add = oc && this.d; + var _loop_2 = function () { + var _a; + var sig = b4(buf, i); + if (sig == 0x4034B50) { + f = 1, is = i; + this_1.d = null; + this_1.c = 0; + var bf = b2(buf, i + 6), cmp_1 = b2(buf, i + 8), u = bf & 2048, dd = bf & 8, fnl = b2(buf, i + 26), es = b2(buf, i + 28); + if (l > i + 30 + fnl + es) { + var chks_3 = []; + this_1.k.unshift(chks_3); + f = 2; + var sc_1 = b4(buf, i + 18), su_1 = b4(buf, i + 22); + var fn_1 = strFromU8(buf.subarray(i + 30, i += 30 + fnl), !u); + if (sc_1 == 4294967295) { + _a = dd ? [-2] : z64e(buf, i), sc_1 = _a[0], su_1 = _a[1]; + } + else if (dd) + sc_1 = -1; + i += es; + this_1.c = sc_1; + var d_1; + var file_1 = { + name: fn_1, + compression: cmp_1, + start: function () { + if (!file_1.ondata) + err(5); + if (!sc_1) + file_1.ondata(null, et, true); + else { + var ctr = _this.o[cmp_1]; + if (!ctr) + file_1.ondata(err(14, 'unknown compression type ' + cmp_1, 1), null, false); + d_1 = sc_1 < 0 ? new ctr(fn_1) : new ctr(fn_1, sc_1, su_1); + d_1.ondata = function (err, dat, final) { file_1.ondata(err, dat, final); }; + for (var _i = 0, chks_4 = chks_3; _i < chks_4.length; _i++) { + var dat = chks_4[_i]; + d_1.push(dat, false); + } + if (_this.k[0] == chks_3 && _this.c) + _this.d = d_1; + else + d_1.push(et, true); + } + }, + terminate: function () { + if (d_1 && d_1.terminate) + d_1.terminate(); + } + }; + if (sc_1 >= 0) + file_1.size = sc_1, file_1.originalSize = su_1; + this_1.onfile(file_1); + } + return "break"; + } + else if (oc) { + if (sig == 0x8074B50) { + is = i += 12 + (oc == -2 && 8), f = 3, this_1.c = 0; + return "break"; + } + else if (sig == 0x2014B50) { + is = i -= 4, f = 3, this_1.c = 0; + return "break"; + } + } + }; + var this_1 = this; + for (; i < l - 4; ++i) { + var state_1 = _loop_2(); + if (state_1 === "break") + break; + } + this.p = et; + if (oc < 0) { + var dat = f ? buf.subarray(0, is - 12 - (oc == -2 && 8) - (b4(buf, is - 16) == 0x8074B50 && 4)) : buf.subarray(0, i); + if (add) + add.push(dat, !!f); + else + this.k[+(f == 2)].push(dat); + } + if (f & 2) + return this.push(buf.subarray(i), final); + this.p = buf.subarray(i); + } + if (final) { + if (this.c) + err(13); + this.p = null; + } + }; + /** + * Registers a decoder with the stream, allowing for files compressed with + * the compression type provided to be expanded correctly + * @param decoder The decoder constructor + */ + Unzip.prototype.register = function (decoder) { + this.o[decoder.compression] = decoder; + }; + return Unzip; +}()))); + +var mt = typeof queueMicrotask == 'function' ? queueMicrotask : typeof setTimeout == 'function' ? setTimeout : function (fn) { fn(); }; +function unzip(data, opts, cb) { + if (!cb) + cb = opts, opts = {}; + if (typeof cb != 'function') + err(7); + var term = []; + var tAll = function () { + for (var i = 0; i < term.length; ++i) + term[i](); + }; + var files = {}; + var cbd = function (a, b) { + mt(function () { cb(a, b); }); + }; + mt(function () { cbd = cb; }); + var e = data.length - 22; + for (; b4(data, e) != 0x6054B50; --e) { + if (!e || data.length - e > 65558) { + cbd(err(13, 0, 1), null); + return tAll; + } + } + ; + var lft = b2(data, e + 8); + if (lft) { + var c = lft; + var o = b4(data, e + 16); + var z = o == 4294967295 || c == 65535; + if (z) { + var ze = b4(data, e - 12); + z = b4(data, ze) == 0x6064B50; + if (z) { + c = lft = b4(data, ze + 32); + o = b4(data, ze + 48); + } + } + var fltr = opts && opts.filter; + var _loop_3 = function (i) { + var _a = zh(data, o, z), c_1 = _a[0], sc = _a[1], su = _a[2], fn = _a[3], no = _a[4], off = _a[5], b = slzh(data, off); + o = no; + var cbl = function (e, d) { + if (e) { + tAll(); + cbd(e, null); + } + else { + if (d) + files[fn] = d; + if (!--lft) + cbd(null, files); + } + }; + if (!fltr || fltr({ + name: fn, + size: sc, + originalSize: su, + compression: c_1 + })) { + if (!c_1) + cbl(null, slc(data, b, b + sc)); + else if (c_1 == 8) { + var infl = data.subarray(b, b + sc); + // Synchronously decompress under 512KB, or barely-compressed data + if (su < 524288 || sc > 0.8 * su) { + try { + cbl(null, inflateSync(infl, { out: new u8(su) })); + } + catch (e) { + cbl(e, null); + } + } + else + term.push(inflate(infl, { size: su }, cbl)); + } + else + cbl(err(14, 'unknown compression type ' + c_1, 1), null); + } + else + cbl(null, null); + }; + for (var i = 0; i < c; ++i) { + _loop_3(i); + } + } + else + cbd(null, {}); + return tAll; +} +/** + * Synchronously decompresses a ZIP archive. Prefer using `unzip` for better + * performance with more than one file. + * @param data The raw compressed ZIP file + * @param opts The ZIP extraction options + * @returns The decompressed files + */ +function unzipSync(data, opts) { + var files = {}; + var e = data.length - 22; + for (; b4(data, e) != 0x6054B50; --e) { + if (!e || data.length - e > 65558) + err(13); + } + ; + var c = b2(data, e + 8); + if (!c) + return {}; + var o = b4(data, e + 16); + var z = o == 4294967295 || c == 65535; + if (z) { + var ze = b4(data, e - 12); + z = b4(data, ze) == 0x6064B50; + if (z) { + c = b4(data, ze + 32); + o = b4(data, ze + 48); + } + } + var fltr = opts && opts.filter; + for (var i = 0; i < c; ++i) { + var _a = zh(data, o, z), c_2 = _a[0], sc = _a[1], su = _a[2], fn = _a[3], no = _a[4], off = _a[5], b = slzh(data, off); + o = no; + if (!fltr || fltr({ + name: fn, + size: sc, + originalSize: su, + compression: c_2 + })) { + if (!c_2) + files[fn] = slc(data, b, b + sc); + else if (c_2 == 8) + files[fn] = inflateSync(data.subarray(b, b + sc), { out: new u8(su) }); + else + err(14, 'unknown compression type ' + c_2); + } + } + return files; +} + +// EXTERNAL MODULE: ./src/providers.ts + 46 modules +var providers = __webpack_require__(68434); +// EXTERNAL MODULE: ./src/utils.ts +var utils = __webpack_require__(67418); +;// ./src/zip.ts + +var __async = (__this, __arguments, generator) => { + return new Promise((resolve, reject) => { + var fulfilled = (value) => { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + }; + var rejected = (value) => { + try { + step(generator.throw(value)); + } catch (e) { + reject(e); + } + }; + var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected); + step((generator = generator.apply(__this, __arguments)).next()); + }); +}; + + + +function zipAsync(file) { + return new Promise((res, rej) => { + zip(file, { mtime: /* @__PURE__ */ new Date("1/1/1980") }, (err, data) => { + if (err) { + rej(err); + return; + } + res(data); + }); + }); +} +function unzipAsync(data) { + return new Promise((res, rej) => { + unzip(data, {}, (err, data2) => { + if (err) { + rej(err); + return; + } + res(data2); + }); + }); +} +function downloadZip(_0) { + return __async(this, arguments, function* ({ + staticUrl = "", + zipName, + zipDigest, + parseJson = true + }) { + const url = `${staticUrl}/${zipName}.zip`; + const resp = yield (0,providers/* fetchData */.Fd)(url, { + method: "GET", + returnResponse: true + }); + const data = new Uint8Array(yield resp.arrayBuffer()); + if (zipDigest) { + const hash = "sha384-" + (0,utils/* bytesToBase64 */["if"])(yield (0,utils/* digest */.br)(data)); + if (zipDigest !== hash) { + const errMsg = `Invalid digest hash for file ${url}, wants ${zipDigest} has ${hash}`; + throw new Error(errMsg); + } + } + const { [zipName]: content } = yield unzipAsync(data); + if (parseJson) { + return JSON.parse(new TextDecoder().decode(content)); + } + return content; + }); +} + + /***/ }), /***/ 32019: @@ -173567,115 +177167,121 @@ __webpack_require__.r(__webpack_exports__); /* harmony export */ GET_REGISTERED: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_REGISTERED), /* harmony export */ GET_STATISTIC: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_STATISTIC), /* harmony export */ GET_WITHDRAWALS: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.GET_WITHDRAWALS), +/* harmony export */ INDEX_DB_ERROR: () => (/* reexport safe */ _idb__WEBPACK_IMPORTED_MODULE_8__.Fl), +/* harmony export */ IndexedDB: () => (/* reexport safe */ _idb__WEBPACK_IMPORTED_MODULE_8__.mc), /* harmony export */ Invoice: () => (/* reexport safe */ _deposits__WEBPACK_IMPORTED_MODULE_5__.qO), -/* harmony export */ MAX_FEE: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.KN), -/* harmony export */ MAX_TOVARISH_EVENTS: () => (/* reexport safe */ _tovarishClient__WEBPACK_IMPORTED_MODULE_17__.o), -/* harmony export */ MIN_FEE: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.Ss), -/* harmony export */ MIN_STAKE_BALANCE: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.pO), -/* harmony export */ MerkleTreeService: () => (/* reexport safe */ _merkleTree__WEBPACK_IMPORTED_MODULE_8__.s), -/* harmony export */ Mimc: () => (/* reexport safe */ _mimc__WEBPACK_IMPORTED_MODULE_9__.p), +/* harmony export */ MAX_FEE: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.KN), +/* harmony export */ MAX_TOVARISH_EVENTS: () => (/* reexport safe */ _tovarishClient__WEBPACK_IMPORTED_MODULE_18__.o), +/* harmony export */ MIN_FEE: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.Ss), +/* harmony export */ MIN_STAKE_BALANCE: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.pO), +/* harmony export */ MerkleTreeService: () => (/* reexport safe */ _merkleTree__WEBPACK_IMPORTED_MODULE_9__.s), +/* harmony export */ Mimc: () => (/* reexport safe */ _mimc__WEBPACK_IMPORTED_MODULE_10__.p), /* harmony export */ Multicall__factory: () => (/* reexport safe */ _typechain__WEBPACK_IMPORTED_MODULE_3__.Q2), -/* harmony export */ NetId: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.zr), +/* harmony export */ NetId: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.zr), /* harmony export */ NoteAccount: () => (/* reexport safe */ _encryptedNotes__WEBPACK_IMPORTED_MODULE_6__.Ad), /* harmony export */ OffchainOracle__factory: () => (/* reexport safe */ _typechain__WEBPACK_IMPORTED_MODULE_3__.Hk), /* harmony export */ OvmGasPriceOracle__factory: () => (/* reexport safe */ _typechain__WEBPACK_IMPORTED_MODULE_3__.Ld), -/* harmony export */ Pedersen: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_12__.Hr), -/* harmony export */ RelayerClient: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.OR), +/* harmony export */ Pedersen: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_13__.Hr), +/* harmony export */ RelayerClient: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.OR), /* harmony export */ ReverseRecords__factory: () => (/* reexport safe */ _typechain__WEBPACK_IMPORTED_MODULE_3__.Rp), -/* harmony export */ TokenPriceOracle: () => (/* reexport safe */ _prices__WEBPACK_IMPORTED_MODULE_13__.T), -/* harmony export */ TornadoBrowserProvider: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.D2), +/* harmony export */ TokenPriceOracle: () => (/* reexport safe */ _prices__WEBPACK_IMPORTED_MODULE_14__.T), +/* harmony export */ TornadoBrowserProvider: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.D2), /* harmony export */ TornadoFeeOracle: () => (/* reexport safe */ _fees__WEBPACK_IMPORTED_MODULE_7__.o), -/* harmony export */ TornadoRpcSigner: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.Vr), -/* harmony export */ TornadoVoidSigner: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.Gd), -/* harmony export */ TornadoWallet: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.nA), -/* harmony export */ TovarishClient: () => (/* reexport safe */ _tovarishClient__WEBPACK_IMPORTED_MODULE_17__.E), +/* harmony export */ TornadoRpcSigner: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.Vr), +/* harmony export */ TornadoVoidSigner: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.Gd), +/* harmony export */ TornadoWallet: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.nA), +/* harmony export */ TovarishClient: () => (/* reexport safe */ _tovarishClient__WEBPACK_IMPORTED_MODULE_18__.E), /* harmony export */ _META: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__._META), -/* harmony export */ addNetwork: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.AE), +/* harmony export */ addNetwork: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.AE), /* harmony export */ addressSchemaType: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.SC), /* harmony export */ ajv: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.SS), -/* harmony export */ base64ToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.Kp), -/* harmony export */ bigIntReplacer: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.gn), +/* harmony export */ base64ToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Kp), +/* harmony export */ bigIntReplacer: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.gn), /* harmony export */ bnSchemaType: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.iL), -/* harmony export */ bnToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.jm), -/* harmony export */ buffPedersenHash: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_12__.UB), -/* harmony export */ bufferToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.lY), +/* harmony export */ bnToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.jm), +/* harmony export */ buffPedersenHash: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_13__.UB), +/* harmony export */ bufferToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.lY), /* harmony export */ bytes32BNSchemaType: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.i1), /* harmony export */ bytes32SchemaType: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.yF), -/* harmony export */ bytesToBN: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.Ju), -/* harmony export */ bytesToBase64: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__["if"]), -/* harmony export */ bytesToHex: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.My), -/* harmony export */ calculateScore: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.zy), -/* harmony export */ calculateSnarkProof: () => (/* reexport safe */ _websnark__WEBPACK_IMPORTED_MODULE_19__.i), -/* harmony export */ chunk: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.iv), -/* harmony export */ concatBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.Id), +/* harmony export */ bytesToBN: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Ju), +/* harmony export */ bytesToBase64: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__["if"]), +/* harmony export */ bytesToHex: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.My), +/* harmony export */ calculateScore: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.zy), +/* harmony export */ calculateSnarkProof: () => (/* reexport safe */ _websnark__WEBPACK_IMPORTED_MODULE_20__.i), +/* harmony export */ chunk: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.iv), +/* harmony export */ concatBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Id), /* harmony export */ convertETHToTokenAmount: () => (/* reexport safe */ _fees__WEBPACK_IMPORTED_MODULE_7__.N), /* harmony export */ createDeposit: () => (/* reexport safe */ _deposits__WEBPACK_IMPORTED_MODULE_5__.Hr), -/* harmony export */ crypto: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.Et), -/* harmony export */ customConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.cX), -/* harmony export */ defaultConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.sb), -/* harmony export */ defaultUserAgent: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.mJ), +/* harmony export */ crypto: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Et), +/* harmony export */ customConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.cX), +/* harmony export */ defaultConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.sb), +/* harmony export */ defaultUserAgent: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.mJ), /* harmony export */ depositsEventsSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.CI), -/* harmony export */ digest: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.br), +/* harmony export */ digest: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.br), +/* harmony export */ downloadZip: () => (/* reexport safe */ _zip__WEBPACK_IMPORTED_MODULE_21__._6), /* harmony export */ echoEventsSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.ME), -/* harmony export */ enabledChains: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.Af), +/* harmony export */ enabledChains: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.Af), /* harmony export */ encryptedNotesSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.XW), /* harmony export */ factories: () => (/* reexport safe */ _typechain__WEBPACK_IMPORTED_MODULE_3__.XB), -/* harmony export */ fetch: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.hd), -/* harmony export */ fetchData: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.Fd), -/* harmony export */ fetchGetUrlFunc: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.uY), -/* harmony export */ getActiveTokenInstances: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.oY), -/* harmony export */ getActiveTokens: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.h9), +/* harmony export */ fetch: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.hd), +/* harmony export */ fetchData: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.Fd), +/* harmony export */ fetchGetUrlFunc: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.uY), +/* harmony export */ getActiveTokenInstances: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.oY), +/* harmony export */ getActiveTokens: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.h9), /* harmony export */ getAllDeposits: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllDeposits), /* harmony export */ getAllEncryptedNotes: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllEncryptedNotes), /* harmony export */ getAllGovernanceEvents: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllGovernanceEvents), /* harmony export */ getAllGraphEchoEvents: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllGraphEchoEvents), /* harmony export */ getAllRegisters: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllRegisters), /* harmony export */ getAllWithdrawals: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getAllWithdrawals), -/* harmony export */ getConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.zj), +/* harmony export */ getConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.zj), /* harmony export */ getDeposits: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getDeposits), /* harmony export */ getEncryptedNotes: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getEncryptedNotes), /* harmony export */ getEventsSchemaValidator: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.ZC), /* harmony export */ getGovernanceEvents: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getGovernanceEvents), /* harmony export */ getGraphEchoEvents: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getGraphEchoEvents), -/* harmony export */ getHttpAgent: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.WU), -/* harmony export */ getInstanceByAddress: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.Zh), +/* harmony export */ getHttpAgent: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.WU), +/* harmony export */ getIndexedDB: () => (/* reexport safe */ _idb__WEBPACK_IMPORTED_MODULE_8__.W7), +/* harmony export */ getInstanceByAddress: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.Zh), /* harmony export */ getMeta: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getMeta), -/* harmony export */ getNetworkConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.RY), +/* harmony export */ getNetworkConfig: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.RY), /* harmony export */ getNoteAccounts: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getNoteAccounts), -/* harmony export */ getProvider: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.sO), -/* harmony export */ getProviderWithNetId: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.MF), +/* harmony export */ getProvider: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.sO), +/* harmony export */ getProviderWithNetId: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.MF), /* harmony export */ getRegisters: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getRegisters), -/* harmony export */ getRelayerEnsSubdomains: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_11__.o2), +/* harmony export */ getRelayerEnsSubdomains: () => (/* reexport safe */ _networkConfig__WEBPACK_IMPORTED_MODULE_12__.o2), /* harmony export */ getStatistic: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getStatistic), /* harmony export */ getStatusSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.c_), -/* harmony export */ getSupportedInstances: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.XF), -/* harmony export */ getTokenBalances: () => (/* reexport safe */ _tokens__WEBPACK_IMPORTED_MODULE_16__.H), -/* harmony export */ getWeightRandom: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.c$), +/* harmony export */ getSupportedInstances: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.XF), +/* harmony export */ getTokenBalances: () => (/* reexport safe */ _tokens__WEBPACK_IMPORTED_MODULE_17__.H), +/* harmony export */ getWeightRandom: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.c$), /* harmony export */ getWithdrawals: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.getWithdrawals), /* harmony export */ governanceEventsSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.FR), -/* harmony export */ hexToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.aT), -/* harmony export */ initGroth16: () => (/* reexport safe */ _websnark__WEBPACK_IMPORTED_MODULE_19__.O), -/* harmony export */ isNode: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.Ll), +/* harmony export */ hexToBytes: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.aT), +/* harmony export */ initGroth16: () => (/* reexport safe */ _websnark__WEBPACK_IMPORTED_MODULE_20__.O), +/* harmony export */ isNode: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.Ll), /* harmony export */ jobsSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.Us), -/* harmony export */ leBuff2Int: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.ae), -/* harmony export */ leInt2Buff: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.EI), -/* harmony export */ mimc: () => (/* reexport safe */ _mimc__WEBPACK_IMPORTED_MODULE_9__.f), -/* harmony export */ multicall: () => (/* reexport safe */ _multicall__WEBPACK_IMPORTED_MODULE_10__.C), +/* harmony export */ leBuff2Int: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.ae), +/* harmony export */ leInt2Buff: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.EI), +/* harmony export */ mimc: () => (/* reexport safe */ _mimc__WEBPACK_IMPORTED_MODULE_10__.f), +/* harmony export */ multicall: () => (/* reexport safe */ _multicall__WEBPACK_IMPORTED_MODULE_11__.C), /* harmony export */ packEncryptedMessage: () => (/* reexport safe */ _encryptedNotes__WEBPACK_IMPORTED_MODULE_6__.Fr), -/* harmony export */ pedersen: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_12__.NO), -/* harmony export */ pickWeightedRandomRelayer: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_15__.sN), -/* harmony export */ populateTransaction: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_14__.zr), +/* harmony export */ pedersen: () => (/* reexport safe */ _pedersen__WEBPACK_IMPORTED_MODULE_13__.NO), +/* harmony export */ pickWeightedRandomRelayer: () => (/* reexport safe */ _relayerClient__WEBPACK_IMPORTED_MODULE_16__.sN), +/* harmony export */ populateTransaction: () => (/* reexport safe */ _providers__WEBPACK_IMPORTED_MODULE_15__.zr), /* harmony export */ proofSchemaType: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.Y6), /* harmony export */ queryGraph: () => (/* reexport safe */ _graphql__WEBPACK_IMPORTED_MODULE_1__.queryGraph), -/* harmony export */ rBigInt: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.ib), +/* harmony export */ rBigInt: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.ib), /* harmony export */ registeredEventsSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.dX), -/* harmony export */ sleep: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.yy), -/* harmony export */ substring: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.uU), -/* harmony export */ toFixedHex: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.$W), -/* harmony export */ toFixedLength: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.sY), +/* harmony export */ sleep: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.yy), +/* harmony export */ substring: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.uU), +/* harmony export */ toFixedHex: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.$W), +/* harmony export */ toFixedLength: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.sY), /* harmony export */ unpackEncryptedMessage: () => (/* reexport safe */ _encryptedNotes__WEBPACK_IMPORTED_MODULE_6__.ol), -/* harmony export */ validateUrl: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_18__.wv), -/* harmony export */ withdrawalsEventsSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.$j) +/* harmony export */ unzipAsync: () => (/* reexport safe */ _zip__WEBPACK_IMPORTED_MODULE_21__.fY), +/* harmony export */ validateUrl: () => (/* reexport safe */ _utils__WEBPACK_IMPORTED_MODULE_19__.wv), +/* harmony export */ withdrawalsEventsSchema: () => (/* reexport safe */ _schemas__WEBPACK_IMPORTED_MODULE_2__.$j), +/* harmony export */ zipAsync: () => (/* reexport safe */ _zip__WEBPACK_IMPORTED_MODULE_21__.a8) /* harmony export */ }); /* harmony import */ var _events__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(94513); /* harmony reexport (unknown) */ var __WEBPACK_REEXPORT_OBJECT__ = {}; @@ -173688,18 +177294,22 @@ __webpack_require__.r(__webpack_exports__); /* harmony import */ var _deposits__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(7240); /* harmony import */ var _encryptedNotes__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(33298); /* harmony import */ var _fees__WEBPACK_IMPORTED_MODULE_7__ = __webpack_require__(37182); -/* harmony import */ var _merkleTree__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(5217); -/* harmony import */ var _mimc__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(22901); -/* harmony import */ var _multicall__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(48486); -/* harmony import */ var _networkConfig__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(59499); -/* harmony import */ var _pedersen__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(85111); -/* harmony import */ var _prices__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(34525); -/* harmony import */ var _providers__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(68434); -/* harmony import */ var _relayerClient__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(57194); -/* harmony import */ var _tokens__WEBPACK_IMPORTED_MODULE_16__ = __webpack_require__(7393); -/* harmony import */ var _tovarishClient__WEBPACK_IMPORTED_MODULE_17__ = __webpack_require__(96838); -/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_18__ = __webpack_require__(67418); -/* harmony import */ var _websnark__WEBPACK_IMPORTED_MODULE_19__ = __webpack_require__(26746); +/* harmony import */ var _idb__WEBPACK_IMPORTED_MODULE_8__ = __webpack_require__(83968); +/* harmony import */ var _merkleTree__WEBPACK_IMPORTED_MODULE_9__ = __webpack_require__(5217); +/* harmony import */ var _mimc__WEBPACK_IMPORTED_MODULE_10__ = __webpack_require__(22901); +/* harmony import */ var _multicall__WEBPACK_IMPORTED_MODULE_11__ = __webpack_require__(48486); +/* harmony import */ var _networkConfig__WEBPACK_IMPORTED_MODULE_12__ = __webpack_require__(59499); +/* harmony import */ var _pedersen__WEBPACK_IMPORTED_MODULE_13__ = __webpack_require__(85111); +/* harmony import */ var _prices__WEBPACK_IMPORTED_MODULE_14__ = __webpack_require__(34525); +/* harmony import */ var _providers__WEBPACK_IMPORTED_MODULE_15__ = __webpack_require__(68434); +/* harmony import */ var _relayerClient__WEBPACK_IMPORTED_MODULE_16__ = __webpack_require__(57194); +/* harmony import */ var _tokens__WEBPACK_IMPORTED_MODULE_17__ = __webpack_require__(7393); +/* harmony import */ var _tovarishClient__WEBPACK_IMPORTED_MODULE_18__ = __webpack_require__(96838); +/* harmony import */ var _utils__WEBPACK_IMPORTED_MODULE_19__ = __webpack_require__(67418); +/* harmony import */ var _websnark__WEBPACK_IMPORTED_MODULE_20__ = __webpack_require__(26746); +/* harmony import */ var _zip__WEBPACK_IMPORTED_MODULE_21__ = __webpack_require__(18995); + + diff --git a/dist/zip.d.ts b/dist/zip.d.ts new file mode 100644 index 0000000..e924664 --- /dev/null +++ b/dist/zip.d.ts @@ -0,0 +1,9 @@ +import { AsyncZippable, Unzipped } from 'fflate'; +export declare function zipAsync(file: AsyncZippable): Promise; +export declare function unzipAsync(data: Uint8Array): Promise; +export declare function downloadZip({ staticUrl, zipName, zipDigest, parseJson, }: { + staticUrl?: string; + zipName: string; + zipDigest?: string; + parseJson?: boolean; +}): Promise; diff --git a/package.json b/package.json index be55ebe..56b8546 100644 --- a/package.json +++ b/package.json @@ -42,7 +42,8 @@ "cross-fetch": "^4.0.0", "ethers": "^6.13.2", "ffjavascript": "0.2.48", - "fflate": "^0.8.2" + "fflate": "^0.8.2", + "idb": "^8.0.0" }, "devDependencies": { "@rollup/plugin-commonjs": "^28.0.0", diff --git a/src/events/db.ts b/src/events/db.ts new file mode 100644 index 0000000..7780442 --- /dev/null +++ b/src/events/db.ts @@ -0,0 +1,150 @@ +import { downloadZip } from '../zip'; +import { IndexedDB } from '../idb'; + +import { BaseTornadoService, BaseTornadoServiceConstructor } from './base'; +import { BaseEvents, MinimalEvents, DepositsEvents, WithdrawalsEvents, CachedEvents } from './types'; + +export async function saveDBEvents({ + idb, + instanceName, + events, + lastBlock, +}: { + idb: IndexedDB; + instanceName: string; + events: T[]; + lastBlock: number; +}) { + try { + await idb.createMultipleTransactions({ + data: events, + storeName: instanceName, + }); + + await idb.putItem({ + data: { + blockNumber: lastBlock, + name: instanceName, + }, + storeName: 'lastEvents', + }); + } catch (err) { + console.log('Method saveDBEvents has error'); + console.log(err); + } +} + +export async function loadDBEvents({ + idb, + instanceName, +}: { + idb: IndexedDB; + instanceName: string; +}): Promise> { + try { + const lastBlockStore = await idb.getItem<{ blockNumber: number; name: string }>({ + storeName: 'lastEvents', + key: instanceName, + }); + + if (!lastBlockStore?.blockNumber) { + return { + events: [], + lastBlock: 0, + }; + } + + return { + events: await idb.getAll({ storeName: instanceName }), + lastBlock: lastBlockStore.blockNumber, + }; + } catch (err) { + console.log('Method loadDBEvents has error'); + console.log(err); + + return { + events: [], + lastBlock: 0, + }; + } +} + +export async function loadRemoteEvents({ + staticUrl, + instanceName, + deployedBlock, +}: { + staticUrl: string; + instanceName: string; + deployedBlock: number; +}): Promise> { + try { + const zipName = `${instanceName}.json`.toLowerCase(); + + const events = await downloadZip({ + staticUrl, + zipName, + }); + + if (!Array.isArray(events)) { + const errStr = `Invalid events from ${staticUrl}/${zipName}`; + throw new Error(errStr); + } + + return { + events, + lastBlock: events[events.length - 1]?.blockNumber || deployedBlock, + fromCache: true, + }; + } catch (err) { + console.log('Method loadRemoteEvents has error'); + console.log(err); + + return { + events: [], + lastBlock: deployedBlock, + fromCache: true, + }; + } +} + +export interface DBTornadoServiceConstructor extends BaseTornadoServiceConstructor { + staticUrl: string; + idb: IndexedDB; +} + +export class DBTornadoService extends BaseTornadoService { + staticUrl: string; + idb: IndexedDB; + + constructor(params: DBTornadoServiceConstructor) { + super(params); + + this.staticUrl = params.staticUrl; + this.idb = params.idb; + } + + async getEventsFromDB() { + return await loadDBEvents({ + idb: this.idb, + instanceName: this.getInstanceName(), + }); + } + + async getEventsFromCache() { + return await loadRemoteEvents({ + staticUrl: this.staticUrl, + instanceName: this.getInstanceName(), + deployedBlock: this.deployedBlock, + }); + } + + async saveEvents({ events, lastBlock }: BaseEvents) { + await saveDBEvents({ + idb: this.idb, + instanceName: this.getInstanceName(), + events, + lastBlock, + }); + } +} diff --git a/src/events/index.ts b/src/events/index.ts index 63fa69b..77219eb 100644 --- a/src/events/index.ts +++ b/src/events/index.ts @@ -1,2 +1,3 @@ export * from './types'; export * from './base'; +export * from './db'; diff --git a/src/idb.ts b/src/idb.ts new file mode 100644 index 0000000..794f0ce --- /dev/null +++ b/src/idb.ts @@ -0,0 +1,395 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { openDB, deleteDB, OpenDBCallbacks, IDBPDatabase } from 'idb'; +import { getConfig, getNetworkConfig, NetId, NetIdType } from './networkConfig'; + +export const INDEX_DB_ERROR = 'A mutation operation was attempted on a database that did not allow mutations.'; + +export interface IDBIndex { + name: string; + unique?: boolean; +} + +export interface IDBStores { + name: string; + keyPath?: string; + indexes?: IDBIndex[]; +} + +export interface IDBConstructor { + dbName: string; + stores?: IDBStores[]; +} + +export class IndexedDB { + dbExists: boolean; + isBlocked: boolean; + // todo: TestDBSchema on any + options: OpenDBCallbacks; + dbName: string; + dbVersion: number; + db?: IDBPDatabase; + + constructor({ dbName, stores }: IDBConstructor) { + this.dbExists = false; + this.isBlocked = false; + + this.options = { + upgrade(db) { + Object.values(db.objectStoreNames).forEach((value) => { + db.deleteObjectStore(value); + }); + + [{ name: 'keyval' }, ...(stores || [])].forEach(({ name, keyPath, indexes }) => { + const store = db.createObjectStore(name, { + keyPath, + autoIncrement: true, + }); + + if (Array.isArray(indexes)) { + indexes.forEach(({ name, unique = false }) => { + store.createIndex(name, name, { unique }); + }); + } + }); + }, + }; + + this.dbName = dbName; + this.dbVersion = 34; + } + + async initDB() { + try { + if (this.dbExists || this.isBlocked) { + return; + } + + this.db = await openDB(this.dbName, this.dbVersion, this.options); + this.db.addEventListener('onupgradeneeded', async () => { + await this._removeExist(); + }); + + this.dbExists = true; + } catch (err: any) { + // needed for private mode firefox browser + if (err.message.includes(INDEX_DB_ERROR)) { + console.log('This browser does not support IndexedDB!'); + this.isBlocked = true; + return; + } + + if (err.message.includes('less than the existing version')) { + console.log(`Upgrading DB ${this.dbName} to ${this.dbVersion}`); + await this._removeExist(); + return; + } + + console.error(`Method initDB has error: ${err.message}`); + } + } + + async _removeExist() { + await deleteDB(this.dbName); + this.dbExists = false; + + await this.initDB(); + } + + async getFromIndex({ + storeName, + indexName, + key, + }: { + storeName: string; + indexName: string; + key?: string; + }): Promise { + await this.initDB(); + + if (!this.db) { + return; + } + + try { + return (await this.db.getFromIndex(storeName, indexName, key)) as T; + } catch (err: any) { + throw new Error(`Method getFromIndex has error: ${err.message}`); + } + } + + async getAllFromIndex({ + storeName, + indexName, + key, + count, + }: { + storeName: string; + indexName: string; + key?: string; + count?: number; + }): Promise { + await this.initDB(); + + if (!this.db) { + return [] as T; + } + + try { + return (await this.db.getAllFromIndex(storeName, indexName, key, count)) as T; + } catch (err: any) { + throw new Error(`Method getAllFromIndex has error: ${err.message}`); + } + } + + async getItem({ storeName, key }: { storeName: string; key: string }): Promise { + await this.initDB(); + + if (!this.db) { + return; + } + + try { + const store = this.db.transaction(storeName).objectStore(storeName); + + return (await store.get(key)) as T; + } catch (err: any) { + throw new Error(`Method getItem has error: ${err.message}`); + } + } + + async addItem({ storeName, data, key = '' }: { storeName: string; data: any; key: string }) { + await this.initDB(); + + if (!this.db) { + return; + } + + try { + const tx = this.db.transaction(storeName, 'readwrite'); + const isExist = await tx.objectStore(storeName).get(key); + + if (!isExist) { + await tx.objectStore(storeName).add(data); + } + } catch (err: any) { + throw new Error(`Method addItem has error: ${err.message}`); + } + } + + async putItem({ storeName, data, key }: { storeName: string; data: any; key?: string }) { + await this.initDB(); + + if (!this.db) { + return; + } + + try { + const tx = this.db.transaction(storeName, 'readwrite'); + + await tx.objectStore(storeName).put(data, key); + } catch (err: any) { + throw new Error(`Method putItem has error: ${err.message}`); + } + } + + async deleteItem({ storeName, key }: { storeName: string; key: string }) { + await this.initDB(); + + if (!this.db) { + return; + } + + try { + const tx = this.db.transaction(storeName, 'readwrite'); + + await tx.objectStore(storeName).delete(key); + } catch (err: any) { + throw new Error(`Method deleteItem has error: ${err.message}`); + } + } + + async getAll({ storeName }: { storeName: string }): Promise { + await this.initDB(); + + if (!this.db) { + return [] as T; + } + + try { + const tx = this.db.transaction(storeName, 'readonly'); + + return (await tx.objectStore(storeName).getAll()) as T; + } catch (err: any) { + throw new Error(`Method getAll has error: ${err.message}`); + } + } + + /** + * Simple key-value store inspired by idb-keyval package + */ + getValue(key: string) { + return this.getItem({ storeName: 'keyval', key }); + } + + setValue(key: string, data: any) { + return this.putItem({ storeName: 'keyval', key, data }); + } + + delValue(key: string) { + return this.deleteItem({ storeName: 'keyval', key }); + } + + async clearStore({ storeName, mode = 'readwrite' }: { storeName: string; mode: IDBTransactionMode }) { + await this.initDB(); + + if (!this.db) { + return; + } + + try { + const tx = this.db.transaction(storeName, mode); + + await (tx.objectStore(storeName).clear as () => Promise)(); + } catch (err: any) { + throw new Error(`Method clearStore has error: ${err.message}`); + } + } + + async createTransactions({ + storeName, + data, + mode = 'readwrite', + }: { + storeName: string; + data: any; + mode: IDBTransactionMode; + }) { + await this.initDB(); + + if (!this.db) { + return; + } + + try { + const tx = this.db.transaction(storeName, mode); + + await (tx.objectStore(storeName).add as (value: any, key?: any) => Promise)(data); + await tx.done; + } catch (err: any) { + throw new Error(`Method createTransactions has error: ${err.message}`); + } + } + + async createMultipleTransactions({ + storeName, + data, + index, + mode = 'readwrite', + }: { + storeName: string; + data: any[]; + index?: any; + mode?: IDBTransactionMode; + }) { + await this.initDB(); + + if (!this.db) { + return; + } + + try { + const tx = this.db.transaction(storeName, mode); + + for (const item of data) { + if (item) { + await (tx.store.put as (value: any, key?: any) => Promise)({ ...item, ...index }); + } + } + } catch (err: any) { + throw new Error(`Method createMultipleTransactions has error: ${err.message}`); + } + } +} + +/** + * Should check if DB is initialized well + */ +export async function getIndexedDB(netId?: NetIdType) { + // key-value db for settings + if (!netId) { + const idb = new IndexedDB({ dbName: 'tornado-core' }); + await idb.initDB(); + return idb; + } + + const DEPOSIT_INDEXES = [ + { name: 'transactionHash', unique: false }, + { name: 'commitment', unique: true }, + ]; + const WITHDRAWAL_INDEXES = [ + { name: 'nullifierHash', unique: true }, // keys on which the index is created + ]; + const LAST_EVENT_INDEXES = [{ name: 'name', unique: false }]; + + const defaultState = [ + { + name: 'encrypted_events', + keyPath: 'transactionHash', + }, + { + name: 'lastEvents', + keyPath: 'name', + indexes: LAST_EVENT_INDEXES, + }, + ]; + + const config = getConfig(netId); + + const { tokens, nativeCurrency } = config; + + const stores = [...defaultState]; + + if (netId === NetId.MAINNET) { + stores.push({ + name: 'register_events', + keyPath: 'ensName', + }); + } + + Object.entries(tokens).forEach(([token, { instanceAddress }]) => { + Object.keys(instanceAddress).forEach((amount) => { + if (nativeCurrency === token) { + stores.push({ + name: `stringify_bloom_${netId}_${token}_${amount}`, + keyPath: 'hashBloom', + }); + } + + stores.push( + { + name: `deposits_${netId}_${token}_${amount}`, + keyPath: 'leafIndex', // the key by which it refers to the object must be in all instances of the storage + indexes: DEPOSIT_INDEXES, + }, + { + name: `withdrawals_${netId}_${token}_${amount}`, + keyPath: 'blockNumber', + indexes: WITHDRAWAL_INDEXES, + }, + { + name: `stringify_tree_${netId}_${token}_${amount}`, + keyPath: 'hashTree', + }, + ); + }); + }); + + const idb = new IndexedDB({ + dbName: `tornado_core_${netId}`, + stores, + }); + + await idb.initDB(); + + return idb; +} diff --git a/src/index.ts b/src/index.ts index 26de59f..4a79754 100644 --- a/src/index.ts +++ b/src/index.ts @@ -6,6 +6,7 @@ export * from './batch'; export * from './deposits'; export * from './encryptedNotes'; export * from './fees'; +export * from './idb'; export * from './merkleTree'; export * from './mimc'; export * from './multicall'; @@ -18,3 +19,4 @@ export * from './tokens'; export * from './tovarishClient'; export * from './utils'; export * from './websnark'; +export * from './zip'; diff --git a/src/zip.ts b/src/zip.ts new file mode 100644 index 0000000..037f6fb --- /dev/null +++ b/src/zip.ts @@ -0,0 +1,66 @@ +import { zip, unzip, AsyncZippable, Unzipped } from 'fflate'; +import { fetchData } from './providers'; +import { bytesToBase64, digest } from './utils'; + +export function zipAsync(file: AsyncZippable): Promise { + return new Promise((res, rej) => { + zip(file, { mtime: new Date('1/1/1980') }, (err, data) => { + if (err) { + rej(err); + return; + } + res(data); + }); + }); +} + +export function unzipAsync(data: Uint8Array): Promise { + return new Promise((res, rej) => { + unzip(data, {}, (err, data) => { + if (err) { + rej(err); + return; + } + res(data); + }); + }); +} + +export async function downloadZip({ + staticUrl = '', + zipName, + zipDigest, + parseJson = true, +}: { + staticUrl?: string; + zipName: string; + zipDigest?: string; + parseJson?: boolean; +}): Promise { + const url = `${staticUrl}/${zipName}.zip`; + + const resp = (await fetchData(url, { + method: 'GET', + returnResponse: true, + })) as Response; + + const data = new Uint8Array(await resp.arrayBuffer()); + + // If the zip has digest value, compare it + if (zipDigest) { + const hash = 'sha384-' + bytesToBase64(await digest(data)); + + if (zipDigest !== hash) { + const errMsg = `Invalid digest hash for file ${url}, wants ${zipDigest} has ${hash}`; + throw new Error(errMsg); + } + } + + const { [zipName]: content } = await unzipAsync(data); + + if (parseJson) { + return JSON.parse(new TextDecoder().decode(content)) as T; + } + + return content as T; +} diff --git a/yarn.lock b/yarn.lock index dc339ac..c081d1e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3096,6 +3096,11 @@ iconv-lite@^0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" +idb@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/idb/-/idb-8.0.0.tgz#33d7ed894ed36e23bcb542fb701ad579bfaad41f" + integrity sha512-l//qvlAKGmQO31Qn7xdzagVPPaHTxXx199MhrAFuVBTPqydcPYBWjkrbv4Y0ktB+GmWOiwHl237UUOrLmQxLvw== + ieee754@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"