Use Subgraph & Batched Events #3

Merged
Theo merged 9 commits from tornadocontrib/nova-ui:events-simple into master 2024-05-09 00:34:31 +03:00
32 changed files with 3335 additions and 107 deletions

View File

@ -2,19 +2,29 @@
## Build Setup
If you use the latest Node.js version, you should modify your NODE_OPTIONS env
```bash
export NODE_OPTIONS="--openssl-legacy-provider"
```
```bash
# install dependencies
$ yarn install
# serve with hot reload at localhost:3000
$ yarn dev
# build for production and launch server
$ yarn build
$ yarn start
# generate static project
$ yarn generate
# serve with hot reload at localhost:3000
# should do yarn build first if worker files are changed
$ yarn dev
# update cached events from node & subgraphs
$ yarn update:events
```
For detailed explanation on how things work, check out [Nuxt.js docs](https://nuxtjs.org).

View File

@ -1,17 +1,17 @@
/* eslint-disable @typescript-eslint/no-require-imports */
import { AES, HmacSHA256, enc } from 'crypto-js'
import { isEmpty } from 'lodash'
import { BigNumber, Contract } from 'ethers'
import { poseidon } from '@tornado/circomlib'
import { decrypt } from 'eth-sig-util'
const { AES, HmacSHA256, enc } = require('crypto-js')
const { isEmpty } = require('lodash')
const { BigNumber } = require('ethers')
const { poseidon } = require('@tornado/circomlib')
const { decrypt } = require('eth-sig-util')
const { IndexedDB } = require('../services/idb')
const { sleep } = require('../utilities/helpers')
const { workerEvents, numbers } = require('../constants/worker')
const { ExtendedProvider } = require('../services/ether/ExtendedProvider')
const { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST } = require('../constants/contracts')
const { TornadoPool__factory: TornadoPoolFactory } = require('../_contracts')
import { IndexedDB } from './services/idb'
import { BatchEventsService } from './services/batch'
import { getAllCommitments } from './services/graph'
import { ExtendedProvider } from './services/provider'
import { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } from './services/constants'
import { sleep } from './services/utilities'
import { poolAbi } from './services/pool'
import { downloadEvents } from './services/downloadEvents'
const getProviderWithSigner = (chainId) => {
return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId])
@ -61,14 +61,59 @@ const initWorker = (chainId) => {
setTornadoPool(chainId, provider)
}
const setTornadoPool = (chainId, provider) => {
self.poolContract = TornadoPoolFactory.connect(POOL_CONTRACT[chainId], provider)
self.poolContract = new Contract(POOL_CONTRACT[chainId], poolAbi, provider)
self.BatchEventsService = new BatchEventsService({
provider,
contract: self.poolContract
})
}
const getCommitmentBatch = async ({ blockFrom, blockTo, cachedEvents, withCache }) => {
const filter = self.poolContract.filters.NewCommitment()
const events = await self.poolContract.queryFilter(filter, blockFrom, blockTo)
const events = []
const commitmentEvents = events.map(({ blockNumber, transactionHash, args }) => ({
let { events: graphEvents, lastSyncBlock } = await getAllCommitments({
fromBlock: blockFrom,
toBlock: blockTo,
chainId
})
if (lastSyncBlock) {
graphEvents = graphEvents
.filter(({ blockNumber }) => {
if (blockFrom && blockTo) {
return Number(blockFrom) <= Number(blockNumber) && Number(blockNumber) <= Number(blockTo)
} else if (blockTo) {
return Number(blockNumber) <= Number(blockTo)
}
// does not filter by default
return true
})
.map(({ blockNumber, transactionHash, index, commitment, encryptedOutput }) => ({
blockNumber,
transactionHash,
index: Number(index),
commitment,
encryptedOutput,
}))
console.log({
graphEvents
})
events.push(...graphEvents)
blockFrom = lastSyncBlock
}
if (!blockTo || blockTo > blockFrom) {
let nodeEvents = await self.BatchEventsService.getBatchEvents({
fromBlock: blockFrom,
toBlock: blockTo,
type: 'NewCommitment'
})
if (nodeEvents && nodeEvents.length) {
nodeEvents = nodeEvents.map(({ blockNumber, transactionHash, args }) => ({
blockNumber,
transactionHash,
index: Number(args.index),
@ -76,7 +121,15 @@ const getCommitmentBatch = async ({ blockFrom, blockTo, cachedEvents, withCache
encryptedOutput: args.encryptedOutput,
}))
return commitmentEvents.filter((el) => {
console.log({
nodeEvents
})
events.push(...nodeEvents)
}
}
return events.filter((el) => {
if (!withCache && cachedEvents && cachedEvents.length) {
return cachedEvents.find((cached) => {
return el.transactionHash === cached.transactionHash && el.index === cached.index
@ -113,6 +166,14 @@ const getCommitments = async ({ withCache, lastSyncBlock }) => {
return { commitmentEvents: cachedEvents }
}
blockFrom = newBlockFrom > currentBlock ? currentBlock : newBlockFrom
} else {
const downloadedEvents = await downloadEvents(`commitments_${self.chainId}.json`, blockFrom)
if (downloadedEvents.events.length) {
cachedEvents.push(...downloadedEvents.events)
blockFrom = downloadedEvents.lastBlock
}
}
const commitmentEvents = await getCommitmentBatch({ blockFrom, blockTo: currentBlock, cachedEvents, withCache })

View File

@ -1,14 +1,14 @@
/* eslint-disable @typescript-eslint/no-require-imports */
const { isEmpty } = require('lodash')
const { BigNumber } = require('ethers')
import { isEmpty } from 'lodash'
import { BigNumber, Contract } from 'ethers'
const { IndexedDB } = require('../services/idb')
const { sleep } = require('../utilities/helpers')
const { workerEvents, numbers } = require('../constants/worker')
const { ExtendedProvider } = require('../services/ether/ExtendedProvider')
const { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST } = require('../constants/contracts')
const { TornadoPool__factory: TornadoPoolFactory } = require('../_contracts')
import { IndexedDB } from './services/idb'
import { BatchEventsService } from './services/batch'
import { getAllNullifiers } from './services/graph'
import { ExtendedProvider } from './services/provider'
import { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } from './services/constants'
import { sleep } from './services/utilities'
import { poolAbi } from './services/pool'
import { downloadEvents } from './services/downloadEvents'
const getProviderWithSigner = (chainId) => {
return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId])
@ -47,7 +47,12 @@ const initWorker = (chainId) => {
}
const setTornadoPool = (chainId, provider) => {
self.poolContract = TornadoPoolFactory.connect(POOL_CONTRACT[chainId], provider)
self.poolContract = new Contract(POOL_CONTRACT[chainId], poolAbi, provider)
self.BatchEventsService = new BatchEventsService({
provider,
contract: self.poolContract
})
}
const saveEvents = async ({ events }) => {
@ -116,6 +121,14 @@ const getCachedEvents = async () => {
return { blockFrom, cachedEvents }
}
blockFrom = newBlockFrom > currentBlock ? currentBlock : newBlockFrom
} else {
const downloadedEvents = await downloadEvents(`nullifiers_${self.chainId}.json`, blockFrom)
if (downloadedEvents.events.length) {
cachedEvents.push(...downloadedEvents.events)
blockFrom = downloadedEvents.lastBlock
}
}
return { blockFrom, cachedEvents }
@ -123,14 +136,39 @@ const getCachedEvents = async () => {
const getNullifiers = async (blockFrom) => {
try {
const filter = self.poolContract.filters.NewNullifier()
const events = await self.poolContract.queryFilter(filter, blockFrom)
const events = []
return events.map(({ blockNumber, transactionHash, args }) => ({
let { events: graphEvents, lastSyncBlock } = await getAllNullifiers({ fromBlock: blockFrom, chainId })
if (lastSyncBlock) {
console.log({
graphEvents
})
events.push(...graphEvents)
blockFrom = lastSyncBlock
}
let nodeEvents = await self.BatchEventsService.getBatchEvents({
fromBlock: blockFrom,
type: 'NewNullifier'
})
if (nodeEvents && nodeEvents.length) {
nodeEvents = nodeEvents.map(({ blockNumber, transactionHash, args }) => ({
blockNumber,
transactionHash,
nullifier: args.nullifier,
}))
console.log({
nodeEvents
})
events.push(...nodeEvents)
}
return events
} catch (err) {
console.error('getNullifiers', err.message)
return []

85
assets/services/batch.js Normal file
View File

@ -0,0 +1,85 @@
import { sleep, getBatches } from './utilities'
export class BatchEventsService {
constructor({
provider,
contract,
concurrencySize = 10,
blocksPerRequest = 2000,
shouldRetry = true,
retryMax = 5,
retryOn = 500,
}) {
this.provider = provider;
this.contract = contract;
this.concurrencySize = concurrencySize;
this.blocksPerRequest = blocksPerRequest;
this.shouldRetry = shouldRetry;
this.retryMax = retryMax;
this.retryOn = retryOn;
}
async getPastEvents({ fromBlock, toBlock, type }) {
let err;
let retries = 0;
// eslint-disable-next-line no-unmodified-loop-condition
while ((!this.shouldRetry && retries === 0) || (this.shouldRetry && retries < this.retryMax)) {
try {
return (await this.contract.queryFilter(type, fromBlock, toBlock));
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e) {
err = e;
retries++;
// If provider.getBlockNumber returned last block that isn't accepted (happened on Avalanche/Gnosis),
// get events to last accepted block
if (e.message.includes('after last accepted block')) {
const acceptedBlock = parseInt(e.message.split('after last accepted block ')[1]);
toBlock = acceptedBlock;
}
// retry on 0.5 seconds
await sleep(this.retryOn);
}
}
throw err;
}
createBatchRequest(batchArray) {
return batchArray.map(async (event, index) => {
await sleep(20 * index);
return this.getPastEvents(event);
});
}
async getBatchEvents({ fromBlock, toBlock, type = '*' }) {
if (!toBlock) {
toBlock = await this.provider.getBlockNumber();
}
const eventsToSync = [];
for (let i = fromBlock; i < toBlock; i += this.blocksPerRequest) {
const j = i + this.blocksPerRequest - 1 > toBlock ? toBlock : i + this.blocksPerRequest - 1;
eventsToSync.push({ fromBlock: i, toBlock: j, type });
}
const events = [];
const eventChunk = getBatches(eventsToSync, this.concurrencySize);
let chunkCount = 0;
for (const chunk of eventChunk) {
chunkCount++;
const fetchedEvents = (await Promise.all(this.createBatchRequest(chunk))).flat();
events.push(...fetchedEvents);
}
return events;
}
}

View File

@ -0,0 +1,237 @@
export const bridgeAbi = [
{
inputs: [
{
internalType: "contract IOmnibridge",
name: "_bridge",
type: "address",
},
{
internalType: "contract IWETH",
name: "_weth",
type: "address",
},
{
internalType: "address",
name: "_owner",
type: "address",
},
],
stateMutability: "nonpayable",
type: "constructor",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "owner",
type: "address",
},
{
indexed: false,
internalType: "bytes",
name: "key",
type: "bytes",
},
],
name: "PublicKey",
type: "event",
},
{
inputs: [],
name: "WETH",
outputs: [
{
internalType: "contract IWETH",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "bridge",
outputs: [
{
internalType: "contract IOmnibridge",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_token",
type: "address",
},
{
internalType: "address",
name: "_to",
type: "address",
},
],
name: "claimTokens",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_token",
type: "address",
},
{
internalType: "uint256",
name: "_value",
type: "uint256",
},
{
internalType: "bytes",
name: "_data",
type: "bytes",
},
],
name: "onTokenBridged",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "owner",
outputs: [
{
internalType: "address",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
components: [
{
internalType: "address",
name: "owner",
type: "address",
},
{
internalType: "bytes",
name: "publicKey",
type: "bytes",
},
],
internalType: "struct L1Helper.Account",
name: "_account",
type: "tuple",
},
],
name: "register",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_newOwner",
type: "address",
},
],
name: "transferOwnership",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "wrapAndRelayTokens",
outputs: [],
stateMutability: "payable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_receiver",
type: "address",
},
{
internalType: "bytes",
name: "_data",
type: "bytes",
},
],
name: "wrapAndRelayTokens",
outputs: [],
stateMutability: "payable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_receiver",
type: "address",
},
{
internalType: "bytes",
name: "_data",
type: "bytes",
},
{
components: [
{
internalType: "address",
name: "owner",
type: "address",
},
{
internalType: "bytes",
name: "publicKey",
type: "bytes",
},
],
internalType: "struct L1Helper.Account",
name: "_account",
type: "tuple",
},
],
name: "wrapAndRelayTokens",
outputs: [],
stateMutability: "payable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_receiver",
type: "address",
},
],
name: "wrapAndRelayTokens",
outputs: [],
stateMutability: "payable",
type: "function",
},
{
stateMutability: "payable",
type: "receive",
},
]

View File

@ -0,0 +1,177 @@
export const BSC_CHAIN_ID = 56
export const XDAI_CHAIN_ID = 100
export const MAINNET_CHAIN_ID = 1
export const ChainId = {
BSC: BSC_CHAIN_ID,
XDAI: XDAI_CHAIN_ID,
MAINNET: MAINNET_CHAIN_ID,
}
export const OFFCHAIN_ORACLE_CONTRACT = '0x07D91f5fb9Bf7798734C3f606dB065549F6893bb'
export const POOL_CONTRACT = {
[ChainId.XDAI]: '0xD692Fd2D0b2Fbd2e52CFa5B5b9424bC981C30696', // ETH
// [ChainId.XDAI]: '0x772F007F13604ac286312C85b9Cd9B2D691B353E', // BNB
}
export const REDGISTRY_CONTRACT = {
[ChainId.MAINNET]: '0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2',
}
export const AGGREGATOR_FACTORY = {
[ChainId.MAINNET]: '0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49',
}
export const WRAPPED_TOKEN = {
[ChainId.MAINNET]: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', // WETH on mainnet
[ChainId.XDAI]: '0x6A023CCd1ff6F2045C3309768eAd9E68F978f6e1', // WETH on xdai
[ChainId.BSC]: '0xCa8d20f3e0144a72C6B5d576e9Bd3Fd8557E2B04', // WBNB on xdai
}
export const RPC_LIST = {
[ChainId.BSC]: 'https://tornadocash-rpc.com/bsc',
[ChainId.MAINNET]: 'https://tornadocash-rpc.com/mainnet',
[ChainId.XDAI]: 'https://tornadocash-rpc.com/gnosis',
}
export const FALLBACK_RPC_LIST = {
[ChainId.BSC]: [
'https://binance.nodereal.io',
// 'https://rpc.ankr.com/bsc/dbe08b852ba176a8aeac783cc1fa8becaf4f107235dfdae79241063fbf52ca4a',
],
[ChainId.MAINNET]: [
'https://rpc.mevblocker.io',
// 'https://rpc.ankr.com/eth/dbe08b852ba176a8aeac783cc1fa8becaf4f107235dfdae79241063fbf52ca4a',
],
[ChainId.XDAI]: [
// 'https://rpc.ankr.com/gnosis/dbe08b852ba176a8aeac783cc1fa8becaf4f107235dfdae79241063fbf52ca4a',
'https://tornadocash-rpc.com/gnosis',
],
}
export const RPC_WS_LIST = {
[ChainId.MAINNET]: 'wss://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607',
[ChainId.BSC]: 'wss://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607',
[ChainId.XDAI]: 'wss://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607',
}
export const MULTICALL = {
[ChainId.BSC]: '0xf072f255A3324198C7F653237B44E1C4e66f8C42',
[ChainId.XDAI]: '0x8677b93D543d0217B32B8FDc20F2316E138D619B',
[ChainId.MAINNET]: '0x1F98415757620B543A52E61c46B32eB19261F984',
}
export const BRIDGE_PROXY = {
[ChainId.BSC]: '0x05185872898b6f94AA600177EF41B9334B1FA48B',
[ChainId.MAINNET]: '0x4c36d2919e407f0cc2ee3c993ccf8ac26d9ce64e',
}
export const AMB_BRIDGE = {
[ChainId.XDAI]: '0x75Df5AF045d91108662D8080fD1FEFAd6aA0bb59', // ETH
// [ChainId.XDAI]: '0x162E898bD0aacB578C8D5F8d6ca588c13d2A383F', // BNB
[ChainId.MAINNET]: '0x162E898bD0aacB578C8D5F8d6ca588c13d2A383F',
}
export const BRIDGE_HELPER = {
[ChainId.MAINNET]: '0xCa0840578f57fE71599D29375e16783424023357',
[ChainId.BSC]: '0x8845F740F8B01bC7D9A4C82a6fD4A60320c07AF1',
}
export const BRIDGE_FEE_MANAGER = {
[ChainId.XDAI]: '0x5dbC897aEf6B18394D845A922BF107FA98E3AC55',
}
export const FOREIGN_OMNIBRIDGE = {
[ChainId.MAINNET]: '0x88ad09518695c6c3712AC10a214bE5109a655671',
}
export const OMNIBRIDGE = {
[ChainId.XDAI]: '0xf6A78083ca3e2a662D6dd1703c939c8aCE2e268d',
}
export const SANCTION_LIST = {
[ChainId.MAINNET]: '0x40C57923924B5c5c5455c48D93317139ADDaC8fb',
}
export const CHAINS = {
[ChainId.XDAI]: {
symbol: 'XDAI',
name: 'xdai',
shortName: 'xdai',
icon: 'ethereum',
network: 'XDAI',
blockDuration: 3000, // ms
deployBlock: 19097755, // ETH
// deployBlock: 20446605, // BNB
blockGasLimit: 144000000, // rpc block gas limit
hexChainId: '0x64',
isEipSupported: false,
ensSubdomainKey: 'gnosis-nova',
blockExplorerUrl: 'https://gnosisscan.io'
},
[ChainId.MAINNET]: {
symbol: 'ETH',
name: 'ethereum',
shortName: 'eth',
icon: 'ethereum',
network: 'Mainnet',
deployBlock: 13494216,
blockDuration: 15000,
blockGasLimit: 144000000,
hexChainId: '0x1',
isEipSupported: true,
ensSubdomainKey: 'mainnet-tornado',
blockExplorerUrl: 'https://etherscan.io'
},
[ChainId.BSC]: {
symbol: 'BNB',
name: 'bsc',
shortName: 'bsc',
icon: 'binance',
network: 'BSC',
deployBlock: 14931075,
blockDuration: 3000,
blockGasLimit: 144000000,
hexChainId: '0x38',
isEipSupported: false,
ensSubdomainKey: 'bsc-tornado',
blockExplorerUrl: 'https://bscscan.com'
},
}
export const workerEvents = {
INIT_WORKER: 'initWorker',
GET_COMMITMENT_EVENTS: 'get_commitment_events',
// nullifier
GET_UNSPENT_EVENTS: 'get_unspent_events',
GET_NULLIFIER_EVENT: 'get_nullifier_event',
GET_NULLIFIER_EVENTS_FROM_TX_HASH: 'get_nullifier_events_from_tx_hash',
UPDATE_NULLIFIER_EVENTS: 'update_nullifier_events',
// events
GET_BATCH_EVENTS: 'get_batch_events',
GET_BATCH_COMMITMENTS_EVENTS: 'get_batch_commitments_events',
GET_EVENTS_FROM_TX_HASH: 'get_events_from_tx_hash',
SAVE_EVENTS: 'save_events',
GET_CACHED_EVENTS: 'get_cached_events',
GET_CACHED_COMMITMENTS_EVENTS: 'get_cached_commitments_events',
SAVE_LAST_SYNC_BLOCK: 'save_last_sync_block',
}
export const numbers = {
ZERO: 0,
TWO: 2,
ONE: 1,
BYTES_31: 31,
BYTES_62: 62,
IS_SPENT_INDEX: 1,
OX_LENGTH: 2,
RECALL_DELAY: 500,
NULLIFIER_LENGTH: 66,
NONCE_BUF_LENGTH: 24,
COMMITMENTS_CHAIN: 100,
DEPLOYED_BLOCK: 19097755,
DECRYPT_WORKERS_COUNT: 8,
MIN_BLOCKS_INTERVAL_LINE: 200000,
EPHEM_PUBLIC_KEY_BUF_LENGTH: 56,
}

View File

@ -0,0 +1,37 @@
import { unzipAsync } from "./zip"
export async function downloadEvents(fileName, deployedBlock) {
fileName = fileName.toLowerCase()
// @ts-ignore
const prefix = __webpack_public_path__.slice(0, -7)
try {
const resp = await fetch(`${prefix}/${fileName}.zip`, {
method: 'GET',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
}
})
const arrayBuffer = await resp.arrayBuffer()
const { [fileName]: content } = await unzipAsync(new Uint8Array(arrayBuffer))
const events = JSON.parse(new TextDecoder().decode(content))
const lastBlock = events && Array.isArray(events) && events[events.length - 1]
? events[events.length - 1].blockNumber
: deployedBlock
return {
events,
lastBlock
}
} catch {
return {
events: [],
lastBlock: deployedBlock
}
}
}

View File

@ -0,0 +1,279 @@
import { isEmpty } from 'lodash'
import { ApolloClient, InMemoryCache, gql } from '@apollo/client/core'
import { utils } from 'ethers'
import { GET_ACCOUNTS, GET_COMMITMENT, GET_NULLIFIER } from './queries'
import { ChainId, numbers } from '../constants'
const { getAddress } = utils
const first = 1000
const breakLength = 900
const CHAIN_GRAPH_URLS = {
[ChainId.BSC]: 'https://api.thegraph.com/subgraphs/name/dan1kov/bsc-tornado-pool-subgraph',
[ChainId.MAINNET]: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/mainnet-tornado-pool-subgraph',
[ChainId.XDAI]: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/gnosis-tornado-nova-subgraph',
}
const link = (operation) => {
const { chainId } = operation.getContext()
return CHAIN_GRAPH_URLS[chainId]
}
const client = new ApolloClient({
uri: link,
cache: new InMemoryCache(),
})
export async function getAccounts({ fromBlock, chainId }) {
const { data } = await client.query({
context: {
chainId,
},
query: gql(GET_ACCOUNTS),
variables: { first, fromBlock },
})
if (!data) {
return {
results: [],
lastSyncBlock: data._meta.block.number
}
}
return {
results: data.accounts,
lastSyncBlock: data._meta.block.number
}
}
export async function getAllAccounts({ fromBlock, toBlock, chainId }) {
try {
let accounts = []
let lastSyncBlock
while (true) {
let { results, lastSyncBlock: lastBlock } = await getAccounts({ fromBlock, chainId })
lastSyncBlock = lastBlock
if (isEmpty(results)) {
break
}
if (results.length < breakLength) {
accounts = accounts.concat(results)
break
}
const [lastEvent] = results.slice(-numbers.ONE)
results = results.filter((e) => e.blockNumber !== lastEvent.blockNumber)
fromBlock = Number(lastEvent.blockNumber)
accounts = accounts.concat(results)
if (toBlock && fromBlock >= Number(toBlock)) {
break
}
}
if (!accounts) {
return {
lastSyncBlock,
events: [],
}
}
const data = accounts.map((e) => ({
key: e.key,
owner: getAddress(e.owner),
blockNumber: Number(e.blockNumber),
}))
const [lastEvent] = data.slice(-numbers.ONE)
return {
events: data,
lastSyncBlock: (lastEvent && lastEvent.blockNumber >= lastSyncBlock)
? lastEvent.blockNumber + numbers.ONE
: lastSyncBlock,
}
} catch (err) {
console.log('Error from getAllAccounts')
console.log(err)
return {
lastSyncBlock: '',
events: [],
}
}
}
export async function getCommitments({ fromBlock, chainId }) {
const { data } = await client.query({
context: {
chainId,
},
query: gql(GET_COMMITMENT),
variables: { first, fromBlock },
})
if (!data) {
return {
results: [],
lastSyncBlock: data._meta.block.number
}
}
return {
results: data.commitments,
lastSyncBlock: data._meta.block.number
}
}
export async function getAllCommitments({ fromBlock, toBlock, chainId }) {
try {
let commitments = []
let lastSyncBlock
while (true) {
let { results, lastSyncBlock: lastBlock } = await getCommitments({ fromBlock, chainId })
lastSyncBlock = lastBlock
if (isEmpty(results)) {
break
}
if (results.length < breakLength) {
commitments = commitments.concat(results)
break
}
const [lastEvent] = results.slice(-numbers.ONE)
results = results.filter((e) => e.blockNumber !== lastEvent.blockNumber)
fromBlock = Number(lastEvent.blockNumber)
commitments = commitments.concat(results)
if (toBlock && fromBlock >= Number(toBlock)) {
break
}
}
if (!commitments) {
return {
lastSyncBlock,
events: [],
}
}
const data = commitments
.map((e) => ({
blockNumber: Number(e.blockNumber),
transactionHash: e.transactionHash,
index: Number(e.index),
commitment: e.commitment,
encryptedOutput: e.encryptedOutput
}))
.sort((a, b) => a.index - b.index)
const [lastEvent] = data.slice(-numbers.ONE)
return {
events: data,
lastSyncBlock: (lastEvent && lastEvent.blockNumber >= lastSyncBlock)
? lastEvent.blockNumber + numbers.ONE
: lastSyncBlock,
}
} catch (err) {
console.log('Error from getAllCommitments')
console.log(err)
return {
lastSyncBlock: '',
events: [],
}
}
}
export async function getNullifiers({ fromBlock, chainId }) {
const { data } = await client.query({
context: {
chainId,
},
query: gql(GET_NULLIFIER),
variables: { first, fromBlock },
})
if (!data) {
return {
results: [],
lastSyncBlock: data._meta.block.number
}
}
return {
results: data.nullifiers,
lastSyncBlock: data._meta.block.number
}
}
export async function getAllNullifiers({ fromBlock, chainId }) {
try {
let nullifiers = []
let lastSyncBlock
while (true) {
let { results, lastSyncBlock: lastBlock } = await getNullifiers({ fromBlock, chainId })
lastSyncBlock = lastBlock
if (isEmpty(results)) {
break
}
if (results.length < breakLength) {
nullifiers = nullifiers.concat(results)
break
}
const [lastEvent] = results.slice(-numbers.ONE)
results = results.filter((e) => e.blockNumber !== lastEvent.blockNumber)
fromBlock = Number(lastEvent.blockNumber)
nullifiers = nullifiers.concat(results)
}
if (!nullifiers) {
return {
lastSyncBlock,
events: [],
}
}
const data = nullifiers.map((e) => ({
nullifier: e.nullifier,
blockNumber: Number(e.blockNumber),
transactionHash: e.transactionHash
}))
const [lastEvent] = data.slice(-numbers.ONE)
return {
events: data,
lastSyncBlock: (lastEvent && lastEvent.blockNumber >= lastSyncBlock)
? lastEvent.blockNumber + numbers.ONE
: lastSyncBlock,
}
} catch (err) {
console.log('Error from getAllNullifiers')
console.log(err)
return {
lastSyncBlock: '',
events: [],
}
}
}

View File

@ -0,0 +1,56 @@
export const GET_ACCOUNTS = `
query getAccounts($first: Int, $fromBlock: Int) {
accounts(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
blockNumber_gte: $fromBlock
}) {
id
key
owner
blockNumber
}
_meta {
block {
number
}
hasIndexingErrors
}
}
`
export const GET_COMMITMENT = `
query getCommitment($first: Int, $fromBlock: Int) {
commitments(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
blockNumber_gte: $fromBlock
}) {
index
commitment
blockNumber
encryptedOutput
transactionHash
}
_meta {
block {
number
}
hasIndexingErrors
}
}
`
export const GET_NULLIFIER = `
query getNullifier($first: Int, $fromBlock: Int) {
nullifiers(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
blockNumber_gte: $fromBlock
}) {
nullifier
blockNumber
transactionHash
}
_meta {
block {
number
}
hasIndexingErrors
}
}
`

222
assets/services/idb.js Normal file
View File

@ -0,0 +1,222 @@
import { deleteDB, openDB } from 'idb'
export const VERSION_ERROR = 'less than the existing version'
export const INDEX_DB_ERROR = 'A mutation operation was attempted on a database that did not allow mutations.'
export const IDB_VERSION = 9
// TODO method for migration, remove indexed
export class IndexedDB {
constructor({ stores, dbName }) {
this.dbExists = false
this.isBlocked = false
this.options = {
upgrade(db) {
Object.values(db.objectStoreNames).forEach((value) => {
db.deleteObjectStore(value)
})
stores.forEach(({ name, keyPath, indexes }) => {
const store = db.createObjectStore(name, {
keyPath,
autoIncrement: true,
})
if (Array.isArray(indexes)) {
indexes.forEach(({ name, unique = false }) => {
store.createIndex(name, String(name), { unique })
})
}
})
},
}
this.dbName = dbName
}
async initDB() {
try {
if (this.dbExists) {
return
}
this.db = await openDB(this.dbName, IDB_VERSION, this.options) // version (optional): Schema version, or undefined to open the current version.
this.onEventHandler()
this.dbExists = true
} catch (err) {
// need for private mode firefox browser
if (err.message.includes(INDEX_DB_ERROR)) {
this.isBlocked = true
return
}
if (err.message.includes(VERSION_ERROR)) {
await this.removeExist()
}
console.error(`initDB has error: ${err.message}`)
}
}
async createTransactions({ storeName, data, mode = 'readwrite' }) {
try {
const tx = this.db.transaction(storeName, mode)
const storedItem = tx.objectStore(storeName)
if (storedItem.add) {
await storedItem.add(data)
await tx.done
}
} catch (err) {
throw new Error(`Method createTransactions has error: ${err.message}`)
}
}
createMultipleTransactions({
storeName,
data,
index,
mode = 'readwrite',
}) {
try {
const tx = this.db.transaction(storeName, mode)
data.forEach((item) => {
if (item && tx.store && tx.store.put) {
tx.store.put({ ...item, ...index })
}
})
} catch (err) {
throw new Error(`Method createMultipleTransactions has error: ${err.message}`)
}
}
async getFromIndex(params) {
if (this.isBlocked) {
return
}
try {
const item = await this.getFromIndexHandler(params)
return item
} catch (err) {
return undefined
}
}
async getItem({ storeName, key }) {
try {
if (this.isBlocked) {
return
}
const store = this.db.transaction(storeName).objectStore(storeName)
const value = await store.get(key)
return value
} catch (err) {
throw new Error(`Method getItem has error: ${err.message}`)
}
}
async addItem({ storeName, data, key }) {
try {
const tx = this.db.transaction(storeName, 'readwrite')
const isExist = await tx.objectStore(storeName).get(key)
if (!isExist) {
await tx.objectStore(storeName).add(data)
}
} catch (err) {
throw new Error(`Method addItem has error: ${err.message}`)
}
}
async putItem({ storeName, data }) {
try {
if (this.isBlocked) {
return
}
const tx = this.db.transaction(storeName, 'readwrite')
await tx.objectStore(storeName).put(data)
} catch (err) {
throw new Error(`Method putItem has error: ${err.message}`)
}
}
async getAll({ storeName }) {
try {
if (this.isBlocked || !this.dbExists) {
return []
}
const tx = this.db.transaction(storeName, 'readonly')
const store = tx.objectStore(storeName)
const data = await store.getAll()
return data
} catch (err) {
throw new Error(`Method getAll has error: ${err.message}`)
}
}
async clearStore({ storeName, mode = 'readwrite' }) {
try {
const tx = this.db.transaction(storeName, mode)
const storedItem = tx.objectStore(storeName)
if (storedItem.clear) {
await storedItem.clear()
}
} catch (err) {
throw new Error(`Method clearStore has error: ${err.message}`)
}
}
async getAllFromIndex(params) {
if (this.isBlocked) {
return []
}
try {
const items = await this.getAllFromIndexHandler(params)
return items
} catch (err) {
return []
}
}
onEventHandler() {
// eslint-disable-next-line @typescript-eslint/no-misused-promises
this.db.addEventListener('onupgradeneeded', async () => {
await this.removeExist()
})
}
async removeExist() {
await deleteDB(this.dbName)
this.dbExists = false
await this.initDB()
}
async getFromIndexHandler({ storeName, indexName, key }) {
try {
const value = await this.db.getFromIndex(storeName, indexName, key)
return value
} catch (err) {
throw new Error(`Method getFromIndexHandler has error: ${err.message}`)
}
}
async getAllFromIndexHandler({ storeName, indexName, key, count }) {
try {
const value = await this.db.getAllFromIndex(storeName, indexName, key, count)
return value
} catch (err) {
throw new Error(`Method getAllFromIndex has error: ${err.message}`)
}
}
}

1040
assets/services/pool.js Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,86 @@
import { ethers } from 'ethers'
import { fetchJson } from 'ethers/lib/utils'
import { numbers } from './constants'
const defaultRetryAttempt = 0
export class ExtendedProvider extends ethers.providers.StaticJsonRpcProvider {
constructor(url, network, fallbackRpcs) {
super(url, network)
this.fallbackRpcs = fallbackRpcs
}
async send(method, params, retryAttempt = defaultRetryAttempt) {
try {
return await super.send(method, params)
} catch (err) {
if (!retryAttempt) {
const TIME_OUT = 3000
await this.sleep(TIME_OUT)
if (this.fallbackRpcs) {
return await this.fallbackSend(method, params, this.fallbackRpcs)
}
return this.send(method, params, ++retryAttempt)
}
throw err
}
}
// eslint-disable-next-line
async fallbackSend(method, params, fallbackRpcs, retryAttempt = defaultRetryAttempt) {
function getResult(payload) {
if (payload.error) {
const error = new Error(payload.error.message)
error.code = payload.error.code
error.data = payload.error.data
// eslint-disable-next-line @typescript-eslint/no-throw-literal
throw error
}
return payload.result
}
try {
const request = {
method: method,
params: params,
id: this._nextId + numbers.ONE,
jsonrpc: '2.0',
}
const result = fetchJson({ url: fallbackRpcs[retryAttempt] }, JSON.stringify(request), getResult).then(
(result) => result,
(error) => {
throw error
},
)
return await result
} catch (err) {
retryAttempt += numbers.ONE
if (!fallbackRpcs[retryAttempt]) {
throw err
} else {
return await this.fallbackSend(method, params, fallbackRpcs, retryAttempt)
}
}
}
async sleep(ms) {
return await new Promise((resolve) => setTimeout(resolve, ms))
}
// private checkRpcError(err: { data: string; code: string; message: string }) {
// const code = String(err?.code)
// const data = err.data?.toLowerCase()
// const message = err.message?.toLowerCase()
// const ERROR_DATA = 'too many concurrent request'
// const ERROR_MESSAGE = 'timeout'
// const ERROR_CODE = '-32017'
// return (data?.includes(ERROR_DATA) || message?.includes(ERROR_MESSAGE)) && code === ERROR_CODE
// }
}

View File

@ -0,0 +1,13 @@
export const ZERO_ELEMENT = 0
export function getBatches(array, batchSize) {
const batches = []
while (array.length) {
batches.push(array.splice(ZERO_ELEMENT, batchSize))
}
return batches
}
export async function sleep(ms) {
return await new Promise((resolve) => setTimeout(resolve, ms))
}

25
assets/services/zip.js Normal file
View File

@ -0,0 +1,25 @@
import { zip, unzip } from 'fflate'
export function zipAsync(file) {
return new Promise((res, rej) => {
zip(file, { mtime: new Date('1/1/1980') }, (err, data) => {
if (err) {
rej(err);
return;
}
res(data);
});
});
}
export function unzipAsync(data) {
return new Promise((res, rej) => {
unzip(data, {}, (err, data) => {
if (err) {
rej(err);
return;
}
res(data);
});
});
}

285
assets/syncEvents.js Normal file
View File

@ -0,0 +1,285 @@
import path from 'path'
import { stat, readFile, writeFile } from 'fs/promises'
import { Contract, providers, utils } from 'ethers'
import { BatchEventsService } from './services/batch'
import { getAllAccounts, getAllCommitments, getAllNullifiers } from './services/graph'
import { POOL_CONTRACT, BRIDGE_HELPER, RPC_LIST, ChainId, CHAINS, numbers } from './services/constants'
import { zipAsync, unzipAsync } from './services/zip'
import { poolAbi } from './services/pool'
import { bridgeAbi } from './services/bridgeHelper'
const { getAddress } = utils
const { StaticJsonRpcProvider } = providers
const EVENT_PATH = './static'
async function existsAsync(fileOrDir) {
try {
await stat(fileOrDir);
return true;
} catch {
return false;
}
}
const getProvider = (chainId) => {
return new StaticJsonRpcProvider({ skipFetchSetup: true, url: RPC_LIST[chainId] }, chainId)
}
const getTornadoPool = (chainId, provider) => {
const TornadoPool = new Contract(POOL_CONTRACT[chainId], poolAbi, provider)
return {
TornadoPool,
BatchEventsService: new BatchEventsService({
provider,
contract: TornadoPool
})
}
}
const getBridgeHelper = (chainId, provider) => {
const BridgeHelper = new Contract(BRIDGE_HELPER[chainId], bridgeAbi, provider)
return {
BridgeHelper,
BridgeEventsService: new BatchEventsService({
provider,
contract: BridgeHelper
})
}
}
const loadEvents = async (fileName, deployedBlock) => {
fileName = fileName.toLowerCase()
const filePath = path.join(EVENT_PATH, fileName + '.zip')
if (!(await existsAsync(filePath))) {
return {
events: [],
lastBlock: deployedBlock
}
}
try {
const data = await readFile(filePath)
const { [fileName]: content } = await unzipAsync(data)
const events = JSON.parse(new TextDecoder().decode(content))
const lastBlock = events && Array.isArray(events) && events[events.length - 1]
? events[events.length - 1].blockNumber
: deployedBlock
return {
events,
lastBlock
}
} catch {
return {
events: [],
lastBlock: deployedBlock
}
}
}
const saveEvents = async (fileName, events) => {
fileName = fileName.toLowerCase()
const filePath = path.join(EVENT_PATH, fileName + '.zip')
const payload = await zipAsync({
[fileName]: new TextEncoder().encode(JSON.stringify(events, null, 2) + '\n')
})
await writeFile(filePath, payload)
}
const syncAccounts = async (chainId, BatchEventsService) => {
const fileName = `accounts_${chainId}.json`
console.log(`Syncing ${fileName}`)
const cachedEvents = await loadEvents(fileName, CHAINS[chainId].deployBlock)
const events = [...cachedEvents.events]
let fromBlock = cachedEvents.lastBlock + numbers.ONE
console.log({
cachedEvents: events.length,
cachedBlock: fromBlock
})
const { events: graphEvents, lastSyncBlock } = await getAllAccounts({
fromBlock,
chainId
})
console.log({
graphEvents: graphEvents.length,
graphBlock: lastSyncBlock
})
if (lastSyncBlock) {
events.push(...graphEvents)
fromBlock = lastSyncBlock
}
let nodeEvents = await BatchEventsService.getBatchEvents({
fromBlock,
type: 'PublicKey'
})
console.log({
nodeEvents: nodeEvents.length,
nodeBlock: nodeEvents && nodeEvents[nodeEvents.length - 1] ? nodeEvents[nodeEvents.length - 1].blockNumber : undefined
})
if (nodeEvents && nodeEvents.length) {
nodeEvents = nodeEvents.map(({ blockNumber, args }) => ({
key: args.key,
owner: getAddress(args.owner),
blockNumber,
}))
events.push(...nodeEvents)
}
await saveEvents(fileName, events)
}
const syncCommitments = async (chainId, BatchEventsService) => {
const fileName = `commitments_${chainId}.json`
console.log(`Syncing ${fileName}`)
const cachedEvents = await loadEvents(fileName, CHAINS[chainId].deployBlock)
const events = [...cachedEvents.events]
let fromBlock = cachedEvents.lastBlock + numbers.ONE
console.log({
cachedEvents: events.length,
cachedBlock: fromBlock
})
const { events: graphEvents, lastSyncBlock } = await getAllCommitments({
fromBlock,
chainId
})
console.log({
graphEvents: graphEvents.length,
graphBlock: lastSyncBlock
})
if (lastSyncBlock) {
events.push(...graphEvents)
fromBlock = lastSyncBlock
}
let nodeEvents = await BatchEventsService.getBatchEvents({
fromBlock,
type: 'NewCommitment'
})
console.log({
nodeEvents: nodeEvents.length,
nodeBlock: nodeEvents && nodeEvents[nodeEvents.length - 1] ? nodeEvents[nodeEvents.length - 1].blockNumber : undefined
})
if (nodeEvents && nodeEvents.length) {
nodeEvents = nodeEvents.map(({ blockNumber, transactionHash, args }) => ({
blockNumber,
transactionHash,
index: Number(args.index),
commitment: args.commitment,
encryptedOutput: args.encryptedOutput,
}))
events.push(...nodeEvents)
}
await saveEvents(fileName, events)
}
const syncNullifiers = async (chainId, BatchEventsService) => {
const fileName = `nullifiers_${chainId}.json`
console.log(`Syncing ${fileName}`)
const cachedEvents = await loadEvents(fileName, CHAINS[chainId].deployBlock)
const events = [...cachedEvents.events]
let fromBlock = cachedEvents.lastBlock + numbers.ONE
console.log({
cachedEvents: events.length,
cachedBlock: fromBlock
})
const { events: graphEvents, lastSyncBlock } = await getAllNullifiers({
fromBlock,
chainId
})
console.log({
graphEvents: graphEvents.length,
graphBlock: lastSyncBlock
})
if (lastSyncBlock) {
events.push(...graphEvents)
fromBlock = lastSyncBlock
}
let nodeEvents = await BatchEventsService.getBatchEvents({
fromBlock,
type: 'NewNullifier'
})
console.log({
nodeEvents: nodeEvents.length,
nodeBlock: nodeEvents && nodeEvents[nodeEvents.length - 1] ? nodeEvents[nodeEvents.length - 1].blockNumber : undefined
})
if (nodeEvents && nodeEvents.length) {
nodeEvents = nodeEvents.map(({ blockNumber, transactionHash, args }) => ({
blockNumber,
transactionHash,
nullifier: args.nullifier,
}))
events.push(...nodeEvents)
}
await saveEvents(fileName, events)
}
const main = async () => {
const chainId = ChainId.XDAI
const ethChainId = ChainId.MAINNET
const provider = getProvider(chainId)
const ethProvider = getProvider(ethChainId)
const { BatchEventsService } = getTornadoPool(chainId, provider)
const { BridgeEventsService } = getBridgeHelper(ethChainId, ethProvider)
console.log(`Connected with ${chainId}: (block: ${await provider.getBlockNumber()})`)
console.log(`Connected with ${ethChainId}: (block: ${await ethProvider.getBlockNumber()})`)
await syncAccounts(ethChainId, BridgeEventsService)
await syncCommitments(chainId, BatchEventsService)
await syncNullifiers(chainId, BatchEventsService)
}
main()

View File

@ -55,7 +55,6 @@ export default {
'setProvider',
'changeChain',
'checkNetwork',
'checkSanction',
'setWalletParams',
'getWalletBalance',
]),
@ -93,9 +92,6 @@ export default {
const address = await provider.setupProvider()
const network = await provider.checkNetworkVersion()
if (address) {
await this.checkSanction(address)
}
await this.setProvider({ network, name: key })
await this.setAccountData(address)
@ -126,8 +122,6 @@ export default {
if (address) {
const checksumAddress = toChecksumAddress(address)
await this.checkSanction(checksumAddress)
if (!this.isConnected) {
return
}

15
copyFile.ts Normal file
View File

@ -0,0 +1,15 @@
import { argv } from 'process'
import { copyFile } from 'fs'
function copyFiles() {
const [, , inFile, outFile] = argv
copyFile(inFile, outFile, function(err) {
if (err) {
throw err
}
console.log(`Copied ${inFile} to ${outFile}`)
})
}
copyFiles()

View File

@ -123,6 +123,7 @@ const config: NuxtConfig = {
config.output.globalObject = 'this'
}
if (config?.module != null) {
/**
if (isClient) {
config.module.rules.push({
test: /\.worker\.js$/,
@ -133,6 +134,7 @@ const config: NuxtConfig = {
},
})
}
**/
config.module.rules.push({
test: /\.bin$/,

View File

@ -10,12 +10,12 @@
"lint": "eslint --ext .js,.ts",
"lint:fix": "eslint --ext .js,.ts --quiet --fix",
"compile": "typechain --target ethers-v5 --out-dir ./_contracts './abi/*.json'",
"generate": "nuxt generate && cp dist/404.html dist/ipfs-404.html",
"copyFile": "node --loader ts-node/esm copyFile.ts",
"generate": "yarn worker:compile && nuxt generate && yarn copyFile dist/404.html dist/ipfs-404.html",
"prepare": "husky install",
"ipfs:upload": "node --loader ts-node/esm ipfsUpload.ts",
"worker:compile": "nuxt generate && yarn compile:events && yarn compile:nullifier",
"compile:events": "babel dist/_nuxt/workers/events.worker.js --out-file static/events.worker.js",
"compile:nullifier": "babel dist/_nuxt/workers/nullifier.worker.js --out-file static/nullifier.worker.js"
"worker:compile": "webpack",
"update:events": "webpack && node ./syncEvents.cjs"
},
"dependencies": {
"@apollo/client": "^3.4.16",
@ -74,7 +74,7 @@
"eslint-plugin-prettier": "^3.4.0",
"eslint-plugin-promise": "^5.1.0",
"eslint-plugin-vue": "^7.16.0",
"fibers": "^5.0.0",
"fflate": "^0.8.2",
"form-data": "^4.0.0",
"husky": "^6.0.0",
"lint-staged": "10.2.11",
@ -87,6 +87,7 @@
"typechain": "^5.1.0",
"typescript": "^4.3.4",
"vue-eslint-parser": "^7.6.0",
"webpack-cli": "^4.10.0",
"worker-loader": "^3.0.8"
},
"husky": {

109
services/events/batch.ts Normal file
View File

@ -0,0 +1,109 @@
import { Provider, Contract, EventLog } from "ethers";
import { sleep, getBatches } from "@/utilities";
export interface BatchEventServiceConstructor {
provider: Provider;
contract: Contract;
concurrencySize?: number;
blocksPerRequest?: number;
shouldRetry?: boolean;
retryMax?: number;
retryOn?: number;
}
export type EventInput = {
fromBlock: number;
toBlock: number;
type: string;
};
export class BatchEventsService {
provider: Provider;
contract: Contract;
concurrencySize: number;
blocksPerRequest: number;
shouldRetry: boolean;
retryMax: number;
retryOn: number;
constructor({
provider,
contract,
concurrencySize = 10,
blocksPerRequest = 2000,
shouldRetry = true,
retryMax = 5,
retryOn = 500,
}: BatchEventServiceConstructor) {
this.provider = provider;
this.contract = contract;
this.concurrencySize = concurrencySize;
this.blocksPerRequest = blocksPerRequest;
this.shouldRetry = shouldRetry;
this.retryMax = retryMax;
this.retryOn = retryOn;
}
async getPastEvents({ fromBlock, toBlock, type }: EventInput): Promise<EventLog[]> {
let err;
let retries = 0;
// eslint-disable-next-line no-unmodified-loop-condition
while ((!this.shouldRetry && retries === 0) || (this.shouldRetry && retries < this.retryMax)) {
try {
return (await this.contract.queryFilter(type, fromBlock, toBlock)) as EventLog[];
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} catch (e: any) {
err = e;
retries++;
// If provider.getBlockNumber returned last block that isn't accepted (happened on Avalanche/Gnosis),
// get events to last accepted block
if (e.message.includes('after last accepted block')) {
const acceptedBlock = parseInt(e.message.split('after last accepted block ')[1]);
toBlock = acceptedBlock;
}
// retry on 0.5 seconds
await sleep(this.retryOn);
}
}
throw err;
}
createBatchRequest(batchArray: EventInput[]): Promise<EventLog[]>[] {
return batchArray.map(async (event: EventInput, index: number) => {
await sleep(20 * index);
return this.getPastEvents(event);
});
}
async getBatchEvents({ fromBlock, toBlock, type = '*' }: EventInput): Promise<EventLog[]> {
if (!toBlock) {
toBlock = await this.provider.getBlockNumber();
}
const eventsToSync: EventInput[] = [];
for (let i = fromBlock; i < toBlock; i += this.blocksPerRequest) {
const j = i + this.blocksPerRequest - 1 > toBlock ? toBlock : i + this.blocksPerRequest - 1;
eventsToSync.push({ fromBlock: i, toBlock: j, type } as EventInput);
}
const events = [];
const eventChunk = getBatches<EventInput>(eventsToSync, this.concurrencySize);
let chunkCount = 0;
for (const chunk of eventChunk) {
chunkCount++;
const fetchedEvents = (await Promise.all(this.createBatchRequest(chunk))).flat();
events.push(...fetchedEvents);
}
return events;
}
}

View File

@ -7,6 +7,9 @@ import { isEmpty, sleep, toChecksumAddress } from '@/utilities'
import { getBridgeHelper, getBridgeProxy, getAmbBridge } from '@/contracts'
import { EventsClass, GetAffirmationParams, GetRelayedMessageParams, SaveEventsParams } from './@types'
import { downloadEvents } from './load'
export * from './batch'
class EventAggregator implements EventsClass {
public async getBackupedAddressFromPublicKey(publicKey: string) {
@ -31,19 +34,31 @@ class EventAggregator implements EventsClass {
storeName: `${IndexDBStores.ACCOUNT_EVENTS}_${chainId}`,
})
const newEvents = []
if (cachedEvents?.length) {
const [latestEvent] = cachedEvents.slice(-numbers.ONE)
blockFrom = Number(latestEvent.blockNumber) + numbers.ONE
} else {
const downloadedEvents = await downloadEvents(`accounts_${chainId}.json`, blockFrom)
if (downloadedEvents.events.length) {
newEvents.push(...downloadedEvents.events)
blockFrom = downloadedEvents.lastBlock
}
}
const { events: graphEvents, lastSyncBlock } = await getAllAccounts({ fromBlock: blockFrom, chainId })
const [account] = graphEvents.filter((e: { key: string }) => e.key === publicKey)
newEvents.push(...graphEvents)
const [account] = newEvents.filter((e: { key: string }) => e.key === publicKey)
if (account) {
this.saveEvents({
chainId,
events: graphEvents,
events: newEvents,
storeName: IndexDBStores.ACCOUNT_EVENTS,
})
return account.owner
@ -66,7 +81,7 @@ class EventAggregator implements EventsClass {
}
})
const newEvents = graphEvents.concat(accountEvents)
newEvents.push(...accountEvents)
this.saveEvents({
chainId,
@ -74,7 +89,7 @@ class EventAggregator implements EventsClass {
storeName: IndexDBStores.ACCOUNT_EVENTS,
})
const events = cachedEvents.concat(newEvents).filter((e: { key: string }) => e.key === publicKey)
const events = newEvents.filter((e: { key: string }) => e.key === publicKey)
if (isEmpty(events)) {
return undefined
@ -85,6 +100,7 @@ class EventAggregator implements EventsClass {
return event.owner
} catch (err) {
console.log(err)
return undefined
}
}
@ -111,19 +127,30 @@ class EventAggregator implements EventsClass {
storeName: `${IndexDBStores.ACCOUNT_EVENTS}_${chainId}`,
})
const newEvents = []
if (cachedEvents?.length) {
const [latestEvent] = cachedEvents.slice(-numbers.ONE)
blockFrom = Number(latestEvent.blockNumber) + numbers.ONE
} else {
const downloadedEvents = await downloadEvents(`accounts_${chainId}.json`, blockFrom)
if (downloadedEvents.events.length) {
newEvents.push(...downloadedEvents.events)
blockFrom = downloadedEvents.lastBlock
}
}
const { events: graphEvents, lastSyncBlock } = await getAllAccounts({ fromBlock: blockFrom, chainId })
newEvents.push(...graphEvents)
const [account] = graphEvents.filter((e: { owner: string }) => toChecksumAddress(e.owner) === toChecksumAddress(address))
const [account] = newEvents.filter((e: { owner: string }) => toChecksumAddress(e.owner) === toChecksumAddress(address))
if (account) {
this.saveEvents({
chainId,
events: graphEvents,
events: newEvents,
storeName: IndexDBStores.ACCOUNT_EVENTS,
})
return account.key
@ -146,7 +173,7 @@ class EventAggregator implements EventsClass {
}
})
const newEvents = graphEvents.concat(accountEvents)
newEvents.push(...accountEvents)
this.saveEvents({
chainId,
@ -167,6 +194,7 @@ class EventAggregator implements EventsClass {
return event.key
} catch (err) {
console.log(err)
return undefined
}
}

49
services/events/load.ts Normal file
View File

@ -0,0 +1,49 @@
import { unzip } from 'fflate'
export function unzipAsync(data: Uint8Array) {
return new Promise((res, rej) => {
unzip(data, {}, (err, data) => {
if (err) {
rej(err);
return;
}
res(data);
});
});
}
export async function downloadEvents(fileName: string, deployedBlock: number) {
fileName = fileName.toLowerCase()
// @ts-ignore
const prefix = __webpack_public_path__.slice(0, -7)
try {
const resp = await fetch(`${prefix}/${fileName}.zip`, {
method: 'GET',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
}
})
const arrayBuffer = await resp.arrayBuffer()
const { [fileName]: content } = (await unzipAsync(new Uint8Array(arrayBuffer))) as any
const events = JSON.parse(new TextDecoder().decode(content))
const lastBlock = events && Array.isArray(events) && events[events.length - 1]
? events[events.length - 1].blockNumber
: deployedBlock
return {
events,
lastBlock
}
} catch {
return {
events: [],
lastBlock: deployedBlock
}
}
}

View File

@ -12,3 +12,21 @@ export type Account = {
}
export type Accounts = Account[]
export type Commitment = {
index: string
commitment: string
blockNumber: string
encryptedOutput: string
transactionHash: string
}
export type Commitments = Commitment[]
export type Nullifier = {
nullifier: string
blockNumber: string
transactionHash: string
}
export type Nullifiers = Nullifier[]

View File

@ -5,8 +5,8 @@ import { ChainId } from '@/types'
import { numbers } from '@/constants'
import { isEmpty, toChecksumAddress } from '@/utilities'
import { Params, Accounts } from './@types'
import { _META, GET_ACCOUNTS, GET_REGISTERED } from './queries'
import { Params, Accounts, Commitments, Nullifiers } from './@types'
import { _META, GET_ACCOUNTS, GET_REGISTERED, GET_COMMITMENT, GET_NULLIFIER } from './queries'
const first = 1000
const breakLength = 900
@ -18,7 +18,8 @@ const link = (operation: Operation) => {
const CHAIN_GRAPH_URLS: { [chainId in ChainId]: string } = {
[ChainId.BSC]: 'https://api.thegraph.com/subgraphs/name/dan1kov/bsc-tornado-pool-subgraph',
[ChainId.MAINNET]: 'https://api.thegraph.com/subgraphs/name/tornadocash/mainnet-tornado-pool-subgraph',
[ChainId.MAINNET]: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/mainnet-tornado-pool-subgraph',
[ChainId.XDAI]: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/gnosis-tornado-nova-subgraph',
}
const client = new ApolloClient({
@ -27,7 +28,7 @@ const client = new ApolloClient({
})
const registryClient = new ApolloClient({
uri: 'https://api.thegraph.com/subgraphs/name/tornadocash/tornado-relayer-registry',
uri: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/tornado-relayer-registry',
cache: new InMemoryCache(),
})
@ -169,3 +170,166 @@ async function getMeta({ chainId }: Params) {
return undefined
}
}
export async function getCommitments({ fromBlock, chainId }: Params): Promise<{
results: Commitments,
lastSyncBlock: number
}> {
const { data } = await client.query({
context: {
chainId,
},
query: gql(GET_COMMITMENT),
variables: { first, fromBlock },
})
if (!data) {
return {
results: [],
lastSyncBlock: data._meta.block.number
}
}
return {
results: data.commitments,
lastSyncBlock: data._meta.block.number
}
}
export async function getAllCommitments({ fromBlock, chainId }: Params) {
try {
let commitments: Commitments = []
let lastSyncBlock
while (true) {
let { results, lastSyncBlock: lastBlock } = await getCommitments({ fromBlock, chainId })
lastSyncBlock = lastBlock
if (isEmpty(results)) {
break
}
if (results.length < breakLength) {
commitments = commitments.concat(results)
break
}
const [lastEvent] = results.slice(-numbers.ONE)
results = results.filter((e) => e.blockNumber !== lastEvent.blockNumber)
fromBlock = Number(lastEvent.blockNumber)
commitments = commitments.concat(results)
}
if (!commitments) {
return {
lastSyncBlock,
events: [],
}
}
const data = commitments
.map((e) => ({
index: Number(e.index),
commitment: e.commitment,
blockNumber: Number(e.blockNumber),
encryptedOutput: e.encryptedOutput,
transactionHash: e.transactionHash
}))
.sort((a, b) => a.index - b.index)
const [lastEvent] = data.slice(-numbers.ONE)
return {
events: data,
lastSyncBlock: lastEvent?.blockNumber > lastSyncBlock ? lastEvent.blockNumber + numbers.ONE : lastSyncBlock,
}
} catch {
return {
lastSyncBlock: '',
events: [],
}
}
}
export async function getNullifiers({ fromBlock, chainId }: Params): Promise<{
results: Nullifiers,
lastSyncBlock: number
}> {
const { data } = await client.query({
context: {
chainId,
},
query: gql(GET_NULLIFIER),
variables: { first, fromBlock },
})
if (!data) {
return {
results: [],
lastSyncBlock: data._meta.block.number
}
}
return {
results: data.nullifiers,
lastSyncBlock: data._meta.block.number
}
}
export async function getAllNullifiers({ fromBlock, chainId }: Params) {
try {
let nullifiers: Nullifiers = []
let lastSyncBlock
while (true) {
let { results, lastSyncBlock: lastBlock } = await getNullifiers({ fromBlock, chainId })
lastSyncBlock = lastBlock
if (isEmpty(results)) {
break
}
if (results.length < breakLength) {
nullifiers = nullifiers.concat(results)
break
}
const [lastEvent] = results.slice(-numbers.ONE)
results = results.filter((e) => e.blockNumber !== lastEvent.blockNumber)
fromBlock = Number(lastEvent.blockNumber)
nullifiers = nullifiers.concat(results)
}
if (!nullifiers) {
return {
lastSyncBlock,
events: [],
}
}
const data = nullifiers.map((e) => ({
nullifier: e.nullifier,
blockNumber: Number(e.blockNumber),
transactionHash: e.transactionHash
}))
const [lastEvent] = data.slice(-numbers.ONE)
return {
events: data,
lastSyncBlock: lastEvent?.blockNumber > lastSyncBlock ? lastEvent.blockNumber + numbers.ONE : lastSyncBlock,
}
} catch {
return {
lastSyncBlock: '',
events: [],
}
}
}

View File

@ -33,3 +33,41 @@ export const GET_REGISTERED = `
}
}
`
export const GET_COMMITMENT = `
query getCommitment($first: Int, $fromBlock: Int) {
commitments(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
blockNumber_gte: $fromBlock
}) {
index
commitment
blockNumber
encryptedOutput
transactionHash
}
_meta {
block {
number
}
hasIndexingErrors
}
}
`
export const GET_NULLIFIER = `
query getNullifier($first: Int, $fromBlock: Int) {
nullifiers(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
blockNumber_gte: $fromBlock
}) {
nullifier
blockNumber
transactionHash
}
_meta {
block {
number
}
hasIndexingErrors
}
}
`

View File

@ -7,12 +7,6 @@ import { CommitmentEvents, NullifierEvents } from '@/services/events/@types'
import { EventsPayload, DecryptedEvents, GetEventsFromTxHashParams } from './@types'
import '@/assets/events.worker.js'
import '@/assets/nullifier.worker.js'
// import NWorker from '@/assets/nullifier.worker.js'
// import EWorker from '@/assets/events.worker.js'
export interface WorkerProvider {
workerSetup: (chainId: ChainId) => void
getCommitmentEvents: () => Promise<CommitmentEvents>
@ -41,13 +35,8 @@ class Provider implements WorkerProvider {
const basePath = `${window.location.origin}${ipfsPathPrefix}`
this.nullifierWorkers = new Array(CORES).fill('').map(() => new Worker(`${basePath}/_nuxt/workers/nullifier.worker.js`))
this.eventsWorkers = new Array(CORES).fill('').map(() => new Worker(`${basePath}/_nuxt/workers/events.worker.js`))
// // @ts-expect-error
// this.nullifierWorkers = new Array(CORES).fill('').map(() => new NWorker())
// // @ts-expect-error
// this.eventsWorkers = new Array(CORES).fill('').map(() => new EWorker())
this.nullifierWorkers = new Array(CORES).fill('').map(() => new Worker(`${basePath}/nullifier.worker.js`))
this.eventsWorkers = new Array(CORES).fill('').map(() => new Worker(`${basePath}/events.worker.js`))
}
public workerSetup = (chainId: ChainId) => {

BIN
static/accounts_1.json.zip Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -19,17 +19,6 @@ export const actions: ActionTree<WalletState, RootState> = {
}
},
async checkSanction({ getters }, address) {
const contract = getSanctionList(getters.dependencies.l1ChainId)
const isSanctioned = await contract.callStatic.isSanctioned(address)
if (isSanctioned) {
window.onbeforeunload = null
// ToDo add type
// @ts-expect-error
window.location = 'https://twitter.com/TornadoCash/status/1514904975037669386'
}
},
checkAppNetwork({ commit }, network) {
try {
// TODO create a selector for active network

53
webpack.config.js Normal file
View File

@ -0,0 +1,53 @@
import path from 'path'
import webpack from 'webpack'
export default [
{
mode: 'production',
entry: './assets/events.worker.js',
output: {
path: path.resolve('static'),
filename: 'events.worker.js',
}
},
{
mode: 'production',
entry: './assets/nullifier.worker.js',
output: {
path: path.resolve('static'),
filename: 'nullifier.worker.js',
}
},
{
mode: 'production',
entry: './assets/syncEvents.js',
output: {
path: path.resolve('.'),
filename: 'syncEvents.cjs',
},
target: 'node',
plugins: [
new webpack.BannerPlugin({
banner: '#!/usr/bin/env node\n',
raw: true
})
],
module: {
rules: [
{
test: /\.mjs$/,
include: /node_modules/,
type: 'javascript/auto'
}
]
},
resolve: {
alias: {
'fflate': 'fflate/esm'
}
},
optimization: {
minimize: false,
}
}
]

160
yarn.lock
View File

@ -1320,7 +1320,7 @@
resolved "https://registry.yarnpkg.com/@csstools/selector-specificity/-/selector-specificity-2.2.0.tgz#2cbcf822bf3764c9658c4d2e568bd0c0cb748016"
integrity "sha1-LLz4Ir83ZMlljE0uVovQwMt0gBY= sha512-+OJ9konv95ClSTOJCmMZqpd5+YGsB2S+x6w3E1oaM8UuR5j8nTNHYSz8c9BEPGDOCMQYIEEGlVPj/VY64iTbGw=="
"@discoveryjs/json-ext@0.5.7":
"@discoveryjs/json-ext@0.5.7", "@discoveryjs/json-ext@^0.5.0":
version "0.5.7"
resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70"
integrity "sha1-HVcr+74Ut3BOC6Dzm3SBW4SHDXA= sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw=="
@ -3040,6 +3040,23 @@
"@webassemblyjs/wast-parser" "1.9.0"
"@xtuc/long" "4.2.2"
"@webpack-cli/configtest@^1.2.0":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@webpack-cli/configtest/-/configtest-1.2.0.tgz#7b20ce1c12533912c3b217ea68262365fa29a6f5"
integrity sha512-4FB8Tj6xyVkyqjj1OaTqCjXYULB9FMkqQ8yGrZjRDrYh0nOE+7Lhs45WioWQQMV+ceFlE368Ukhe6xdvJM9Egg==
"@webpack-cli/info@^1.5.0":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@webpack-cli/info/-/info-1.5.0.tgz#6c78c13c5874852d6e2dd17f08a41f3fe4c261b1"
integrity sha512-e8tSXZpw2hPl2uMJY6fsMswaok5FdlGNRTktvFk2sD8RjH0hE2+XistawJx1vmKteh4NmGmNUrp+Tb2w+udPcQ==
dependencies:
envinfo "^7.7.3"
"@webpack-cli/serve@^1.7.0":
version "1.7.0"
resolved "https://registry.yarnpkg.com/@webpack-cli/serve/-/serve-1.7.0.tgz#e1993689ac42d2b16e9194376cfb6753f6254db1"
integrity sha512-oxnCNGj88fL+xzV+dacXs44HcDwf1ovs3AuEzvP7mqXw7fQntqIhQ1BRmynh4qEKQSSSRSWVyXRjmTbZIX9V2Q==
"@wry/context@^0.7.0":
version "0.7.3"
resolved "https://registry.yarnpkg.com/@wry/context/-/context-0.7.3.tgz#240f6dfd4db5ef54f81f6597f6714e58d4f476a1"
@ -4317,6 +4334,15 @@ cliui@^6.0.0:
strip-ansi "^6.0.0"
wrap-ansi "^6.2.0"
clone-deep@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387"
integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==
dependencies:
is-plain-object "^2.0.4"
kind-of "^6.0.2"
shallow-clone "^3.0.0"
clone-response@^1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3"
@ -4371,7 +4397,7 @@ colord@^2.9.1:
resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43"
integrity "sha1-T4zpGd5Fbx1cHDaMMH/iDz5Z+0M= sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw=="
colorette@^2.0.10:
colorette@^2.0.10, colorette@^2.0.14:
version "2.0.20"
resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a"
integrity "sha1-nreT5oMwZ/cjWQL807CZF6AAqVo= sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="
@ -4407,7 +4433,7 @@ commander@^5.1.0:
resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae"
integrity "sha1-Rqu9FlL44Fm92u+Zu9yyrZzxea4= sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg=="
commander@^7.2.0:
commander@^7.0.0, commander@^7.2.0:
version "7.2.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7"
integrity "sha1-o2y1fQtQHOEI5NIFWaFQo5HZerc= sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw=="
@ -5108,11 +5134,6 @@ detect-indent@^5.0.0:
resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-5.0.0.tgz#3871cc0a6a002e8c3e5b3cf7f336264675f06b9d"
integrity "sha1-OHHMCmoALow+Wzz38zYmRnXwa50= sha512-rlpvsxUtM0PQvy9iZe640/IWwWYyBsTApREbA1pHOpmOUIl9MkP/U4z7vTtg4Oaojvqhxt7sdufnT0EzGaR31g=="
detect-libc@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
integrity "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups= sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg=="
devalue@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/devalue/-/devalue-2.0.1.tgz#5d368f9adc0928e47b77eea53ca60d2f346f9762"
@ -5364,6 +5385,11 @@ entities@^4.2.0:
resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48"
integrity "sha1-XSaOpecRPsdMTQM7eepaNaSI+0g= sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="
envinfo@^7.7.3:
version "7.13.0"
resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.13.0.tgz#81fbb81e5da35d74e814941aeab7c325a606fb31"
integrity sha512-cvcaMr7KqXVh4nyzGTVqTum+gAiL265x5jUWQIDLq//zOGbW+gSW/C+OWLleY/rs9Qole6AZLMXPbtIFQbqu+Q==
errno@^0.1.3, errno@~0.1.7:
version "0.1.8"
resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.8.tgz#8bb3e9c7d463be4976ff888f76b4809ebc2e811f"
@ -6212,6 +6238,11 @@ fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6:
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
integrity "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="
fastest-levenshtein@^1.0.12:
version "1.0.16"
resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz#210e61b6ff181de91ea9b3d1b84fdedd47e034e5"
integrity sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==
fastfile@0.0.18:
version "0.0.18"
resolved "https://registry.yarnpkg.com/fastfile/-/fastfile-0.0.18.tgz#2b69bbbfd2fcccc9bc8099c27de1379b89756a4b"
@ -6291,6 +6322,11 @@ ffjavascript@^0.2.48:
wasmcurves "0.2.2"
web-worker "^1.2.0"
fflate@^0.8.2:
version "0.8.2"
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.8.2.tgz#fc8631f5347812ad6028bbe4a2308b2792aa1dea"
integrity sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==
ffwasm@0.0.7:
version "0.0.7"
resolved "https://registry.yarnpkg.com/ffwasm/-/ffwasm-0.0.7.tgz#23bb9a3537ecc87c0f24fcfb3a9ddd0e86855fff"
@ -6299,13 +6335,6 @@ ffwasm@0.0.7:
big-integer "^1.6.48"
wasmbuilder "0.0.10"
fibers@^5.0.0:
version "5.0.3"
resolved "https://registry.yarnpkg.com/fibers/-/fibers-5.0.3.tgz#2fd03acb255db66fe693d15beafbf5ae92193fd7"
integrity "sha1-L9A6yyVdtm/mk9Fb6vv1rpIZP9c= sha512-/qYTSoZydQkM21qZpGLDLuCq8c+B8KhuCQ1kLPvnRNhxhVbvrpmH9l2+Lblf5neDuEsY4bfT7LeO553TXQDvJw=="
dependencies:
detect-libc "^1.0.3"
figgy-pudding@^3.5.1:
version "3.5.2"
resolved "https://registry.yarnpkg.com/figgy-pudding/-/figgy-pudding-3.5.2.tgz#b4eee8148abb01dcf1d1ac34367d59e12fa61d6e"
@ -6702,6 +6731,11 @@ function-bind@^1.1.1:
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
integrity "sha1-pWiZ0+o8m6uHS7l3O3xe3pL0iV0= sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
function-bind@^1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c"
integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==
function.prototype.name@^1.1.5:
version "1.1.5"
resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621"
@ -7121,6 +7155,13 @@ hash.js@1.1.7, hash.js@^1.0.0, hash.js@^1.0.3, hash.js@^1.1.7:
inherits "^2.0.3"
minimalistic-assert "^1.0.1"
hasown@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003"
integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==
dependencies:
function-bind "^1.1.2"
he@1.2.0, he@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
@ -7326,6 +7367,14 @@ import-fresh@^3.0.0, import-fresh@^3.1.0, import-fresh@^3.2.1:
parent-module "^1.0.0"
resolve-from "^4.0.0"
import-local@^3.0.2:
version "3.1.0"
resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4"
integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==
dependencies:
pkg-dir "^4.2.0"
resolve-cwd "^3.0.0"
imurmurhash@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
@ -7416,6 +7465,11 @@ internal-slot@^1.0.5:
has "^1.0.3"
side-channel "^1.0.4"
interpret@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9"
integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==
invert-kv@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02"
@ -7513,6 +7567,13 @@ is-core-module@^2.11.0:
dependencies:
has "^1.0.3"
is-core-module@^2.13.0:
version "2.13.1"
resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384"
integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==
dependencies:
hasown "^2.0.0"
is-data-descriptor@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56"
@ -9493,7 +9554,7 @@ pkg-dir@^3.0.0:
dependencies:
find-up "^3.0.0"
pkg-dir@^4.1.0:
pkg-dir@^4.1.0, pkg-dir@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3"
integrity "sha1-8JkTPfft5CLoHR2ESCcO6z5CYfM= sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ=="
@ -10601,6 +10662,13 @@ readline@^1.3.0:
resolved "https://registry.yarnpkg.com/readline/-/readline-1.3.0.tgz#c580d77ef2cfc8752b132498060dc9793a7ac01c"
integrity sha512-k2d6ACCkiNYz222Fs/iNze30rRJ1iIicW7JuX/7/cozvih6YCkFZH+J6mAFDVgv0dRBaAyr4jDqC95R2y4IADg==
rechoir@^0.7.0:
version "0.7.1"
resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.7.1.tgz#9478a96a1ca135b5e88fc027f03ee92d6c645686"
integrity sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg==
dependencies:
resolve "^1.9.0"
regenerate-unicode-properties@^10.1.0:
version "10.1.0"
resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.1.0.tgz#7c3192cab6dd24e21cb4461e5ddd7dd24fa8374c"
@ -10758,11 +10826,23 @@ resolve-alpn@^1.0.0, resolve-alpn@^1.2.0:
resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9"
integrity "sha1-t629rDVGqq7CC0Xn2CZZJwcnJvk= sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g=="
resolve-cwd@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d"
integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==
dependencies:
resolve-from "^5.0.0"
resolve-from@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
integrity "sha1-SrzYUq0y3Xuqv+m0DgCjbbXzkuY= sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="
resolve-from@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69"
integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==
resolve-url@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a"
@ -10777,6 +10857,15 @@ resolve@^1.1.7, resolve@^1.10.1, resolve@^1.14.2, resolve@^1.22.0, resolve@^1.22
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"
resolve@^1.9.0:
version "1.22.8"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d"
integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==
dependencies:
is-core-module "^2.13.0"
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"
response-iterator@^0.2.6:
version "0.2.6"
resolved "https://registry.yarnpkg.com/response-iterator/-/response-iterator-0.2.6.tgz#249005fb14d2e4eeb478a3f735a28fd8b4c9f3da"
@ -11149,6 +11238,13 @@ sha.js@^2.4.0, sha.js@^2.4.8:
inherits "^2.0.1"
safe-buffer "^5.0.1"
shallow-clone@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3"
integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==
dependencies:
kind-of "^6.0.2"
shebang-command@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
@ -12948,6 +13044,24 @@ webpack-bundle-analyzer@^4.9.0:
sirv "^1.0.7"
ws "^7.3.1"
webpack-cli@^4.10.0:
version "4.10.0"
resolved "https://registry.yarnpkg.com/webpack-cli/-/webpack-cli-4.10.0.tgz#37c1d69c8d85214c5a65e589378f53aec64dab31"
integrity sha512-NLhDfH/h4O6UOy+0LSso42xvYypClINuMNBVVzX4vX98TmTaTUxwRbXdhucbFMd2qLaCTcLq/PdYrvi8onw90w==
dependencies:
"@discoveryjs/json-ext" "^0.5.0"
"@webpack-cli/configtest" "^1.2.0"
"@webpack-cli/info" "^1.5.0"
"@webpack-cli/serve" "^1.7.0"
colorette "^2.0.14"
commander "^7.0.0"
cross-spawn "^7.0.3"
fastest-levenshtein "^1.0.12"
import-local "^3.0.2"
interpret "^2.2.0"
rechoir "^0.7.0"
webpack-merge "^5.7.3"
webpack-dev-middleware@^5.3.3:
version "5.3.3"
resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz#efae67c2793908e7311f1d9b06f2a08dcc97e51f"
@ -12968,6 +13082,15 @@ webpack-hot-middleware@^2.25.3:
html-entities "^2.1.0"
strip-ansi "^6.0.0"
webpack-merge@^5.7.3:
version "5.10.0"
resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.10.0.tgz#a3ad5d773241e9c682803abf628d4cd62b8a4177"
integrity sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==
dependencies:
clone-deep "^4.0.1"
flat "^5.0.2"
wildcard "^2.0.0"
webpack-node-externals@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/webpack-node-externals/-/webpack-node-externals-3.0.0.tgz#1a3407c158d547a9feb4229a9e3385b7b60c9917"
@ -13089,6 +13212,11 @@ widest-line@^3.1.0:
dependencies:
string-width "^4.0.0"
wildcard@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.1.tgz#5ab10d02487198954836b6349f74fff961e10f67"
integrity sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==
word-wrap@~1.2.3:
version "1.2.3"
resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"