Create Events Cache

This commit is contained in:
Tornado Contrib 2024-05-08 16:55:36 +00:00
parent d86d3ff125
commit 9414b751f7
Signed by: tornadocontrib
GPG Key ID: 60B4DF1A076C64B1
22 changed files with 65243 additions and 134 deletions

@ -1,18 +1,16 @@
/* eslint-disable @typescript-eslint/no-require-imports */ import { AES, HmacSHA256, enc } from 'crypto-js'
import { isEmpty } from 'lodash'
import { BigNumber, Contract } from 'ethers'
import { poseidon } from '@tornado/circomlib'
import { decrypt } from 'eth-sig-util'
const { AES, HmacSHA256, enc } = require('crypto-js') import { IndexedDB } from './services/idb'
const { isEmpty } = require('lodash') import { BatchEventsService } from './services/batch'
const { BigNumber, Contract } = require('ethers') import { getAllCommitments } from './services/graph'
const { poseidon } = require('@tornado/circomlib') import { ExtendedProvider } from './services/provider'
const { decrypt } = require('eth-sig-util') import { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } from './services/constants'
import { sleep } from './services/utilities'
const { IndexedDB } = require('./services/idb') import { poolAbi } from './services/pool'
const { BatchEventsService } = require('./services/batch')
const { getAllCommitments } = require('./services/graph')
const { ExtendedProvider } = require('./services/provider')
const { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } = require('./services/constants')
const { sleep } = require('./services/utilities')
const { poolAbi } = require('./services/pool')
const getProviderWithSigner = (chainId) => { const getProviderWithSigner = (chainId) => {
return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId]) return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId])
@ -103,7 +101,7 @@ const getCommitmentBatch = async ({ blockFrom, blockTo, cachedEvents, withCache
}) })
events.push(...graphEvents) events.push(...graphEvents)
blockFrom = lastSyncBlock + numbers.ONE blockFrom = lastSyncBlock
} }
if (!blockTo || blockTo > blockFrom) { if (!blockTo || blockTo > blockFrom) {

@ -1,14 +1,13 @@
/* eslint-disable @typescript-eslint/no-require-imports */ import { isEmpty } from 'lodash'
const { isEmpty } = require('lodash') import { BigNumber, Contract } from 'ethers'
const { BigNumber, Contract } = require('ethers')
const { IndexedDB } = require('./services/idb') import { IndexedDB } from './services/idb'
const { BatchEventsService } = require('./services/batch') import { BatchEventsService } from './services/batch'
const { getAllNullifiers } = require('./services/graph') import { getAllNullifiers } from './services/graph'
const { ExtendedProvider } = require('./services/provider') import { ExtendedProvider } from './services/provider'
const { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } = require('./services/constants') import { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } from './services/constants'
const { sleep } = require('./services/utilities') import { sleep } from './services/utilities'
const { poolAbi } = require('./services/pool') import { poolAbi } from './services/pool'
const getProviderWithSigner = (chainId) => { const getProviderWithSigner = (chainId) => {
return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId]) return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId])
@ -138,7 +137,7 @@ const getNullifiers = async (blockFrom) => {
}) })
events.push(...graphEvents) events.push(...graphEvents)
blockFrom = lastSyncBlock + numbers.ONE blockFrom = lastSyncBlock
} }
let nodeEvents = await self.BatchEventsService.getBatchEvents({ let nodeEvents = await self.BatchEventsService.getBatchEvents({

@ -1,6 +1,6 @@
const { sleep, getBatches } = require('./utilities') import { sleep, getBatches } from './utilities'
class BatchEventsService { export class BatchEventsService {
constructor({ constructor({
provider, provider,
contract, contract,
@ -83,5 +83,3 @@ class BatchEventsService {
return events; return events;
} }
} }
module.exports = { BatchEventsService }

@ -0,0 +1,237 @@
export const bridgeAbi = [
{
inputs: [
{
internalType: "contract IOmnibridge",
name: "_bridge",
type: "address",
},
{
internalType: "contract IWETH",
name: "_weth",
type: "address",
},
{
internalType: "address",
name: "_owner",
type: "address",
},
],
stateMutability: "nonpayable",
type: "constructor",
},
{
anonymous: false,
inputs: [
{
indexed: true,
internalType: "address",
name: "owner",
type: "address",
},
{
indexed: false,
internalType: "bytes",
name: "key",
type: "bytes",
},
],
name: "PublicKey",
type: "event",
},
{
inputs: [],
name: "WETH",
outputs: [
{
internalType: "contract IWETH",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [],
name: "bridge",
outputs: [
{
internalType: "contract IOmnibridge",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_token",
type: "address",
},
{
internalType: "address",
name: "_to",
type: "address",
},
],
name: "claimTokens",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_token",
type: "address",
},
{
internalType: "uint256",
name: "_value",
type: "uint256",
},
{
internalType: "bytes",
name: "_data",
type: "bytes",
},
],
name: "onTokenBridged",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "owner",
outputs: [
{
internalType: "address",
name: "",
type: "address",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [
{
components: [
{
internalType: "address",
name: "owner",
type: "address",
},
{
internalType: "bytes",
name: "publicKey",
type: "bytes",
},
],
internalType: "struct L1Helper.Account",
name: "_account",
type: "tuple",
},
],
name: "register",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_newOwner",
type: "address",
},
],
name: "transferOwnership",
outputs: [],
stateMutability: "nonpayable",
type: "function",
},
{
inputs: [],
name: "wrapAndRelayTokens",
outputs: [],
stateMutability: "payable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_receiver",
type: "address",
},
{
internalType: "bytes",
name: "_data",
type: "bytes",
},
],
name: "wrapAndRelayTokens",
outputs: [],
stateMutability: "payable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_receiver",
type: "address",
},
{
internalType: "bytes",
name: "_data",
type: "bytes",
},
{
components: [
{
internalType: "address",
name: "owner",
type: "address",
},
{
internalType: "bytes",
name: "publicKey",
type: "bytes",
},
],
internalType: "struct L1Helper.Account",
name: "_account",
type: "tuple",
},
],
name: "wrapAndRelayTokens",
outputs: [],
stateMutability: "payable",
type: "function",
},
{
inputs: [
{
internalType: "address",
name: "_receiver",
type: "address",
},
],
name: "wrapAndRelayTokens",
outputs: [],
stateMutability: "payable",
type: "function",
},
{
stateMutability: "payable",
type: "receive",
},
]

@ -1,40 +1,40 @@
const BSC_CHAIN_ID = 56 export const BSC_CHAIN_ID = 56
const XDAI_CHAIN_ID = 100 export const XDAI_CHAIN_ID = 100
const MAINNET_CHAIN_ID = 1 export const MAINNET_CHAIN_ID = 1
const ChainId = { export const ChainId = {
BSC: BSC_CHAIN_ID, BSC: BSC_CHAIN_ID,
XDAI: XDAI_CHAIN_ID, XDAI: XDAI_CHAIN_ID,
MAINNET: MAINNET_CHAIN_ID, MAINNET: MAINNET_CHAIN_ID,
} }
const OFFCHAIN_ORACLE_CONTRACT = '0x07D91f5fb9Bf7798734C3f606dB065549F6893bb' export const OFFCHAIN_ORACLE_CONTRACT = '0x07D91f5fb9Bf7798734C3f606dB065549F6893bb'
const POOL_CONTRACT = { export const POOL_CONTRACT = {
[ChainId.XDAI]: '0xD692Fd2D0b2Fbd2e52CFa5B5b9424bC981C30696', // ETH [ChainId.XDAI]: '0xD692Fd2D0b2Fbd2e52CFa5B5b9424bC981C30696', // ETH
// [ChainId.XDAI]: '0x772F007F13604ac286312C85b9Cd9B2D691B353E', // BNB // [ChainId.XDAI]: '0x772F007F13604ac286312C85b9Cd9B2D691B353E', // BNB
} }
const REDGISTRY_CONTRACT = { export const REDGISTRY_CONTRACT = {
[ChainId.MAINNET]: '0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2', [ChainId.MAINNET]: '0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2',
} }
const AGGREGATOR_FACTORY = { export const AGGREGATOR_FACTORY = {
[ChainId.MAINNET]: '0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49', [ChainId.MAINNET]: '0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49',
} }
const WRAPPED_TOKEN = { export const WRAPPED_TOKEN = {
[ChainId.MAINNET]: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', // WETH on mainnet [ChainId.MAINNET]: '0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2', // WETH on mainnet
[ChainId.XDAI]: '0x6A023CCd1ff6F2045C3309768eAd9E68F978f6e1', // WETH on xdai [ChainId.XDAI]: '0x6A023CCd1ff6F2045C3309768eAd9E68F978f6e1', // WETH on xdai
[ChainId.BSC]: '0xCa8d20f3e0144a72C6B5d576e9Bd3Fd8557E2B04', // WBNB on xdai [ChainId.BSC]: '0xCa8d20f3e0144a72C6B5d576e9Bd3Fd8557E2B04', // WBNB on xdai
} }
const RPC_LIST = { export const RPC_LIST = {
[ChainId.BSC]: 'https://tornadocash-rpc.com/bsc', [ChainId.BSC]: 'https://tornadocash-rpc.com/bsc',
[ChainId.MAINNET]: 'https://tornadocash-rpc.com/mainnet', [ChainId.MAINNET]: 'https://tornadocash-rpc.com/mainnet',
[ChainId.XDAI]: 'https://tornadocash-rpc.com/gnosis', [ChainId.XDAI]: 'https://tornadocash-rpc.com/gnosis',
} }
const FALLBACK_RPC_LIST = { export const FALLBACK_RPC_LIST = {
[ChainId.BSC]: [ [ChainId.BSC]: [
'https://binance.nodereal.io', 'https://binance.nodereal.io',
// 'https://rpc.ankr.com/bsc/dbe08b852ba176a8aeac783cc1fa8becaf4f107235dfdae79241063fbf52ca4a', // 'https://rpc.ankr.com/bsc/dbe08b852ba176a8aeac783cc1fa8becaf4f107235dfdae79241063fbf52ca4a',
@ -49,52 +49,98 @@ const FALLBACK_RPC_LIST = {
], ],
} }
const RPC_WS_LIST = { export const RPC_WS_LIST = {
[ChainId.MAINNET]: 'wss://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607', [ChainId.MAINNET]: 'wss://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607',
[ChainId.BSC]: 'wss://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607', [ChainId.BSC]: 'wss://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607',
[ChainId.XDAI]: 'wss://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607', [ChainId.XDAI]: 'wss://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607',
} }
const MULTICALL = { export const MULTICALL = {
[ChainId.BSC]: '0xf072f255A3324198C7F653237B44E1C4e66f8C42', [ChainId.BSC]: '0xf072f255A3324198C7F653237B44E1C4e66f8C42',
[ChainId.XDAI]: '0x8677b93D543d0217B32B8FDc20F2316E138D619B', [ChainId.XDAI]: '0x8677b93D543d0217B32B8FDc20F2316E138D619B',
[ChainId.MAINNET]: '0x1F98415757620B543A52E61c46B32eB19261F984', [ChainId.MAINNET]: '0x1F98415757620B543A52E61c46B32eB19261F984',
} }
const BRIDGE_PROXY = { export const BRIDGE_PROXY = {
[ChainId.BSC]: '0x05185872898b6f94AA600177EF41B9334B1FA48B', [ChainId.BSC]: '0x05185872898b6f94AA600177EF41B9334B1FA48B',
[ChainId.MAINNET]: '0x4c36d2919e407f0cc2ee3c993ccf8ac26d9ce64e', [ChainId.MAINNET]: '0x4c36d2919e407f0cc2ee3c993ccf8ac26d9ce64e',
} }
const AMB_BRIDGE = { export const AMB_BRIDGE = {
[ChainId.XDAI]: '0x75Df5AF045d91108662D8080fD1FEFAd6aA0bb59', // ETH [ChainId.XDAI]: '0x75Df5AF045d91108662D8080fD1FEFAd6aA0bb59', // ETH
// [ChainId.XDAI]: '0x162E898bD0aacB578C8D5F8d6ca588c13d2A383F', // BNB // [ChainId.XDAI]: '0x162E898bD0aacB578C8D5F8d6ca588c13d2A383F', // BNB
[ChainId.MAINNET]: '0x162E898bD0aacB578C8D5F8d6ca588c13d2A383F', [ChainId.MAINNET]: '0x162E898bD0aacB578C8D5F8d6ca588c13d2A383F',
} }
const BRIDGE_HELPER = { export const BRIDGE_HELPER = {
[ChainId.MAINNET]: '0xCa0840578f57fE71599D29375e16783424023357', [ChainId.MAINNET]: '0xCa0840578f57fE71599D29375e16783424023357',
[ChainId.BSC]: '0x8845F740F8B01bC7D9A4C82a6fD4A60320c07AF1', [ChainId.BSC]: '0x8845F740F8B01bC7D9A4C82a6fD4A60320c07AF1',
} }
const BRIDGE_FEE_MANAGER = { export const BRIDGE_FEE_MANAGER = {
[ChainId.XDAI]: '0x5dbC897aEf6B18394D845A922BF107FA98E3AC55', [ChainId.XDAI]: '0x5dbC897aEf6B18394D845A922BF107FA98E3AC55',
} }
const FOREIGN_OMNIBRIDGE = { export const FOREIGN_OMNIBRIDGE = {
[ChainId.MAINNET]: '0x88ad09518695c6c3712AC10a214bE5109a655671', [ChainId.MAINNET]: '0x88ad09518695c6c3712AC10a214bE5109a655671',
} }
const OMNIBRIDGE = { export const OMNIBRIDGE = {
[ChainId.XDAI]: '0xf6A78083ca3e2a662D6dd1703c939c8aCE2e268d', [ChainId.XDAI]: '0xf6A78083ca3e2a662D6dd1703c939c8aCE2e268d',
} }
const SANCTION_LIST = { export const SANCTION_LIST = {
[ChainId.MAINNET]: '0x40C57923924B5c5c5455c48D93317139ADDaC8fb', [ChainId.MAINNET]: '0x40C57923924B5c5c5455c48D93317139ADDaC8fb',
} }
export const CHAINS = {
[ChainId.XDAI]: {
symbol: 'XDAI',
name: 'xdai',
shortName: 'xdai',
icon: 'ethereum',
network: 'XDAI',
blockDuration: 3000, // ms
deployBlock: 19097755, // ETH
// deployBlock: 20446605, // BNB
blockGasLimit: 144000000, // rpc block gas limit
hexChainId: '0x64',
isEipSupported: false,
ensSubdomainKey: 'gnosis-nova',
blockExplorerUrl: 'https://gnosisscan.io'
},
[ChainId.MAINNET]: {
symbol: 'ETH',
name: 'ethereum',
shortName: 'eth',
icon: 'ethereum',
network: 'Mainnet',
deployBlock: 13494216,
blockDuration: 15000,
blockGasLimit: 144000000,
hexChainId: '0x1',
isEipSupported: true,
ensSubdomainKey: 'mainnet-tornado',
blockExplorerUrl: 'https://etherscan.io'
},
[ChainId.BSC]: {
symbol: 'BNB',
name: 'bsc',
shortName: 'bsc',
icon: 'binance',
network: 'BSC',
deployBlock: 14931075,
blockDuration: 3000,
blockGasLimit: 144000000,
hexChainId: '0x38',
isEipSupported: false,
ensSubdomainKey: 'bsc-tornado',
blockExplorerUrl: 'https://bscscan.com'
},
}
const workerEvents = {
export const workerEvents = {
INIT_WORKER: 'initWorker', INIT_WORKER: 'initWorker',
GET_COMMITMENT_EVENTS: 'get_commitment_events', GET_COMMITMENT_EVENTS: 'get_commitment_events',
// nullifier // nullifier
@ -112,7 +158,7 @@ const workerEvents = {
SAVE_LAST_SYNC_BLOCK: 'save_last_sync_block', SAVE_LAST_SYNC_BLOCK: 'save_last_sync_block',
} }
const numbers = { export const numbers = {
ZERO: 0, ZERO: 0,
TWO: 2, TWO: 2,
ONE: 1, ONE: 1,
@ -129,12 +175,3 @@ const numbers = {
MIN_BLOCKS_INTERVAL_LINE: 200000, MIN_BLOCKS_INTERVAL_LINE: 200000,
EPHEM_PUBLIC_KEY_BUF_LENGTH: 56, EPHEM_PUBLIC_KEY_BUF_LENGTH: 56,
} }
module.exports = {
ChainId,
POOL_CONTRACT,
RPC_LIST,
FALLBACK_RPC_LIST,
workerEvents,
numbers
}

@ -1,8 +1,11 @@
const { isEmpty } = require('lodash') import { isEmpty } from 'lodash'
const { ApolloClient, InMemoryCache, gql } = require('@apollo/client/core') import { ApolloClient, InMemoryCache, gql } from '@apollo/client/core'
import { utils } from 'ethers'
const { GET_COMMITMENT, GET_NULLIFIER } = require('./queries') import { GET_ACCOUNTS, GET_COMMITMENT, GET_NULLIFIER } from './queries'
const { ChainId, numbers } = require('../constants') import { ChainId, numbers } from '../constants'
const { getAddress } = utils
const first = 1000 const first = 1000
const breakLength = 900 const breakLength = 900
@ -23,7 +26,91 @@ const client = new ApolloClient({
cache: new InMemoryCache(), cache: new InMemoryCache(),
}) })
async function getCommitments({ fromBlock, chainId }) { export async function getAccounts({ fromBlock, chainId }) {
const { data } = await client.query({
context: {
chainId,
},
query: gql(GET_ACCOUNTS),
variables: { first, fromBlock },
})
if (!data) {
return {
results: [],
lastSyncBlock: data._meta.block.number
}
}
return {
results: data.accounts,
lastSyncBlock: data._meta.block.number
}
}
export async function getAllAccounts({ fromBlock, toBlock, chainId }) {
try {
let accounts = []
let lastSyncBlock
while (true) {
let { results, lastSyncBlock: lastBlock } = await getAccounts({ fromBlock, chainId })
lastSyncBlock = lastBlock
if (isEmpty(results)) {
break
}
if (results.length < breakLength) {
accounts = accounts.concat(results)
break
}
const [lastEvent] = results.slice(-numbers.ONE)
results = results.filter((e) => e.blockNumber !== lastEvent.blockNumber)
fromBlock = Number(lastEvent.blockNumber)
accounts = accounts.concat(results)
if (toBlock && fromBlock >= Number(toBlock)) {
break
}
}
if (!accounts) {
return {
lastSyncBlock,
events: [],
}
}
const data = accounts.map((e) => ({
key: e.key,
owner: getAddress(e.owner),
blockNumber: Number(e.blockNumber),
}))
const [lastEvent] = data.slice(-numbers.ONE)
return {
events: data,
lastSyncBlock: (lastEvent && lastEvent.blockNumber >= lastSyncBlock)
? lastEvent.blockNumber + numbers.ONE
: lastSyncBlock,
}
} catch (err) {
console.log('Error from getAllAccounts')
console.log(err)
return {
lastSyncBlock: '',
events: [],
}
}
}
export async function getCommitments({ fromBlock, chainId }) {
const { data } = await client.query({ const { data } = await client.query({
context: { context: {
chainId, chainId,
@ -45,7 +132,7 @@ async function getCommitments({ fromBlock, chainId }) {
} }
} }
async function getAllCommitments({ fromBlock, toBlock, chainId }) { export async function getAllCommitments({ fromBlock, toBlock, chainId }) {
try { try {
let commitments = [] let commitments = []
let lastSyncBlock let lastSyncBlock
@ -84,18 +171,18 @@ async function getAllCommitments({ fromBlock, toBlock, chainId }) {
} }
const data = commitments.map((e) => ({ const data = commitments.map((e) => ({
blockNumber: Number(e.blockNumber),
transactionHash: e.transactionHash,
index: Number(e.index), index: Number(e.index),
commitment: e.commitment, commitment: e.commitment,
blockNumber: Number(e.blockNumber), encryptedOutput: e.encryptedOutput
encryptedOutput: e.encryptedOutput,
transactionHash: e.transactionHash
})) }))
const [lastEvent] = data.slice(-numbers.ONE) const [lastEvent] = data.slice(-numbers.ONE)
return { return {
events: data, events: data,
lastSyncBlock: (lastEvent && lastEvent.blockNumber > lastSyncBlock) lastSyncBlock: (lastEvent && lastEvent.blockNumber >= lastSyncBlock)
? lastEvent.blockNumber + numbers.ONE ? lastEvent.blockNumber + numbers.ONE
: lastSyncBlock, : lastSyncBlock,
} }
@ -109,7 +196,7 @@ async function getAllCommitments({ fromBlock, toBlock, chainId }) {
} }
} }
async function getNullifiers({ fromBlock, chainId }) { export async function getNullifiers({ fromBlock, chainId }) {
const { data } = await client.query({ const { data } = await client.query({
context: { context: {
chainId, chainId,
@ -131,7 +218,7 @@ async function getNullifiers({ fromBlock, chainId }) {
} }
} }
async function getAllNullifiers({ fromBlock, chainId }) { export async function getAllNullifiers({ fromBlock, chainId }) {
try { try {
let nullifiers = [] let nullifiers = []
let lastSyncBlock let lastSyncBlock
@ -175,7 +262,7 @@ async function getAllNullifiers({ fromBlock, chainId }) {
return { return {
events: data, events: data,
lastSyncBlock: (lastEvent && lastEvent.blockNumber > lastSyncBlock) lastSyncBlock: (lastEvent && lastEvent.blockNumber >= lastSyncBlock)
? lastEvent.blockNumber + numbers.ONE ? lastEvent.blockNumber + numbers.ONE
: lastSyncBlock, : lastSyncBlock,
} }
@ -188,8 +275,3 @@ async function getAllNullifiers({ fromBlock, chainId }) {
} }
} }
} }
module.exports = {
getAllCommitments,
getAllNullifiers
}

@ -1,4 +1,23 @@
const GET_COMMITMENT = ` export const GET_ACCOUNTS = `
query getAccounts($first: Int, $fromBlock: Int) {
accounts(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
blockNumber_gte: $fromBlock
}) {
id
key
owner
blockNumber
}
_meta {
block {
number
}
hasIndexingErrors
}
}
`
export const GET_COMMITMENT = `
query getCommitment($first: Int, $fromBlock: Int) { query getCommitment($first: Int, $fromBlock: Int) {
commitments(first: $first, orderBy: blockNumber, orderDirection: asc, where: { commitments(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
blockNumber_gte: $fromBlock blockNumber_gte: $fromBlock
@ -18,7 +37,7 @@ const GET_COMMITMENT = `
} }
` `
const GET_NULLIFIER = ` export const GET_NULLIFIER = `
query getNullifier($first: Int, $fromBlock: Int) { query getNullifier($first: Int, $fromBlock: Int) {
nullifiers(first: $first, orderBy: blockNumber, orderDirection: asc, where: { nullifiers(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
blockNumber_gte: $fromBlock blockNumber_gte: $fromBlock
@ -35,5 +54,3 @@ const GET_NULLIFIER = `
} }
} }
` `
module.exports = { GET_COMMITMENT, GET_NULLIFIER }

@ -1,12 +1,12 @@
const { deleteDB, openDB } = require('idb') import { deleteDB, openDB } from 'idb'
const VERSION_ERROR = 'less than the existing version' export const VERSION_ERROR = 'less than the existing version'
const INDEX_DB_ERROR = 'A mutation operation was attempted on a database that did not allow mutations.' export const INDEX_DB_ERROR = 'A mutation operation was attempted on a database that did not allow mutations.'
const IDB_VERSION = 9 export const IDB_VERSION = 9
// TODO method for migration, remove indexed // TODO method for migration, remove indexed
class IndexedDB { export class IndexedDB {
constructor({ stores, dbName }) { constructor({ stores, dbName }) {
this.dbExists = false this.dbExists = false
this.isBlocked = false this.isBlocked = false
@ -220,5 +220,3 @@ class IndexedDB {
} }
} }
} }
module.exports = { IndexedDB }

@ -1,4 +1,4 @@
const poolAbi = [ export const poolAbi = [
{ {
inputs: [ inputs: [
{ {
@ -1038,5 +1038,3 @@ const poolAbi = [
type: "function", type: "function",
}, },
] ]
module.exports = { poolAbi }

@ -1,10 +1,10 @@
const { ethers } = require('ethers') import { ethers } from 'ethers'
const { fetchJson } = require('@ethersproject/web') import { fetchJson } from 'ethers/lib/utils'
const { numbers } = require('./constants') import { numbers } from './constants'
const defaultRetryAttempt = 0 const defaultRetryAttempt = 0
class ExtendedProvider extends ethers.providers.StaticJsonRpcProvider { export class ExtendedProvider extends ethers.providers.StaticJsonRpcProvider {
constructor(url, network, fallbackRpcs) { constructor(url, network, fallbackRpcs) {
super(url, network) super(url, network)
this.fallbackRpcs = fallbackRpcs this.fallbackRpcs = fallbackRpcs
@ -84,5 +84,3 @@ class ExtendedProvider extends ethers.providers.StaticJsonRpcProvider {
// return (data?.includes(ERROR_DATA) || message?.includes(ERROR_MESSAGE)) && code === ERROR_CODE // return (data?.includes(ERROR_DATA) || message?.includes(ERROR_MESSAGE)) && code === ERROR_CODE
// } // }
} }
module.exports = { ExtendedProvider }

@ -1,6 +1,6 @@
const ZERO_ELEMENT = 0 export const ZERO_ELEMENT = 0
function getBatches(array, batchSize) { export function getBatches(array, batchSize) {
const batches = [] const batches = []
while (array.length) { while (array.length) {
batches.push(array.splice(ZERO_ELEMENT, batchSize)) batches.push(array.splice(ZERO_ELEMENT, batchSize))
@ -8,12 +8,6 @@ function getBatches(array, batchSize) {
return batches return batches
} }
async function sleep(ms) { export async function sleep(ms) {
return await new Promise((resolve) => setTimeout(resolve, ms)) return await new Promise((resolve) => setTimeout(resolve, ms))
} }
module.exports = {
ZERO_ELEMENT,
getBatches,
sleep
}

25
assets/services/zip.js Normal file

@ -0,0 +1,25 @@
import { zip, unzip } from 'fflate'
export function zipAsync(file) {
return new Promise((res, rej) => {
zip(file, { mtime: new Date('1/1/1980') }, (err, data) => {
if (err) {
rej(err);
return;
}
res(data);
});
});
}
export function unzipAsync(data) {
return new Promise((res, rej) => {
unzip(data, {}, (err, data) => {
if (err) {
rej(err);
return;
}
res(data);
});
});
}

285
assets/syncEvents.js Normal file

@ -0,0 +1,285 @@
import path from 'path'
import { stat, readFile, writeFile } from 'fs/promises'
import { Contract, providers, utils } from 'ethers'
import { BatchEventsService } from './services/batch'
import { getAllAccounts, getAllCommitments, getAllNullifiers } from './services/graph'
import { POOL_CONTRACT, BRIDGE_HELPER, RPC_LIST, ChainId, CHAINS, numbers } from './services/constants'
import { zipAsync, unzipAsync } from './services/zip'
import { poolAbi } from './services/pool'
import { bridgeAbi } from './services/bridgeHelper'
const { getAddress } = utils
const { StaticJsonRpcProvider } = providers
const EVENT_PATH = './static'
async function existsAsync(fileOrDir) {
try {
await stat(fileOrDir);
return true;
} catch {
return false;
}
}
const getProvider = (chainId) => {
return new StaticJsonRpcProvider({ skipFetchSetup: true, url: RPC_LIST[chainId] }, chainId)
}
const getTornadoPool = (chainId, provider) => {
const TornadoPool = new Contract(POOL_CONTRACT[chainId], poolAbi, provider)
return {
TornadoPool,
BatchEventsService: new BatchEventsService({
provider,
contract: TornadoPool
})
}
}
const getBridgeHelper = (chainId, provider) => {
const BridgeHelper = new Contract(BRIDGE_HELPER[chainId], bridgeAbi, provider)
return {
BridgeHelper,
BridgeEventsService: new BatchEventsService({
provider,
contract: BridgeHelper
})
}
}
const loadEvents = async (fileName, deployedBlock) => {
fileName = fileName.toLowerCase()
const filePath = path.join(EVENT_PATH, fileName + '.zip')
if (!(await existsAsync(filePath))) {
return {
events: [],
lastBlock: deployedBlock
}
}
try {
const data = await readFile(filePath)
const { [fileName]: content } = await unzipAsync(data)
const events = JSON.parse(new TextDecoder().decode(content))
const lastBlock = events && Array.isArray(events) && events[events.length - 1]
? events[events.length - 1].blockNumber
: deployedBlock
return {
events,
lastBlock
}
} catch {
return {
events: [],
lastBlock: deployedBlock
}
}
}
const saveEvents = async (fileName, events) => {
fileName = fileName.toLowerCase()
const filePath = path.join(EVENT_PATH, fileName + '.zip')
const payload = await zipAsync({
[fileName]: new TextEncoder().encode(JSON.stringify(events, null, 2) + '\n')
})
await writeFile(filePath, payload)
}
const syncAccounts = async (chainId, BatchEventsService) => {
const fileName = `accounts_${chainId}.json`
console.log(`Syncing ${fileName}`)
const cachedEvents = await loadEvents(fileName, CHAINS[chainId].deployBlock)
const events = [...cachedEvents.events]
let fromBlock = cachedEvents.lastBlock + numbers.ONE
console.log({
cachedEvents: events.length,
cachedBlock: fromBlock
})
const { events: graphEvents, lastSyncBlock } = await getAllAccounts({
fromBlock,
chainId
})
console.log({
graphEvents: graphEvents.length,
graphBlock: lastSyncBlock
})
if (lastSyncBlock) {
events.push(...graphEvents)
fromBlock = lastSyncBlock
}
let nodeEvents = await BatchEventsService.getBatchEvents({
fromBlock,
type: 'PublicKey'
})
console.log({
nodeEvents: nodeEvents.length,
nodeBlock: nodeEvents && nodeEvents[nodeEvents.length - 1] ? nodeEvents[nodeEvents.length - 1].blockNumber : undefined
})
if (nodeEvents && nodeEvents.length) {
nodeEvents = nodeEvents.map(({ blockNumber, args }) => ({
key: args.key,
owner: getAddress(args.owner),
blockNumber,
}))
events.push(...nodeEvents)
}
await saveEvents(fileName, events)
}
const syncCommitments = async (chainId, BatchEventsService) => {
const fileName = `commitments_${chainId}.json`
console.log(`Syncing ${fileName}`)
const cachedEvents = await loadEvents(fileName, CHAINS[chainId].deployBlock)
const events = [...cachedEvents.events]
let fromBlock = cachedEvents.lastBlock + numbers.ONE
console.log({
cachedEvents: events.length,
cachedBlock: fromBlock
})
const { events: graphEvents, lastSyncBlock } = await getAllCommitments({
fromBlock,
chainId
})
console.log({
graphEvents: graphEvents.length,
graphBlock: lastSyncBlock
})
if (lastSyncBlock) {
events.push(...graphEvents)
fromBlock = lastSyncBlock
}
let nodeEvents = await BatchEventsService.getBatchEvents({
fromBlock,
type: 'NewCommitment'
})
console.log({
nodeEvents: nodeEvents.length,
nodeBlock: nodeEvents && nodeEvents[nodeEvents.length - 1] ? nodeEvents[nodeEvents.length - 1].blockNumber : undefined
})
if (nodeEvents && nodeEvents.length) {
nodeEvents = nodeEvents.map(({ blockNumber, transactionHash, args }) => ({
blockNumber,
transactionHash,
index: Number(args.index),
commitment: args.commitment,
encryptedOutput: args.encryptedOutput,
}))
events.push(...nodeEvents)
}
await saveEvents(fileName, events)
}
const syncNullifiers = async (chainId, BatchEventsService) => {
const fileName = `nullifiers_${chainId}.json`
console.log(`Syncing ${fileName}`)
const cachedEvents = await loadEvents(fileName, CHAINS[chainId].deployBlock)
const events = [...cachedEvents.events]
let fromBlock = cachedEvents.lastBlock + numbers.ONE
console.log({
cachedEvents: events.length,
cachedBlock: fromBlock
})
const { events: graphEvents, lastSyncBlock } = await getAllNullifiers({
fromBlock,
chainId
})
console.log({
graphEvents: graphEvents.length,
graphBlock: lastSyncBlock
})
if (lastSyncBlock) {
events.push(...graphEvents)
fromBlock = lastSyncBlock
}
let nodeEvents = await BatchEventsService.getBatchEvents({
fromBlock,
type: 'NewNullifier'
})
console.log({
nodeEvents: nodeEvents.length,
nodeBlock: nodeEvents && nodeEvents[nodeEvents.length - 1] ? nodeEvents[nodeEvents.length - 1].blockNumber : undefined
})
if (nodeEvents && nodeEvents.length) {
nodeEvents = nodeEvents.map(({ blockNumber, transactionHash, args }) => ({
blockNumber,
transactionHash,
nullifier: args.nullifier,
}))
events.push(...nodeEvents)
}
await saveEvents(fileName, events)
}
const main = async () => {
const chainId = ChainId.XDAI
const ethChainId = ChainId.MAINNET
const provider = getProvider(chainId)
const ethProvider = getProvider(ethChainId)
const { BatchEventsService } = getTornadoPool(chainId, provider)
const { BridgeEventsService } = getBridgeHelper(ethChainId, ethProvider)
console.log(`Connected with ${chainId}: (block: ${await provider.getBlockNumber()})`)
console.log(`Connected with ${ethChainId}: (block: ${await ethProvider.getBlockNumber()})`)
await syncAccounts(ethChainId, BridgeEventsService)
await syncCommitments(chainId, BatchEventsService)
await syncNullifiers(chainId, BatchEventsService)
}
main()

@ -14,7 +14,8 @@
"generate": "yarn worker:compile && nuxt generate && yarn copyFile dist/404.html dist/ipfs-404.html", "generate": "yarn worker:compile && nuxt generate && yarn copyFile dist/404.html dist/ipfs-404.html",
"prepare": "husky install", "prepare": "husky install",
"ipfs:upload": "node --loader ts-node/esm ipfsUpload.ts", "ipfs:upload": "node --loader ts-node/esm ipfsUpload.ts",
"worker:compile": "webpack" "worker:compile": "webpack",
"update:events": "webpack && node ./syncEvents.cjs"
}, },
"dependencies": { "dependencies": {
"@apollo/client": "^3.4.16", "@apollo/client": "^3.4.16",
@ -73,6 +74,7 @@
"eslint-plugin-prettier": "^3.4.0", "eslint-plugin-prettier": "^3.4.0",
"eslint-plugin-promise": "^5.1.0", "eslint-plugin-promise": "^5.1.0",
"eslint-plugin-vue": "^7.16.0", "eslint-plugin-vue": "^7.16.0",
"fflate": "^0.8.2",
"form-data": "^4.0.0", "form-data": "^4.0.0",
"husky": "^6.0.0", "husky": "^6.0.0",
"lint-staged": "10.2.11", "lint-staged": "10.2.11",

BIN
static/accounts_1.json.zip Normal file

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

64403
syncEvents.cjs Normal file

File diff suppressed because one or more lines are too long

@ -1,4 +1,5 @@
import path from 'path' import path from 'path'
import webpack from 'webpack'
export default [ export default [
{ {
@ -16,5 +17,37 @@ export default [
path: path.resolve('static'), path: path.resolve('static'),
filename: 'nullifier.worker.js', filename: 'nullifier.worker.js',
} }
},
{
mode: 'production',
entry: './assets/syncEvents.js',
output: {
path: path.resolve('.'),
filename: 'syncEvents.cjs',
},
target: 'node',
plugins: [
new webpack.BannerPlugin({
banner: '#!/usr/bin/env node\n',
raw: true
})
],
module: {
rules: [
{
test: /\.mjs$/,
include: /node_modules/,
type: 'javascript/auto'
}
]
},
resolve: {
alias: {
'fflate': 'fflate/esm'
}
},
optimization: {
minimize: false,
}
} }
] ]

@ -6322,6 +6322,11 @@ ffjavascript@^0.2.48:
wasmcurves "0.2.2" wasmcurves "0.2.2"
web-worker "^1.2.0" web-worker "^1.2.0"
fflate@^0.8.2:
version "0.8.2"
resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.8.2.tgz#fc8631f5347812ad6028bbe4a2308b2792aa1dea"
integrity sha512-cPJU47OaAoCbg0pBvzsgpTPhmhqI5eJjh/JIu8tPj5q+T7iLvW/JAYUqmE7KOB4R1ZyEhzBaIQpQpardBF5z8A==
ffwasm@0.0.7: ffwasm@0.0.7:
version "0.0.7" version "0.0.7"
resolved "https://registry.yarnpkg.com/ffwasm/-/ffwasm-0.0.7.tgz#23bb9a3537ecc87c0f24fcfb3a9ddd0e86855fff" resolved "https://registry.yarnpkg.com/ffwasm/-/ffwasm-0.0.7.tgz#23bb9a3537ecc87c0f24fcfb3a9ddd0e86855fff"