static cache should be prefixed by network

Signed-off-by: AlienTornadosaurusHex <>
This commit is contained in:
AlienTornadosaurusHex 2023-05-12 22:19:58 +00:00
parent e49e3e0c0d
commit ad0d1391dc
120 changed files with 81 additions and 43 deletions

@ -1,4 +1,6 @@
export default { export default {
enabledChains: ['1', '10', '56', '100', '137', '42161'],
chainsWithEncryptedNotes: ['1', '5', '56', '100', '137'],
netId1: { netId1: {
rpcCallRetryAttempt: 15, rpcCallRetryAttempt: 15,
gasPrices: { gasPrices: {
@ -22,6 +24,10 @@ export default {
secureRPC: { secureRPC: {
name: 'SecureRPC', name: 'SecureRPC',
url: 'https://api.securerpc.com/v1' url: 'https://api.securerpc.com/v1'
},
llamaRPC: {
name: 'llamarpc',
url: 'https://eth.llamarpc.com'
} }
}, },
multicall: '0xeefba1e63905ef1d7acba5a8513c70307c1ce441', multicall: '0xeefba1e63905ef1d7acba5a8513c70307c1ce441',
@ -297,6 +303,10 @@ export default {
multicall: '0x842eC2c7D803033Edf55E478F461FC547Bc54EB2', multicall: '0x842eC2c7D803033Edf55E478F461FC547Bc54EB2',
echoContractAccount: '0xa75BF2815618872f155b7C4B0C81bF990f5245E4', echoContractAccount: '0xa75BF2815618872f155b7C4B0C81bF990f5245E4',
rpcUrls: { rpcUrls: {
oneRpc: {
name: '1rpc',
url: 'https://1rpc.io/arb'
},
blockPi: { blockPi: {
name: 'BlockPi', name: 'BlockPi',
url: 'https://arbitrum.blockpi.network/v1/rpc/public' url: 'https://arbitrum.blockpi.network/v1/rpc/public'

@ -1,24 +1,23 @@
import networkConfig from '../networkConfig' import networkConfig from '../networkConfig'
import ABI from '../abis/Instance.abi.json' import { loadCachedEvents } from './helpers'
import { loadCachedEvents, getPastEvents } from './helpers'
const EVENTS_PATH = './static/events/' const EVENTS_PATH = './static/events/'
const enabledChains = ['1', '56', '100', '137' ]
async function main() { function main() {
for (let network in enabledChains) { const enabledChains = networkConfig.enabledChains
const netId = enabledChains[network]
for (const netId of enabledChains) {
const config = networkConfig[`netId${netId}`] const config = networkConfig[`netId${netId}`]
const { constants, tokens, nativeCurrency, deployedBlock } = config const { constants, tokens, nativeCurrency, deployedBlock } = config
const CONTRACTS = tokens[nativeCurrency].instanceAddress const CONTRACTS = tokens[nativeCurrency].instanceAddress
console.log(`\n ::: ${netId} [${nativeCurrency.toUpperCase()}] :::`) console.log(`\n ::: ${netId} [${nativeCurrency.toUpperCase()}] :::`)
for (const [instance, _contract] of Object.entries(CONTRACTS)) { for (const [instance, ] of Object.entries(CONTRACTS)) {
console.log(`\n instanceDenomation - ${instance}`) console.log(`\n instanceDenomation - ${instance}`)
const withdrawalCachedEvents = await loadCachedEvents({ const withdrawalCachedEvents = loadCachedEvents({
name: `withdrawals_${nativeCurrency}_${instance}.json`, name: `withdrawals_${netId}_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH, directory: EVENTS_PATH,
deployedBlock deployedBlock
}) })
@ -27,8 +26,8 @@ async function main() {
console.log('cachedEvents count - ', withdrawalCachedEvents.events.length) console.log('cachedEvents count - ', withdrawalCachedEvents.events.length)
console.log('lastBlock - ', withdrawalCachedEvents.lastBlock) console.log('lastBlock - ', withdrawalCachedEvents.lastBlock)
const depositCachedEvents = await loadCachedEvents({ const depositCachedEvents = loadCachedEvents({
name: `withdrawals_${nativeCurrency}_${instance}.json`, name: `deposits_${netId}_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH, directory: EVENTS_PATH,
deployedBlock deployedBlock
}) })
@ -37,7 +36,7 @@ async function main() {
console.log('cachedEvents count - ', depositCachedEvents.events.length) console.log('cachedEvents count - ', depositCachedEvents.events.length)
console.log('lastBlock - ', depositCachedEvents.lastBlock) console.log('lastBlock - ', depositCachedEvents.lastBlock)
const notesCachedEvents = await loadCachedEvents({ const notesCachedEvents = loadCachedEvents({
name: `encrypted_notes_${netId}.json`, name: `encrypted_notes_${netId}.json`,
directory: EVENTS_PATH, directory: EVENTS_PATH,
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
@ -46,7 +45,6 @@ async function main() {
console.log('- Notes') console.log('- Notes')
console.log('cachedEvents count - ', notesCachedEvents.events.length) console.log('cachedEvents count - ', notesCachedEvents.events.length)
console.log('lastBlock - ', notesCachedEvents.lastBlock) console.log('lastBlock - ', notesCachedEvents.lastBlock)
} }
} }
} }

@ -4,7 +4,7 @@ import Web3 from 'web3'
import networkConfig from '../../networkConfig' import networkConfig from '../../networkConfig'
export async function download({ name, directory, contentType }) { export function download({ name, directory }) {
const path = `${directory}${name}.gz`.toLowerCase() const path = `${directory}${name}.gz`.toLowerCase()
const data = fs.readFileSync(path) const data = fs.readFileSync(path)
@ -13,16 +13,16 @@ export async function download({ name, directory, contentType }) {
return content return content
} }
export async function loadCachedEvents({ name, directory, deployedBlock }) { export function loadCachedEvents({ name, directory, deployedBlock }) {
try { try {
const module = await download({ contentType: 'string', directory, name }) const module = download({ contentType: 'string', directory, name })
if (module) { if (module) {
const events = JSON.parse(module) const events = JSON.parse(module)
return { return {
events, events,
lastBlock: events[events.length - 1].blockNumber lastBlock: events[events.length - 1].blockNumber
} }
} }
} catch (err) { } catch (err) {
@ -67,6 +67,7 @@ export async function getPastEvents({ type, fromBlock, netId, events, contractAt
} }
console.log(`Fetching ${type}, chainId - ${netId}`, `chunksCount - ${chunksCount}`) console.log(`Fetching ${type}, chainId - ${netId}`, `chunksCount - ${chunksCount}`)
for (let i = 0; i < chunksCount; i++) for (let i = 0; i < chunksCount; i++)
try { try {
await new Promise((resolve) => setTimeout(resolve, 200)) await new Promise((resolve) => setTimeout(resolve, 200))

@ -1,14 +1,11 @@
import fs from 'fs' import fs from 'fs'
import zlib from 'zlib' import zlib from 'zlib'
export async function save(filePath) { export function save(filePath) {
const directories = filePath.split('/')
const fileName = directories[directories.length - 1]
try { try {
const data = fs.readFileSync(`${filePath}`) const data = fs.readFileSync(`${filePath}`)
const payload = await zlib.deflateSync(data, { const payload = zlib.deflateSync(data, {
level: zlib.constants.Z_BEST_COMPRESSION, level: zlib.constants.Z_BEST_COMPRESSION,
strategy: zlib.constants.Z_FILTERED strategy: zlib.constants.Z_FILTERED
}) })

@ -5,10 +5,10 @@ import { uniqBy } from 'lodash'
import networkConfig from '../networkConfig' import networkConfig from '../networkConfig'
import ABI from '../abis/TornadoProxy.abi.json' import ABI from '../abis/TornadoProxy.abi.json'
import { getPastEvents, loadCachedEvents } from './helpers' import { getPastEvents, loadCachedEvents } from './helpers'
const EVENTS_PATH = './static/events/' const EVENTS_PATH = './static/events/'
const enabledChains = ['1', '5', '56', '100', '137']
async function saveEncryptedNote(netId) { async function saveEncryptedNote(netId) {
const { const {
@ -57,11 +57,15 @@ async function saveEncryptedNote(netId) {
freshEvents = uniqBy(freshEvents, 'encryptedNote').sort((a, b) => b.blockNumber - a.blockNumber) freshEvents = uniqBy(freshEvents, 'encryptedNote').sort((a, b) => b.blockNumber - a.blockNumber)
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n' const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson) fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson)
} }
async function main() { async function main() {
const [, , , chain] = process.argv const [, , , chain] = process.argv
const enabledChains = networkConfig.enabledChains
if (!enabledChains.includes(chain)) { if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`) throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
} }

@ -5,11 +5,11 @@ import { uniqBy } from 'lodash'
import networkConfig from '../networkConfig' import networkConfig from '../networkConfig'
import ABI from '../abis/Instance.abi.json' import ABI from '../abis/Instance.abi.json'
import { loadCachedEvents, getPastEvents } from './helpers' import { loadCachedEvents, getPastEvents } from './helpers'
const EVENTS_PATH = './static/events/' const EVENTS_PATH = './static/events/'
const EVENTS = ['Deposit', 'Withdrawal'] const EVENTS = ['Deposit', 'Withdrawal']
const enabledChains = ['1', '56', '100', '137']
async function main(type, netId) { async function main(type, netId) {
const { tokens, nativeCurrency, deployedBlock } = networkConfig[`netId${netId}`] const { tokens, nativeCurrency, deployedBlock } = networkConfig[`netId${netId}`]
@ -17,7 +17,7 @@ async function main(type, netId) {
for (const [instance, _contract] of Object.entries(CONTRACTS)) { for (const [instance, _contract] of Object.entries(CONTRACTS)) {
const cachedEvents = await loadCachedEvents({ const cachedEvents = await loadCachedEvents({
name: `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`, name: `${type.toLowerCase()}s_${netId}_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH, directory: EVENTS_PATH,
deployedBlock deployedBlock
}) })
@ -71,17 +71,24 @@ async function main(type, netId) {
} }
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n' const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
fs.writeFileSync(`${EVENTS_PATH}${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`, eventsJson)
fs.writeFileSync(
`${EVENTS_PATH}${type.toLowerCase()}s_${netId}_${nativeCurrency}_${instance}.json`,
eventsJson
)
} }
} }
async function start() { async function start() {
const [, , , chain] = process.argv const [, , , chain] = process.argv
const enabledChains = networkConfig.enabledChains
if (!enabledChains.includes(chain)) { if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`) throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
} }
for await (const event of EVENTS) { for (const event of EVENTS) {
await main(event, chain) await main(event, chain)
} }
} }

@ -1,6 +1,7 @@
import 'dotenv/config' import 'dotenv/config'
import fs from 'fs' import fs from 'fs'
import BloomFilter from 'bloomfilter.js' import BloomFilter from 'bloomfilter.js'
import { MerkleTree } from 'fixed-merkle-tree' import { MerkleTree } from 'fixed-merkle-tree'
import { buildMimcSponge } from 'circomlibjs' import { buildMimcSponge } from 'circomlibjs'
@ -14,7 +15,7 @@ const TREES_PATH = './static/trees/'
const EVENTS_PATH = './static/events/' const EVENTS_PATH = './static/events/'
const EVENTS = ['deposit'] const EVENTS = ['deposit']
const enabledChains = ['1', '56', '100', '137' ]
let mimcHash let mimcHash
const trees = { const trees = {
@ -22,8 +23,8 @@ const trees = {
LEVELS: 20 // const from contract LEVELS: 20 // const from contract
} }
function getName({ path, type, instance, format = '.json', currName = 'eth' }) { function getName({ path, type, netId, instance, format = '.json', currName = 'eth' }) {
return `${path}${type.toLowerCase()}s_${currName}_${instance}${format}` return `${path}${type.toLowerCase()}s_${netId}_${currName}_${instance}${format}`
} }
function createTreeZip(netId) { function createTreeZip(netId) {
@ -36,6 +37,7 @@ function createTreeZip(netId) {
const baseFilename = getName({ const baseFilename = getName({
type, type,
instance, instance,
netId,
format: '', format: '',
path: TREES_PATH, path: TREES_PATH,
currName: currencyName.toLowerCase() currName: currencyName.toLowerCase()
@ -45,6 +47,7 @@ function createTreeZip(netId) {
treesFolder.forEach((fileName) => { treesFolder.forEach((fileName) => {
fileName = `${TREES_PATH}${fileName}` fileName = `${TREES_PATH}${fileName}`
const isInstanceFile = !fileName.includes('.gz') && fileName.includes(baseFilename) const isInstanceFile = !fileName.includes('.gz') && fileName.includes(baseFilename)
if (isInstanceFile) { if (isInstanceFile) {
@ -67,6 +70,7 @@ async function createTree(netId) {
const filePath = getName({ const filePath = getName({
type, type,
instance, instance,
netId,
format: '', format: '',
path: TREES_PATH, path: TREES_PATH,
currName: currencyName.toLowerCase() currName: currencyName.toLowerCase()
@ -75,7 +79,7 @@ async function createTree(netId) {
console.log('createTree', { type, instance }) console.log('createTree', { type, instance })
const { events } = await loadCachedEvents({ const { events } = await loadCachedEvents({
name: `${type}s_${nativeCurrency}_${instance}.json`, name: `${type}s_${netId}_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH, directory: EVENTS_PATH,
deployedBlock deployedBlock
}) })
@ -118,10 +122,12 @@ async function createTree(netId) {
}, []) }, [])
const sliceJson = JSON.stringify(slice, null, 2) + '\n' const sliceJson = JSON.stringify(slice, null, 2) + '\n'
fs.writeFileSync(`${filePath}_slice${index + 1}.json`, sliceJson) fs.writeFileSync(`${filePath}_slice${index + 1}.json`, sliceJson)
}) })
const bloomCache = bloom.serialize() const bloomCache = bloom.serialize()
fs.writeFileSync(`${filePath}_bloom.json`, bloomCache) fs.writeFileSync(`${filePath}_bloom.json`, bloomCache)
} }
} }
@ -137,13 +143,18 @@ async function initMimc() {
async function main() { async function main() {
const [, , , chain] = process.argv const [, , , chain] = process.argv
const enabledChains = networkConfig.enabledChains
if (!enabledChains.includes(chain)) { if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`) throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
} }
await initMimc() await initMimc()
await createTree(chain) await createTree(chain)
await createTreeZip(chain)
createTreeZip(chain)
} }
main() main()

@ -1,6 +1,7 @@
import { uniqBy } from 'lodash' import { uniqBy } from 'lodash'
import networkConfig from '../networkConfig' import networkConfig from '../networkConfig'
import { loadCachedEvents, save } from './helpers' import { loadCachedEvents, save } from './helpers'
const EVENTS_PATH = './static/events/' const EVENTS_PATH = './static/events/'
@ -9,19 +10,23 @@ const EVENTS = ['Deposit', 'Withdrawal']
function updateEncrypted(netId) { function updateEncrypted(netId) {
try { try {
const file = `${EVENTS_PATH}encrypted_notes_${netId}.json` const file = `${EVENTS_PATH}encrypted_notes_${netId}.json`
save(file) save(file)
} catch {} } catch {}
} }
async function updateCommon(netId) { async function updateCommon(netId) {
const { nativeCurrency, tokens } = networkConfig[`netId${netId}`] const { nativeCurrency, tokens } = networkConfig[`netId${netId}`]
console.log(Object.keys(tokens[nativeCurrency].instanceAddress)) console.log(Object.keys(tokens[nativeCurrency].instanceAddress))
for await (const type of EVENTS) { for await (const type of EVENTS) {
for await (const instance of Object.keys(tokens[nativeCurrency].instanceAddress)) { for await (const instance of Object.keys(tokens[nativeCurrency].instanceAddress)) {
console.warn('instance', instance) console.warn('instance', instance)
const filename = `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`
const filename = `${type.toLowerCase()}s_${netId}_${nativeCurrency}_${instance}.json`
const isSaved = save(`${EVENTS_PATH}${filename}`) const isSaved = save(`${EVENTS_PATH}${filename}`)
if (isSaved) { if (isSaved) {
try { try {
await testCommon(netId, type, filename) await testCommon(netId, type, filename)
@ -45,11 +50,13 @@ async function testCommon(netId, type, filename) {
console.log('cachedEvents', cachedEvents.events.length, type) console.log('cachedEvents', cachedEvents.events.length, type)
let events = cachedEvents.events let events = cachedEvents.events
if (type === 'Withdrawal') { if (type === 'Withdrawal') {
events = uniqBy(cachedEvents.events, 'nullifierHash') events = uniqBy(cachedEvents.events, 'nullifierHash')
} else if (type === 'Deposit') { } else if (type === 'Deposit') {
events = cachedEvents.events.filter((e, index) => Number(e.leafIndex) === index) events = cachedEvents.events.filter((e, index) => Number(e.leafIndex) === index)
} }
if (events.length !== cachedEvents.events.length) { if (events.length !== cachedEvents.events.length) {
console.error('events.length', events.length) console.error('events.length', events.length)
console.error('cachedEvents.events.length', cachedEvents.events.length) console.error('cachedEvents.events.length', cachedEvents.events.length)
@ -58,10 +65,14 @@ async function testCommon(netId, type, filename) {
} }
async function main() { async function main() {
const NETWORKS = [1, 5, 56, 100, 137 ] const enabledChains = networkConfig.enabledChains
const chainsWithEncryptedNotes = networkConfig.chainsWithEncryptedNotes
for (let i = 0; i < enabledChains.length; i++) {
const netId = enabledChains[i]
if (netId === chainsWithEncryptedNotes[i]) updateEncrypted(netId)
for await (const netId of NETWORKS) {
updateEncrypted(netId)
await updateCommon(netId) await updateCommon(netId)
} }
} }

@ -7,8 +7,6 @@ import InstanceABI from '@/abis/Instance.abi.json'
import { CONTRACT_INSTANCES, eventsType, httpConfig } from '@/constants' import { CONTRACT_INSTANCES, eventsType, httpConfig } from '@/constants'
import { sleep, flattenNArray, formatEvents, capitalizeFirstLetter } from '@/utils' import { sleep, flattenNArray, formatEvents, capitalizeFirstLetter } from '@/utils'
const supportedCaches = ['1', '56', '100', '137']
let store let store
if (process.browser) { if (process.browser) {
window.onNuxtReady(({ $store }) => { window.onNuxtReady(({ $store }) => {
@ -21,7 +19,7 @@ class EventService {
this.idb = window.$nuxt.$indexedDB(netId) this.idb = window.$nuxt.$indexedDB(netId)
const { nativeCurrency } = networkConfig[`netId${netId}`] const { nativeCurrency } = networkConfig[`netId${netId}`]
const hasCache = supportedCaches.includes(netId.toString()) const hasCache = networkConfig.enabledChains.includes(netId.toString())
this.netId = netId this.netId = netId
this.amount = amount this.amount = amount
@ -35,7 +33,7 @@ class EventService {
} }
getInstanceName(type) { getInstanceName(type) {
return `${type}s_${this.currency}_${this.amount}` return `${type}s_${this.netId}_${this.currency}_${this.amount}`
} }
updateEventProgress(percentage, type) { updateEventProgress(percentage, type) {

@ -16,18 +16,19 @@ class MerkleTreeService {
this.instanceName = instanceName this.instanceName = instanceName
this.idb = window.$nuxt.$indexedDB(netId) this.idb = window.$nuxt.$indexedDB(netId)
this.bloomService = bloomService({ this.bloomService = bloomService({
netId, netId,
amount, amount,
commitment, commitment,
instanceName, instanceName,
fileFolder: 'trees', fileFolder: 'trees',
fileName: `deposits_${currency}_${amount}_bloom.json.gz` fileName: `deposits_${netId}_${currency}_${amount}_bloom.json.gz`
}) })
} }
getFileName(partNumber = trees.PARTS_COUNT) { getFileName(partNumber = trees.PARTS_COUNT) {
return `trees/deposits_${this.currency}_${this.amount}_slice${partNumber}.json.gz` return `trees/deposits_${this.netId}_${this.currency}_${this.amount}_slice${partNumber}.json.gz`
} }
createTree({ events }) { createTree({ events }) {

Some files were not shown because too many files have changed in this diff Show More