#PR1: classic-ui: Prefix static caches with network id and code quality #25
@ -25,7 +25,7 @@ export async function _encryptFormatTx({ dispatch, getters, rootGetters }, { eve
|
||||
if (!instance) {
|
||||
return acc
|
||||
}
|
||||
const name = `${instance.amount}${instance.currency}`
|
||||
const name = `${netId}${instance.amount}${instance.currency}`
|
||||
if (!acc[name]) {
|
||||
const service = eventsInterface.getService({ netId, ...instance })
|
||||
acc[name] = { ...instance, service }
|
||||
@ -49,7 +49,7 @@ export async function _encryptFormatTx({ dispatch, getters, rootGetters }, { eve
|
||||
if (!instance) {
|
||||
return
|
||||
}
|
||||
const { service } = instances[`${instance.amount}${instance.currency}`]
|
||||
const { service } = instances[`${netId}${instance.amount}${instance.currency}`]
|
||||
return getDeposit({ event, netId, service, instance })
|
||||
})
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
export const enabledChains = ['1', '10', '56', '100', '137', '42161']
|
||||
export const chainsWithEncryptedNotes = ['1', '5', '56', '100', '137']
|
||||
export default {
|
||||
netId1: {
|
||||
rpcCallRetryAttempt: 15,
|
||||
@ -26,9 +28,14 @@ export default {
|
||||
mevblockerRPC: {
|
||||
name: 'MevblockerRPC',
|
||||
url: 'https://rpc.mevblocker.io'
|
||||
},
|
||||
llamaRPC: {
|
||||
name: 'llamarpc',
|
||||
url: 'https://eth.llamarpc.com'
|
||||
}
|
||||
},
|
||||
multicall: '0xeefba1e63905ef1d7acba5a8513c70307c1ce441',
|
||||
routerContract: '0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b',
|
||||
registryContract: '0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2',
|
||||
echoContractAccount: '0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42',
|
||||
aggregatorContract: '0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49',
|
||||
@ -297,6 +304,10 @@ export default {
|
||||
name: 'Tornado RPC',
|
||||
url: 'https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
|
||||
},
|
||||
oneRpc: {
|
||||
name: '1rpc',
|
||||
url: 'https://1rpc.io/arb'
|
||||
},
|
||||
Arbitrum: {
|
||||
name: 'Arbitrum RPC',
|
||||
url: 'https://arb1.arbitrum.io/rpc'
|
||||
|
@ -264,24 +264,24 @@ export default async (ctx, inject) => {
|
||||
Object.keys(tokens[token].instanceAddress).forEach((amount) => {
|
||||
if (nativeCurrency === token && netId === 1) {
|
||||
stores.push({
|
||||
name: `stringify_bloom_${token}_${amount}`,
|
||||
name: `stringify_bloom_${netId}_${token}_${amount}`,
|
||||
keyPath: 'hashBloom'
|
||||
})
|
||||
}
|
||||
|
||||
stores.push(
|
||||
{
|
||||
name: `deposits_${token}_${amount}`,
|
||||
name: `deposits_${netId}_${token}_${amount}`,
|
||||
keyPath: 'leafIndex', // the key by which it refers to the object must be in all instances of the storage
|
||||
indexes: DEPOSIT_INDEXES
|
||||
},
|
||||
{
|
||||
name: `withdrawals_${token}_${amount}`,
|
||||
name: `withdrawals_${netId}_${token}_${amount}`,
|
||||
keyPath: 'blockNumber',
|
||||
indexes: WITHDRAWAL_INDEXES
|
||||
},
|
||||
{
|
||||
name: `stringify_tree_${token}_${amount}`,
|
||||
name: `stringify_tree_${netId}_${token}_${amount}`,
|
||||
keyPath: 'hashTree'
|
||||
}
|
||||
)
|
||||
|
@ -1,24 +1,21 @@
|
||||
import networkConfig from '../networkConfig'
|
||||
import ABI from '../abis/Instance.abi.json'
|
||||
import { loadCachedEvents, getPastEvents } from './helpers'
|
||||
import networkConfig, { enabledChains } from '../networkConfig'
|
||||
import { loadCachedEvents } from './helpers'
|
||||
|
||||
const EVENTS_PATH = './static/events/'
|
||||
const enabledChains = ['1', '56', '100', '137' ]
|
||||
|
||||
async function main() {
|
||||
for (let network in enabledChains) {
|
||||
const netId = enabledChains[network]
|
||||
function main() {
|
||||
for (const netId of enabledChains) {
|
||||
const config = networkConfig[`netId${netId}`]
|
||||
const { constants, tokens, nativeCurrency, deployedBlock } = config
|
||||
const CONTRACTS = tokens[nativeCurrency].instanceAddress
|
||||
|
||||
console.log(`\n ::: ${netId} [${nativeCurrency.toUpperCase()}] :::`)
|
||||
|
||||
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
|
||||
for (const [instance] of Object.entries(CONTRACTS)) {
|
||||
console.log(`\n instanceDenomation - ${instance}`)
|
||||
|
||||
const withdrawalCachedEvents = await loadCachedEvents({
|
||||
name: `withdrawals_${nativeCurrency}_${instance}.json`,
|
||||
const withdrawalCachedEvents = loadCachedEvents({
|
||||
name: `withdrawals_${netId}_${nativeCurrency}_${instance}.json`,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
})
|
||||
@ -27,8 +24,8 @@ async function main() {
|
||||
console.log('cachedEvents count - ', withdrawalCachedEvents.events.length)
|
||||
console.log('lastBlock - ', withdrawalCachedEvents.lastBlock)
|
||||
|
||||
const depositCachedEvents = await loadCachedEvents({
|
||||
name: `withdrawals_${nativeCurrency}_${instance}.json`,
|
||||
const depositCachedEvents = loadCachedEvents({
|
||||
name: `deposits_${netId}_${nativeCurrency}_${instance}.json`,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
})
|
||||
@ -37,7 +34,7 @@ async function main() {
|
||||
console.log('cachedEvents count - ', depositCachedEvents.events.length)
|
||||
console.log('lastBlock - ', depositCachedEvents.lastBlock)
|
||||
|
||||
const notesCachedEvents = await loadCachedEvents({
|
||||
const notesCachedEvents = loadCachedEvents({
|
||||
name: `encrypted_notes_${netId}.json`,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
|
||||
@ -46,7 +43,6 @@ async function main() {
|
||||
console.log('- Notes')
|
||||
console.log('cachedEvents count - ', notesCachedEvents.events.length)
|
||||
console.log('lastBlock - ', notesCachedEvents.lastBlock)
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4,18 +4,18 @@ import Web3 from 'web3'
|
||||
|
||||
import networkConfig from '../../networkConfig'
|
||||
|
||||
export async function download({ name, directory, contentType }) {
|
||||
export function download({ name, directory }) {
|
||||
const path = `${directory}${name}.gz`.toLowerCase()
|
||||
|
||||
const data = fs.readFileSync(path)
|
||||
const data = fs.readFileSync(path, { flag: 'as+' })
|
||||
const content = zlib.inflateSync(data)
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
export async function loadCachedEvents({ name, directory, deployedBlock }) {
|
||||
export function loadCachedEvents({ name, directory, deployedBlock }) {
|
||||
try {
|
||||
const module = await download({ contentType: 'string', directory, name })
|
||||
const module = download({ contentType: 'string', directory, name })
|
||||
|
||||
if (module) {
|
||||
const events = JSON.parse(module)
|
||||
@ -67,6 +67,7 @@ export async function getPastEvents({ type, fromBlock, netId, events, contractAt
|
||||
}
|
||||
|
||||
console.log(`Fetching ${type}, chainId - ${netId}`, `chunksCount - ${chunksCount}`)
|
||||
|
||||
for (let i = 0; i < chunksCount; i++)
|
||||
try {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
@ -1,14 +1,11 @@
|
||||
import fs from 'fs'
|
||||
import zlib from 'zlib'
|
||||
|
||||
export async function save(filePath) {
|
||||
const directories = filePath.split('/')
|
||||
const fileName = directories[directories.length - 1]
|
||||
|
||||
export function save(filePath) {
|
||||
try {
|
||||
const data = fs.readFileSync(`${filePath}`)
|
||||
|
||||
const payload = await zlib.deflateSync(data, {
|
||||
const payload = zlib.deflateSync(data, {
|
||||
level: zlib.constants.Z_BEST_COMPRESSION,
|
||||
strategy: zlib.constants.Z_FILTERED
|
||||
})
|
||||
|
@ -3,12 +3,12 @@ import 'dotenv/config'
|
||||
import fs from 'fs'
|
||||
import { uniqBy } from 'lodash'
|
||||
|
||||
import networkConfig from '../networkConfig'
|
||||
import networkConfig, { enabledChains } from '../networkConfig'
|
||||
import ABI from '../abis/TornadoProxy.abi.json'
|
||||
|
||||
import { getPastEvents, loadCachedEvents } from './helpers'
|
||||
|
||||
const EVENTS_PATH = './static/events/'
|
||||
const enabledChains = ['1', '5', '56', '100', '137']
|
||||
|
||||
async function saveEncryptedNote(netId) {
|
||||
const {
|
||||
@ -23,7 +23,7 @@ async function saveEncryptedNote(netId) {
|
||||
let encryptedEvents = []
|
||||
const name = `encrypted_notes_${netId}.json`
|
||||
|
||||
const cachedEvents = await loadCachedEvents({
|
||||
const cachedEvents = loadCachedEvents({
|
||||
name,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
|
||||
@ -57,11 +57,13 @@ async function saveEncryptedNote(netId) {
|
||||
freshEvents = uniqBy(freshEvents, 'encryptedNote').sort((a, b) => b.blockNumber - a.blockNumber)
|
||||
|
||||
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
|
||||
|
||||
fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson)
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const [, , , chain] = process.argv
|
||||
|
||||
if (!enabledChains.includes(chain)) {
|
||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||
}
|
||||
|
@ -3,41 +3,25 @@ import 'dotenv/config'
|
||||
import fs from 'fs'
|
||||
import { uniqBy } from 'lodash'
|
||||
|
||||
import networkConfig from '../networkConfig'
|
||||
import networkConfig, { enabledChains } from '../networkConfig'
|
||||
import ABI from '../abis/Instance.abi.json'
|
||||
|
||||
import { loadCachedEvents, getPastEvents } from './helpers'
|
||||
|
||||
const EVENTS_PATH = './static/events/'
|
||||
const EVENTS = ['Deposit', 'Withdrawal']
|
||||
const enabledChains = ['1', '56', '100', '137']
|
||||
|
||||
async function main(type, netId) {
|
||||
const { tokens, nativeCurrency, deployedBlock } = networkConfig[`netId${netId}`]
|
||||
const CONTRACTS = tokens[nativeCurrency].instanceAddress
|
||||
function parseArg(netId, tokenOrEvent) {
|
||||
const { tokens } = networkConfig[`netId${netId}`]
|
||||
const keys = Object.keys(tokens)
|
||||
if (tokenOrEvent !== undefined) {
|
||||
const lower = tokenOrEvent.toLowerCase()
|
||||
return keys.includes(lower)
|
||||
? { token: lower }
|
||||
: { event: lower[0].toUpperCase() + lower.slice(1).toLowerCase() }
|
||||
} else return undefined
|
||||
}
|
||||
|
||||
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
|
||||
const cachedEvents = await loadCachedEvents({
|
||||
name: `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
})
|
||||
|
||||
console.log('Update events for', instance, nativeCurrency.toUpperCase(), `${type.toLowerCase()}s`)
|
||||
console.log('cachedEvents count - ', cachedEvents.events.length)
|
||||
console.log('lastBlock - ', cachedEvents.lastBlock)
|
||||
|
||||
let events = []
|
||||
|
||||
events = await getPastEvents({
|
||||
type,
|
||||
netId,
|
||||
events,
|
||||
contractAttrs: [ABI, _contract],
|
||||
fromBlock: cachedEvents.lastBlock + 1
|
||||
})
|
||||
|
||||
if (type === 'Deposit') {
|
||||
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
|
||||
function parseDepositEvent({ blockNumber, transactionHash, returnValues }) {
|
||||
const { commitment, leafIndex, timestamp } = returnValues
|
||||
return {
|
||||
timestamp,
|
||||
@ -46,11 +30,9 @@ async function main(type, netId) {
|
||||
transactionHash,
|
||||
leafIndex: Number(leafIndex)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (type === 'Withdrawal') {
|
||||
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
|
||||
function parseWithdrawalEvent({ blockNumber, transactionHash, returnValues }) {
|
||||
const { nullifierHash, to, fee } = returnValues
|
||||
return {
|
||||
to,
|
||||
@ -59,31 +41,82 @@ async function main(type, netId) {
|
||||
nullifierHash,
|
||||
transactionHash
|
||||
}
|
||||
}
|
||||
|
||||
function filterWithdrawalEvents(events) {
|
||||
return uniqBy(events, 'nullifierHash').sort((a, b) => a.blockNumber - b.blockNumber)
|
||||
}
|
||||
|
||||
function filterDepositEvents(events) {
|
||||
return events.filter((e, index) => Number(e.leafIndex) === index)
|
||||
}
|
||||
|
||||
async function main(netId, chosenToken, chosenEvent) {
|
||||
const { tokens, deployedBlock } = networkConfig[`netId${netId}`]
|
||||
|
||||
const tokenSymbols = chosenToken !== undefined ? [chosenToken] : Object.keys(tokens)
|
||||
const eventNames = chosenEvent !== undefined ? [chosenEvent] : ['Deposit', 'Withdrawal']
|
||||
|
||||
for (const eventName of eventNames) {
|
||||
// Get the parser that we need
|
||||
const parser = eventName === 'Deposit' ? parseDepositEvent : parseWithdrawalEvent
|
||||
// Get the parser that we need
|
||||
const filter = eventName === 'Deposit' ? filterDepositEvents : filterWithdrawalEvents
|
||||
|
||||
for (const tokenSymbol of tokenSymbols) {
|
||||
// Now load the denominations and address
|
||||
const instanceData = Object.entries(tokens[tokenSymbol].instanceAddress)
|
||||
|
||||
// And now sync
|
||||
for (const data of instanceData) {
|
||||
const denom = data[0]
|
||||
const address = data[1]
|
||||
|
||||
// Now load cached events
|
||||
const cachedEvents = loadCachedEvents({
|
||||
name: `${eventName.toLowerCase()}s_${netId}_${tokenSymbol}_${denom}.json`,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
})
|
||||
|
||||
console.log('Update events for', denom, tokenSymbol.toUpperCase(), `${eventName.toLowerCase()}s`)
|
||||
console.log('cachedEvents count - ', cachedEvents.events.length)
|
||||
console.log('lastBlock - ', cachedEvents.lastBlock)
|
||||
|
||||
let events = await getPastEvents({
|
||||
type: eventName,
|
||||
fromBlock: cachedEvents.lastBlock + 1,
|
||||
netId: netId,
|
||||
events: [],
|
||||
contractAttrs: [ABI, address]
|
||||
})
|
||||
|
||||
events = filter(cachedEvents.events.concat(events.map(parser)))
|
||||
|
||||
fs.writeFileSync(
|
||||
`${EVENTS_PATH}${eventName.toLowerCase()}s_${netId}_${tokenSymbol}_${denom}.json`,
|
||||
JSON.stringify(events, null, 2) + '\n'
|
||||
)
|
||||
}
|
||||
|
||||
let freshEvents = cachedEvents.events.concat(events)
|
||||
|
||||
if (type === 'Withdrawal') {
|
||||
freshEvents = uniqBy(freshEvents, 'nullifierHash').sort((a, b) => a.blockNumber - b.blockNumber)
|
||||
} else {
|
||||
freshEvents = freshEvents.filter((e, index) => Number(e.leafIndex) === index)
|
||||
}
|
||||
|
||||
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
|
||||
fs.writeFileSync(`${EVENTS_PATH}${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`, eventsJson)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param netId ID of the network for which event(s) should be synced.
|
||||
* @param tokenOrEvent Optional token or event.
|
||||
* @param eventOrToken Optional token or event. Overwrites the former option.
|
||||
*/
|
||||
async function start() {
|
||||
const [, , , chain] = process.argv
|
||||
if (!enabledChains.includes(chain)) {
|
||||
const [, , , netId, tokenOrEvent, eventOrToken] = process.argv
|
||||
|
||||
const args = { ...parseArg(netId, tokenOrEvent), ...parseArg(netId, eventOrToken) }
|
||||
|
||||
if (!enabledChains.includes(netId)) {
|
||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||
}
|
||||
|
||||
for await (const event of EVENTS) {
|
||||
await main(event, chain)
|
||||
}
|
||||
await main(netId, args.token, args.event)
|
||||
}
|
||||
|
||||
start()
|
||||
|
@ -1,11 +1,12 @@
|
||||
import 'dotenv/config'
|
||||
|
||||
import fs from 'fs'
|
||||
|
||||
import BloomFilter from 'bloomfilter.js'
|
||||
import { MerkleTree } from 'fixed-merkle-tree'
|
||||
import { buildMimcSponge } from 'circomlibjs'
|
||||
|
||||
import networkConfig from '../networkConfig'
|
||||
import networkConfig, { enabledChains } from '../networkConfig'
|
||||
|
||||
import { loadCachedEvents, save } from './helpers'
|
||||
|
||||
@ -14,7 +15,7 @@ const TREES_PATH = './static/trees/'
|
||||
const EVENTS_PATH = './static/events/'
|
||||
|
||||
const EVENTS = ['deposit']
|
||||
const enabledChains = ['1', '56', '100', '137' ]
|
||||
|
||||
let mimcHash
|
||||
|
||||
const trees = {
|
||||
@ -22,8 +23,8 @@ const trees = {
|
||||
LEVELS: 20 // const from contract
|
||||
}
|
||||
|
||||
function getName({ path, type, instance, format = '.json', currName = 'eth' }) {
|
||||
return `${path}${type.toLowerCase()}s_${currName}_${instance}${format}`
|
||||
function getName({ path, type, netId, instance, format = '.json', currName = 'eth' }) {
|
||||
return `${path}${type.toLowerCase()}s_${netId}_${currName}_${instance}${format}`
|
||||
}
|
||||
|
||||
function createTreeZip(netId) {
|
||||
@ -36,6 +37,7 @@ function createTreeZip(netId) {
|
||||
const baseFilename = getName({
|
||||
type,
|
||||
instance,
|
||||
netId,
|
||||
format: '',
|
||||
path: TREES_PATH,
|
||||
currName: currencyName.toLowerCase()
|
||||
@ -45,6 +47,7 @@ function createTreeZip(netId) {
|
||||
|
||||
treesFolder.forEach((fileName) => {
|
||||
fileName = `${TREES_PATH}${fileName}`
|
||||
|
||||
const isInstanceFile = !fileName.includes('.gz') && fileName.includes(baseFilename)
|
||||
|
||||
if (isInstanceFile) {
|
||||
@ -67,6 +70,7 @@ async function createTree(netId) {
|
||||
const filePath = getName({
|
||||
type,
|
||||
instance,
|
||||
netId,
|
||||
format: '',
|
||||
path: TREES_PATH,
|
||||
currName: currencyName.toLowerCase()
|
||||
@ -74,8 +78,8 @@ async function createTree(netId) {
|
||||
|
||||
console.log('createTree', { type, instance })
|
||||
|
||||
const { events } = await loadCachedEvents({
|
||||
name: `${type}s_${nativeCurrency}_${instance}.json`,
|
||||
const { events } = loadCachedEvents({
|
||||
name: `${type}s_${netId}_${nativeCurrency}_${instance}.json`,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
})
|
||||
@ -118,10 +122,12 @@ async function createTree(netId) {
|
||||
}, [])
|
||||
|
||||
const sliceJson = JSON.stringify(slice, null, 2) + '\n'
|
||||
|
||||
fs.writeFileSync(`${filePath}_slice${index + 1}.json`, sliceJson)
|
||||
})
|
||||
|
||||
const bloomCache = bloom.serialize()
|
||||
|
||||
fs.writeFileSync(`${filePath}_bloom.json`, bloomCache)
|
||||
}
|
||||
}
|
||||
@ -137,13 +143,16 @@ async function initMimc() {
|
||||
|
||||
async function main() {
|
||||
const [, , , chain] = process.argv
|
||||
|
||||
if (!enabledChains.includes(chain)) {
|
||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||
}
|
||||
|
||||
await initMimc()
|
||||
|
||||
await createTree(chain)
|
||||
await createTreeZip(chain)
|
||||
|
||||
createTreeZip(chain)
|
||||
}
|
||||
|
||||
main()
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { uniqBy } from 'lodash'
|
||||
|
||||
import networkConfig from '../networkConfig'
|
||||
import networkConfig, { enabledChains, chainsWithEncryptedNotes } from '../networkConfig'
|
||||
|
||||
import { loadCachedEvents, save } from './helpers'
|
||||
|
||||
const EVENTS_PATH = './static/events/'
|
||||
@ -9,22 +10,26 @@ const EVENTS = ['Deposit', 'Withdrawal']
|
||||
function updateEncrypted(netId) {
|
||||
try {
|
||||
const file = `${EVENTS_PATH}encrypted_notes_${netId}.json`
|
||||
|
||||
save(file)
|
||||
} catch {}
|
||||
}
|
||||
|
||||
async function updateCommon(netId) {
|
||||
const { nativeCurrency, tokens } = networkConfig[`netId${netId}`]
|
||||
|
||||
console.log(Object.keys(tokens[nativeCurrency].instanceAddress))
|
||||
|
||||
for await (const type of EVENTS) {
|
||||
for await (const instance of Object.keys(tokens[nativeCurrency].instanceAddress)) {
|
||||
console.warn('instance', instance)
|
||||
const filename = `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`
|
||||
|
||||
const filename = `${type.toLowerCase()}s_${netId}_${nativeCurrency}_${instance}.json`
|
||||
|
||||
const isSaved = save(`${EVENTS_PATH}${filename}`)
|
||||
|
||||
if (isSaved) {
|
||||
try {
|
||||
await testCommon(netId, type, filename)
|
||||
testCommon(netId, type, filename)
|
||||
} catch (err) {
|
||||
console.error(err.message)
|
||||
}
|
||||
@ -33,10 +38,10 @@ async function updateCommon(netId) {
|
||||
}
|
||||
}
|
||||
|
||||
async function testCommon(netId, type, filename) {
|
||||
function testCommon(netId, type, filename) {
|
||||
const { deployedBlock } = networkConfig[`netId${netId}`]
|
||||
|
||||
const cachedEvents = await loadCachedEvents({
|
||||
const cachedEvents = loadCachedEvents({
|
||||
name: filename,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
@ -45,11 +50,13 @@ async function testCommon(netId, type, filename) {
|
||||
console.log('cachedEvents', cachedEvents.events.length, type)
|
||||
|
||||
let events = cachedEvents.events
|
||||
|
||||
if (type === 'Withdrawal') {
|
||||
events = uniqBy(cachedEvents.events, 'nullifierHash')
|
||||
} else if (type === 'Deposit') {
|
||||
events = cachedEvents.events.filter((e, index) => Number(e.leafIndex) === index)
|
||||
}
|
||||
|
||||
if (events.length !== cachedEvents.events.length) {
|
||||
console.error('events.length', events.length)
|
||||
console.error('cachedEvents.events.length', cachedEvents.events.length)
|
||||
@ -58,10 +65,11 @@ async function testCommon(netId, type, filename) {
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const NETWORKS = [1, 5, 56, 100, 137 ]
|
||||
for (let i = 0; i < enabledChains.length; i++) {
|
||||
const netId = enabledChains[i]
|
||||
|
||||
if (netId === chainsWithEncryptedNotes[i]) updateEncrypted(netId)
|
||||
|
||||
for await (const netId of NETWORKS) {
|
||||
updateEncrypted(netId)
|
||||
await updateCommon(netId)
|
||||
}
|
||||
}
|
||||
|
@ -2,13 +2,11 @@ import Web3 from 'web3'
|
||||
|
||||
import graph from '@/services/graph'
|
||||
import { download } from '@/store/snark'
|
||||
import networkConfig from '@/networkConfig'
|
||||
import networkConfig, { enabledChains } from '@/networkConfig'
|
||||
import InstanceABI from '@/abis/Instance.abi.json'
|
||||
import { CONTRACT_INSTANCES, eventsType, httpConfig } from '@/constants'
|
||||
import { sleep, flattenNArray, formatEvents, capitalizeFirstLetter } from '@/utils'
|
||||
|
||||
const supportedCaches = ['1', '56', '100', '137']
|
||||
|
||||
let store
|
||||
if (process.browser) {
|
||||
window.onNuxtReady(({ $store }) => {
|
||||
@ -21,7 +19,7 @@ class EventService {
|
||||
this.idb = window.$nuxt.$indexedDB(netId)
|
||||
|
||||
const { nativeCurrency } = networkConfig[`netId${netId}`]
|
||||
const hasCache = supportedCaches.includes(netId.toString())
|
||||
const hasCache = enabledChains.includes(netId.toString())
|
||||
|
||||
this.netId = netId
|
||||
this.amount = amount
|
||||
@ -35,7 +33,7 @@ class EventService {
|
||||
}
|
||||
|
||||
getInstanceName(type) {
|
||||
return `${type}s_${this.currency}_${this.amount}`
|
||||
return `${type}s_${this.netId}_${this.currency}_${this.amount}`
|
||||
}
|
||||
|
||||
updateEventProgress(percentage, type) {
|
||||
@ -466,7 +464,7 @@ class EventsFactory {
|
||||
}
|
||||
|
||||
getService = (payload) => {
|
||||
const instanceName = `${payload.currency}_${payload.amount}`
|
||||
const instanceName = `${payload.netId}_${payload.currency}_${payload.amount}`
|
||||
|
||||
if (this.instances.has(instanceName)) {
|
||||
return this.instances.get(instanceName)
|
||||
|
@ -16,18 +16,19 @@ class MerkleTreeService {
|
||||
this.instanceName = instanceName
|
||||
|
||||
this.idb = window.$nuxt.$indexedDB(netId)
|
||||
|
||||
this.bloomService = bloomService({
|
||||
netId,
|
||||
amount,
|
||||
commitment,
|
||||
instanceName,
|
||||
fileFolder: 'trees',
|
||||
fileName: `deposits_${currency}_${amount}_bloom.json.gz`
|
||||
fileName: `deposits_${netId}_${currency}_${amount}_bloom.json.gz`
|
||||
})
|
||||
}
|
||||
|
||||
getFileName(partNumber = trees.PARTS_COUNT) {
|
||||
return `trees/deposits_${this.currency}_${this.amount}_slice${partNumber}.json.gz`
|
||||
return `trees/deposits_${this.netId}_${this.currency}_${this.amount}_slice${partNumber}.json.gz`
|
||||
}
|
||||
|
||||
createTree({ events }) {
|
||||
@ -185,7 +186,7 @@ class TreesFactory {
|
||||
instances = new Map()
|
||||
|
||||
getService = (payload) => {
|
||||
const instanceName = `${payload.currency}_${payload.amount}`
|
||||
const instanceName = `${payload.netId}_${payload.currency}_${payload.amount}`
|
||||
if (this.instances.has(instanceName)) {
|
||||
return this.instances.get(instanceName)
|
||||
}
|
||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
0
static/events/withdrawals_matic_100000.json.gz → static/events/withdrawals_137_matic_100000.json.gz
0
static/events/withdrawals_matic_100000.json.gz → static/events/withdrawals_137_matic_100000.json.gz
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
0
static/trees/deposits_xdai_10000_bloom.json.gz → static/trees/deposits_100_xdai_10000_bloom.json.gz
0
static/trees/deposits_xdai_10000_bloom.json.gz → static/trees/deposits_100_xdai_10000_bloom.json.gz
0
static/trees/deposits_xdai_1000_slice1.json.gz → static/trees/deposits_100_xdai_1000_slice1.json.gz
0
static/trees/deposits_xdai_1000_slice1.json.gz → static/trees/deposits_100_xdai_1000_slice1.json.gz
0
static/trees/deposits_xdai_1000_slice2.json.gz → static/trees/deposits_100_xdai_1000_slice2.json.gz
0
static/trees/deposits_xdai_1000_slice2.json.gz → static/trees/deposits_100_xdai_1000_slice2.json.gz
0
static/trees/deposits_xdai_1000_slice3.json.gz → static/trees/deposits_100_xdai_1000_slice3.json.gz
0
static/trees/deposits_xdai_1000_slice3.json.gz → static/trees/deposits_100_xdai_1000_slice3.json.gz
0
static/trees/deposits_xdai_1000_slice4.json.gz → static/trees/deposits_100_xdai_1000_slice4.json.gz
0
static/trees/deposits_xdai_1000_slice4.json.gz → static/trees/deposits_100_xdai_1000_slice4.json.gz
0
static/trees/deposits_matic_1000_bloom.json.gz → static/trees/deposits_137_matic_1000_bloom.json.gz
0
static/trees/deposits_matic_1000_bloom.json.gz → static/trees/deposits_137_matic_1000_bloom.json.gz
0
static/trees/deposits_matic_100_slice1.json.gz → static/trees/deposits_137_matic_100_slice1.json.gz
0
static/trees/deposits_matic_100_slice1.json.gz → static/trees/deposits_137_matic_100_slice1.json.gz
0
static/trees/deposits_matic_100_slice2.json.gz → static/trees/deposits_137_matic_100_slice2.json.gz
0
static/trees/deposits_matic_100_slice2.json.gz → static/trees/deposits_137_matic_100_slice2.json.gz
0
static/trees/deposits_matic_100_slice3.json.gz → static/trees/deposits_137_matic_100_slice3.json.gz
0
static/trees/deposits_matic_100_slice3.json.gz → static/trees/deposits_137_matic_100_slice3.json.gz
0
static/trees/deposits_matic_100_slice4.json.gz → static/trees/deposits_137_matic_100_slice4.json.gz
0
static/trees/deposits_matic_100_slice4.json.gz → static/trees/deposits_137_matic_100_slice4.json.gz
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user