🎨 improved scripts for updating events and trees

This commit is contained in:
Pasha8914 2022-06-08 20:45:37 +10:00 committed by Danil Kovtonyuk
parent 8fbbe4c67b
commit f0e38035e7
55 changed files with 437 additions and 477 deletions

View File

@ -28,3 +28,31 @@ For detailed explanation on how things work, checkout [Nuxt.js docs](https://nux
## Audit ## Audit
[TornadoCash_Сlassic_dApp_audit_Decurity.pdf](https://tornado.cash/audits/TornadoCash_Сlassic_dApp_audit_Decurity.pdf) [TornadoCash_Сlassic_dApp_audit_Decurity.pdf](https://tornado.cash/audits/TornadoCash_Сlassic_dApp_audit_Decurity.pdf)
## Update cached files
- For update deposits and withdrawals events use `yarn update:events {chainId}`
- For update encrypted notes use `yarn update:encrypted {chainId}`
- For update merkle tree use `yarn update:tree {chainId}`
#### NOTE!
After update cached files do not forget to use `yarn update:zip`
### Example for Ethereum Mainnet:
```
yarn update:events 1
yarn update:encrypted 1
yarn update:tree 1
yarn update:zip
```
### Example for Binance Smart Chain:
```
yarn update:events 56
yarn update:zip
```

View File

@ -50,10 +50,10 @@ export const addressType = { type: 'string', pattern: '^0x[a-fA-F0-9]{40}$' }
// TODO auto update in yarn updateCache // TODO auto update in yarn updateCache
export const cachedEventsLength = { export const cachedEventsLength = {
mainnet: { mainnet: {
ENCRYPTED_NOTES: 14550 ENCRYPTED_NOTES: 16890
}, },
goerli: { goerli: {
ENCRYPTED_NOTES: 1350 ENCRYPTED_NOTES: 1366
} }
} }

View File

@ -10,13 +10,13 @@
"dev": "NODE_OPTIONS='--max-old-space-size=8192' nuxt", "dev": "NODE_OPTIONS='--max-old-space-size=8192' nuxt",
"build": "nuxt build", "build": "nuxt build",
"start": "nuxt start", "start": "nuxt start",
"update:encrypted": "node --experimental-json-modules updateEncryptedEvents.js", "update:zip": "node -r esm scripts/updateZip.js",
"update:common": "node --experimental-json-modules updateEvents.js", "update:events": "node -r esm scripts/updateEvents.js --network",
"update:zip": "node updateZip.js", "update:encrypted": "node -r esm scripts/updateEncryptedEvents.js --network",
"updateCache": "yarn update:encrypted && yarn update:common && yarn update:zip", "update:tree": "node -r esm scripts/updateTree.js --network",
"generate": "NODE_OPTIONS='--max-old-space-size=8192' nuxt generate && cp dist/404.html dist/ipfs-404.html", "generate": "NODE_OPTIONS='--max-old-space-size=8192' nuxt generate && cp dist/404.html dist/ipfs-404.html",
"ipfsUpload": "node ipfsUpload.js", "ipfsUpload": "node scripts/ipfsUpload.js",
"deploy-ipfs": "yarn generate && yarn ipfsUpload" "deploy:ipfs": "yarn generate && yarn ipfsUpload"
}, },
"dependencies": { "dependencies": {
"@apollo/client": "^3.3.20", "@apollo/client": "^3.3.20",
@ -43,6 +43,7 @@
"idb": "^6.0.0", "idb": "^6.0.0",
"jspdf": "^1.5.3", "jspdf": "^1.5.3",
"jszip": "^3.5.0", "jszip": "^3.5.0",
"lodash": "^4.17.21",
"node-fetch": "^2.6.1", "node-fetch": "^2.6.1",
"numbro": "^2.3.2", "numbro": "^2.3.2",
"nuxt": "2.14.7", "nuxt": "2.14.7",
@ -79,6 +80,7 @@
"eslint-plugin-promise": ">=4.2.1", "eslint-plugin-promise": ">=4.2.1",
"eslint-plugin-standard": ">=4.0.1", "eslint-plugin-standard": ">=4.0.1",
"eslint-plugin-vue": "^6.0.1", "eslint-plugin-vue": "^6.0.1",
"esm": "^3.2.25",
"jest": "^24.9.0", "jest": "^24.9.0",
"node-stream-zip": "^1.15.0", "node-stream-zip": "^1.15.0",
"nodemon": "^2.0.0", "nodemon": "^2.0.0",

View File

@ -0,0 +1,97 @@
import fs from 'fs'
import Jszip from 'jszip'
import Web3 from 'web3'
const jszip = new Jszip()
export async function download({ name, directory, contentType }) {
const path = `${directory}${name}.zip`.toLowerCase()
const data = fs.readFileSync(path)
const zip = await jszip.loadAsync(data)
const file = zip.file(
path
.replace(directory, '')
.slice(0, -4)
.toLowerCase()
)
const content = await file.async(contentType)
return content
}
export async function loadCachedEvents({ name, directory, deployedBlock }) {
try {
const module = await download({ contentType: 'string', directory, name })
if (module) {
const events = JSON.parse(module)
const [lastEvent] = JSON.parse(module).sort(
(a, b) => (b.block || b.blockNumber) - (a.block || a.blockNumber)
)
const lastBlock = lastEvent.block || lastEvent.blockNumber
return {
events,
lastBlock
}
}
} catch (err) {
console.error(`Method loadCachedEvents has error: ${err.message}`)
return {
events: [],
lastBlock: deployedBlock
}
}
}
export async function getPastEvents({ type, fromBlock, netId, events, rpcUrl, contractAttrs }) {
let downloadedEvents = events
const provider = new Web3.providers.HttpProvider(rpcUrl)
const web3 = new Web3(provider)
const contract = new web3.eth.Contract(...contractAttrs)
const currentBlockNumber = await web3.eth.getBlockNumber()
const blockDifference = Math.ceil(currentBlockNumber - fromBlock)
const blockRange = Number(netId) === 56 ? 4950 : blockDifference / 20
let chunksCount = blockDifference === 0 ? 1 : Math.ceil(blockDifference / blockRange)
const chunkSize = Math.ceil(blockDifference / chunksCount)
let toBlock = fromBlock + chunkSize
if (fromBlock < currentBlockNumber) {
if (toBlock >= currentBlockNumber) {
toBlock = currentBlockNumber
chunksCount = 1
}
console.log(`Fetching ${type}, chainId - ${netId}`, `chunksCount - ${chunksCount}`)
for (let i = 0; i < chunksCount; i++)
try {
await new Promise((resolve) => setTimeout(resolve, 200))
console.log(`fromBlock - ${fromBlock}`)
console.log(`toBlock - ${toBlock}`)
const eventsChunk = await contract.getPastEvents(type, { fromBlock, toBlock })
if (eventsChunk) {
downloadedEvents = downloadedEvents.concat(eventsChunk)
console.log('downloaded events count - ', eventsChunk.length)
console.log('____________________________________________')
}
fromBlock = toBlock
toBlock += chunkSize
} catch (err) {
console.log('getPastEvents events', `chunk number - ${i}, has error: ${err.message}`)
chunksCount = chunksCount + 1
}
}
return downloadedEvents
}

2
scripts/helpers/index.js Normal file
View File

@ -0,0 +1,2 @@
export { download, loadCachedEvents, getPastEvents } from './download'
export { save } from './save'

17
scripts/helpers/save.js Normal file
View File

@ -0,0 +1,17 @@
import fs from 'fs'
import zipper from 'zip-local'
export function save(fileName) {
try {
zipper.sync
.zip(`${fileName}`)
.compress()
.save(`${fileName}.zip`)
fs.unlinkSync(fileName)
return true
} catch (err) {
console.log('on save error', fileName, err.message)
return false
}
}

View File

@ -0,0 +1,79 @@
import 'dotenv/config'
import fs from 'fs'
import { uniqBy } from 'lodash'
import networkConfig from '../networkConfig'
import ABI from '../abis/TornadoProxy.abi.json'
import { getPastEvents, loadCachedEvents } from './helpers'
const EVENTS_PATH = './static/events/'
const enabledChains = ['1', '5', '56']
async function saveEncryptedNote(netId) {
const {
constants,
'tornado-proxy.contract.tornadocash.eth': tornadoProxy,
'tornado-router.contract.tornadocash.eth': tornadoRouter,
'tornado-proxy-light.contract.tornadocash.eth': lightProxy
} = networkConfig[`netId${netId}`]
let [{ url: rpcUrl }] = Object.values(networkConfig[`netId${netId}`].rpcUrls)
if (netId === '5') {
rpcUrl = `https://goerli.infura.io/v3/${process.env.INFURA_KEY}`
}
const contractAddress = tornadoRouter || tornadoProxy || lightProxy
let encryptedEvents = []
const name = `encrypted_notes_${netId}.json`
const cachedEvents = await loadCachedEvents({
name,
directory: EVENTS_PATH,
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
})
console.log('cachedEvents', cachedEvents.events.length)
encryptedEvents = await getPastEvents({
netId,
rpcUrl,
type: 'EncryptedNote',
events: encryptedEvents,
fromBlock: cachedEvents.lastBlock + 1,
contractAttrs: [ABI, contractAddress]
})
console.log('Encrypted note', netId, encryptedEvents.length)
encryptedEvents = encryptedEvents.reduce((acc, curr) => {
if (curr.returnValues.encryptedNote) {
acc.push({
txHash: curr.transactionHash,
blockNumber: Number(curr.blockNumber),
encryptedNote: curr.returnValues.encryptedNote
})
}
return acc
}, [])
let freshEvents = cachedEvents.events.concat(encryptedEvents)
freshEvents = uniqBy(freshEvents, 'encryptedNote').sort((a, b) => b.blockNumber - a.blockNumber)
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson)
}
async function main() {
const [, , , chain] = process.argv
if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
await saveEncryptedNote(chain)
}
main()

95
scripts/updateEvents.js Normal file
View File

@ -0,0 +1,95 @@
import 'dotenv/config'
import fs from 'fs'
import { uniqBy } from 'lodash'
import networkConfig from '../networkConfig'
import ABI from '../abis/Instance.abi.json'
import { loadCachedEvents, getPastEvents } from './helpers'
const EVENTS_PATH = './static/events/'
const EVENTS = ['Deposit', 'Withdrawal']
const enabledChains = ['1', '56']
async function main(type, netId) {
const { tokens, nativeCurrency, deployedBlock } = networkConfig[`netId${netId}`]
const CONTRACTS = tokens[nativeCurrency].instanceAddress
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
const cachedEvents = await loadCachedEvents({
name: `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH,
deployedBlock
})
console.log('cachedEvents count - ', cachedEvents.events.length)
console.log('lastBlock - ', cachedEvents.lastBlock)
let [{ url: rpcUrl }] = Object.values(networkConfig[`netId${netId}`].rpcUrls)
if (netId === '5') {
rpcUrl = `https://goerli.infura.io/v3/${process.env.INFURA_KEY}`
}
let events = []
events = await getPastEvents({
type,
netId,
rpcUrl,
events,
contractAttrs: [ABI, _contract],
fromBlock: cachedEvents.lastBlock + 1
})
if (type === 'Deposit') {
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
const { commitment, leafIndex, timestamp } = returnValues
return {
timestamp,
commitment,
blockNumber,
transactionHash,
leafIndex: Number(leafIndex)
}
})
}
if (type === 'Withdrawal') {
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
const { nullifierHash, to, fee } = returnValues
return {
to,
fee,
blockNumber,
nullifierHash,
transactionHash
}
})
}
let freshEvents = cachedEvents.events.concat(events)
if (type === 'Withdrawal') {
freshEvents = uniqBy(freshEvents, 'nullifierHash').sort((a, b) => b.blockNumber - a.blockNumber)
} else {
freshEvents = freshEvents.filter((e, index) => Number(e.leafIndex) === index)
}
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
fs.writeFileSync(`${EVENTS_PATH}${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`, eventsJson)
}
}
async function start() {
const [, , , chain] = process.argv
if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
for await (const event of EVENTS) {
await main(event, chain)
}
}
start()

View File

@ -1,19 +1,20 @@
const fs = require('fs') import 'dotenv/config'
const Jszip = require('jszip')
const zipper = require('zip-local') import fs from 'fs'
const BloomFilter = require('bloomfilter.js') import BloomFilter from 'bloomfilter.js'
import { MerkleTree } from 'fixed-merkle-tree'
import { buildMimcSponge } from 'circomlibjs'
import networkConfig from '../networkConfig'
import { loadCachedEvents, save } from './helpers'
const TREES_FOLDER = 'static/trees'
const TREES_PATH = './static/trees/'
const EVENTS_PATH = './static/events/'
const jszip = new Jszip()
const EVENTS = ['deposit'] const EVENTS = ['deposit']
const enabledChains = ['1']
const { MerkleTree } = require('fixed-merkle-tree')
const { buildMimcSponge } = require('circomlibjs')
const networkConfig = require('./networkConfig')
const treesPath = './static/trees/'
const eventsPath = './static/events/'
let mimcHash let mimcHash
const trees = { const trees = {
@ -25,7 +26,6 @@ function getName({ path, type, instance, format = '.json', currName = 'eth' }) {
return `${path}${type.toLowerCase()}s_${currName}_${instance}${format}` return `${path}${type.toLowerCase()}s_${currName}_${instance}${format}`
} }
const TREES_FOLDER = 'static/trees'
function createTreeZip(netId) { function createTreeZip(netId) {
try { try {
const config = networkConfig[`netId${netId}`] const config = networkConfig[`netId${netId}`]
@ -37,23 +37,18 @@ function createTreeZip(netId) {
type, type,
instance, instance,
format: '', format: '',
path: treesPath, path: TREES_PATH,
currName: config.currencyName.toLowerCase() currName: config.currencyName.toLowerCase()
}) })
const treesFolder = fs.readdirSync(TREES_FOLDER) const treesFolder = fs.readdirSync(TREES_FOLDER)
treesFolder.forEach((fileName) => { treesFolder.forEach((fileName) => {
fileName = `${treesPath}${fileName}` fileName = `${TREES_PATH}${fileName}`
const isInstanceFile = !fileName.includes('.zip') && fileName.includes(baseFilename) const isInstanceFile = !fileName.includes('.zip') && fileName.includes(baseFilename)
if (isInstanceFile) { if (isInstanceFile) {
zipper.sync save(fileName)
.zip(`${fileName}`)
.compress()
.save(`${fileName}.zip`)
fs.unlinkSync(fileName)
} }
}) })
} }
@ -63,10 +58,10 @@ function createTreeZip(netId) {
async function createTree(netId) { async function createTree(netId) {
try { try {
const config = networkConfig[`netId${netId}`] const { currencyName, tokens, deployedBlock } = networkConfig[`netId${netId}`]
const currName = config.currencyName.toLowerCase() const currName = currencyName.toLowerCase()
const { instanceAddress: CONTRACTS } = config.tokens.eth const { instanceAddress: CONTRACTS } = tokens.eth
for (const type of EVENTS) { for (const type of EVENTS) {
for (const [instance] of Object.entries(CONTRACTS)) { for (const [instance] of Object.entries(CONTRACTS)) {
@ -75,12 +70,17 @@ async function createTree(netId) {
instance, instance,
currName, currName,
format: '', format: '',
path: treesPath path: TREES_PATH
}) })
console.log('createTree', { type, instance }) console.log('createTree', { type, instance })
const events = await loadCachedEvents({ type, amount: instance, currName }) const { events } = await loadCachedEvents({
name: `${type}s_${currName}_${instance}.json`,
directory: EVENTS_PATH,
deployedBlock
})
console.log('events', events.length) console.log('events', events.length)
const bloom = new BloomFilter(events.length) // to reduce the number of false positives const bloom = new BloomFilter(events.length) // to reduce the number of false positives
@ -88,7 +88,7 @@ async function createTree(netId) {
const eventsData = events.reduce( const eventsData = events.reduce(
(acc, { leafIndex, commitment, ...rest }, i) => { (acc, { leafIndex, commitment, ...rest }, i) => {
if (leafIndex !== i) { if (leafIndex !== i) {
throw new Error('leafIndex !== i', i, leafIndex) throw new Error(`leafIndex (${leafIndex}) !== i (${i})`)
} }
const leave = commitment.toString() const leave = commitment.toString()
@ -127,40 +127,7 @@ async function createTree(netId) {
} }
} }
} catch (e) { } catch (e) {
console.log(e.message) console.error(e.message)
}
}
async function download({ name, contentType }) {
const path = `${name}.zip`
const data = fs.readFileSync(path)
const zip = await jszip.loadAsync(data)
const file = zip.file(
path
.replace(eventsPath, '')
.slice(0, -4)
.toLowerCase()
)
const content = await file.async(contentType)
return content
}
async function loadCachedEvents({ type, amount, currName = 'eth', path = '' }) {
try {
const module = await download({
contentType: 'string',
name: path || getName({ path: eventsPath, type, instance: amount, currName })
})
if (module) {
return JSON.parse(module)
}
} catch (err) {
throw new Error(`Method loadCachedEvents has error: ${err.message}`)
} }
} }
@ -170,14 +137,14 @@ async function initMimc() {
} }
async function main() { async function main() {
const [, , , chain] = process.argv
if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
await initMimc() await initMimc()
const NETWORKS = [1] await createTree(chain)
await createTreeZip(chain)
for await (const netId of NETWORKS) {
await createTree(netId)
await createTreeZip(netId)
}
} }
main() main()

71
scripts/updateZip.js Normal file
View File

@ -0,0 +1,71 @@
import { uniqBy } from 'lodash'
import networkConfig from '../networkConfig'
import { loadCachedEvents, save } from './helpers'
const EVENTS_PATH = './static/events/'
const EVENTS = ['Deposit', 'Withdrawal']
function updateEncrypted(netId) {
try {
const file = `${EVENTS_PATH}encrypted_notes_${netId}.json`
save(file)
} catch {
console.warn('Not detected any events files for chainId - ', netId)
}
}
async function updateCommon(netId) {
const { nativeCurrency, tokens } = networkConfig[`netId${netId}`]
console.log(Object.keys(tokens[nativeCurrency].instanceAddress))
for await (const type of EVENTS) {
for await (const instance of Object.keys(tokens[nativeCurrency].instanceAddress)) {
console.warn('instance', instance)
const filename = `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`
const isSaved = save(`${EVENTS_PATH}${filename}`)
if (isSaved) {
try {
await testCommon(netId, type, filename)
} catch (err) {
console.error(err.message)
}
}
}
}
}
async function testCommon(netId, type, filename) {
const { deployedBlock } = networkConfig[`netId${netId}`]
const cachedEvents = await loadCachedEvents({
name: filename,
directory: EVENTS_PATH,
deployedBlock
})
console.log('cachedEvents', cachedEvents.events.length, type)
let events = cachedEvents.events
if (type === 'Withdrawal') {
events = uniqBy(cachedEvents.events, 'nullifierHash')
} else if (type === 'Deposit') {
events = cachedEvents.events.filter((e, index) => Number(e.leafIndex) === index)
}
if (events.length !== cachedEvents.events.length) {
console.error('events.length', events.length)
console.error('cachedEvents.events.length', cachedEvents.events.length)
throw new Error(`Duplicates was detected in ${filename} (${events.length - cachedEvents.events.length})`)
}
}
async function main() {
const NETWORKS = [1, 5, 56]
for await (const netId of NETWORKS) {
updateEncrypted(netId)
await updateCommon(netId)
}
}
main()

Binary file not shown.

View File

@ -451,7 +451,7 @@ const actions = {
let cachedEvents = await dispatch('getEncryptedEventsFromDb', { netId }) let cachedEvents = await dispatch('getEncryptedEventsFromDb', { netId })
const networksWithCache = [1, 5] const networksWithCache = [1, 5, 56]
const LENGTH_CACHE = const LENGTH_CACHE =
Number(netId) === 1 Number(netId) === 1

View File

@ -1,119 +0,0 @@
const fs = require('fs')
const Web3 = require('web3')
const Jszip = require('jszip')
const networkConfig = require('./networkConfig')
const MAIN_NET_RPC_URL = networkConfig.netId1.rpcUrls.Infura.url
const GOERLI_RPC_URL = networkConfig.netId5.rpcUrls.Alchemy.url
const ABI = require('./abis/TornadoProxy.abi.json')
const jszip = new Jszip()
function getWeb3(netId) {
const rpc = Number(netId) === 1 ? MAIN_NET_RPC_URL : GOERLI_RPC_URL
const provider = new Web3.providers.HttpProvider(rpc)
const web3 = new Web3(provider)
return web3
}
async function download({ name, contentType }) {
const path = `${name}.zip`
const data = fs.readFileSync(path)
const zip = await jszip.loadAsync(data)
const file = zip.file(path.replace('./static/events/', '').slice(0, -4))
const content = await file.async(contentType)
return content
}
async function loadCachedEvents(file) {
try {
const module = await download({
contentType: 'string',
name: file
})
if (module) {
const events = JSON.parse(module)
const lastEvent = events[events.length - 1]
const lastBlock = lastEvent.block || lastEvent.blockNumber
return {
events,
lastBlock
}
}
} catch (err) {
throw new Error(`Method loadCachedEvents has error: ${err.message}`)
}
}
async function saveEncryptedNote(netId) {
const web3 = getWeb3(netId)
const {
'tornado-proxy.contract.tornadocash.eth': tornadoProxy,
'tornado-router.contract.tornadocash.eth': tornadoRouter
} = networkConfig[`netId${netId}`]
const contractAddress = tornadoRouter || tornadoProxy
const contract = new web3.eth.Contract(ABI, contractAddress)
const currentBlockNumber = await web3.eth.getBlockNumber()
const file = `./static/events/encrypted_notes_${netId}.json`
let encryptedEvents = []
const cachedEvents = await loadCachedEvents(file)
console.log('cachedEvents', cachedEvents.events.length)
const startBlock = cachedEvents.lastBlock + 1
const NUMBER_PARTS = 20
const part = parseInt((currentBlockNumber - startBlock) / NUMBER_PARTS)
let fromBlock = startBlock
let toBlock = startBlock + part
for (let i = 0; i <= NUMBER_PARTS; i++) {
const partOfEvents = await contract.getPastEvents('EncryptedNote', {
toBlock,
fromBlock
})
if (partOfEvents) {
encryptedEvents = encryptedEvents.concat(partOfEvents)
}
fromBlock = toBlock
toBlock += part
}
console.log('Encrypted note', netId, encryptedEvents.length)
encryptedEvents = encryptedEvents
.filter((e) => e.returnValues.encryptedNote)
.map((item) => {
return {
txHash: item.transactionHash,
blockNumber: Number(item.blockNumber),
encryptedNote: item.returnValues.encryptedNote
}
})
const eventsJson = JSON.stringify(cachedEvents.events.concat(encryptedEvents), null, 2) + '\n'
fs.writeFileSync(file, eventsJson)
}
async function main() {
const NETWORKS = [1]
for await (const netId of NETWORKS) {
await saveEncryptedNote(netId)
}
}
main()

View File

@ -1,166 +0,0 @@
const fs = require('fs')
const Web3 = require('web3')
const Jszip = require('jszip')
const networkConfig = require('./networkConfig')
const ABI = require('./abis/Instance.abi.json')
const EVENTS = ['Deposit', 'Withdrawal']
const CHAINS = [56]
const jszip = new Jszip()
async function download({ name, contentType }) {
const path = `${name}.zip`
const data = fs.readFileSync(path)
const zip = await jszip.loadAsync(data)
const file = zip.file(
path
.replace('./static/events/', '')
.slice(0, -4)
.toLowerCase()
)
const content = await file.async(contentType)
return content
}
async function loadCachedEvents({ type, amount, nativeCurrency, deployedBlock }) {
try {
const module = await download({
contentType: 'string',
name: `./static/events/${type}s_${nativeCurrency}_${amount}.json`
})
if (module) {
const events = JSON.parse(module)
return {
events,
lastBlock: events[events.length - 1].blockNumber
}
}
} catch (err) {
console.error(`Method loadCachedEvents has error: ${err.message}`)
return {
events: [],
lastBlock: deployedBlock
}
}
}
async function main(type, netId) {
const { tokens, nativeCurrency, deployedBlock } = networkConfig[`netId${netId}`]
const CONTRACTS = tokens[nativeCurrency].instanceAddress
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
const cachedEvents = await loadCachedEvents({ type, amount: instance, nativeCurrency, deployedBlock })
console.log('cachedEvents', cachedEvents.events.length)
let startBlock = cachedEvents.lastBlock + 1
console.log('startBlock', startBlock)
const rpcUrl =
netId === 1
? networkConfig[`netId${netId}`].rpcUrls.Infura.url
: networkConfig[`netId${netId}`].rpcUrls.publicRpc3.url
const provider = new Web3.providers.HttpProvider(rpcUrl)
const web3 = new Web3(provider)
const contract = new web3.eth.Contract(ABI, _contract)
let events = []
console.log(netId)
if (netId === 56) {
const blockRange = 4950
const currentBlockNumber = await web3.eth.getBlockNumber()
const blockDifference = Math.ceil(currentBlockNumber - startBlock)
let numberParts = blockDifference === 0 ? 1 : Math.ceil(blockDifference / blockRange)
const part = Math.ceil(blockDifference / numberParts)
console.log('numberParts', numberParts)
let toBlock = startBlock + part
if (startBlock < currentBlockNumber) {
if (toBlock >= currentBlockNumber) {
toBlock = 'latest'
numberParts = 1
}
for (let i = 0; i < numberParts; i++) {
try {
await new Promise((resolve) => setTimeout(resolve, 200))
console.log({ startBlock, toBlock })
const partOfEvents = await contract.getPastEvents(type, {
fromBlock: startBlock,
toBlock
})
if (partOfEvents) {
events = events.concat(partOfEvents)
console.log({
events: events.length
})
}
startBlock = toBlock
toBlock += part
} catch {
numberParts = numberParts + 1
}
}
}
} else {
events = await contract.getPastEvents(type, {
fromBlock: startBlock,
toBlock: 'latest'
})
}
console.log('events', events.length)
if (type === 'Deposit') {
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
const { commitment, leafIndex, timestamp } = returnValues
return {
blockNumber,
transactionHash,
commitment,
leafIndex: Number(leafIndex),
timestamp
}
})
}
if (type === 'Withdrawal') {
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
const { nullifierHash, to, fee } = returnValues
return {
blockNumber,
transactionHash,
nullifierHash,
to,
fee
}
})
}
const eventsJson = JSON.stringify(cachedEvents.events.concat(events), null, 2) + '\n'
fs.writeFileSync(`./static/events/${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`, eventsJson)
}
}
async function start() {
for await (const chain of CHAINS) {
for await (const event of EVENTS) {
await main(event, chain)
}
}
}
start()

View File

@ -1,113 +0,0 @@
const fs = require('fs')
const Jszip = require('jszip')
const zipper = require('zip-local')
const jszip = new Jszip()
const EVENTS = ['Deposit', 'Withdrawal']
const networkConfig = require('./networkConfig')
function updateEncryptedNote(netId) {
try {
const file = `./static/events/encrypted_notes_${netId}.json`
zipper.sync
.zip(file)
.compress()
.save(`${file}.zip`)
fs.unlinkSync(file)
} catch {}
}
function updateCommon(netId) {
try {
const CURRENCY = networkConfig[`netId${netId}`].nativeCurrency
const CONTRACTS = networkConfig[`netId${netId}`].tokens[CURRENCY].instanceAddress
for (const type of EVENTS) {
for (const [instance] of Object.entries(CONTRACTS)) {
const file = `./static/events/${type.toLowerCase()}s_${CURRENCY}_${instance}.json`
zipper.sync
.zip(file)
.compress()
.save(`${file}.zip`)
fs.unlinkSync(file)
}
}
} catch {}
}
async function download({ name, contentType }) {
const path = `${name}.zip`
const data = fs.readFileSync(path)
const zip = await jszip.loadAsync(data)
const file = zip.file(
path
.replace('./static/events/', '')
.slice(0, -4)
.toLowerCase()
)
const content = await file.async(contentType)
return content
}
async function loadCachedEvents({ type, amount, CURRENCY, path = '' }) {
try {
const module = await download({
contentType: 'string',
name: path || `./static/events/${type}s_${CURRENCY}_${amount}.json`
})
if (module) {
const events = JSON.parse(module)
return {
events,
lastBlock: events[events.length - 1].blockNumber
}
}
} catch (err) {
throw new Error(`Method loadCachedEvents has error: ${err.message}`)
}
}
async function testCommon(netId) {
for (const type of EVENTS) {
if (type === 'Withdrawal') {
return
}
const CURRENCY = networkConfig[`netId${netId}`].nativeCurrency
const CONTRACTS = networkConfig[`netId${netId}`].tokens[CURRENCY].instanceAddress
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
console.log('update', { type, instance, contract: _contract })
const cachedEvents = await loadCachedEvents({ type, amount: instance, CURRENCY })
console.log('cachedEvents', cachedEvents.events.length)
cachedEvents.events.forEach((e, index) => {
if (Number(e.leafIndex) !== index) {
throw new Error(index)
}
})
}
}
}
async function main() {
const NETWORKS = [1, 5, 56]
for await (const netId of NETWORKS) {
await updateEncryptedNote(netId)
await updateCommon(netId)
await testCommon(netId)
}
}
main()

View File

@ -10485,7 +10485,7 @@ lodash@^4.15.0, lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
lodash@^4.17.12: lodash@^4.17.12, lodash@^4.17.21:
version "4.17.21" version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==