Use cached events from frontend and workers

This commit is contained in:
Tornado Contrib 2024-05-08 18:57:13 +00:00
parent 8e84cd651c
commit 8fcb9ed387
Signed by: tornadocontrib
GPG Key ID: 60B4DF1A076C64B1
6 changed files with 143 additions and 7 deletions

@ -2,6 +2,12 @@
## Build Setup ## Build Setup
If you use the latest Node.js version, you should modify your NODE_OPTIONS env
```bash
export NODE_OPTIONS="--openssl-legacy-provider"
```
```bash ```bash
# install dependencies # install dependencies
$ yarn install $ yarn install

@ -11,6 +11,7 @@ import { ExtendedProvider } from './services/provider'
import { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } from './services/constants' import { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } from './services/constants'
import { sleep } from './services/utilities' import { sleep } from './services/utilities'
import { poolAbi } from './services/pool' import { poolAbi } from './services/pool'
import { downloadEvents } from './services/downloadEvents'
const getProviderWithSigner = (chainId) => { const getProviderWithSigner = (chainId) => {
return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId]) return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId])
@ -165,6 +166,14 @@ const getCommitments = async ({ withCache, lastSyncBlock }) => {
return { commitmentEvents: cachedEvents } return { commitmentEvents: cachedEvents }
} }
blockFrom = newBlockFrom > currentBlock ? currentBlock : newBlockFrom blockFrom = newBlockFrom > currentBlock ? currentBlock : newBlockFrom
} else {
const downloadedEvents = await downloadEvents(`commitments_${self.chainId}.json`, blockFrom)
if (downloadedEvents.events.length) {
cachedEvents.push(...downloadedEvents.events)
blockFrom = downloadedEvents.lastBlock
}
} }
const commitmentEvents = await getCommitmentBatch({ blockFrom, blockTo: currentBlock, cachedEvents, withCache }) const commitmentEvents = await getCommitmentBatch({ blockFrom, blockTo: currentBlock, cachedEvents, withCache })

@ -8,6 +8,7 @@ import { ExtendedProvider } from './services/provider'
import { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } from './services/constants' import { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } from './services/constants'
import { sleep } from './services/utilities' import { sleep } from './services/utilities'
import { poolAbi } from './services/pool' import { poolAbi } from './services/pool'
import { downloadEvents } from './services/downloadEvents'
const getProviderWithSigner = (chainId) => { const getProviderWithSigner = (chainId) => {
return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId]) return new ExtendedProvider(RPC_LIST[chainId], chainId, FALLBACK_RPC_LIST[chainId])
@ -120,6 +121,14 @@ const getCachedEvents = async () => {
return { blockFrom, cachedEvents } return { blockFrom, cachedEvents }
} }
blockFrom = newBlockFrom > currentBlock ? currentBlock : newBlockFrom blockFrom = newBlockFrom > currentBlock ? currentBlock : newBlockFrom
} else {
const downloadedEvents = await downloadEvents(`nullifiers_${self.chainId}.json`, blockFrom)
if (downloadedEvents.events.length) {
cachedEvents.push(...downloadedEvents.events)
blockFrom = downloadedEvents.lastBlock
}
} }
return { blockFrom, cachedEvents } return { blockFrom, cachedEvents }

@ -0,0 +1,37 @@
import { unzipAsync } from "./zip"
export async function downloadEvents(fileName, deployedBlock) {
fileName = fileName.toLowerCase()
// @ts-ignore
const prefix = __webpack_public_path__.slice(0, -7)
try {
const resp = await fetch(`${prefix}/${fileName}.zip`, {
method: 'GET',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
}
})
const arrayBuffer = await resp.arrayBuffer()
const { [fileName]: content } = await unzipAsync(new Uint8Array(arrayBuffer))
const events = JSON.parse(new TextDecoder().decode(content))
const lastBlock = events && Array.isArray(events) && events[events.length - 1]
? events[events.length - 1].blockNumber
: deployedBlock
return {
events,
lastBlock
}
} catch {
return {
events: [],
lastBlock: deployedBlock
}
}
}

@ -7,6 +7,7 @@ import { isEmpty, sleep, toChecksumAddress } from '@/utilities'
import { getBridgeHelper, getBridgeProxy, getAmbBridge } from '@/contracts' import { getBridgeHelper, getBridgeProxy, getAmbBridge } from '@/contracts'
import { EventsClass, GetAffirmationParams, GetRelayedMessageParams, SaveEventsParams } from './@types' import { EventsClass, GetAffirmationParams, GetRelayedMessageParams, SaveEventsParams } from './@types'
import { downloadEvents } from './load'
export * from './batch' export * from './batch'
@ -33,19 +34,31 @@ class EventAggregator implements EventsClass {
storeName: `${IndexDBStores.ACCOUNT_EVENTS}_${chainId}`, storeName: `${IndexDBStores.ACCOUNT_EVENTS}_${chainId}`,
}) })
const newEvents = []
if (cachedEvents?.length) { if (cachedEvents?.length) {
const [latestEvent] = cachedEvents.slice(-numbers.ONE) const [latestEvent] = cachedEvents.slice(-numbers.ONE)
blockFrom = Number(latestEvent.blockNumber) + numbers.ONE blockFrom = Number(latestEvent.blockNumber) + numbers.ONE
} else {
const downloadedEvents = await downloadEvents(`accounts_${chainId}.json`, blockFrom)
if (downloadedEvents.events.length) {
newEvents.push(...downloadedEvents.events)
blockFrom = downloadedEvents.lastBlock
}
} }
const { events: graphEvents, lastSyncBlock } = await getAllAccounts({ fromBlock: blockFrom, chainId }) const { events: graphEvents, lastSyncBlock } = await getAllAccounts({ fromBlock: blockFrom, chainId })
const [account] = graphEvents.filter((e: { key: string }) => e.key === publicKey) newEvents.push(...graphEvents)
const [account] = newEvents.filter((e: { key: string }) => e.key === publicKey)
if (account) { if (account) {
this.saveEvents({ this.saveEvents({
chainId, chainId,
events: graphEvents, events: newEvents,
storeName: IndexDBStores.ACCOUNT_EVENTS, storeName: IndexDBStores.ACCOUNT_EVENTS,
}) })
return account.owner return account.owner
@ -68,7 +81,7 @@ class EventAggregator implements EventsClass {
} }
}) })
const newEvents = graphEvents.concat(accountEvents) newEvents.push(...accountEvents)
this.saveEvents({ this.saveEvents({
chainId, chainId,
@ -76,7 +89,7 @@ class EventAggregator implements EventsClass {
storeName: IndexDBStores.ACCOUNT_EVENTS, storeName: IndexDBStores.ACCOUNT_EVENTS,
}) })
const events = cachedEvents.concat(newEvents).filter((e: { key: string }) => e.key === publicKey) const events = newEvents.filter((e: { key: string }) => e.key === publicKey)
if (isEmpty(events)) { if (isEmpty(events)) {
return undefined return undefined
@ -87,6 +100,7 @@ class EventAggregator implements EventsClass {
return event.owner return event.owner
} catch (err) { } catch (err) {
console.log(err)
return undefined return undefined
} }
} }
@ -113,19 +127,30 @@ class EventAggregator implements EventsClass {
storeName: `${IndexDBStores.ACCOUNT_EVENTS}_${chainId}`, storeName: `${IndexDBStores.ACCOUNT_EVENTS}_${chainId}`,
}) })
const newEvents = []
if (cachedEvents?.length) { if (cachedEvents?.length) {
const [latestEvent] = cachedEvents.slice(-numbers.ONE) const [latestEvent] = cachedEvents.slice(-numbers.ONE)
blockFrom = Number(latestEvent.blockNumber) + numbers.ONE blockFrom = Number(latestEvent.blockNumber) + numbers.ONE
} else {
const downloadedEvents = await downloadEvents(`accounts_${chainId}.json`, blockFrom)
if (downloadedEvents.events.length) {
newEvents.push(...downloadedEvents.events)
blockFrom = downloadedEvents.lastBlock
}
} }
const { events: graphEvents, lastSyncBlock } = await getAllAccounts({ fromBlock: blockFrom, chainId }) const { events: graphEvents, lastSyncBlock } = await getAllAccounts({ fromBlock: blockFrom, chainId })
newEvents.push(...graphEvents)
const [account] = graphEvents.filter((e: { owner: string }) => toChecksumAddress(e.owner) === toChecksumAddress(address)) const [account] = newEvents.filter((e: { owner: string }) => toChecksumAddress(e.owner) === toChecksumAddress(address))
if (account) { if (account) {
this.saveEvents({ this.saveEvents({
chainId, chainId,
events: graphEvents, events: newEvents,
storeName: IndexDBStores.ACCOUNT_EVENTS, storeName: IndexDBStores.ACCOUNT_EVENTS,
}) })
return account.key return account.key
@ -148,7 +173,7 @@ class EventAggregator implements EventsClass {
} }
}) })
const newEvents = graphEvents.concat(accountEvents) newEvents.push(...accountEvents)
this.saveEvents({ this.saveEvents({
chainId, chainId,
@ -169,6 +194,7 @@ class EventAggregator implements EventsClass {
return event.key return event.key
} catch (err) { } catch (err) {
console.log(err)
return undefined return undefined
} }
} }

49
services/events/load.ts Normal file

@ -0,0 +1,49 @@
import { unzip } from 'fflate'
export function unzipAsync(data: Uint8Array) {
return new Promise((res, rej) => {
unzip(data, {}, (err, data) => {
if (err) {
rej(err);
return;
}
res(data);
});
});
}
export async function downloadEvents(fileName: string, deployedBlock: number) {
fileName = fileName.toLowerCase()
// @ts-ignore
const prefix = __webpack_public_path__.slice(0, -7)
try {
const resp = await fetch(`${prefix}/${fileName}.zip`, {
method: 'GET',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
}
})
const arrayBuffer = await resp.arrayBuffer()
const { [fileName]: content } = (await unzipAsync(new Uint8Array(arrayBuffer))) as any
const events = JSON.parse(new TextDecoder().decode(content))
const lastBlock = events && Array.isArray(events) && events[events.length - 1]
? events[events.length - 1].blockNumber
: deployedBlock
return {
events,
lastBlock
}
} catch {
return {
events: [],
lastBlock: deployedBlock
}
}
}