Use Subgraph & Batched Events #2
@ -8,6 +8,7 @@ const { decrypt } = require('eth-sig-util')
|
||||
|
||||
const { IndexedDB } = require('./services/idb')
|
||||
const { BatchEventsService } = require('./services/batch')
|
||||
const { getAllCommitments } = require('./services/graph')
|
||||
const { ExtendedProvider } = require('./services/provider')
|
||||
const { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } = require('./services/constants')
|
||||
const { sleep } = require('./services/utilities')
|
||||
@ -70,21 +71,52 @@ const setTornadoPool = (chainId, provider) => {
|
||||
}
|
||||
|
||||
const getCommitmentBatch = async ({ blockFrom, blockTo, cachedEvents, withCache }) => {
|
||||
const events = await self.BatchEventsService.getBatchEvents({
|
||||
fromBlock: blockFrom,
|
||||
toBlock: blockTo,
|
||||
type: 'NewCommitment'
|
||||
})
|
||||
const events = []
|
||||
|
||||
const commitmentEvents = events.map(({ blockNumber, transactionHash, args }) => ({
|
||||
blockNumber,
|
||||
transactionHash,
|
||||
index: Number(args.index),
|
||||
commitment: args.commitment,
|
||||
encryptedOutput: args.encryptedOutput,
|
||||
}))
|
||||
let { events: graphEvents, lastSyncBlock } = await getAllCommitments({ fromBlock: blockFrom, chainId })
|
||||
|
||||
return commitmentEvents.filter((el) => {
|
||||
if (lastSyncBlock) {
|
||||
graphEvents = graphEvents.map(({ blockNumber, transactionHash, index, commitment, encryptedOutput }) => ({
|
||||
blockNumber,
|
||||
transactionHash,
|
||||
index: Number(index),
|
||||
commitment,
|
||||
encryptedOutput,
|
||||
}))
|
||||
|
||||
console.log({
|
||||
graphEvents
|
||||
})
|
||||
|
||||
events.push(...graphEvents)
|
||||
blockFrom = lastSyncBlock + numbers.ONE
|
||||
}
|
||||
|
||||
if (!blockTo || blockTo > blockFrom) {
|
||||
let nodeEvents = await self.BatchEventsService.getBatchEvents({
|
||||
fromBlock: blockFrom,
|
||||
toBlock: blockTo,
|
||||
type: 'NewCommitment'
|
||||
})
|
||||
|
||||
if (nodeEvents && nodeEvents.length) {
|
||||
nodeEvents = nodeEvents.map(({ blockNumber, transactionHash, args }) => ({
|
||||
blockNumber,
|
||||
transactionHash,
|
||||
index: Number(args.index),
|
||||
commitment: args.commitment,
|
||||
encryptedOutput: args.encryptedOutput,
|
||||
}))
|
||||
|
||||
console.log({
|
||||
nodeEvents
|
||||
})
|
||||
|
||||
events.push(...nodeEvents)
|
||||
}
|
||||
}
|
||||
|
||||
return events.filter((el) => {
|
||||
if (!withCache && cachedEvents && cachedEvents.length) {
|
||||
return cachedEvents.find((cached) => {
|
||||
return el.transactionHash === cached.transactionHash && el.index === cached.index
|
||||
|
@ -4,6 +4,7 @@ const { BigNumber, Contract } = require('ethers')
|
||||
|
||||
const { IndexedDB } = require('./services/idb')
|
||||
const { BatchEventsService } = require('./services/batch')
|
||||
const { getAllNullifiers } = require('./services/graph')
|
||||
const { ExtendedProvider } = require('./services/provider')
|
||||
const { POOL_CONTRACT, RPC_LIST, FALLBACK_RPC_LIST, workerEvents, numbers } = require('./services/constants')
|
||||
const { sleep } = require('./services/utilities')
|
||||
@ -127,16 +128,41 @@ const getCachedEvents = async () => {
|
||||
|
||||
const getNullifiers = async (blockFrom) => {
|
||||
try {
|
||||
const events = await self.BatchEventsService.getBatchEvents({
|
||||
fromBlock: blockFrom,
|
||||
type: 'NewNullifier'
|
||||
})
|
||||
const events = []
|
||||
|
||||
let { events: graphEvents, lastSyncBlock } = await getAllNullifiers({ fromBlock: blockFrom, chainId })
|
||||
|
||||
if (lastSyncBlock) {
|
||||
console.log({
|
||||
graphEvents
|
||||
})
|
||||
|
||||
return events.map(({ blockNumber, transactionHash, args }) => ({
|
||||
blockNumber,
|
||||
transactionHash,
|
||||
nullifier: args.nullifier,
|
||||
}))
|
||||
events.push(...graphEvents)
|
||||
blockFrom = lastSyncBlock + numbers.ONE
|
||||
}
|
||||
|
||||
if (!blockTo || blockTo > blockFrom) {
|
||||
let nodeEvents = await self.BatchEventsService.getBatchEvents({
|
||||
fromBlock: blockFrom,
|
||||
type: 'NewNullifier'
|
||||
})
|
||||
|
||||
if (nodeEvents && nodeEvents.length) {
|
||||
nodeEvents = nodeEvents.map(({ blockNumber, transactionHash, args }) => ({
|
||||
blockNumber,
|
||||
transactionHash,
|
||||
nullifier: args.nullifier,
|
||||
}))
|
||||
|
||||
console.log({
|
||||
nodeEvents
|
||||
})
|
||||
|
||||
events.push(...nodeEvents)
|
||||
}
|
||||
}
|
||||
|
||||
return events
|
||||
} catch (err) {
|
||||
console.error('getNullifiers', err.message)
|
||||
return []
|
||||
|
@ -131,6 +131,7 @@ const numbers = {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
ChainId,
|
||||
POOL_CONTRACT,
|
||||
RPC_LIST,
|
||||
FALLBACK_RPC_LIST,
|
||||
|
191
assets/services/graph/index.js
Normal file
191
assets/services/graph/index.js
Normal file
@ -0,0 +1,191 @@
|
||||
const { isEmpty } = require('lodash')
|
||||
const { ApolloClient, InMemoryCache, gql } = require('@apollo/client/core')
|
||||
|
||||
const { GET_COMMITMENT, GET_NULLIFIER } = require('./queries')
|
||||
const { ChainId, numbers } = require('../constants')
|
||||
|
||||
const first = 1000
|
||||
const breakLength = 900
|
||||
|
||||
const CHAIN_GRAPH_URLS = {
|
||||
[ChainId.BSC]: 'https://api.thegraph.com/subgraphs/name/dan1kov/bsc-tornado-pool-subgraph',
|
||||
[ChainId.MAINNET]: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/mainnet-tornado-pool-subgraph',
|
||||
[ChainId.XDAI]: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/gnosis-tornado-nova-subgraph',
|
||||
}
|
||||
|
||||
const link = (operation) => {
|
||||
const { chainId } = operation.getContext()
|
||||
return CHAIN_GRAPH_URLS[chainId]
|
||||
}
|
||||
|
||||
const client = new ApolloClient({
|
||||
uri: link,
|
||||
cache: new InMemoryCache(),
|
||||
})
|
||||
|
||||
async function getCommitments({ fromBlock, chainId }) {
|
||||
const { data } = await client.query({
|
||||
context: {
|
||||
chainId,
|
||||
},
|
||||
query: gql(GET_COMMITMENT),
|
||||
variables: { first, fromBlock },
|
||||
})
|
||||
|
||||
if (!data) {
|
||||
return {
|
||||
results: [],
|
||||
lastSyncBlock: data._meta.block.number
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
results: data.commitments,
|
||||
lastSyncBlock: data._meta.block.number
|
||||
}
|
||||
}
|
||||
|
||||
async function getAllCommitments({ fromBlock, chainId }) {
|
||||
try {
|
||||
let commitments = []
|
||||
let lastSyncBlock
|
||||
|
||||
while (true) {
|
||||
let { results, lastSyncBlock: lastBlock } = await getCommitments({ fromBlock, chainId })
|
||||
|
||||
lastSyncBlock = lastBlock
|
||||
|
||||
if (isEmpty(results)) {
|
||||
break
|
||||
}
|
||||
|
||||
if (results.length < breakLength) {
|
||||
commitments = commitments.concat(results)
|
||||
break
|
||||
}
|
||||
|
||||
const [lastEvent] = results.slice(-numbers.ONE)
|
||||
|
||||
results = results.filter((e) => e.blockNumber !== lastEvent.blockNumber)
|
||||
fromBlock = Number(lastEvent.blockNumber)
|
||||
|
||||
commitments = commitments.concat(results)
|
||||
}
|
||||
|
||||
if (!commitments) {
|
||||
return {
|
||||
lastSyncBlock,
|
||||
events: [],
|
||||
}
|
||||
}
|
||||
|
||||
const data = commitments.map((e) => ({
|
||||
index: Number(e.index),
|
||||
commitment: e.commitment,
|
||||
blockNumber: Number(e.blockNumber),
|
||||
encryptedOutput: e.encryptedOutput,
|
||||
transactionHash: e.transactionHash
|
||||
}))
|
||||
|
||||
const [lastEvent] = data.slice(-numbers.ONE)
|
||||
|
||||
return {
|
||||
events: data,
|
||||
lastSyncBlock: (lastEvent && lastEvent.blockNumber > lastSyncBlock)
|
||||
? lastEvent.blockNumber + numbers.ONE
|
||||
: lastSyncBlock,
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Error from getAllCommitments')
|
||||
console.log(err)
|
||||
return {
|
||||
lastSyncBlock: '',
|
||||
events: [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function getNullifiers({ fromBlock, chainId }) {
|
||||
const { data } = await client.query({
|
||||
context: {
|
||||
chainId,
|
||||
},
|
||||
query: gql(GET_NULLIFIER),
|
||||
variables: { first, fromBlock },
|
||||
})
|
||||
|
||||
if (!data) {
|
||||
return {
|
||||
results: [],
|
||||
lastSyncBlock: data._meta.block.number
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
results: data.nullifiers,
|
||||
lastSyncBlock: data._meta.block.number
|
||||
}
|
||||
}
|
||||
|
||||
async function getAllNullifiers({ fromBlock, chainId }) {
|
||||
try {
|
||||
let nullifiers = []
|
||||
let lastSyncBlock
|
||||
|
||||
while (true) {
|
||||
let { results, lastSyncBlock: lastBlock } = await getNullifiers({ fromBlock, chainId })
|
||||
|
||||
lastSyncBlock = lastBlock
|
||||
|
||||
if (isEmpty(results)) {
|
||||
break
|
||||
}
|
||||
|
||||
if (results.length < breakLength) {
|
||||
nullifiers = nullifiers.concat(results)
|
||||
break
|
||||
}
|
||||
|
||||
const [lastEvent] = results.slice(-numbers.ONE)
|
||||
|
||||
results = results.filter((e) => e.blockNumber !== lastEvent.blockNumber)
|
||||
fromBlock = Number(lastEvent.blockNumber)
|
||||
|
||||
nullifiers = nullifiers.concat(results)
|
||||
}
|
||||
|
||||
if (!nullifiers) {
|
||||
return {
|
||||
lastSyncBlock,
|
||||
events: [],
|
||||
}
|
||||
}
|
||||
|
||||
const data = nullifiers.map((e) => ({
|
||||
nullifier: e.nullifier,
|
||||
blockNumber: Number(e.blockNumber),
|
||||
transactionHash: e.transactionHash
|
||||
}))
|
||||
|
||||
const [lastEvent] = data.slice(-numbers.ONE)
|
||||
|
||||
return {
|
||||
events: data,
|
||||
lastSyncBlock: (lastEvent && lastEvent.blockNumber > lastSyncBlock)
|
||||
? lastEvent.blockNumber + numbers.ONE
|
||||
: lastSyncBlock,
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('Error from getAllNullifiers')
|
||||
console.log(err)
|
||||
return {
|
||||
lastSyncBlock: '',
|
||||
events: [],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAllCommitments,
|
||||
getAllNullifiers
|
||||
}
|
39
assets/services/graph/queries.js
Normal file
39
assets/services/graph/queries.js
Normal file
@ -0,0 +1,39 @@
|
||||
const GET_COMMITMENT = `
|
||||
query getCommitment($first: Int, $fromBlock: Int) {
|
||||
commitments(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
|
||||
blockNumber_gte: $fromBlock
|
||||
}) {
|
||||
index
|
||||
commitment
|
||||
blockNumber
|
||||
encryptedOutput
|
||||
transactionHash
|
||||
}
|
||||
_meta {
|
||||
block {
|
||||
number
|
||||
}
|
||||
hasIndexingErrors
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
const GET_NULLIFIER = `
|
||||
query getNullifier($first: Int, $fromBlock: Int) {
|
||||
nullifiers(first: $first, orderBy: blockNumber, orderDirection: asc, where: {
|
||||
blockNumber_gte: $fromBlock
|
||||
}) {
|
||||
nullifier
|
||||
blockNumber
|
||||
transactionHash
|
||||
}
|
||||
_meta {
|
||||
block {
|
||||
number
|
||||
}
|
||||
hasIndexingErrors
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
module.exports = { GET_COMMITMENT, GET_NULLIFIER }
|
@ -18,7 +18,8 @@ const link = (operation: Operation) => {
|
||||
|
||||
const CHAIN_GRAPH_URLS: { [chainId in ChainId]: string } = {
|
||||
[ChainId.BSC]: 'https://api.thegraph.com/subgraphs/name/dan1kov/bsc-tornado-pool-subgraph',
|
||||
[ChainId.MAINNET]: 'https://api.thegraph.com/subgraphs/name/tornadocash/mainnet-tornado-pool-subgraph',
|
||||
[ChainId.MAINNET]: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/mainnet-tornado-pool-subgraph',
|
||||
[ChainId.XDAI]: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/gnosis-tornado-nova-subgraph',
|
||||
}
|
||||
|
||||
const client = new ApolloClient({
|
||||
@ -27,7 +28,7 @@ const client = new ApolloClient({
|
||||
})
|
||||
|
||||
const registryClient = new ApolloClient({
|
||||
uri: 'https://api.thegraph.com/subgraphs/name/tornadocash/tornado-relayer-registry',
|
||||
uri: 'https://tornadocash-rpc.com/subgraphs/name/tornadocash/tornado-relayer-registry',
|
||||
cache: new InMemoryCache(),
|
||||
})
|
||||
|
||||
@ -272,7 +273,7 @@ export async function getNullifiers({ fromBlock, chainId }: Params): Promise<{
|
||||
}
|
||||
|
||||
return {
|
||||
results: data.commitments,
|
||||
results: data.nullifiers,
|
||||
lastSyncBlock: data._meta.block.number
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue
Block a user