Cached tree generation

This commit is contained in:
Tornado Contrib 2024-04-27 13:55:15 +00:00
parent 183dc5ca60
commit a2ea239ea8
Signed by: tornadocontrib
GPG Key ID: 60B4DF1A076C64B1
10 changed files with 253 additions and 32 deletions

@ -58,6 +58,7 @@
"@tornado/snarkjs": "0.1.20",
"@tornado/websnark": "0.0.4",
"ajv": "^8.12.0",
"bloomfilter.js": "^1.0.2",
"bn.js": "^5.2.1",
"circomlibjs": "0.1.7",
"cross-fetch": "^4.0.0",

@ -82,6 +82,7 @@ import {
NoteAccount,
parseRecoveryKey,
getSupportedInstances,
TreeCache,
} from './services';
const DEFAULT_GAS_LIMIT = 600_000;
@ -98,6 +99,7 @@ const MERKLE_WORKER_PATH =
// Where we should backup notes and save events
const USER_DIR = process.env.USER_DIR || '.';
const SAVED_DIR = path.join(USER_DIR, './events');
const SAVED_TREE_DIR = path.join(USER_DIR, './trees');
const CIRCUIT_PATH = path.join(__dirname, '../static/tornado.json');
const KEY_PATH = path.join(__dirname, '../static/tornadoProvingKey.bin');
@ -1278,6 +1280,7 @@ export function tornadoProgram() {
tornadoSubgraph,
registrySubgraph,
tokens,
nativeCurrency,
routerContract,
echoContract,
registryContract,
@ -1396,20 +1399,31 @@ export function tornadoProgram() {
merkleWorkerPath: MERKLE_WORKER_PATH,
});
const depositEvents = (await depositsService.updateEvents()).events;
const treeCache = new TreeCache({
netId,
amount,
currency,
userDirectory: SAVED_TREE_DIR,
});
const depositEvents = (await depositsService.updateEvents()).events as DepositsEvents[];
// If we have MERKLE_WORKER_PATH run worker at background otherwise resolve it here
const depositTreePromise = await (async () => {
if (MERKLE_WORKER_PATH) {
return () => merkleTreeService.verifyTree(depositEvents as DepositsEvents[]) as Promise<MerkleTree>;
return () => merkleTreeService.verifyTree(depositEvents) as Promise<MerkleTree>;
}
return (await merkleTreeService.verifyTree(depositEvents as DepositsEvents[])) as MerkleTree;
return (await merkleTreeService.verifyTree(depositEvents)) as MerkleTree;
})();
await Promise.all([
withdrawalsService.updateEvents(),
const [tree] = await Promise.all([
typeof depositTreePromise === 'function' ? depositTreePromise() : depositTreePromise,
withdrawalsService.updateEvents(),
]);
if (nativeCurrency === currency) {
await treeCache.createTree(depositEvents, tree);
}
}
}
}

@ -37,22 +37,21 @@ export function unzipAsync(data: Uint8Array): Promise<Unzipped> {
});
}
export async function saveEvents<T extends MinimalEvents>({
name,
export async function saveUserFile({
fileName,
userDirectory,
events,
dataString,
}: {
name: string;
fileName: string;
userDirectory: string;
events: T[];
dataString: string;
}) {
const fileName = `${name}.json`.toLowerCase();
fileName = fileName.toLowerCase();
const filePath = path.join(userDirectory, fileName);
const stringEvents = JSON.stringify(events, null, 2) + '\n';
const payload = await zipAsync({
[fileName]: new TextEncoder().encode(stringEvents),
[fileName]: new TextEncoder().encode(dataString),
});
if (!(await existsAsync(userDirectory))) {
@ -60,7 +59,7 @@ export async function saveEvents<T extends MinimalEvents>({
}
await writeFile(filePath + '.zip', payload);
await writeFile(filePath, stringEvents);
await writeFile(filePath, dataString);
}
export async function loadSavedEvents<T extends MinimalEvents>({

@ -1,7 +1,7 @@
import Table from 'cli-table3';
import moment from 'moment';
import { BatchBlockOnProgress, BatchEventOnProgress } from '../batch';
import { saveEvents, loadSavedEvents, loadCachedEvents } from '../data';
import { saveUserFile, loadSavedEvents, loadCachedEvents } from '../data';
import {
BaseDepositsService,
BaseEncryptedNotesService,
@ -184,10 +184,10 @@ export class NodeDepositsService extends BaseDepositsService {
console.log(eventTable.toString() + '\n');
if (this.userDirectory) {
await saveEvents<DepositsEvents | WithdrawalsEvents>({
name: instanceName,
await saveUserFile({
fileName: instanceName + '.json',
userDirectory: this.userDirectory,
events,
dataString: JSON.stringify(events, null, 2) + '\n',
});
}
}
@ -329,10 +329,10 @@ export class NodeEchoService extends BaseEchoService {
console.log(eventTable.toString() + '\n');
if (this.userDirectory) {
await saveEvents<EchoEvents>({
name: instanceName,
await saveUserFile({
fileName: instanceName + '.json',
userDirectory: this.userDirectory,
events,
dataString: JSON.stringify(events, null, 2) + '\n',
});
}
}
@ -474,10 +474,10 @@ export class NodeEncryptedNotesService extends BaseEncryptedNotesService {
console.log(eventTable.toString() + '\n');
if (this.userDirectory) {
await saveEvents<EncryptedNotesEvents>({
name: instanceName,
await saveUserFile({
fileName: instanceName + '.json',
userDirectory: this.userDirectory,
events,
dataString: JSON.stringify(events, null, 2) + '\n',
});
}
}
@ -625,10 +625,10 @@ export class NodeGovernanceService extends BaseGovernanceService {
console.log(eventTable.toString() + '\n');
if (this.userDirectory) {
await saveEvents<BaseGovernanceEventTypes>({
name: instanceName,
await saveUserFile({
fileName: instanceName + '.json',
userDirectory: this.userDirectory,
events,
dataString: JSON.stringify(events, null, 2) + '\n',
});
}
}
@ -770,10 +770,10 @@ export class NodeRegistryService extends BaseRegistryService {
console.log(eventTable.toString() + '\n');
if (this.userDirectory) {
await saveEvents<RegistersEvents>({
name: instanceName,
await saveUserFile({
fileName: instanceName + '.json',
userDirectory: this.userDirectory,
events,
dataString: JSON.stringify(events, null, 2) + '\n',
});
}
}

@ -16,5 +16,6 @@ export * from './prices';
export * from './providers';
export * from './relayerClient';
export * from './tokens';
export * from './treeCache';
export * from './utils';
export * from './websnark';

@ -1,5 +1,5 @@
import { Worker as NodeWorker } from 'worker_threads';
import { MerkleTree, Element } from '@tornado/fixed-merkle-tree';
import { MerkleTree, PartialMerkleTree, Element, TreeEdge } from '@tornado/fixed-merkle-tree';
import type { Tornado } from '@tornado/contracts';
import { isNode, toFixedHex } from './utils';
import { mimc } from './mimc';
@ -113,6 +113,69 @@ export class MerkleTreeService {
});
}
async createPartialTree({ edge, elements }: { edge: TreeEdge; elements: Element[] }) {
const { hash: hashFunction } = await mimc.getHash();
if (this.merkleWorkerPath) {
console.log('Using merkleWorker\n');
try {
if (isNode) {
const merkleWorkerPromise = new Promise((resolve, reject) => {
const worker = new NodeWorker(this.merkleWorkerPath as string, {
workerData: {
merkleTreeHeight: this.merkleTreeHeight,
edge,
elements,
zeroElement: this.emptyElement,
},
});
worker.on('message', resolve);
worker.on('error', reject);
worker.on('exit', (code) => {
if (code !== 0) {
reject(new Error(`Worker stopped with exit code ${code}`));
}
});
}) as Promise<string>;
return PartialMerkleTree.deserialize(JSON.parse(await merkleWorkerPromise), hashFunction);
} else {
const merkleWorkerPromise = new Promise((resolve, reject) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const worker = new (Worker as any)(this.merkleWorkerPath);
worker.onmessage = (e: { data: string }) => {
resolve(e.data);
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
worker.onerror = (e: any) => {
reject(e);
};
worker.postMessage({
merkleTreeHeight: this.merkleTreeHeight,
edge,
elements,
zeroElement: this.emptyElement,
});
}) as Promise<string>;
return PartialMerkleTree.deserialize(JSON.parse(await merkleWorkerPromise), hashFunction);
}
} catch (err) {
console.log('merkleWorker failed, falling back to synchronous merkle tree');
console.log(err);
}
}
return new PartialMerkleTree(this.merkleTreeHeight, edge, elements, {
zeroElement: this.emptyElement,
hashFunction,
});
}
async verifyTree(events: DepositsEvents[]) {
console.log(
`\nCreating deposit tree for ${this.netId} ${this.amount} ${this.currency.toUpperCase()} would take a while\n`,

112
src/services/treeCache.ts Normal file

@ -0,0 +1,112 @@
/**
* Create tree cache file from node.js
*
* Only works for node.js, modified from https://github.com/tornadocash/tornado-classic-ui/blob/master/scripts/updateTree.js
*/
import { MerkleTree } from '@tornado/fixed-merkle-tree';
import BloomFilter from 'bloomfilter.js';
import { saveUserFile } from './data';
import { DepositsEvents } from './events';
export interface TreeCacheConstructor {
netId: number | string;
amount: string;
currency: string;
userDirectory: string;
PARTS_COUNT?: number;
LEAVES?: number;
zeroElement?: string;
}
export interface treeMetadata {
blockNumber: number;
logIndex: number;
transactionHash: string;
timestamp: number;
from: string;
leafIndex: number;
}
export class TreeCache {
netId: number | string;
amount: string;
currency: string;
userDirectory: string;
PARTS_COUNT: number;
constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }: TreeCacheConstructor) {
this.netId = netId;
this.amount = amount;
this.currency = currency;
this.userDirectory = userDirectory;
this.PARTS_COUNT = PARTS_COUNT;
}
getInstanceName(): string {
return `deposits_${this.netId}_${this.currency}_${this.amount}`;
}
async createTree(events: DepositsEvents[], tree: MerkleTree) {
const bloom = new BloomFilter(events.length);
console.log(`Creating cached tree for ${this.getInstanceName()}\n`);
// events indexed by commitment
const eventsData = events.reduce(
(acc, { leafIndex, commitment, ...rest }, i) => {
if (leafIndex !== i) {
throw new Error(`leafIndex (${leafIndex}) !== i (${i})`);
}
acc[commitment] = { ...rest, leafIndex };
return acc;
},
{} as { [key in string]: treeMetadata },
);
const slices = tree.getTreeSlices(this.PARTS_COUNT);
await Promise.all(
slices.map(async (slice, index) => {
const metadata = slice.elements.reduce((acc, curr) => {
if (index < this.PARTS_COUNT - 1) {
bloom.add(curr);
}
acc.push(eventsData[curr]);
return acc;
}, [] as treeMetadata[]);
const dataString =
JSON.stringify(
{
...slice,
metadata,
},
null,
2,
) + '\n';
const fileName = `${this.getInstanceName()}_slice${index + 1}.json`;
await saveUserFile({
fileName,
userDirectory: this.userDirectory,
dataString,
});
}),
);
const dataString = bloom.serialize() + '\n';
const fileName = `${this.getInstanceName()}_bloom.json`;
await saveUserFile({
fileName,
userDirectory: this.userDirectory,
dataString,
});
}
}

25
src/types/bloomfilter.js.d.ts vendored Normal file

@ -0,0 +1,25 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
declare module 'bloomfilter.js' {
export default class BloomFilter {
m: number;
k: number;
size: number;
bitview: any;
constructor(n: number, false_postive_tolerance?: number);
calculateHash(x: number, m: number, i: number): number;
test(data: any): boolean;
add(data: any): void;
bytelength(): number;
view(): Uint8Array;
serialize(): string;
deserialize(serialized: string): BloomFilter;
}
}

@ -2,6 +2,7 @@
"compilerOptions": {
"typeRoots": [
"./node_modules/@types",
"./src/types",
],
/* Visit https://aka.ms/tsconfig to read more about this file */

@ -1596,6 +1596,11 @@ blakejs@^1.1.0:
resolved "https://registry.yarnpkg.com/blakejs/-/blakejs-1.2.1.tgz#5057e4206eadb4a97f7c0b6e197a505042fc3814"
integrity sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ==
bloomfilter.js@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/bloomfilter.js/-/bloomfilter.js-1.0.2.tgz#63449e4b055dc08e5e4db75367d48cc0a395e704"
integrity sha512-x3SG+7/NlT5m6hHy1GCerNoWm38kxWZeUIsBs1LaMwnTLM0hidmGalhAfXH07DtP3s9QAp+JAQagpgVIxtUl9g==
bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9:
version "4.12.0"
resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88"