forked from tornadocash/tornado-cli
Reimport node.js specific services from core
This commit is contained in:
parent
0995dcbd70
commit
5fc4b15786
@ -2,7 +2,7 @@
|
||||
|
||||
<img src="./logo2.png">
|
||||
|
||||
# Tornado CLI
|
||||
# Tornado CLI (@tornado/cli)
|
||||
|
||||
Modern Toolsets for [Privacy Pools](https://www.forbes.com/sites/tomerniv/2023/09/07/privacy-pools-bridging-the-gap-between-blockchain-and-regulatory-compliance) on Ethereum
|
||||
|
||||
|
14554
dist/cli.js
vendored
14554
dist/cli.js
vendored
File diff suppressed because it is too large
Load Diff
24
dist/services/data.d.ts
vendored
Normal file
24
dist/services/data.d.ts
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
import { AsyncZippable, Unzipped } from 'fflate';
|
||||
import { BaseEvents, MinimalEvents } from '@tornado/core';
|
||||
export declare function existsAsync(fileOrDir: string): Promise<boolean>;
|
||||
export declare function zipAsync(file: AsyncZippable): Promise<Uint8Array>;
|
||||
export declare function unzipAsync(data: Uint8Array): Promise<Unzipped>;
|
||||
export declare function saveUserFile({ fileName, userDirectory, dataString, }: {
|
||||
fileName: string;
|
||||
userDirectory: string;
|
||||
dataString: string;
|
||||
}): Promise<void>;
|
||||
export declare function loadSavedEvents<T extends MinimalEvents>({ name, userDirectory, deployedBlock, }: {
|
||||
name: string;
|
||||
userDirectory: string;
|
||||
deployedBlock: number;
|
||||
}): Promise<BaseEvents<T>>;
|
||||
export declare function download({ name, cacheDirectory }: {
|
||||
name: string;
|
||||
cacheDirectory: string;
|
||||
}): Promise<string>;
|
||||
export declare function loadCachedEvents<T extends MinimalEvents>({ name, cacheDirectory, deployedBlock, }: {
|
||||
name: string;
|
||||
cacheDirectory: string;
|
||||
deployedBlock: number;
|
||||
}): Promise<BaseEvents<T>>;
|
4
dist/services/index.d.ts
vendored
Normal file
4
dist/services/index.d.ts
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
export * from './data';
|
||||
export * from './nodeEvents';
|
||||
export * from './parser';
|
||||
export * from './treeCache';
|
75
dist/services/nodeEvents.d.ts
vendored
Normal file
75
dist/services/nodeEvents.d.ts
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
import { BatchBlockOnProgress, BatchEventOnProgress, BaseTornadoService, BaseEncryptedNotesService, BaseGovernanceService, BaseRegistryService, BaseTornadoServiceConstructor, BaseEncryptedNotesServiceConstructor, BaseGovernanceServiceConstructor, BaseRegistryServiceConstructor, BaseEchoServiceConstructor, BaseEchoService } from '@tornado/core';
|
||||
import type { BaseEvents, DepositsEvents, WithdrawalsEvents, EncryptedNotesEvents, RegistersEvents, AllGovernanceEvents, EchoEvents } from '@tornado/core';
|
||||
export type NodeTornadoServiceConstructor = BaseTornadoServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
export declare class NodeTornadoService extends BaseTornadoService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
constructor({ netId, provider, graphApi, subgraphName, Tornado, type, amount, currency, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeTornadoServiceConstructor);
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
updateTransactionProgress({ currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]): void;
|
||||
updateBlockProgress({ currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]): void;
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
getEventsFromDB(): Promise<BaseEvents<DepositsEvents | WithdrawalsEvents>>;
|
||||
getEventsFromCache(): Promise<BaseEvents<DepositsEvents | WithdrawalsEvents>>;
|
||||
saveEvents({ events, lastBlock }: BaseEvents<DepositsEvents | WithdrawalsEvents>): Promise<void>;
|
||||
}
|
||||
export type NodeEchoServiceConstructor = BaseEchoServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
export declare class NodeEchoService extends BaseEchoService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
constructor({ netId, provider, graphApi, subgraphName, Echoer, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeEchoServiceConstructor);
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
getEventsFromDB(): Promise<BaseEvents<EchoEvents>>;
|
||||
getEventsFromCache(): Promise<BaseEvents<EchoEvents>>;
|
||||
saveEvents({ events, lastBlock }: BaseEvents<EchoEvents>): Promise<void>;
|
||||
}
|
||||
export type NodeEncryptedNotesServiceConstructor = BaseEncryptedNotesServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
export declare class NodeEncryptedNotesService extends BaseEncryptedNotesService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
constructor({ netId, provider, graphApi, subgraphName, Router, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeEncryptedNotesServiceConstructor);
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
getEventsFromDB(): Promise<BaseEvents<EncryptedNotesEvents>>;
|
||||
getEventsFromCache(): Promise<BaseEvents<EncryptedNotesEvents>>;
|
||||
saveEvents({ events, lastBlock }: BaseEvents<EncryptedNotesEvents>): Promise<void>;
|
||||
}
|
||||
export type NodeGovernanceServiceConstructor = BaseGovernanceServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
export declare class NodeGovernanceService extends BaseGovernanceService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
constructor({ netId, provider, graphApi, subgraphName, Governance, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeGovernanceServiceConstructor);
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
updateTransactionProgress({ currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]): void;
|
||||
getEventsFromDB(): Promise<BaseEvents<AllGovernanceEvents>>;
|
||||
getEventsFromCache(): Promise<BaseEvents<AllGovernanceEvents>>;
|
||||
saveEvents({ events, lastBlock }: BaseEvents<AllGovernanceEvents>): Promise<void>;
|
||||
}
|
||||
export type NodeRegistryServiceConstructor = BaseRegistryServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
export declare class NodeRegistryService extends BaseRegistryService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
constructor({ netId, provider, graphApi, subgraphName, RelayerRegistry, deployedBlock, fetchDataOptions, cacheDirectory, userDirectory, }: NodeRegistryServiceConstructor);
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
getEventsFromDB(): Promise<BaseEvents<RegistersEvents>>;
|
||||
getEventsFromCache(): Promise<BaseEvents<RegistersEvents>>;
|
||||
saveEvents({ events, lastBlock }: BaseEvents<RegistersEvents>): Promise<void>;
|
||||
}
|
10
dist/services/parser.d.ts
vendored
Normal file
10
dist/services/parser.d.ts
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
export declare function parseNumber(value?: string | number): number;
|
||||
export declare function parseUrl(value?: string): string;
|
||||
export declare function parseRelayer(value?: string): string;
|
||||
export declare function parseAddress(value?: string): string;
|
||||
export declare function parseMnemonic(value?: string): string;
|
||||
export declare function parseKey(value?: string): string;
|
||||
/**
|
||||
* Recovery key shouldn't have a 0x prefix (Also this is how the UI generates)
|
||||
*/
|
||||
export declare function parseRecoveryKey(value?: string): string;
|
35
dist/services/treeCache.d.ts
vendored
Normal file
35
dist/services/treeCache.d.ts
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
/**
|
||||
* Create tree cache file from node.js
|
||||
*
|
||||
* Only works for node.js, modified from https://github.com/tornadocash/tornado-classic-ui/blob/master/scripts/updateTree.js
|
||||
*/
|
||||
import { MerkleTree } from '@tornado/fixed-merkle-tree';
|
||||
import { DepositsEvents } from '@tornado/core';
|
||||
import type { NetIdType } from '@tornado/core';
|
||||
export interface TreeCacheConstructor {
|
||||
netId: NetIdType;
|
||||
amount: string;
|
||||
currency: string;
|
||||
userDirectory: string;
|
||||
PARTS_COUNT?: number;
|
||||
LEAVES?: number;
|
||||
zeroElement?: string;
|
||||
}
|
||||
export interface treeMetadata {
|
||||
blockNumber: number;
|
||||
logIndex: number;
|
||||
transactionHash: string;
|
||||
timestamp: number;
|
||||
from: string;
|
||||
leafIndex: number;
|
||||
}
|
||||
export declare class TreeCache {
|
||||
netId: NetIdType;
|
||||
amount: string;
|
||||
currency: string;
|
||||
userDirectory: string;
|
||||
PARTS_COUNT: number;
|
||||
constructor({ netId, amount, currency, userDirectory, PARTS_COUNT }: TreeCacheConstructor);
|
||||
getInstanceName(): string;
|
||||
createTree(events: DepositsEvents[], tree: MerkleTree): Promise<void>;
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "tornado-cli",
|
||||
"name": "@tornado/cli",
|
||||
"version": "1.0.3-alpha",
|
||||
"description": "Modern Toolsets for Privacy Pools on Ethereum",
|
||||
"main": "./dist/cli.js",
|
||||
@ -53,7 +53,7 @@
|
||||
"@colors/colors": "1.5.0",
|
||||
"@metamask/eth-sig-util": "^7.0.1",
|
||||
"@tornado/contracts": "1.0.0",
|
||||
"@tornado/core": "git+https://git.tornado.ws/tornadocontrib/tornado-core.git#b5f57e20ee7de42c4af88fb417d887672a8d3582",
|
||||
"@tornado/core": "git+https://git.tornado.ws/tornadocontrib/tornado-core.git#4fde41b10ce601bcf687e2e8b93785f86237ac6c",
|
||||
"@tornado/fixed-merkle-tree": "0.7.3",
|
||||
"@tornado/snarkjs": "0.1.20",
|
||||
"@tornado/websnark": "0.0.4",
|
||||
|
@ -37,12 +37,6 @@ import {
|
||||
Multicall__factory,
|
||||
OffchainOracle__factory,
|
||||
OvmGasPriceOracle__factory,
|
||||
parseUrl,
|
||||
parseRelayer,
|
||||
parseNumber,
|
||||
parseMnemonic,
|
||||
parseKey,
|
||||
parseAddress,
|
||||
getProviderOptions,
|
||||
getProviderWithNetId,
|
||||
getTokenBalances,
|
||||
@ -50,19 +44,14 @@ import {
|
||||
TornadoVoidSigner,
|
||||
tokenBalances,
|
||||
Deposit,
|
||||
NodeTornadoService,
|
||||
DepositsEvents,
|
||||
WithdrawalsEvents,
|
||||
Relayer,
|
||||
RelayerInfo,
|
||||
RelayerError,
|
||||
NodeRegistryService,
|
||||
TornadoFeeOracle,
|
||||
TokenPriceOracle,
|
||||
calculateSnarkProof,
|
||||
NodeEchoService,
|
||||
NodeEncryptedNotesService,
|
||||
NodeGovernanceService,
|
||||
RelayerClient,
|
||||
MerkleTreeService,
|
||||
multicall,
|
||||
@ -78,12 +67,27 @@ import {
|
||||
enabledChains,
|
||||
substring,
|
||||
NoteAccount,
|
||||
parseRecoveryKey,
|
||||
getSupportedInstances,
|
||||
TreeCache,
|
||||
initGroth16,
|
||||
} from '@tornado/core';
|
||||
import * as packageJson from '../package.json';
|
||||
import {
|
||||
parseUrl,
|
||||
parseRelayer,
|
||||
parseNumber,
|
||||
parseMnemonic,
|
||||
parseKey,
|
||||
parseAddress,
|
||||
parseRecoveryKey,
|
||||
NodeTornadoService,
|
||||
NodeRegistryService,
|
||||
NodeEchoService,
|
||||
NodeEncryptedNotesService,
|
||||
NodeGovernanceService,
|
||||
TreeCache,
|
||||
} from './services';
|
||||
|
||||
const EXEC_NAME = 'tornado-cli';
|
||||
|
||||
/**
|
||||
* Static variables, shouldn't be modified by env unless you know what they are doing
|
||||
@ -448,11 +452,11 @@ export async function programSendTransaction({
|
||||
}
|
||||
|
||||
export function tornadoProgram() {
|
||||
const { name, version, description } = packageJson as packageJson;
|
||||
const { version, description } = packageJson as packageJson;
|
||||
|
||||
const program = new Command();
|
||||
|
||||
program.name(name).version(version).description(description);
|
||||
program.name(EXEC_NAME).version(version).description(description);
|
||||
|
||||
program
|
||||
.command('create')
|
||||
|
146
src/services/data.ts
Normal file
146
src/services/data.ts
Normal file
@ -0,0 +1,146 @@
|
||||
import path from 'path';
|
||||
import { stat, mkdir, readFile, writeFile } from 'fs/promises';
|
||||
import { zip, unzip, AsyncZippable, Unzipped } from 'fflate';
|
||||
import { BaseEvents, MinimalEvents } from '@tornado/core';
|
||||
|
||||
export async function existsAsync(fileOrDir: string): Promise<boolean> {
|
||||
try {
|
||||
await stat(fileOrDir);
|
||||
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function zipAsync(file: AsyncZippable): Promise<Uint8Array> {
|
||||
return new Promise((res, rej) => {
|
||||
zip(file, { mtime: new Date('1/1/1980') }, (err, data) => {
|
||||
if (err) {
|
||||
rej(err);
|
||||
return;
|
||||
}
|
||||
res(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function unzipAsync(data: Uint8Array): Promise<Unzipped> {
|
||||
return new Promise((res, rej) => {
|
||||
unzip(data, {}, (err, data) => {
|
||||
if (err) {
|
||||
rej(err);
|
||||
return;
|
||||
}
|
||||
res(data);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function saveUserFile({
|
||||
fileName,
|
||||
userDirectory,
|
||||
dataString,
|
||||
}: {
|
||||
fileName: string;
|
||||
userDirectory: string;
|
||||
dataString: string;
|
||||
}) {
|
||||
fileName = fileName.toLowerCase();
|
||||
|
||||
const filePath = path.join(userDirectory, fileName);
|
||||
|
||||
const payload = await zipAsync({
|
||||
[fileName]: new TextEncoder().encode(dataString),
|
||||
});
|
||||
|
||||
if (!(await existsAsync(userDirectory))) {
|
||||
await mkdir(userDirectory, { recursive: true });
|
||||
}
|
||||
|
||||
await writeFile(filePath + '.zip', payload);
|
||||
await writeFile(filePath, dataString);
|
||||
}
|
||||
|
||||
export async function loadSavedEvents<T extends MinimalEvents>({
|
||||
name,
|
||||
userDirectory,
|
||||
deployedBlock,
|
||||
}: {
|
||||
name: string;
|
||||
userDirectory: string;
|
||||
deployedBlock: number;
|
||||
}): Promise<BaseEvents<T>> {
|
||||
const filePath = path.join(userDirectory, `${name}.json`.toLowerCase());
|
||||
|
||||
if (!(await existsAsync(filePath))) {
|
||||
return {
|
||||
events: [] as T[],
|
||||
lastBlock: null,
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const events = JSON.parse(await readFile(filePath, { encoding: 'utf8' })) as T[];
|
||||
|
||||
return {
|
||||
events,
|
||||
lastBlock: events && events.length ? events[events.length - 1].blockNumber : deployedBlock,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log('Method loadSavedEvents has error');
|
||||
console.log(err);
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: deployedBlock,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function download({ name, cacheDirectory }: { name: string; cacheDirectory: string }) {
|
||||
const fileName = `${name}.json`.toLowerCase();
|
||||
const zipName = `${fileName}.zip`;
|
||||
const zipPath = path.join(cacheDirectory, zipName);
|
||||
|
||||
const data = await readFile(zipPath);
|
||||
const { [fileName]: content } = await unzipAsync(data);
|
||||
|
||||
return new TextDecoder().decode(content);
|
||||
}
|
||||
|
||||
export async function loadCachedEvents<T extends MinimalEvents>({
|
||||
name,
|
||||
cacheDirectory,
|
||||
deployedBlock,
|
||||
}: {
|
||||
name: string;
|
||||
cacheDirectory: string;
|
||||
deployedBlock: number;
|
||||
}): Promise<BaseEvents<T>> {
|
||||
try {
|
||||
const module = await download({ cacheDirectory, name });
|
||||
|
||||
if (module) {
|
||||
const events = JSON.parse(module);
|
||||
|
||||
const lastBlock = events && events.length ? events[events.length - 1].blockNumber : deployedBlock;
|
||||
|
||||
return {
|
||||
events,
|
||||
lastBlock,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: deployedBlock,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log('Method loadCachedEvents has error');
|
||||
console.log(err);
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: deployedBlock,
|
||||
};
|
||||
}
|
||||
}
|
4
src/services/index.ts
Normal file
4
src/services/index.ts
Normal file
@ -0,0 +1,4 @@
|
||||
export * from './data';
|
||||
export * from './nodeEvents';
|
||||
export * from './parser';
|
||||
export * from './treeCache';
|
781
src/services/nodeEvents.ts
Normal file
781
src/services/nodeEvents.ts
Normal file
@ -0,0 +1,781 @@
|
||||
import Table from 'cli-table3';
|
||||
import moment from 'moment';
|
||||
import {
|
||||
BatchBlockOnProgress,
|
||||
BatchEventOnProgress,
|
||||
BaseTornadoService,
|
||||
BaseEncryptedNotesService,
|
||||
BaseGovernanceService,
|
||||
BaseRegistryService,
|
||||
BaseTornadoServiceConstructor,
|
||||
BaseEncryptedNotesServiceConstructor,
|
||||
BaseGovernanceServiceConstructor,
|
||||
BaseRegistryServiceConstructor,
|
||||
BaseEchoServiceConstructor,
|
||||
BaseEchoService,
|
||||
} from '@tornado/core';
|
||||
import type {
|
||||
BaseEvents,
|
||||
DepositsEvents,
|
||||
WithdrawalsEvents,
|
||||
EncryptedNotesEvents,
|
||||
RegistersEvents,
|
||||
AllGovernanceEvents,
|
||||
EchoEvents,
|
||||
} from '@tornado/core';
|
||||
import { saveUserFile, loadSavedEvents, loadCachedEvents } from './data';
|
||||
|
||||
export type NodeTornadoServiceConstructor = BaseTornadoServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
|
||||
export class NodeTornadoService extends BaseTornadoService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
|
||||
constructor({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
Tornado,
|
||||
type,
|
||||
amount,
|
||||
currency,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
cacheDirectory,
|
||||
userDirectory,
|
||||
}: NodeTornadoServiceConstructor) {
|
||||
super({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
Tornado,
|
||||
type,
|
||||
amount,
|
||||
currency,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
});
|
||||
|
||||
this.cacheDirectory = cacheDirectory;
|
||||
this.userDirectory = userDirectory;
|
||||
}
|
||||
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateTransactionProgress({ currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]) {
|
||||
if (totalIndex) {
|
||||
console.log(`Fetched ${currentIndex} deposit txs of ${totalIndex}`);
|
||||
}
|
||||
}
|
||||
|
||||
updateBlockProgress({ currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]) {
|
||||
if (totalIndex) {
|
||||
console.log(`Fetched ${currentIndex} withdrawal blocks of ${totalIndex}`);
|
||||
}
|
||||
}
|
||||
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events from graph node count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getEventsFromDB() {
|
||||
if (!this.userDirectory) {
|
||||
console.log(
|
||||
'Updating events for',
|
||||
this.amount,
|
||||
this.currency.toUpperCase(),
|
||||
`${this.getType().toLowerCase()}s\n`,
|
||||
);
|
||||
console.log(`savedEvents count - ${0}`);
|
||||
console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const savedEvents = await loadSavedEvents<DepositsEvents | WithdrawalsEvents>({
|
||||
name: this.getInstanceName(),
|
||||
userDirectory: this.userDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log('Updating events for', this.amount, this.currency.toUpperCase(), `${this.getType().toLowerCase()}s\n`);
|
||||
console.log(`savedEvents count - ${savedEvents.events.length}`);
|
||||
console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
|
||||
|
||||
return savedEvents;
|
||||
}
|
||||
|
||||
async getEventsFromCache() {
|
||||
if (!this.cacheDirectory) {
|
||||
console.log(`cachedEvents count - ${0}`);
|
||||
console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const cachedEvents = await loadCachedEvents<DepositsEvents | WithdrawalsEvents>({
|
||||
name: this.getInstanceName(),
|
||||
cacheDirectory: this.cacheDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`cachedEvents count - ${cachedEvents.events.length}`);
|
||||
console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
|
||||
|
||||
return cachedEvents;
|
||||
}
|
||||
|
||||
async saveEvents({ events, lastBlock }: BaseEvents<DepositsEvents | WithdrawalsEvents>) {
|
||||
const instanceName = this.getInstanceName();
|
||||
|
||||
console.log('\ntotalEvents count - ', events.length);
|
||||
console.log(
|
||||
`totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
|
||||
);
|
||||
|
||||
const eventTable = new Table();
|
||||
|
||||
eventTable.push(
|
||||
[{ colSpan: 2, content: `${this.getType()}s`, hAlign: 'center' }],
|
||||
['Instance', `${this.netId} chain ${this.amount} ${this.currency.toUpperCase()}`],
|
||||
['Anonymity set', `${events.length} equal user ${this.getType().toLowerCase()}s`],
|
||||
[{ colSpan: 2, content: `Latest ${this.getType().toLowerCase()}s` }],
|
||||
...events
|
||||
.slice(events.length - 10)
|
||||
.reverse()
|
||||
.map(({ timestamp }, index) => {
|
||||
const eventIndex = events.length - index;
|
||||
const eventTime = moment.unix(timestamp).fromNow();
|
||||
|
||||
return [eventIndex, eventTime];
|
||||
}),
|
||||
);
|
||||
|
||||
console.log(eventTable.toString() + '\n');
|
||||
|
||||
if (this.userDirectory) {
|
||||
await saveUserFile({
|
||||
fileName: instanceName + '.json',
|
||||
userDirectory: this.userDirectory,
|
||||
dataString: JSON.stringify(events, null, 2) + '\n',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type NodeEchoServiceConstructor = BaseEchoServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
|
||||
export class NodeEchoService extends BaseEchoService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
|
||||
constructor({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
Echoer,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
cacheDirectory,
|
||||
userDirectory,
|
||||
}: NodeEchoServiceConstructor) {
|
||||
super({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
Echoer,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
});
|
||||
|
||||
this.cacheDirectory = cacheDirectory;
|
||||
this.userDirectory = userDirectory;
|
||||
}
|
||||
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events from graph node count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getEventsFromDB() {
|
||||
if (!this.userDirectory) {
|
||||
console.log(`Updating events for ${this.netId} chain echo events\n`);
|
||||
console.log(`savedEvents count - ${0}`);
|
||||
console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const savedEvents = await loadSavedEvents<EchoEvents>({
|
||||
name: this.getInstanceName(),
|
||||
userDirectory: this.userDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`Updating events for ${this.netId} chain echo events\n`);
|
||||
console.log(`savedEvents count - ${savedEvents.events.length}`);
|
||||
console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
|
||||
|
||||
return savedEvents;
|
||||
}
|
||||
|
||||
async getEventsFromCache() {
|
||||
if (!this.cacheDirectory) {
|
||||
console.log(`cachedEvents count - ${0}`);
|
||||
console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const cachedEvents = await loadCachedEvents<EchoEvents>({
|
||||
name: this.getInstanceName(),
|
||||
cacheDirectory: this.cacheDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`cachedEvents count - ${cachedEvents.events.length}`);
|
||||
console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
|
||||
|
||||
return cachedEvents;
|
||||
}
|
||||
|
||||
async saveEvents({ events, lastBlock }: BaseEvents<EchoEvents>) {
|
||||
const instanceName = this.getInstanceName();
|
||||
|
||||
console.log('\ntotalEvents count - ', events.length);
|
||||
console.log(
|
||||
`totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
|
||||
);
|
||||
|
||||
const eventTable = new Table();
|
||||
|
||||
eventTable.push(
|
||||
[{ colSpan: 2, content: 'Echo Accounts', hAlign: 'center' }],
|
||||
['Network', `${this.netId} chain`],
|
||||
['Events', `${events.length} events`],
|
||||
[{ colSpan: 2, content: 'Latest events' }],
|
||||
...events
|
||||
.slice(events.length - 10)
|
||||
.reverse()
|
||||
.map(({ blockNumber }, index) => {
|
||||
const eventIndex = events.length - index;
|
||||
|
||||
return [eventIndex, blockNumber];
|
||||
}),
|
||||
);
|
||||
|
||||
console.log(eventTable.toString() + '\n');
|
||||
|
||||
if (this.userDirectory) {
|
||||
await saveUserFile({
|
||||
fileName: instanceName + '.json',
|
||||
userDirectory: this.userDirectory,
|
||||
dataString: JSON.stringify(events, null, 2) + '\n',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type NodeEncryptedNotesServiceConstructor = BaseEncryptedNotesServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
|
||||
export class NodeEncryptedNotesService extends BaseEncryptedNotesService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
|
||||
constructor({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
Router,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
cacheDirectory,
|
||||
userDirectory,
|
||||
}: NodeEncryptedNotesServiceConstructor) {
|
||||
super({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
Router,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
});
|
||||
|
||||
this.cacheDirectory = cacheDirectory;
|
||||
this.userDirectory = userDirectory;
|
||||
}
|
||||
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events from graph node count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getEventsFromDB() {
|
||||
if (!this.userDirectory) {
|
||||
console.log(`Updating events for ${this.netId} chain encrypted events\n`);
|
||||
console.log(`savedEvents count - ${0}`);
|
||||
console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const savedEvents = await loadSavedEvents<EncryptedNotesEvents>({
|
||||
name: this.getInstanceName(),
|
||||
userDirectory: this.userDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`Updating events for ${this.netId} chain encrypted events\n`);
|
||||
console.log(`savedEvents count - ${savedEvents.events.length}`);
|
||||
console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
|
||||
|
||||
return savedEvents;
|
||||
}
|
||||
|
||||
async getEventsFromCache() {
|
||||
if (!this.cacheDirectory) {
|
||||
console.log(`cachedEvents count - ${0}`);
|
||||
console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const cachedEvents = await loadCachedEvents<EncryptedNotesEvents>({
|
||||
name: this.getInstanceName(),
|
||||
cacheDirectory: this.cacheDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`cachedEvents count - ${cachedEvents.events.length}`);
|
||||
console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
|
||||
|
||||
return cachedEvents;
|
||||
}
|
||||
|
||||
async saveEvents({ events, lastBlock }: BaseEvents<EncryptedNotesEvents>) {
|
||||
const instanceName = this.getInstanceName();
|
||||
|
||||
console.log('\ntotalEvents count - ', events.length);
|
||||
console.log(
|
||||
`totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
|
||||
);
|
||||
|
||||
const eventTable = new Table();
|
||||
|
||||
eventTable.push(
|
||||
[{ colSpan: 2, content: 'Encrypted Notes', hAlign: 'center' }],
|
||||
['Network', `${this.netId} chain`],
|
||||
['Events', `${events.length} events`],
|
||||
[{ colSpan: 2, content: 'Latest events' }],
|
||||
...events
|
||||
.slice(events.length - 10)
|
||||
.reverse()
|
||||
.map(({ blockNumber }, index) => {
|
||||
const eventIndex = events.length - index;
|
||||
|
||||
return [eventIndex, blockNumber];
|
||||
}),
|
||||
);
|
||||
|
||||
console.log(eventTable.toString() + '\n');
|
||||
|
||||
if (this.userDirectory) {
|
||||
await saveUserFile({
|
||||
fileName: instanceName + '.json',
|
||||
userDirectory: this.userDirectory,
|
||||
dataString: JSON.stringify(events, null, 2) + '\n',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type NodeGovernanceServiceConstructor = BaseGovernanceServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
|
||||
export class NodeGovernanceService extends BaseGovernanceService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
|
||||
constructor({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
Governance,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
cacheDirectory,
|
||||
userDirectory,
|
||||
}: NodeGovernanceServiceConstructor) {
|
||||
super({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
Governance,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
});
|
||||
|
||||
this.cacheDirectory = cacheDirectory;
|
||||
this.userDirectory = userDirectory;
|
||||
}
|
||||
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events from graph node count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateTransactionProgress({ currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]) {
|
||||
if (totalIndex) {
|
||||
console.log(`Fetched ${currentIndex} governance txs of ${totalIndex}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getEventsFromDB() {
|
||||
if (!this.userDirectory) {
|
||||
console.log(`Updating events for ${this.netId} chain governance events\n`);
|
||||
console.log(`savedEvents count - ${0}`);
|
||||
console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const savedEvents = await loadSavedEvents<AllGovernanceEvents>({
|
||||
name: this.getInstanceName(),
|
||||
userDirectory: this.userDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`Updating events for ${this.netId} chain governance events\n`);
|
||||
console.log(`savedEvents count - ${savedEvents.events.length}`);
|
||||
console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
|
||||
|
||||
return savedEvents;
|
||||
}
|
||||
|
||||
async getEventsFromCache() {
|
||||
if (!this.cacheDirectory) {
|
||||
console.log(`cachedEvents count - ${0}`);
|
||||
console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const cachedEvents = await loadCachedEvents<AllGovernanceEvents>({
|
||||
name: this.getInstanceName(),
|
||||
cacheDirectory: this.cacheDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`cachedEvents count - ${cachedEvents.events.length}`);
|
||||
console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
|
||||
|
||||
return cachedEvents;
|
||||
}
|
||||
|
||||
async saveEvents({ events, lastBlock }: BaseEvents<AllGovernanceEvents>) {
|
||||
const instanceName = this.getInstanceName();
|
||||
|
||||
console.log('\ntotalEvents count - ', events.length);
|
||||
console.log(
|
||||
`totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
|
||||
);
|
||||
|
||||
const eventTable = new Table();
|
||||
|
||||
eventTable.push(
|
||||
[{ colSpan: 2, content: 'Governance Events', hAlign: 'center' }],
|
||||
['Network', `${this.netId} chain`],
|
||||
['Events', `${events.length} events`],
|
||||
[{ colSpan: 2, content: 'Latest events' }],
|
||||
...events
|
||||
.slice(events.length - 10)
|
||||
.reverse()
|
||||
.map(({ blockNumber }, index) => {
|
||||
const eventIndex = events.length - index;
|
||||
|
||||
return [eventIndex, blockNumber];
|
||||
}),
|
||||
);
|
||||
|
||||
console.log(eventTable.toString() + '\n');
|
||||
|
||||
if (this.userDirectory) {
|
||||
await saveUserFile({
|
||||
fileName: instanceName + '.json',
|
||||
userDirectory: this.userDirectory,
|
||||
dataString: JSON.stringify(events, null, 2) + '\n',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type NodeRegistryServiceConstructor = BaseRegistryServiceConstructor & {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
};
|
||||
|
||||
export class NodeRegistryService extends BaseRegistryService {
|
||||
cacheDirectory?: string;
|
||||
userDirectory?: string;
|
||||
|
||||
constructor({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
RelayerRegistry,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
cacheDirectory,
|
||||
userDirectory,
|
||||
}: NodeRegistryServiceConstructor) {
|
||||
super({
|
||||
netId,
|
||||
provider,
|
||||
graphApi,
|
||||
subgraphName,
|
||||
RelayerRegistry,
|
||||
deployedBlock,
|
||||
fetchDataOptions,
|
||||
});
|
||||
|
||||
this.cacheDirectory = cacheDirectory;
|
||||
this.userDirectory = userDirectory;
|
||||
}
|
||||
|
||||
updateEventProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {
|
||||
if (toBlock) {
|
||||
console.log(`fromBlock - ${fromBlock}`);
|
||||
console.log(`toBlock - ${toBlock}`);
|
||||
|
||||
if (count) {
|
||||
console.log(`downloaded ${type} events from graph node count - ${count}`);
|
||||
console.log('____________________________________________');
|
||||
console.log(`Fetched ${type} events from graph node ${fromBlock} to ${toBlock}\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getEventsFromDB() {
|
||||
if (!this.userDirectory) {
|
||||
console.log(`Updating events for ${this.netId} chain registry events\n`);
|
||||
console.log(`savedEvents count - ${0}`);
|
||||
console.log(`savedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const savedEvents = await loadSavedEvents<RegistersEvents>({
|
||||
name: this.getInstanceName(),
|
||||
userDirectory: this.userDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`Updating events for ${this.netId} chain registry events\n`);
|
||||
console.log(`savedEvents count - ${savedEvents.events.length}`);
|
||||
console.log(`savedEvents lastBlock - ${savedEvents.lastBlock}\n`);
|
||||
|
||||
return savedEvents;
|
||||
}
|
||||
|
||||
async getEventsFromCache() {
|
||||
if (!this.cacheDirectory) {
|
||||
console.log(`cachedEvents count - ${0}`);
|
||||
console.log(`cachedEvents lastBlock - ${this.deployedBlock}\n`);
|
||||
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: this.deployedBlock,
|
||||
};
|
||||
}
|
||||
|
||||
const cachedEvents = await loadCachedEvents<RegistersEvents>({
|
||||
name: this.getInstanceName(),
|
||||
cacheDirectory: this.cacheDirectory,
|
||||
deployedBlock: this.deployedBlock,
|
||||
});
|
||||
|
||||
console.log(`cachedEvents count - ${cachedEvents.events.length}`);
|
||||
console.log(`cachedEvents lastBlock - ${cachedEvents.lastBlock}\n`);
|
||||
|
||||
return cachedEvents;
|
||||
}
|
||||
|
||||
async saveEvents({ events, lastBlock }: BaseEvents<RegistersEvents>) {
|
||||
const instanceName = this.getInstanceName();
|
||||
|
||||
console.log('\ntotalEvents count - ', events.length);
|
||||
console.log(
|
||||
`totalEvents lastBlock - ${events[events.length - 1] ? events[events.length - 1].blockNumber : lastBlock}\n`,
|
||||
);
|
||||
|
||||
const eventTable = new Table();
|
||||
|
||||
eventTable.push(
|
||||
[{ colSpan: 2, content: 'Registered Relayers', hAlign: 'center' }],
|
||||
['Network', `${this.netId} chain`],
|
||||
['Events', `${events.length} events`],
|
||||
[{ colSpan: 2, content: 'Latest events' }],
|
||||
...events
|
||||
.slice(events.length - 10)
|
||||
.reverse()
|
||||
.map(({ blockNumber }, index) => {
|
||||
const eventIndex = events.length - index;
|
||||
|
||||
return [eventIndex, blockNumber];
|
||||
}),
|
||||
);
|
||||
|
||||
console.log(eventTable.toString() + '\n');
|
||||
|
||||
if (this.userDirectory) {
|
||||
await saveUserFile({
|
||||
fileName: instanceName + '.json',
|
||||
userDirectory: this.userDirectory,
|
||||
dataString: JSON.stringify(events, null, 2) + '\n',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
77
src/services/parser.ts
Normal file
77
src/services/parser.ts
Normal file
@ -0,0 +1,77 @@
|
||||
import { InvalidArgumentError } from 'commander';
|
||||
import { computeAddress, getAddress, Mnemonic } from 'ethers';
|
||||
import { validateUrl } from '@tornado/core';
|
||||
|
||||
export function parseNumber(value?: string | number): number {
|
||||
if (!value || isNaN(Number(value))) {
|
||||
throw new InvalidArgumentError('Invalid Number');
|
||||
}
|
||||
return Number(value);
|
||||
}
|
||||
|
||||
export function parseUrl(value?: string): string {
|
||||
if (!value || !validateUrl(value, ['http:', 'https:'])) {
|
||||
throw new InvalidArgumentError('Invalid URL');
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export function parseRelayer(value?: string): string {
|
||||
if (!value || !(value.endsWith('.eth') || validateUrl(value, ['http:', 'https:']))) {
|
||||
throw new InvalidArgumentError('Invalid Relayer ETH address or URL');
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export function parseAddress(value?: string): string {
|
||||
if (!value) {
|
||||
throw new InvalidArgumentError('Invalid Address');
|
||||
}
|
||||
try {
|
||||
return getAddress(value);
|
||||
} catch {
|
||||
throw new InvalidArgumentError('Invalid Address');
|
||||
}
|
||||
}
|
||||
|
||||
export function parseMnemonic(value?: string): string {
|
||||
if (!value) {
|
||||
throw new InvalidArgumentError('Invalid Mnemonic');
|
||||
}
|
||||
try {
|
||||
Mnemonic.fromPhrase(value);
|
||||
} catch {
|
||||
throw new InvalidArgumentError('Invalid Mnemonic');
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export function parseKey(value?: string): string {
|
||||
if (!value) {
|
||||
throw new InvalidArgumentError('Invalid Private Key');
|
||||
}
|
||||
if (value.length === 64) {
|
||||
value = '0x' + value;
|
||||
}
|
||||
try {
|
||||
computeAddress(value);
|
||||
} catch {
|
||||
throw new InvalidArgumentError('Invalid Private Key');
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Recovery key shouldn't have a 0x prefix (Also this is how the UI generates)
|
||||
*/
|
||||
export function parseRecoveryKey(value?: string): string {
|
||||
if (!value) {
|
||||
throw new InvalidArgumentError('Invalid Recovery Key');
|
||||
}
|
||||
try {
|
||||
computeAddress('0x' + value);
|
||||
} catch {
|
||||
throw new InvalidArgumentError('Invalid Recovery Key');
|
||||
}
|
||||
return value;
|
||||
}
|
113
src/services/treeCache.ts
Normal file
113
src/services/treeCache.ts
Normal file
@ -0,0 +1,113 @@
|
||||
/**
|
||||
* Create tree cache file from node.js
|
||||
*
|
||||
* Only works for node.js, modified from https://github.com/tornadocash/tornado-classic-ui/blob/master/scripts/updateTree.js
|
||||
*/
|
||||
import { MerkleTree } from '@tornado/fixed-merkle-tree';
|
||||
import BloomFilter from 'bloomfilter.js';
|
||||
import { DepositsEvents } from '@tornado/core';
|
||||
import type { NetIdType } from '@tornado/core';
|
||||
import { saveUserFile } from './data';
|
||||
|
||||
export interface TreeCacheConstructor {
|
||||
netId: NetIdType;
|
||||
amount: string;
|
||||
currency: string;
|
||||
userDirectory: string;
|
||||
PARTS_COUNT?: number;
|
||||
LEAVES?: number;
|
||||
zeroElement?: string;
|
||||
}
|
||||
|
||||
export interface treeMetadata {
|
||||
blockNumber: number;
|
||||
logIndex: number;
|
||||
transactionHash: string;
|
||||
timestamp: number;
|
||||
from: string;
|
||||
leafIndex: number;
|
||||
}
|
||||
|
||||
export class TreeCache {
|
||||
netId: NetIdType;
|
||||
amount: string;
|
||||
currency: string;
|
||||
userDirectory: string;
|
||||
|
||||
PARTS_COUNT: number;
|
||||
|
||||
constructor({ netId, amount, currency, userDirectory, PARTS_COUNT = 4 }: TreeCacheConstructor) {
|
||||
this.netId = netId;
|
||||
this.amount = amount;
|
||||
this.currency = currency;
|
||||
this.userDirectory = userDirectory;
|
||||
|
||||
this.PARTS_COUNT = PARTS_COUNT;
|
||||
}
|
||||
|
||||
getInstanceName(): string {
|
||||
return `deposits_${this.netId}_${this.currency}_${this.amount}`;
|
||||
}
|
||||
|
||||
async createTree(events: DepositsEvents[], tree: MerkleTree) {
|
||||
const bloom = new BloomFilter(events.length);
|
||||
|
||||
console.log(`Creating cached tree for ${this.getInstanceName()}\n`);
|
||||
|
||||
// events indexed by commitment
|
||||
const eventsData = events.reduce(
|
||||
(acc, { leafIndex, commitment, ...rest }, i) => {
|
||||
if (leafIndex !== i) {
|
||||
throw new Error(`leafIndex (${leafIndex}) !== i (${i})`);
|
||||
}
|
||||
|
||||
acc[commitment] = { ...rest, leafIndex };
|
||||
|
||||
return acc;
|
||||
},
|
||||
{} as { [key in string]: treeMetadata },
|
||||
);
|
||||
|
||||
const slices = tree.getTreeSlices(this.PARTS_COUNT);
|
||||
|
||||
await Promise.all(
|
||||
slices.map(async (slice, index) => {
|
||||
const metadata = slice.elements.reduce((acc, curr) => {
|
||||
if (index < this.PARTS_COUNT - 1) {
|
||||
bloom.add(curr);
|
||||
}
|
||||
acc.push(eventsData[curr]);
|
||||
return acc;
|
||||
}, [] as treeMetadata[]);
|
||||
|
||||
const dataString =
|
||||
JSON.stringify(
|
||||
{
|
||||
...slice,
|
||||
metadata,
|
||||
},
|
||||
null,
|
||||
2,
|
||||
) + '\n';
|
||||
|
||||
const fileName = `${this.getInstanceName()}_slice${index + 1}.json`;
|
||||
|
||||
await saveUserFile({
|
||||
fileName,
|
||||
userDirectory: this.userDirectory,
|
||||
dataString,
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
const dataString = bloom.serialize() + '\n';
|
||||
|
||||
const fileName = `${this.getInstanceName()}_bloom.json`;
|
||||
|
||||
await saveUserFile({
|
||||
fileName,
|
||||
userDirectory: this.userDirectory,
|
||||
dataString,
|
||||
});
|
||||
}
|
||||
}
|
25
src/types/bloomfilter.js.d.ts
vendored
Normal file
25
src/types/bloomfilter.js.d.ts
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
declare module 'bloomfilter.js' {
|
||||
export default class BloomFilter {
|
||||
m: number;
|
||||
k: number;
|
||||
size: number;
|
||||
bitview: any;
|
||||
|
||||
constructor(n: number, false_postive_tolerance?: number);
|
||||
|
||||
calculateHash(x: number, m: number, i: number): number;
|
||||
|
||||
test(data: any): boolean;
|
||||
|
||||
add(data: any): void;
|
||||
|
||||
bytelength(): number;
|
||||
|
||||
view(): Uint8Array;
|
||||
|
||||
serialize(): string;
|
||||
|
||||
deserialize(serialized: string): BloomFilter;
|
||||
}
|
||||
}
|
15
yarn.lock
15
yarn.lock
@ -771,9 +771,9 @@
|
||||
"@openzeppelin/contracts-v3" "npm:@openzeppelin/contracts@3.2.0-rc.0"
|
||||
ethers "^6.4.0"
|
||||
|
||||
"@tornado/core@git+https://git.tornado.ws/tornadocontrib/tornado-core.git#b5f57e20ee7de42c4af88fb417d887672a8d3582":
|
||||
version "1.0.0"
|
||||
resolved "git+https://git.tornado.ws/tornadocontrib/tornado-core.git#b5f57e20ee7de42c4af88fb417d887672a8d3582"
|
||||
"@tornado/core@git+https://git.tornado.ws/tornadocontrib/tornado-core.git#4fde41b10ce601bcf687e2e8b93785f86237ac6c":
|
||||
version "1.0.1"
|
||||
resolved "git+https://git.tornado.ws/tornadocontrib/tornado-core.git#4fde41b10ce601bcf687e2e8b93785f86237ac6c"
|
||||
dependencies:
|
||||
"@metamask/eth-sig-util" "^7.0.1"
|
||||
"@tornado/contracts" "^1.0.0"
|
||||
@ -781,21 +781,12 @@
|
||||
"@tornado/snarkjs" "^0.1.20"
|
||||
"@tornado/websnark" "^0.0.4"
|
||||
ajv "^8.12.0"
|
||||
bloomfilter.js "^1.0.2"
|
||||
bn.js "^5.2.1"
|
||||
circomlibjs "0.1.7"
|
||||
cross-fetch "^4.0.0"
|
||||
ethers "^6.4.0"
|
||||
ffjavascript "0.2.48"
|
||||
fflate "^0.8.2"
|
||||
optionalDependencies:
|
||||
"@colors/colors" "1.5.0"
|
||||
cli-table3 "^0.6.4"
|
||||
commander "^12.0.0"
|
||||
http-proxy-agent "^7.0.2"
|
||||
https-proxy-agent "^7.0.4"
|
||||
moment "^2.30.1"
|
||||
socks-proxy-agent "^8.0.3"
|
||||
|
||||
"@tornado/fixed-merkle-tree@0.7.3", "@tornado/fixed-merkle-tree@^0.7.3":
|
||||
version "0.7.3"
|
||||
|
Loading…
Reference in New Issue
Block a user