Compare commits
2 Commits
f411159f15
...
8041bd7f78
| Author | SHA1 | Date | |
|---|---|---|---|
| 8041bd7f78 | |||
| acb7aa72a1 |
@ -33,7 +33,6 @@ module.exports = {
|
||||
'error',
|
||||
{
|
||||
tabWidth: 4,
|
||||
printWidth: 120,
|
||||
singleQuote: true,
|
||||
},
|
||||
],
|
||||
|
||||
4
dist/batch.d.ts
vendored
4
dist/batch.d.ts
vendored
@ -80,7 +80,7 @@ export declare class BatchEventsService {
|
||||
retryMax: number;
|
||||
retryOn: number;
|
||||
constructor({ provider, contract, onProgress, concurrencySize, blocksPerRequest, shouldRetry, retryMax, retryOn, }: BatchEventServiceConstructor);
|
||||
getPastEvents({ fromBlock, toBlock, type }: EventInput): Promise<EventLog[]>;
|
||||
getPastEvents({ fromBlock, toBlock, type, }: EventInput): Promise<EventLog[]>;
|
||||
createBatchRequest(batchArray: EventInput[]): Promise<EventLog[]>[];
|
||||
getBatchEvents({ fromBlock, toBlock, type }: EventInput): Promise<EventLog[]>;
|
||||
getBatchEvents({ fromBlock, toBlock, type, }: EventInput): Promise<EventLog[]>;
|
||||
}
|
||||
|
||||
4
dist/deposits.d.ts
vendored
4
dist/deposits.d.ts
vendored
@ -30,7 +30,7 @@ export interface parsedInvoiceExec extends DepositType {
|
||||
}
|
||||
export declare function parseNote(noteString: string): parsedNoteExec | undefined;
|
||||
export declare function parseInvoice(invoiceString: string): parsedInvoiceExec | undefined;
|
||||
export declare function createDeposit({ nullifier, secret }: createDepositParams): Promise<createDepositObject>;
|
||||
export declare function createDeposit({ nullifier, secret, }: createDepositParams): Promise<createDepositObject>;
|
||||
export interface DepositConstructor {
|
||||
currency: string;
|
||||
amount: string;
|
||||
@ -56,7 +56,7 @@ export declare class Deposit {
|
||||
nullifierHex: string;
|
||||
constructor({ currency, amount, netId, nullifier, secret, note, noteHex, invoice, commitmentHex, nullifierHex, }: DepositConstructor);
|
||||
toString(): string;
|
||||
static createNote({ currency, amount, netId, nullifier, secret }: createNoteParams): Promise<Deposit>;
|
||||
static createNote({ currency, amount, netId, nullifier, secret, }: createNoteParams): Promise<Deposit>;
|
||||
static parseNote(noteString: string): Promise<Deposit>;
|
||||
}
|
||||
export declare class Invoice {
|
||||
|
||||
2
dist/encryptedNotes.d.ts
vendored
2
dist/encryptedNotes.d.ts
vendored
@ -10,7 +10,7 @@ export interface DecryptedNotes {
|
||||
address: string;
|
||||
noteHex: string;
|
||||
}
|
||||
export declare function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext }: EthEncryptedData): string;
|
||||
export declare function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext, }: EthEncryptedData): string;
|
||||
export declare function unpackEncryptedMessage(encryptedMessage: string): EthEncryptedData & {
|
||||
messageBuff: string;
|
||||
};
|
||||
|
||||
14
dist/events/base.d.ts
vendored
14
dist/events/base.d.ts
vendored
@ -50,10 +50,10 @@ export declare class BaseEventsService<EventType extends MinimalEvents> {
|
||||
getTovarishType(): string;
|
||||
getGraphMethod(): string;
|
||||
getGraphParams(): BaseGraphParams;
|
||||
updateEventProgress({ percentage, type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
updateBlockProgress({ percentage, currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]): void;
|
||||
updateTransactionProgress({ percentage, currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]): void;
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchGraphOnProgress>[0]): void;
|
||||
updateEventProgress({ percentage, type, fromBlock, toBlock, count, }: Parameters<BatchEventOnProgress>[0]): void;
|
||||
updateBlockProgress({ percentage, currentIndex, totalIndex, }: Parameters<BatchBlockOnProgress>[0]): void;
|
||||
updateTransactionProgress({ percentage, currentIndex, totalIndex, }: Parameters<BatchBlockOnProgress>[0]): void;
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count, }: Parameters<BatchGraphOnProgress>[0]): void;
|
||||
formatEvents(events: EventLog[]): Promise<EventType[]>;
|
||||
/**
|
||||
* Get saved or cached events
|
||||
@ -75,7 +75,7 @@ export declare class BaseEventsService<EventType extends MinimalEvents> {
|
||||
fromBlock: number;
|
||||
toBlock?: number;
|
||||
}): Promise<BaseEvents<EventType>>;
|
||||
getLatestEvents({ fromBlock }: {
|
||||
getLatestEvents({ fromBlock, }: {
|
||||
fromBlock: number;
|
||||
}): Promise<BaseEvents<EventType>>;
|
||||
validateEvents<S>({ events, lastBlock, hasNewEvents, }: BaseEvents<EventType> & {
|
||||
@ -132,7 +132,7 @@ export declare class BaseEchoService extends BaseEventsService<EchoEvents> {
|
||||
getInstanceName(): string;
|
||||
getGraphMethod(): string;
|
||||
formatEvents(events: EventLog[]): Promise<EchoEvents[]>;
|
||||
getEventsFromGraph({ fromBlock }: {
|
||||
getEventsFromGraph({ fromBlock, }: {
|
||||
fromBlock: number;
|
||||
}): Promise<BaseEvents<EchoEvents>>;
|
||||
}
|
||||
@ -180,7 +180,7 @@ export declare class BaseGovernanceService extends BaseEventsService<AllGovernan
|
||||
getTovarishType(): string;
|
||||
getGraphMethod(): string;
|
||||
formatEvents(events: EventLog[]): Promise<AllGovernanceEvents[]>;
|
||||
getEventsFromGraph({ fromBlock }: {
|
||||
getEventsFromGraph({ fromBlock, }: {
|
||||
fromBlock: number;
|
||||
}): Promise<BaseEvents<AllGovernanceEvents>>;
|
||||
getAllProposals(): Promise<GovernanceProposals[]>;
|
||||
|
||||
2
dist/events/db.d.ts
vendored
2
dist/events/db.d.ts
vendored
@ -28,7 +28,7 @@ export declare class DBTornadoService extends BaseTornadoService {
|
||||
constructor(params: DBTornadoServiceConstructor);
|
||||
getEventsFromDB(): Promise<BaseEvents<DepositsEvents | WithdrawalsEvents>>;
|
||||
getEventsFromCache(): Promise<CachedEvents<DepositsEvents | WithdrawalsEvents>>;
|
||||
saveEvents({ events, lastBlock }: BaseEvents<DepositsEvents | WithdrawalsEvents>): Promise<void>;
|
||||
saveEvents({ events, lastBlock, }: BaseEvents<DepositsEvents | WithdrawalsEvents>): Promise<void>;
|
||||
}
|
||||
export interface DBEchoServiceConstructor extends BaseEchoServiceConstructor {
|
||||
staticUrl: string;
|
||||
|
||||
2
dist/graphql/index.d.ts
vendored
2
dist/graphql/index.d.ts
vendored
@ -57,7 +57,7 @@ export interface getMetaReturns {
|
||||
lastSyncBlock: null | number;
|
||||
hasIndexingErrors: null | boolean;
|
||||
}
|
||||
export declare function getMeta({ graphApi, subgraphName, fetchDataOptions }: getMetaParams): Promise<getMetaReturns>;
|
||||
export declare function getMeta({ graphApi, subgraphName, fetchDataOptions, }: getMetaParams): Promise<getMetaReturns>;
|
||||
export interface GraphRegisters {
|
||||
relayers: {
|
||||
id: string;
|
||||
|
||||
8
dist/idb.d.ts
vendored
8
dist/idb.d.ts
vendored
@ -35,16 +35,16 @@ export declare class IndexedDB {
|
||||
key?: string;
|
||||
count?: number;
|
||||
}): Promise<T>;
|
||||
getItem<T>({ storeName, key }: {
|
||||
getItem<T>({ storeName, key, }: {
|
||||
storeName: string;
|
||||
key: string;
|
||||
}): Promise<T | undefined>;
|
||||
addItem({ storeName, data, key }: {
|
||||
addItem({ storeName, data, key, }: {
|
||||
storeName: string;
|
||||
data: any;
|
||||
key: string;
|
||||
}): Promise<void>;
|
||||
putItem({ storeName, data, key }: {
|
||||
putItem({ storeName, data, key, }: {
|
||||
storeName: string;
|
||||
data: any;
|
||||
key?: string;
|
||||
@ -62,7 +62,7 @@ export declare class IndexedDB {
|
||||
getValue<T>(key: string): Promise<T | undefined>;
|
||||
setValue(key: string, data: any): Promise<void>;
|
||||
delValue(key: string): Promise<void>;
|
||||
clearStore({ storeName, mode }: {
|
||||
clearStore({ storeName, mode, }: {
|
||||
storeName: string;
|
||||
mode: IDBTransactionMode;
|
||||
}): Promise<void>;
|
||||
|
||||
880
dist/index.js
vendored
880
dist/index.js
vendored
File diff suppressed because it is too large
Load Diff
880
dist/index.mjs
vendored
880
dist/index.mjs
vendored
File diff suppressed because it is too large
Load Diff
2
dist/merkleTree.d.ts
vendored
2
dist/merkleTree.d.ts
vendored
@ -22,7 +22,7 @@ export declare class MerkleTreeService {
|
||||
merkleWorkerPath?: string;
|
||||
constructor({ netId, amount, currency, Tornado, commitmentHex, merkleTreeHeight, emptyElement, merkleWorkerPath, }: MerkleTreeConstructor);
|
||||
createTree(events: Element[]): Promise<MerkleTree>;
|
||||
createPartialTree({ edge, elements }: {
|
||||
createPartialTree({ edge, elements, }: {
|
||||
edge: TreeEdge;
|
||||
elements: Element[];
|
||||
}): Promise<PartialMerkleTree>;
|
||||
|
||||
30
dist/merkleTreeWorker.js
vendored
30
dist/merkleTreeWorker.js
vendored
@ -1814,7 +1814,9 @@ class Mimc {
|
||||
}
|
||||
async initMimc() {
|
||||
this.sponge = await buildMimcSponge();
|
||||
this.hash = (left, right) => this.sponge?.F.toString(this.sponge?.multiHash([BigInt(left), BigInt(right)]));
|
||||
this.hash = (left, right) => this.sponge?.F.toString(
|
||||
this.sponge?.multiHash([BigInt(left), BigInt(right)])
|
||||
);
|
||||
}
|
||||
async getHash() {
|
||||
await this.mimcPromise;
|
||||
@ -1835,18 +1837,27 @@ async function nodePostWork() {
|
||||
const { hash: hashFunction } = await mimc.getHash();
|
||||
const { merkleTreeHeight, edge, elements, zeroElement } = workerThreads.workerData;
|
||||
if (edge) {
|
||||
const merkleTree2 = new libExports.PartialMerkleTree(merkleTreeHeight, edge, elements, {
|
||||
const merkleTree2 = new libExports.PartialMerkleTree(
|
||||
merkleTreeHeight,
|
||||
edge,
|
||||
elements,
|
||||
{
|
||||
zeroElement,
|
||||
hashFunction
|
||||
});
|
||||
workerThreads.parentPort.postMessage(merkleTree2.toString());
|
||||
}
|
||||
);
|
||||
workerThreads.parentPort.postMessage(
|
||||
merkleTree2.toString()
|
||||
);
|
||||
return;
|
||||
}
|
||||
const merkleTree = new libExports.MerkleTree(merkleTreeHeight, elements, {
|
||||
zeroElement,
|
||||
hashFunction
|
||||
});
|
||||
workerThreads.parentPort.postMessage(merkleTree.toString());
|
||||
workerThreads.parentPort.postMessage(
|
||||
merkleTree.toString()
|
||||
);
|
||||
}
|
||||
if (isNode && workerThreads) {
|
||||
nodePostWork();
|
||||
@ -1861,10 +1872,15 @@ if (isNode && workerThreads) {
|
||||
const { hash: hashFunction } = await mimc.getHash();
|
||||
const { merkleTreeHeight, edge, elements, zeroElement } = data;
|
||||
if (edge) {
|
||||
const merkleTree2 = new libExports.PartialMerkleTree(merkleTreeHeight, edge, elements, {
|
||||
const merkleTree2 = new libExports.PartialMerkleTree(
|
||||
merkleTreeHeight,
|
||||
edge,
|
||||
elements,
|
||||
{
|
||||
zeroElement,
|
||||
hashFunction
|
||||
});
|
||||
}
|
||||
);
|
||||
postMessage(merkleTree2.toString());
|
||||
return;
|
||||
}
|
||||
|
||||
50
dist/merkleTreeWorker.umd.js
vendored
50
dist/merkleTreeWorker.umd.js
vendored
@ -101986,7 +101986,9 @@ class Mimc {
|
||||
}
|
||||
async initMimc() {
|
||||
this.sponge = await mimcsponge_buildMimcSponge();
|
||||
this.hash = (left, right) => this.sponge?.F.toString(this.sponge?.multiHash([BigInt(left), BigInt(right)]));
|
||||
this.hash = (left, right) => this.sponge?.F.toString(
|
||||
this.sponge?.multiHash([BigInt(left), BigInt(right)])
|
||||
);
|
||||
}
|
||||
async getHash() {
|
||||
await this.mimcPromise;
|
||||
@ -102011,7 +102013,9 @@ BigInt.prototype.toJSON = function() {
|
||||
};
|
||||
const isNode = !process.browser && typeof globalThis.window === "undefined";
|
||||
const utils_crypto = isNode ? crypto_browserify.webcrypto : globalThis.crypto;
|
||||
const chunk = (arr, size) => [...Array(Math.ceil(arr.length / size))].map((_, i) => arr.slice(size * i, size + size * i));
|
||||
const chunk = (arr, size) => [...Array(Math.ceil(arr.length / size))].map(
|
||||
(_, i) => arr.slice(size * i, size + size * i)
|
||||
);
|
||||
function utils_sleep(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
@ -102039,7 +102043,9 @@ function bufferToBytes(b) {
|
||||
return new Uint8Array(b.buffer);
|
||||
}
|
||||
function bytesToBase64(bytes) {
|
||||
return btoa(bytes.reduce((data, byte) => data + String.fromCharCode(byte), ""));
|
||||
return btoa(
|
||||
bytes.reduce((data, byte) => data + String.fromCharCode(byte), "")
|
||||
);
|
||||
}
|
||||
function base64ToBytes(base64) {
|
||||
return Uint8Array.from(atob(base64), (c) => c.charCodeAt(0));
|
||||
@ -102054,7 +102060,11 @@ function hexToBytes(hexString) {
|
||||
if (hexString.length % 2 !== 0) {
|
||||
hexString = "0" + hexString;
|
||||
}
|
||||
return Uint8Array.from(hexString.match(/.{1,2}/g).map((byte) => parseInt(byte, 16)));
|
||||
return Uint8Array.from(
|
||||
hexString.match(/.{1,2}/g).map(
|
||||
(byte) => parseInt(byte, 16)
|
||||
)
|
||||
);
|
||||
}
|
||||
function bytesToBN(bytes) {
|
||||
return BigInt(bytesToHex(bytes));
|
||||
@ -102067,7 +102077,11 @@ function bnToBytes(bigint) {
|
||||
if (hexString.length % 2 !== 0) {
|
||||
hexString = "0" + hexString;
|
||||
}
|
||||
return Uint8Array.from(hexString.match(/.{1,2}/g).map((byte) => parseInt(byte, 16)));
|
||||
return Uint8Array.from(
|
||||
hexString.match(/.{1,2}/g).map(
|
||||
(byte) => parseInt(byte, 16)
|
||||
)
|
||||
);
|
||||
}
|
||||
function leBuff2Int(bytes) {
|
||||
return new BN(bytes, 16, "le");
|
||||
@ -102128,18 +102142,27 @@ async function nodePostWork() {
|
||||
const { hash: hashFunction } = await mimc.getHash();
|
||||
const { merkleTreeHeight, edge, elements, zeroElement } = (worker_threads_ignored_default()).workerData;
|
||||
if (edge) {
|
||||
const merkleTree2 = new lib.PartialMerkleTree(merkleTreeHeight, edge, elements, {
|
||||
const merkleTree2 = new lib.PartialMerkleTree(
|
||||
merkleTreeHeight,
|
||||
edge,
|
||||
elements,
|
||||
{
|
||||
zeroElement,
|
||||
hashFunction
|
||||
});
|
||||
worker_threads_ignored_default().parentPort.postMessage(merkleTree2.toString());
|
||||
}
|
||||
);
|
||||
worker_threads_ignored_default().parentPort.postMessage(
|
||||
merkleTree2.toString()
|
||||
);
|
||||
return;
|
||||
}
|
||||
const merkleTree = new lib.MerkleTree(merkleTreeHeight, elements, {
|
||||
zeroElement,
|
||||
hashFunction
|
||||
});
|
||||
worker_threads_ignored_default().parentPort.postMessage(merkleTree.toString());
|
||||
worker_threads_ignored_default().parentPort.postMessage(
|
||||
merkleTree.toString()
|
||||
);
|
||||
}
|
||||
if (isNode && (worker_threads_ignored_default())) {
|
||||
nodePostWork();
|
||||
@ -102154,10 +102177,15 @@ if (isNode && (worker_threads_ignored_default())) {
|
||||
const { hash: hashFunction } = await mimc.getHash();
|
||||
const { merkleTreeHeight, edge, elements, zeroElement } = data;
|
||||
if (edge) {
|
||||
const merkleTree2 = new lib.PartialMerkleTree(merkleTreeHeight, edge, elements, {
|
||||
const merkleTree2 = new lib.PartialMerkleTree(
|
||||
merkleTreeHeight,
|
||||
edge,
|
||||
elements,
|
||||
{
|
||||
zeroElement,
|
||||
hashFunction
|
||||
});
|
||||
}
|
||||
);
|
||||
postMessage(merkleTree2.toString());
|
||||
return;
|
||||
}
|
||||
|
||||
6
dist/providers.d.ts
vendored
6
dist/providers.d.ts
vendored
@ -50,7 +50,7 @@ export declare class TornadoWallet extends Wallet {
|
||||
gasLimitBump: number;
|
||||
gasFailover: boolean;
|
||||
bumpNonce: boolean;
|
||||
constructor(key: string | SigningKey, provider?: Provider, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce }?: TornadoWalletOptions);
|
||||
constructor(key: string | SigningKey, provider?: Provider, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce, }?: TornadoWalletOptions);
|
||||
static fromMnemonic(mneomnic: string, provider: Provider, index?: number, options?: TornadoWalletOptions): TornadoWallet;
|
||||
populateTransaction(tx: TransactionRequest): Promise<import("ethers").TransactionLike<string>>;
|
||||
}
|
||||
@ -60,7 +60,7 @@ export declare class TornadoVoidSigner extends VoidSigner {
|
||||
gasLimitBump: number;
|
||||
gasFailover: boolean;
|
||||
bumpNonce: boolean;
|
||||
constructor(address: string, provider?: Provider, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce }?: TornadoWalletOptions);
|
||||
constructor(address: string, provider?: Provider, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce, }?: TornadoWalletOptions);
|
||||
populateTransaction(tx: TransactionRequest): Promise<import("ethers").TransactionLike<string>>;
|
||||
}
|
||||
export declare class TornadoRpcSigner extends JsonRpcSigner {
|
||||
@ -69,7 +69,7 @@ export declare class TornadoRpcSigner extends JsonRpcSigner {
|
||||
gasLimitBump: number;
|
||||
gasFailover: boolean;
|
||||
bumpNonce: boolean;
|
||||
constructor(provider: JsonRpcApiProvider, address: string, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce }?: TornadoWalletOptions);
|
||||
constructor(provider: JsonRpcApiProvider, address: string, { gasPriceBump, gasLimitBump, gasFailover, bumpNonce, }?: TornadoWalletOptions);
|
||||
sendUncheckedTransaction(tx: TransactionRequest): Promise<string>;
|
||||
}
|
||||
export type connectWalletFunc = (...args: any[]) => Promise<void>;
|
||||
|
||||
2
dist/relayerClient.d.ts
vendored
2
dist/relayerClient.d.ts
vendored
@ -109,7 +109,7 @@ export function isRelayerUpdated(relayerVersion: string, netId: NetIdType) {
|
||||
return isUpdatedMajor && (Number(patch) >= 5 || netId !== NetId.MAINNET); // Patch checking - also backwards compatibility for Mainnet
|
||||
}
|
||||
**/
|
||||
export declare function calculateScore({ stakeBalance, tornadoServiceFee }: RelayerInfo): bigint;
|
||||
export declare function calculateScore({ stakeBalance, tornadoServiceFee, }: RelayerInfo): bigint;
|
||||
export declare function getWeightRandom(weightsScores: bigint[], random: bigint): number;
|
||||
export interface RelayerInstanceList {
|
||||
[key: string]: {
|
||||
|
||||
880
dist/tornado.umd.js
vendored
880
dist/tornado.umd.js
vendored
File diff suppressed because it is too large
Load Diff
90
src/batch.ts
90
src/batch.ts
@ -1,4 +1,12 @@
|
||||
import type { Provider, BlockTag, Block, TransactionResponse, BaseContract, ContractEventName, EventLog } from 'ethers';
|
||||
import type {
|
||||
Provider,
|
||||
BlockTag,
|
||||
Block,
|
||||
TransactionResponse,
|
||||
BaseContract,
|
||||
ContractEventName,
|
||||
EventLog,
|
||||
} from 'ethers';
|
||||
import { chunk, sleep } from './utils';
|
||||
|
||||
export interface BatchBlockServiceConstructor {
|
||||
@ -72,9 +80,14 @@ export class BatchBlockService {
|
||||
let err;
|
||||
|
||||
// eslint-disable-next-line no-unmodified-loop-condition
|
||||
while ((!this.shouldRetry && retries === 0) || (this.shouldRetry && retries < this.retryMax)) {
|
||||
while (
|
||||
(!this.shouldRetry && retries === 0) ||
|
||||
(this.shouldRetry && retries < this.retryMax)
|
||||
) {
|
||||
try {
|
||||
return await Promise.all(blocks.map((b) => this.getBlock(b)));
|
||||
return await Promise.all(
|
||||
blocks.map((b) => this.getBlock(b)),
|
||||
);
|
||||
} catch (e) {
|
||||
retries++;
|
||||
err = e;
|
||||
@ -93,8 +106,15 @@ export class BatchBlockService {
|
||||
let blockCount = 0;
|
||||
const results: Block[] = [];
|
||||
|
||||
for (const chunks of chunk(blocks, this.concurrencySize * this.batchSize)) {
|
||||
const chunksResult = (await Promise.all(this.createBatchRequest(chunk(chunks, this.batchSize)))).flat();
|
||||
for (const chunks of chunk(
|
||||
blocks,
|
||||
this.concurrencySize * this.batchSize,
|
||||
)) {
|
||||
const chunksResult = (
|
||||
await Promise.all(
|
||||
this.createBatchRequest(chunk(chunks, this.batchSize)),
|
||||
)
|
||||
).flat();
|
||||
|
||||
results.push(...chunksResult);
|
||||
|
||||
@ -153,7 +173,9 @@ export class BatchTransactionService {
|
||||
return txObject;
|
||||
}
|
||||
|
||||
createBatchRequest(batchArray: string[][]): Promise<TransactionResponse[]>[] {
|
||||
createBatchRequest(
|
||||
batchArray: string[][],
|
||||
): Promise<TransactionResponse[]>[] {
|
||||
return batchArray.map(async (txs: string[], index: number) => {
|
||||
await sleep(20 * index);
|
||||
|
||||
@ -162,9 +184,14 @@ export class BatchTransactionService {
|
||||
let err;
|
||||
|
||||
// eslint-disable-next-line no-unmodified-loop-condition
|
||||
while ((!this.shouldRetry && retries === 0) || (this.shouldRetry && retries < this.retryMax)) {
|
||||
while (
|
||||
(!this.shouldRetry && retries === 0) ||
|
||||
(this.shouldRetry && retries < this.retryMax)
|
||||
) {
|
||||
try {
|
||||
return await Promise.all(txs.map((tx) => this.getTransaction(tx)));
|
||||
return await Promise.all(
|
||||
txs.map((tx) => this.getTransaction(tx)),
|
||||
);
|
||||
} catch (e) {
|
||||
retries++;
|
||||
err = e;
|
||||
@ -183,8 +210,15 @@ export class BatchTransactionService {
|
||||
let txCount = 0;
|
||||
const results = [];
|
||||
|
||||
for (const chunks of chunk(txs, this.concurrencySize * this.batchSize)) {
|
||||
const chunksResult = (await Promise.all(this.createBatchRequest(chunk(chunks, this.batchSize)))).flat();
|
||||
for (const chunks of chunk(
|
||||
txs,
|
||||
this.concurrencySize * this.batchSize,
|
||||
)) {
|
||||
const chunksResult = (
|
||||
await Promise.all(
|
||||
this.createBatchRequest(chunk(chunks, this.batchSize)),
|
||||
)
|
||||
).flat();
|
||||
|
||||
results.push(...chunksResult);
|
||||
|
||||
@ -267,14 +301,25 @@ export class BatchEventsService {
|
||||
this.retryOn = retryOn;
|
||||
}
|
||||
|
||||
async getPastEvents({ fromBlock, toBlock, type }: EventInput): Promise<EventLog[]> {
|
||||
async getPastEvents({
|
||||
fromBlock,
|
||||
toBlock,
|
||||
type,
|
||||
}: EventInput): Promise<EventLog[]> {
|
||||
let err;
|
||||
let retries = 0;
|
||||
|
||||
// eslint-disable-next-line no-unmodified-loop-condition
|
||||
while ((!this.shouldRetry && retries === 0) || (this.shouldRetry && retries < this.retryMax)) {
|
||||
while (
|
||||
(!this.shouldRetry && retries === 0) ||
|
||||
(this.shouldRetry && retries < this.retryMax)
|
||||
) {
|
||||
try {
|
||||
return (await this.contract.queryFilter(type, fromBlock, toBlock)) as EventLog[];
|
||||
return (await this.contract.queryFilter(
|
||||
type,
|
||||
fromBlock,
|
||||
toBlock,
|
||||
)) as EventLog[];
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} catch (e: any) {
|
||||
err = e;
|
||||
@ -283,7 +328,9 @@ export class BatchEventsService {
|
||||
// If provider.getBlockNumber returned last block that isn't accepted (happened on Avalanche/Gnosis),
|
||||
// get events to last accepted block
|
||||
if (e.message.includes('after last accepted block')) {
|
||||
const acceptedBlock = parseInt(e.message.split('after last accepted block ')[1]);
|
||||
const acceptedBlock = parseInt(
|
||||
e.message.split('after last accepted block ')[1],
|
||||
);
|
||||
toBlock = acceptedBlock;
|
||||
}
|
||||
|
||||
@ -303,7 +350,11 @@ export class BatchEventsService {
|
||||
});
|
||||
}
|
||||
|
||||
async getBatchEvents({ fromBlock, toBlock, type = '*' }: EventInput): Promise<EventLog[]> {
|
||||
async getBatchEvents({
|
||||
fromBlock,
|
||||
toBlock,
|
||||
type = '*',
|
||||
}: EventInput): Promise<EventLog[]> {
|
||||
if (!toBlock) {
|
||||
toBlock = await this.provider.getBlockNumber();
|
||||
}
|
||||
@ -311,7 +362,10 @@ export class BatchEventsService {
|
||||
const eventsToSync = [];
|
||||
|
||||
for (let i = fromBlock; i < toBlock; i += this.blocksPerRequest) {
|
||||
const j = i + this.blocksPerRequest - 1 > toBlock ? toBlock : i + this.blocksPerRequest - 1;
|
||||
const j =
|
||||
i + this.blocksPerRequest - 1 > toBlock
|
||||
? toBlock
|
||||
: i + this.blocksPerRequest - 1;
|
||||
|
||||
eventsToSync.push({ fromBlock: i, toBlock: j, type });
|
||||
}
|
||||
@ -324,7 +378,9 @@ export class BatchEventsService {
|
||||
for (const chunk of eventChunk) {
|
||||
chunkCount++;
|
||||
|
||||
const fetchedEvents = (await Promise.all(this.createBatchRequest(chunk))).flat();
|
||||
const fetchedEvents = (
|
||||
await Promise.all(this.createBatchRequest(chunk))
|
||||
).flat();
|
||||
events.push(...fetchedEvents);
|
||||
|
||||
if (typeof this.onProgress === 'function') {
|
||||
|
||||
@ -1,4 +1,11 @@
|
||||
import { bnToBytes, bytesToBN, leBuff2Int, leInt2Buff, rBigInt, toFixedHex } from './utils';
|
||||
import {
|
||||
bnToBytes,
|
||||
bytesToBN,
|
||||
leBuff2Int,
|
||||
leInt2Buff,
|
||||
rBigInt,
|
||||
toFixedHex,
|
||||
} from './utils';
|
||||
import { buffPedersenHash } from './pedersen';
|
||||
import type { NetIdType } from './networkConfig';
|
||||
|
||||
@ -38,13 +45,15 @@ export interface parsedInvoiceExec extends DepositType {
|
||||
}
|
||||
|
||||
export function parseNote(noteString: string): parsedNoteExec | undefined {
|
||||
const noteRegex = /tornado-(?<currency>\w+)-(?<amount>[\d.]+)-(?<netId>\d+)-0x(?<noteHex>[0-9a-fA-F]{124})/g;
|
||||
const noteRegex =
|
||||
/tornado-(?<currency>\w+)-(?<amount>[\d.]+)-(?<netId>\d+)-0x(?<noteHex>[0-9a-fA-F]{124})/g;
|
||||
const match = noteRegex.exec(noteString);
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { currency, amount, netId, noteHex } = match.groups as unknown as parsedNoteExec;
|
||||
const { currency, amount, netId, noteHex } =
|
||||
match.groups as unknown as parsedNoteExec;
|
||||
|
||||
return {
|
||||
currency: currency.toLowerCase(),
|
||||
@ -55,7 +64,9 @@ export function parseNote(noteString: string): parsedNoteExec | undefined {
|
||||
};
|
||||
}
|
||||
|
||||
export function parseInvoice(invoiceString: string): parsedInvoiceExec | undefined {
|
||||
export function parseInvoice(
|
||||
invoiceString: string,
|
||||
): parsedInvoiceExec | undefined {
|
||||
const invoiceRegex =
|
||||
/tornadoInvoice-(?<currency>\w+)-(?<amount>[\d.]+)-(?<netId>\d+)-0x(?<commitmentHex>[0-9a-fA-F]{64})/g;
|
||||
const match = invoiceRegex.exec(invoiceString);
|
||||
@ -63,7 +74,8 @@ export function parseInvoice(invoiceString: string): parsedInvoiceExec | undefin
|
||||
return;
|
||||
}
|
||||
|
||||
const { currency, amount, netId, commitmentHex } = match.groups as unknown as parsedInvoiceExec;
|
||||
const { currency, amount, netId, commitmentHex } =
|
||||
match.groups as unknown as parsedInvoiceExec;
|
||||
|
||||
return {
|
||||
currency: currency.toLowerCase(),
|
||||
@ -74,8 +86,14 @@ export function parseInvoice(invoiceString: string): parsedInvoiceExec | undefin
|
||||
};
|
||||
}
|
||||
|
||||
export async function createDeposit({ nullifier, secret }: createDepositParams): Promise<createDepositObject> {
|
||||
const preimage = new Uint8Array([...leInt2Buff(nullifier), ...leInt2Buff(secret)]);
|
||||
export async function createDeposit({
|
||||
nullifier,
|
||||
secret,
|
||||
}: createDepositParams): Promise<createDepositObject> {
|
||||
const preimage = new Uint8Array([
|
||||
...leInt2Buff(nullifier),
|
||||
...leInt2Buff(secret),
|
||||
]);
|
||||
const noteHex = toFixedHex(bytesToBN(preimage), 62);
|
||||
const commitment = BigInt(await buffPedersenHash(preimage));
|
||||
const commitmentHex = toFixedHex(commitment);
|
||||
@ -166,7 +184,13 @@ export class Deposit {
|
||||
);
|
||||
}
|
||||
|
||||
static async createNote({ currency, amount, netId, nullifier, secret }: createNoteParams): Promise<Deposit> {
|
||||
static async createNote({
|
||||
currency,
|
||||
amount,
|
||||
netId,
|
||||
nullifier,
|
||||
secret,
|
||||
}: createNoteParams): Promise<Deposit> {
|
||||
if (!nullifier) {
|
||||
nullifier = rBigInt(31);
|
||||
}
|
||||
@ -202,7 +226,13 @@ export class Deposit {
|
||||
throw new Error('The note has invalid format');
|
||||
}
|
||||
|
||||
const { currency, amount, netId, note, noteHex: parsedNoteHex } = parsedNote;
|
||||
const {
|
||||
currency,
|
||||
amount,
|
||||
netId,
|
||||
note,
|
||||
noteHex: parsedNoteHex,
|
||||
} = parsedNote;
|
||||
|
||||
const bytes = bnToBytes(parsedNoteHex);
|
||||
const nullifier = BigInt(leBuff2Int(bytes.slice(0, 31)).toString());
|
||||
@ -246,7 +276,8 @@ export class Invoice {
|
||||
throw new Error('The invoice has invalid format');
|
||||
}
|
||||
|
||||
const { currency, amount, netId, invoice, commitmentHex } = parsedInvoice;
|
||||
const { currency, amount, netId, invoice, commitmentHex } =
|
||||
parsedInvoice;
|
||||
|
||||
this.currency = currency;
|
||||
this.amount = amount;
|
||||
|
||||
@ -1,6 +1,25 @@
|
||||
import { getEncryptionPublicKey, encrypt, decrypt, EthEncryptedData } from '@metamask/eth-sig-util';
|
||||
import { JsonRpcApiProvider, Signer, Wallet, computeAddress, getAddress } from 'ethers';
|
||||
import { base64ToBytes, bytesToBase64, bytesToHex, hexToBytes, toFixedHex, concatBytes, rHex } from './utils';
|
||||
import {
|
||||
getEncryptionPublicKey,
|
||||
encrypt,
|
||||
decrypt,
|
||||
EthEncryptedData,
|
||||
} from '@metamask/eth-sig-util';
|
||||
import {
|
||||
JsonRpcApiProvider,
|
||||
Signer,
|
||||
Wallet,
|
||||
computeAddress,
|
||||
getAddress,
|
||||
} from 'ethers';
|
||||
import {
|
||||
base64ToBytes,
|
||||
bytesToBase64,
|
||||
bytesToHex,
|
||||
hexToBytes,
|
||||
toFixedHex,
|
||||
concatBytes,
|
||||
rHex,
|
||||
} from './utils';
|
||||
import { EchoEvents, EncryptedNotesEvents } from './events';
|
||||
|
||||
export interface NoteToEncrypt {
|
||||
@ -14,12 +33,23 @@ export interface DecryptedNotes {
|
||||
noteHex: string;
|
||||
}
|
||||
|
||||
export function packEncryptedMessage({ nonce, ephemPublicKey, ciphertext }: EthEncryptedData) {
|
||||
export function packEncryptedMessage({
|
||||
nonce,
|
||||
ephemPublicKey,
|
||||
ciphertext,
|
||||
}: EthEncryptedData) {
|
||||
const nonceBuf = toFixedHex(bytesToHex(base64ToBytes(nonce)), 24);
|
||||
const ephemPublicKeyBuf = toFixedHex(bytesToHex(base64ToBytes(ephemPublicKey)), 32);
|
||||
const ephemPublicKeyBuf = toFixedHex(
|
||||
bytesToHex(base64ToBytes(ephemPublicKey)),
|
||||
32,
|
||||
);
|
||||
const ciphertextBuf = bytesToHex(base64ToBytes(ciphertext));
|
||||
|
||||
const messageBuff = concatBytes(hexToBytes(nonceBuf), hexToBytes(ephemPublicKeyBuf), hexToBytes(ciphertextBuf));
|
||||
const messageBuff = concatBytes(
|
||||
hexToBytes(nonceBuf),
|
||||
hexToBytes(ephemPublicKeyBuf),
|
||||
hexToBytes(ciphertextBuf),
|
||||
);
|
||||
|
||||
return bytesToHex(messageBuff);
|
||||
}
|
||||
@ -75,7 +105,10 @@ export class NoteAccount {
|
||||
static async getSignerPublicKey(signer: Signer | Wallet) {
|
||||
if ((signer as Wallet).privateKey) {
|
||||
const wallet = signer as Wallet;
|
||||
const privateKey = wallet.privateKey.slice(0, 2) === '0x' ? wallet.privateKey.slice(2) : wallet.privateKey;
|
||||
const privateKey =
|
||||
wallet.privateKey.slice(0, 2) === '0x'
|
||||
? wallet.privateKey.slice(2)
|
||||
: wallet.privateKey;
|
||||
|
||||
// Should return base64 encoded public key
|
||||
return getEncryptionPublicKey(privateKey);
|
||||
@ -113,8 +146,13 @@ export class NoteAccount {
|
||||
/**
|
||||
* Decrypt Echoer backuped note encryption account with private keys
|
||||
*/
|
||||
static async decryptSignerNoteAccounts(signer: Signer | Wallet, events: EchoEvents[]): Promise<NoteAccount[]> {
|
||||
const signerAddress = (signer as (Signer & { address: string }) | Wallet).address;
|
||||
static async decryptSignerNoteAccounts(
|
||||
signer: Signer | Wallet,
|
||||
events: EchoEvents[],
|
||||
): Promise<NoteAccount[]> {
|
||||
const signerAddress = (
|
||||
signer as (Signer & { address: string }) | Wallet
|
||||
).address;
|
||||
|
||||
const decryptedEvents = [];
|
||||
|
||||
@ -124,21 +162,26 @@ export class NoteAccount {
|
||||
}
|
||||
|
||||
try {
|
||||
const unpackedMessage = unpackEncryptedMessage(event.encryptedAccount);
|
||||
const unpackedMessage = unpackEncryptedMessage(
|
||||
event.encryptedAccount,
|
||||
);
|
||||
|
||||
let recoveryKey;
|
||||
|
||||
if ((signer as Wallet).privateKey) {
|
||||
const wallet = signer as Wallet;
|
||||
const privateKey =
|
||||
wallet.privateKey.slice(0, 2) === '0x' ? wallet.privateKey.slice(2) : wallet.privateKey;
|
||||
wallet.privateKey.slice(0, 2) === '0x'
|
||||
? wallet.privateKey.slice(2)
|
||||
: wallet.privateKey;
|
||||
|
||||
recoveryKey = decrypt({
|
||||
encryptedData: unpackedMessage,
|
||||
privateKey,
|
||||
});
|
||||
} else {
|
||||
const { version, nonce, ephemPublicKey, ciphertext } = unpackedMessage;
|
||||
const { version, nonce, ephemPublicKey, ciphertext } =
|
||||
unpackedMessage;
|
||||
|
||||
const unpackedBuffer = bytesToHex(
|
||||
new TextEncoder().encode(
|
||||
@ -153,7 +196,10 @@ export class NoteAccount {
|
||||
|
||||
const provider = signer.provider as JsonRpcApiProvider;
|
||||
|
||||
recoveryKey = await provider.send('eth_decrypt', [unpackedBuffer, signerAddress]);
|
||||
recoveryKey = await provider.send('eth_decrypt', [
|
||||
unpackedBuffer,
|
||||
signerAddress,
|
||||
]);
|
||||
}
|
||||
|
||||
decryptedEvents.push(
|
||||
@ -176,7 +222,9 @@ export class NoteAccount {
|
||||
|
||||
for (const event of events) {
|
||||
try {
|
||||
const unpackedMessage = unpackEncryptedMessage(event.encryptedNote);
|
||||
const unpackedMessage = unpackEncryptedMessage(
|
||||
event.encryptedNote,
|
||||
);
|
||||
|
||||
const [address, noteHex] = decrypt({
|
||||
encryptedData: unpackedMessage,
|
||||
|
||||
48
src/ens.ts
48
src/ens.ts
@ -1,4 +1,10 @@
|
||||
import { namehash, EnsResolver, AbstractProvider, keccak256, Signer } from 'ethers';
|
||||
import {
|
||||
namehash,
|
||||
EnsResolver,
|
||||
AbstractProvider,
|
||||
keccak256,
|
||||
Signer,
|
||||
} from 'ethers';
|
||||
|
||||
import {
|
||||
ENSNameWrapper,
|
||||
@ -24,7 +30,10 @@ export function labelhash(label: string) {
|
||||
if (!label) {
|
||||
return bytesToHex(new Uint8Array(32).fill(0));
|
||||
}
|
||||
return encodedLabelToLabelhash(label) || keccak256(new TextEncoder().encode(label));
|
||||
return (
|
||||
encodedLabelToLabelhash(label) ||
|
||||
keccak256(new TextEncoder().encode(label))
|
||||
);
|
||||
}
|
||||
|
||||
export function makeLabelNodeAndParent(name: string) {
|
||||
@ -75,11 +84,21 @@ export class ENSUtils {
|
||||
async getContracts() {
|
||||
const { chainId } = await this.provider.getNetwork();
|
||||
|
||||
const { ensRegistry, ensPublicResolver, ensNameWrapper } = EnsContracts[Number(chainId)];
|
||||
const { ensRegistry, ensPublicResolver, ensNameWrapper } =
|
||||
EnsContracts[Number(chainId)];
|
||||
|
||||
this.ENSRegistry = ENSRegistry__factory.connect(ensRegistry, this.provider);
|
||||
this.ENSResolver = ENSResolver__factory.connect(ensPublicResolver, this.provider);
|
||||
this.ENSNameWrapper = ENSNameWrapper__factory.connect(ensNameWrapper, this.provider);
|
||||
this.ENSRegistry = ENSRegistry__factory.connect(
|
||||
ensRegistry,
|
||||
this.provider,
|
||||
);
|
||||
this.ENSResolver = ENSResolver__factory.connect(
|
||||
ensPublicResolver,
|
||||
this.provider,
|
||||
);
|
||||
this.ENSNameWrapper = ENSNameWrapper__factory.connect(
|
||||
ensNameWrapper,
|
||||
this.provider,
|
||||
);
|
||||
}
|
||||
|
||||
async getOwner(name: string) {
|
||||
@ -98,7 +117,9 @@ export class ENSUtils {
|
||||
|
||||
const owner = (signer as unknown as { address: string }).address;
|
||||
|
||||
const nameWrapper = (this.ENSNameWrapper as ENSNameWrapper).connect(signer);
|
||||
const nameWrapper = (this.ENSNameWrapper as ENSNameWrapper).connect(
|
||||
signer,
|
||||
);
|
||||
|
||||
const { labelhash } = makeLabelNodeAndParent(name);
|
||||
|
||||
@ -118,12 +139,21 @@ export class ENSUtils {
|
||||
|
||||
const { labelhash, parentNode } = makeLabelNodeAndParent(name);
|
||||
|
||||
return registry.setSubnodeRecord(parentNode, labelhash, owner, resolver.target, BigInt(0));
|
||||
return registry.setSubnodeRecord(
|
||||
parentNode,
|
||||
labelhash,
|
||||
owner,
|
||||
resolver.target,
|
||||
BigInt(0),
|
||||
);
|
||||
}
|
||||
|
||||
// https://github.com/ensdomains/ensjs/blob/main/packages/ensjs/src/functions/wallet/setTextRecord.ts
|
||||
async setText(signer: Signer, name: string, key: string, value: string) {
|
||||
const resolver = ENSResolver__factory.connect((await this.getResolver(name))?.address as string, signer);
|
||||
const resolver = ENSResolver__factory.connect(
|
||||
(await this.getResolver(name))?.address as string,
|
||||
signer,
|
||||
);
|
||||
|
||||
return resolver.setText(namehash(name), key, value);
|
||||
}
|
||||
|
||||
@ -34,7 +34,11 @@ import {
|
||||
} from '../batch';
|
||||
|
||||
import { fetchData, fetchDataOptions } from '../providers';
|
||||
import { enabledChains, type NetIdType, type SubdomainMap } from '../networkConfig';
|
||||
import {
|
||||
enabledChains,
|
||||
type NetIdType,
|
||||
type SubdomainMap,
|
||||
} from '../networkConfig';
|
||||
import { RelayerParams, MIN_STAKE_BALANCE } from '../relayerClient';
|
||||
import type { TovarishClient } from '../tovarishClient';
|
||||
|
||||
@ -159,18 +163,39 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
updateEventProgress({ percentage, type, fromBlock, toBlock, count }: Parameters<BatchEventOnProgress>[0]) {}
|
||||
updateEventProgress({
|
||||
percentage,
|
||||
type,
|
||||
fromBlock,
|
||||
toBlock,
|
||||
count,
|
||||
}: Parameters<BatchEventOnProgress>[0]) {}
|
||||
|
||||
updateBlockProgress({ percentage, currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]) {}
|
||||
updateBlockProgress({
|
||||
percentage,
|
||||
currentIndex,
|
||||
totalIndex,
|
||||
}: Parameters<BatchBlockOnProgress>[0]) {}
|
||||
|
||||
updateTransactionProgress({ percentage, currentIndex, totalIndex }: Parameters<BatchBlockOnProgress>[0]) {}
|
||||
updateTransactionProgress({
|
||||
percentage,
|
||||
currentIndex,
|
||||
totalIndex,
|
||||
}: Parameters<BatchBlockOnProgress>[0]) {}
|
||||
|
||||
updateGraphProgress({ type, fromBlock, toBlock, count }: Parameters<BatchGraphOnProgress>[0]) {}
|
||||
updateGraphProgress({
|
||||
type,
|
||||
fromBlock,
|
||||
toBlock,
|
||||
count,
|
||||
}: Parameters<BatchGraphOnProgress>[0]) {}
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars */
|
||||
|
||||
async formatEvents(events: EventLog[]): Promise<EventType[]> {
|
||||
// eslint-disable-next-line no-return-await
|
||||
return await new Promise((resolve) => resolve(events as unknown as EventType[]));
|
||||
return await new Promise((resolve) =>
|
||||
resolve(events as unknown as EventType[]),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -195,7 +220,9 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
};
|
||||
}
|
||||
|
||||
async getSavedEvents(): Promise<BaseEvents<EventType> | CachedEvents<EventType>> {
|
||||
async getSavedEvents(): Promise<
|
||||
BaseEvents<EventType> | CachedEvents<EventType>
|
||||
> {
|
||||
let dbEvents = await this.getEventsFromDB();
|
||||
|
||||
if (!dbEvents.lastBlock) {
|
||||
@ -224,7 +251,9 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const { events, lastSyncBlock } = (await (graph as any)[methodName || this.getGraphMethod()]({
|
||||
const { events, lastSyncBlock } = (await (graph as any)[
|
||||
methodName || this.getGraphMethod()
|
||||
]({
|
||||
fromBlock,
|
||||
...this.getGraphParams(),
|
||||
})) as BaseGraphEvents<EventType>;
|
||||
@ -276,9 +305,17 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
}
|
||||
}
|
||||
|
||||
async getLatestEvents({ fromBlock }: { fromBlock: number }): Promise<BaseEvents<EventType>> {
|
||||
if (this.tovarishClient?.selectedRelayer && ![DEPOSIT, WITHDRAWAL].includes(this.type.toLowerCase())) {
|
||||
const { events, lastSyncBlock: lastBlock } = await this.tovarishClient.getEvents<EventType>({
|
||||
async getLatestEvents({
|
||||
fromBlock,
|
||||
}: {
|
||||
fromBlock: number;
|
||||
}): Promise<BaseEvents<EventType>> {
|
||||
if (
|
||||
this.tovarishClient?.selectedRelayer &&
|
||||
![DEPOSIT, WITHDRAWAL].includes(this.type.toLowerCase())
|
||||
) {
|
||||
const { events, lastSyncBlock: lastBlock } =
|
||||
await this.tovarishClient.getEvents<EventType>({
|
||||
type: this.getTovarishType(),
|
||||
fromBlock,
|
||||
});
|
||||
@ -291,7 +328,9 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
|
||||
const graphEvents = await this.getEventsFromGraph({ fromBlock });
|
||||
const lastSyncBlock =
|
||||
graphEvents.lastBlock && graphEvents.lastBlock >= fromBlock ? graphEvents.lastBlock : fromBlock;
|
||||
graphEvents.lastBlock && graphEvents.lastBlock >= fromBlock
|
||||
? graphEvents.lastBlock
|
||||
: fromBlock;
|
||||
const rpcEvents = await this.getEventsFromRpc({
|
||||
fromBlock: lastSyncBlock,
|
||||
});
|
||||
@ -335,7 +374,10 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
|
||||
const eventSet = new Set();
|
||||
|
||||
const allEvents: EventType[] = [...savedEvents.events, ...newEvents.events]
|
||||
const allEvents: EventType[] = [
|
||||
...savedEvents.events,
|
||||
...newEvents.events,
|
||||
]
|
||||
.sort((a, b) => {
|
||||
if (a.blockNumber === b.blockNumber) {
|
||||
return a.logIndex - b.logIndex;
|
||||
@ -349,7 +391,8 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
return !hasEvent;
|
||||
});
|
||||
|
||||
const lastBlock = newEvents.lastBlock || allEvents[allEvents.length - 1]?.blockNumber;
|
||||
const lastBlock =
|
||||
newEvents.lastBlock || allEvents[allEvents.length - 1]?.blockNumber;
|
||||
|
||||
const validateResult = await this.validateEvents<S>({
|
||||
events: allEvents,
|
||||
@ -358,7 +401,10 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
});
|
||||
|
||||
// If the events are loaded from cache or we have found new events, save them
|
||||
if ((savedEvents as CachedEvents<EventType>).fromCache || newEvents.events.length) {
|
||||
if (
|
||||
(savedEvents as CachedEvents<EventType>).fromCache ||
|
||||
newEvents.events.length
|
||||
) {
|
||||
await this.saveEvents({ events: allEvents, lastBlock });
|
||||
}
|
||||
|
||||
@ -370,7 +416,8 @@ export class BaseEventsService<EventType extends MinimalEvents> {
|
||||
}
|
||||
}
|
||||
|
||||
export interface BaseTornadoServiceConstructor extends Omit<BaseEventsServiceConstructor, 'contract'> {
|
||||
export interface BaseTornadoServiceConstructor
|
||||
extends Omit<BaseEventsServiceConstructor, 'contract'> {
|
||||
Tornado: Tornado;
|
||||
amount: string;
|
||||
currency: string;
|
||||
@ -383,7 +430,9 @@ export interface DepositsGraphParams extends BaseGraphParams {
|
||||
currency: string;
|
||||
}
|
||||
|
||||
export class BaseTornadoService extends BaseEventsService<DepositsEvents | WithdrawalsEvents> {
|
||||
export class BaseTornadoService extends BaseEventsService<
|
||||
DepositsEvents | WithdrawalsEvents
|
||||
> {
|
||||
amount: string;
|
||||
currency: string;
|
||||
|
||||
@ -393,7 +442,14 @@ export class BaseTornadoService extends BaseEventsService<DepositsEvents | Withd
|
||||
batchBlockService: BatchBlockService;
|
||||
|
||||
constructor(serviceConstructor: BaseTornadoServiceConstructor) {
|
||||
const { Tornado: contract, amount, currency, provider, optionalTree, merkleTreeService } = serviceConstructor;
|
||||
const {
|
||||
Tornado: contract,
|
||||
amount,
|
||||
currency,
|
||||
provider,
|
||||
optionalTree,
|
||||
merkleTreeService,
|
||||
} = serviceConstructor;
|
||||
|
||||
super({
|
||||
...serviceConstructor,
|
||||
@ -436,10 +492,13 @@ export class BaseTornadoService extends BaseEventsService<DepositsEvents | Withd
|
||||
};
|
||||
}
|
||||
|
||||
async formatEvents(events: EventLog[]): Promise<(DepositsEvents | WithdrawalsEvents)[]> {
|
||||
async formatEvents(
|
||||
events: EventLog[],
|
||||
): Promise<(DepositsEvents | WithdrawalsEvents)[]> {
|
||||
const type = this.getType().toLowerCase();
|
||||
if (type === DEPOSIT) {
|
||||
const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args }) => {
|
||||
const formattedEvents = events.map(
|
||||
({ blockNumber, index: logIndex, transactionHash, args }) => {
|
||||
const { commitment, leafIndex, timestamp } = args;
|
||||
|
||||
return {
|
||||
@ -450,14 +509,23 @@ export class BaseTornadoService extends BaseEventsService<DepositsEvents | Withd
|
||||
leafIndex: Number(leafIndex),
|
||||
timestamp: Number(timestamp),
|
||||
};
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
const txs = await this.batchTransactionService.getBatchTransactions([
|
||||
...new Set(formattedEvents.map(({ transactionHash }) => transactionHash)),
|
||||
]);
|
||||
const txs = await this.batchTransactionService.getBatchTransactions(
|
||||
[
|
||||
...new Set(
|
||||
formattedEvents.map(
|
||||
({ transactionHash }) => transactionHash,
|
||||
),
|
||||
),
|
||||
],
|
||||
);
|
||||
|
||||
return formattedEvents.map((event) => {
|
||||
const { from } = txs.find(({ hash }) => hash === event.transactionHash) as TransactionResponse;
|
||||
const { from } = txs.find(
|
||||
({ hash }) => hash === event.transactionHash,
|
||||
) as TransactionResponse;
|
||||
|
||||
return {
|
||||
...event,
|
||||
@ -465,7 +533,8 @@ export class BaseTornadoService extends BaseEventsService<DepositsEvents | Withd
|
||||
};
|
||||
});
|
||||
} else {
|
||||
const formattedEvents = events.map(({ blockNumber, index: logIndex, transactionHash, args }) => {
|
||||
const formattedEvents = events.map(
|
||||
({ blockNumber, index: logIndex, transactionHash, args }) => {
|
||||
const { nullifierHash, to, fee } = args;
|
||||
|
||||
return {
|
||||
@ -476,14 +545,19 @@ export class BaseTornadoService extends BaseEventsService<DepositsEvents | Withd
|
||||
to: getAddress(to),
|
||||
fee: String(fee),
|
||||
};
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
const blocks = await this.batchBlockService.getBatchBlocks([
|
||||
...new Set(formattedEvents.map(({ blockNumber }) => blockNumber)),
|
||||
...new Set(
|
||||
formattedEvents.map(({ blockNumber }) => blockNumber),
|
||||
),
|
||||
]);
|
||||
|
||||
return formattedEvents.map((event) => {
|
||||
const { timestamp } = blocks.find(({ number }) => number === event.blockNumber) as Block;
|
||||
const { timestamp } = blocks.find(
|
||||
({ number }) => number === event.blockNumber,
|
||||
) as Block;
|
||||
|
||||
return {
|
||||
...event,
|
||||
@ -509,8 +583,13 @@ export class BaseTornadoService extends BaseEventsService<DepositsEvents | Withd
|
||||
throw new Error(errMsg);
|
||||
}
|
||||
|
||||
if (this.merkleTreeService && (!this.optionalTree || hasNewEvents)) {
|
||||
return (await this.merkleTreeService.verifyTree(depositEvents)) as S;
|
||||
if (
|
||||
this.merkleTreeService &&
|
||||
(!this.optionalTree || hasNewEvents)
|
||||
) {
|
||||
return (await this.merkleTreeService.verifyTree(
|
||||
depositEvents,
|
||||
)) as S;
|
||||
}
|
||||
}
|
||||
|
||||
@ -523,7 +602,8 @@ export class BaseTornadoService extends BaseEventsService<DepositsEvents | Withd
|
||||
fromBlock: number;
|
||||
}): Promise<BaseEvents<DepositsEvents | WithdrawalsEvents>> {
|
||||
if (this.tovarishClient?.selectedRelayer) {
|
||||
const { events, lastSyncBlock: lastBlock } = await this.tovarishClient.getEvents<
|
||||
const { events, lastSyncBlock: lastBlock } =
|
||||
await this.tovarishClient.getEvents<
|
||||
DepositsEvents | WithdrawalsEvents
|
||||
>({
|
||||
type: this.getTovarishType(),
|
||||
@ -542,7 +622,8 @@ export class BaseTornadoService extends BaseEventsService<DepositsEvents | Withd
|
||||
}
|
||||
}
|
||||
|
||||
export interface BaseEchoServiceConstructor extends Omit<BaseEventsServiceConstructor, 'contract' | 'type'> {
|
||||
export interface BaseEchoServiceConstructor
|
||||
extends Omit<BaseEventsServiceConstructor, 'contract' | 'type'> {
|
||||
Echoer: Echoer;
|
||||
}
|
||||
|
||||
@ -585,7 +666,11 @@ export class BaseEchoService extends BaseEventsService<EchoEvents> {
|
||||
.filter((e) => e) as EchoEvents[];
|
||||
}
|
||||
|
||||
async getEventsFromGraph({ fromBlock }: { fromBlock: number }): Promise<BaseEvents<EchoEvents>> {
|
||||
async getEventsFromGraph({
|
||||
fromBlock,
|
||||
}: {
|
||||
fromBlock: number;
|
||||
}): Promise<BaseEvents<EchoEvents>> {
|
||||
// TheGraph doesn't support our batch sync due to missing blockNumber field
|
||||
if (!this.graphApi || this.graphApi.includes('api.thegraph.com')) {
|
||||
return {
|
||||
@ -598,7 +683,8 @@ export class BaseEchoService extends BaseEventsService<EchoEvents> {
|
||||
}
|
||||
}
|
||||
|
||||
export interface BaseEncryptedNotesServiceConstructor extends Omit<BaseEventsServiceConstructor, 'contract' | 'type'> {
|
||||
export interface BaseEncryptedNotesServiceConstructor
|
||||
extends Omit<BaseEventsServiceConstructor, 'contract' | 'type'> {
|
||||
Router: TornadoRouter | TornadoProxyLight;
|
||||
}
|
||||
|
||||
@ -657,12 +743,16 @@ export const proposalState: { [key: string]: string } = {
|
||||
6: 'Expired',
|
||||
};
|
||||
|
||||
function parseDescription(id: number, text: string): { title: string; description: string } {
|
||||
function parseDescription(
|
||||
id: number,
|
||||
text: string,
|
||||
): { title: string; description: string } {
|
||||
switch (id) {
|
||||
case 1:
|
||||
return {
|
||||
title: text,
|
||||
description: 'See: https://torn.community/t/proposal-1-enable-torn-transfers/38',
|
||||
description:
|
||||
'See: https://torn.community/t/proposal-1-enable-torn-transfers/38',
|
||||
};
|
||||
case 10:
|
||||
text = text.replace('\n', '\\n\\n');
|
||||
@ -705,10 +795,16 @@ function parseDescription(id: number, text: string): { title: string; descriptio
|
||||
};
|
||||
}
|
||||
|
||||
function parseComment(Governance: Governance, calldata: string): { contact: string; message: string } {
|
||||
function parseComment(
|
||||
Governance: Governance,
|
||||
calldata: string,
|
||||
): { contact: string; message: string } {
|
||||
try {
|
||||
const methodLength = 4;
|
||||
const result = abiCoder.decode(['address[]', 'uint256', 'bool'], dataSlice(calldata, methodLength));
|
||||
const result = abiCoder.decode(
|
||||
['address[]', 'uint256', 'bool'],
|
||||
dataSlice(calldata, methodLength),
|
||||
);
|
||||
const data = Governance.interface.encodeFunctionData(
|
||||
// @ts-expect-error encodeFunctionData is broken lol
|
||||
'castDelegatedVote',
|
||||
@ -716,7 +812,10 @@ function parseComment(Governance: Governance, calldata: string): { contact: stri
|
||||
);
|
||||
const length = dataLength(data);
|
||||
|
||||
const str: string = abiCoder.decode(['string'], dataSlice(calldata, length))[0];
|
||||
const str: string = abiCoder.decode(
|
||||
['string'],
|
||||
dataSlice(calldata, length),
|
||||
)[0];
|
||||
const [contact, message] = JSON.parse(str) as string[];
|
||||
|
||||
return {
|
||||
@ -749,7 +848,8 @@ export interface GovernanceVotes extends GovernanceVotedEvents {
|
||||
voterName?: string;
|
||||
}
|
||||
|
||||
export interface BaseGovernanceServiceConstructor extends Omit<BaseEventsServiceConstructor, 'contract' | 'type'> {
|
||||
export interface BaseGovernanceServiceConstructor
|
||||
extends Omit<BaseEventsServiceConstructor, 'contract' | 'type'> {
|
||||
Governance: Governance;
|
||||
Aggregator: Aggregator;
|
||||
ReverseRecords: ReverseRecords;
|
||||
@ -763,7 +863,8 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
batchTransactionService: BatchTransactionService;
|
||||
|
||||
constructor(serviceConstructor: BaseGovernanceServiceConstructor) {
|
||||
const { Governance, Aggregator, ReverseRecords, provider } = serviceConstructor;
|
||||
const { Governance, Aggregator, ReverseRecords, provider } =
|
||||
serviceConstructor;
|
||||
|
||||
super({
|
||||
...serviceConstructor,
|
||||
@ -799,7 +900,14 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
const delegatedEvents: GovernanceDelegatedEvents[] = [];
|
||||
const undelegatedEvents: GovernanceUndelegatedEvents[] = [];
|
||||
|
||||
events.forEach(({ blockNumber, index: logIndex, transactionHash, args, eventName: event }) => {
|
||||
events.forEach(
|
||||
({
|
||||
blockNumber,
|
||||
index: logIndex,
|
||||
transactionHash,
|
||||
args,
|
||||
eventName: event,
|
||||
}) => {
|
||||
const eventObjects = {
|
||||
blockNumber,
|
||||
logIndex,
|
||||
@ -808,7 +916,14 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
};
|
||||
|
||||
if (event === 'ProposalCreated') {
|
||||
const { id, proposer, target, startTime, endTime, description } = args;
|
||||
const {
|
||||
id,
|
||||
proposer,
|
||||
target,
|
||||
startTime,
|
||||
endTime,
|
||||
description,
|
||||
} = args;
|
||||
|
||||
proposalEvents.push({
|
||||
...eventObjects,
|
||||
@ -854,18 +969,27 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
delegateFrom,
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (votedEvents.length) {
|
||||
this.updateTransactionProgress({ percentage: 0 });
|
||||
|
||||
const txs = await this.batchTransactionService.getBatchTransactions([
|
||||
...new Set(votedEvents.map(({ transactionHash }) => transactionHash)),
|
||||
]);
|
||||
const txs = await this.batchTransactionService.getBatchTransactions(
|
||||
[
|
||||
...new Set(
|
||||
votedEvents.map(
|
||||
({ transactionHash }) => transactionHash,
|
||||
),
|
||||
),
|
||||
],
|
||||
);
|
||||
|
||||
votedEvents.forEach((event, index) => {
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { data: input, from } = txs.find((t) => t.hash === event.transactionHash) as TransactionResponse;
|
||||
let { data: input, from } = txs.find(
|
||||
(t) => t.hash === event.transactionHash,
|
||||
) as TransactionResponse;
|
||||
|
||||
// Filter spammy txs
|
||||
if (!input || input.length > 2048) {
|
||||
@ -877,12 +1001,25 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
});
|
||||
}
|
||||
|
||||
return [...proposalEvents, ...votedEvents, ...delegatedEvents, ...undelegatedEvents];
|
||||
return [
|
||||
...proposalEvents,
|
||||
...votedEvents,
|
||||
...delegatedEvents,
|
||||
...undelegatedEvents,
|
||||
];
|
||||
}
|
||||
|
||||
async getEventsFromGraph({ fromBlock }: { fromBlock: number }): Promise<BaseEvents<AllGovernanceEvents>> {
|
||||
async getEventsFromGraph({
|
||||
fromBlock,
|
||||
}: {
|
||||
fromBlock: number;
|
||||
}): Promise<BaseEvents<AllGovernanceEvents>> {
|
||||
// TheGraph doesn't support governance subgraphs
|
||||
if (!this.graphApi || !this.subgraphName || this.graphApi.includes('api.thegraph.com')) {
|
||||
if (
|
||||
!this.graphApi ||
|
||||
!this.subgraphName ||
|
||||
this.graphApi.includes('api.thegraph.com')
|
||||
) {
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: fromBlock,
|
||||
@ -895,11 +1032,16 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
async getAllProposals(): Promise<GovernanceProposals[]> {
|
||||
const { events } = await this.updateEvents();
|
||||
|
||||
const proposalEvents = events.filter((e) => e.event === 'ProposalCreated') as GovernanceProposalCreatedEvents[];
|
||||
const proposalEvents = events.filter(
|
||||
(e) => e.event === 'ProposalCreated',
|
||||
) as GovernanceProposalCreatedEvents[];
|
||||
|
||||
const allProposers = [...new Set(proposalEvents.map((e) => [e.proposer]).flat())];
|
||||
const allProposers = [
|
||||
...new Set(proposalEvents.map((e) => [e.proposer]).flat()),
|
||||
];
|
||||
|
||||
const [QUORUM_VOTES, proposalStatus, proposerNameRecords] = await Promise.all([
|
||||
const [QUORUM_VOTES, proposalStatus, proposerNameRecords] =
|
||||
await Promise.all([
|
||||
this.Governance.QUORUM_VOTES(),
|
||||
this.Aggregator.getAllProposals(this.Governance.target),
|
||||
this.ReverseRecords.getNames(allProposers),
|
||||
@ -920,11 +1062,16 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
|
||||
const status = proposalStatus[index];
|
||||
|
||||
const { forVotes, againstVotes, executed, extended, state } = status;
|
||||
const { forVotes, againstVotes, executed, extended, state } =
|
||||
status;
|
||||
|
||||
const { title, description } = parseDescription(id, text);
|
||||
|
||||
const quorum = ((Number(forVotes + againstVotes) / Number(QUORUM_VOTES)) * 100).toFixed(0) + '%';
|
||||
const quorum =
|
||||
(
|
||||
(Number(forVotes + againstVotes) / Number(QUORUM_VOTES)) *
|
||||
100
|
||||
).toFixed(0) + '%';
|
||||
|
||||
return {
|
||||
...event,
|
||||
@ -945,10 +1092,14 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
const { events } = await this.getSavedEvents();
|
||||
|
||||
const votedEvents = events.filter(
|
||||
(e) => e.event === 'Voted' && (e as GovernanceVotedEvents).proposalId === proposalId,
|
||||
(e) =>
|
||||
e.event === 'Voted' &&
|
||||
(e as GovernanceVotedEvents).proposalId === proposalId,
|
||||
) as GovernanceVotedEvents[];
|
||||
|
||||
const allVoters = [...new Set(votedEvents.map((e) => [e.from, e.voter]).flat())];
|
||||
const allVoters = [
|
||||
...new Set(votedEvents.map((e) => [e.from, e.voter]).flat()),
|
||||
];
|
||||
|
||||
const names = await this.ReverseRecords.getNames(allVoters);
|
||||
|
||||
@ -965,7 +1116,10 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
const votes = votedEvents.map((event) => {
|
||||
const { from, voter } = event;
|
||||
|
||||
const { contact, message } = parseComment(this.Governance, event.input);
|
||||
const { contact, message } = parseComment(
|
||||
this.Governance,
|
||||
event.input,
|
||||
);
|
||||
|
||||
return {
|
||||
...event,
|
||||
@ -983,11 +1137,20 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
const { events } = await this.getSavedEvents();
|
||||
|
||||
const delegatedAccs = events
|
||||
.filter((e) => e.event === 'Delegated' && (e as GovernanceDelegatedEvents).delegateTo === ethAccount)
|
||||
.filter(
|
||||
(e) =>
|
||||
e.event === 'Delegated' &&
|
||||
(e as GovernanceDelegatedEvents).delegateTo === ethAccount,
|
||||
)
|
||||
.map((e) => (e as GovernanceDelegatedEvents).account);
|
||||
|
||||
const undelegatedAccs = events
|
||||
.filter((e) => e.event === 'Undelegated' && (e as GovernanceUndelegatedEvents).delegateFrom === ethAccount)
|
||||
.filter(
|
||||
(e) =>
|
||||
e.event === 'Undelegated' &&
|
||||
(e as GovernanceUndelegatedEvents).delegateFrom ===
|
||||
ethAccount,
|
||||
)
|
||||
.map((e) => (e as GovernanceUndelegatedEvents).account);
|
||||
|
||||
const undel = [...undelegatedAccs];
|
||||
@ -1027,20 +1190,28 @@ export class BaseGovernanceService extends BaseEventsService<AllGovernanceEvents
|
||||
}
|
||||
}
|
||||
|
||||
export async function getTovarishNetworks(registryService: BaseRegistryService, relayers: CachedRelayerInfo[]) {
|
||||
export async function getTovarishNetworks(
|
||||
registryService: BaseRegistryService,
|
||||
relayers: CachedRelayerInfo[],
|
||||
) {
|
||||
await Promise.all(
|
||||
relayers
|
||||
.filter((r) => r.tovarishHost)
|
||||
.map(async (relayer) => {
|
||||
try {
|
||||
relayer.tovarishNetworks = await fetchData(relayer.tovarishHost as string, {
|
||||
relayer.tovarishNetworks = await fetchData(
|
||||
relayer.tovarishHost as string,
|
||||
{
|
||||
...registryService.fetchDataOptions,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
timeout: 30000,
|
||||
maxRetry: registryService.fetchDataOptions?.torPort ? 2 : 0,
|
||||
});
|
||||
maxRetry: registryService.fetchDataOptions?.torPort
|
||||
? 2
|
||||
: 0,
|
||||
},
|
||||
);
|
||||
} catch {
|
||||
// Ignore error and disable relayer
|
||||
relayer.tovarishNetworks = [];
|
||||
@ -1085,7 +1256,8 @@ export interface CachedRelayers {
|
||||
fromCache?: boolean;
|
||||
}
|
||||
|
||||
export interface BaseRegistryServiceConstructor extends Omit<BaseEventsServiceConstructor, 'contract' | 'type'> {
|
||||
export interface BaseRegistryServiceConstructor
|
||||
extends Omit<BaseEventsServiceConstructor, 'contract' | 'type'> {
|
||||
RelayerRegistry: RelayerRegistry;
|
||||
Aggregator: Aggregator;
|
||||
relayerEnsSubdomains: SubdomainMap;
|
||||
@ -1097,7 +1269,11 @@ export class BaseRegistryService extends BaseEventsService<RegistersEvents> {
|
||||
updateInterval: number;
|
||||
|
||||
constructor(serviceConstructor: BaseRegistryServiceConstructor) {
|
||||
const { RelayerRegistry: contract, Aggregator, relayerEnsSubdomains } = serviceConstructor;
|
||||
const {
|
||||
RelayerRegistry: contract,
|
||||
Aggregator,
|
||||
relayerEnsSubdomains,
|
||||
} = serviceConstructor;
|
||||
|
||||
super({
|
||||
...serviceConstructor,
|
||||
@ -1125,7 +1301,8 @@ export class BaseRegistryService extends BaseEventsService<RegistersEvents> {
|
||||
}
|
||||
|
||||
async formatEvents(events: EventLog[]) {
|
||||
return events.map(({ blockNumber, index: logIndex, transactionHash, args }) => {
|
||||
return events.map(
|
||||
({ blockNumber, index: logIndex, transactionHash, args }) => {
|
||||
const eventObjects = {
|
||||
blockNumber,
|
||||
logIndex,
|
||||
@ -1137,7 +1314,8 @@ export class BaseRegistryService extends BaseEventsService<RegistersEvents> {
|
||||
ensName: args.ensName,
|
||||
relayerAddress: args.relayerAddress,
|
||||
};
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1188,20 +1366,30 @@ export class BaseRegistryService extends BaseEventsService<RegistersEvents> {
|
||||
return false;
|
||||
});
|
||||
|
||||
const relayerNameHashes = uniqueRegisters.map((r) => namehash(r.ensName));
|
||||
const relayerNameHashes = uniqueRegisters.map((r) =>
|
||||
namehash(r.ensName),
|
||||
);
|
||||
|
||||
const [relayersData, timestamp] = await Promise.all([
|
||||
this.Aggregator.relayersData.staticCall(relayerNameHashes, subdomains.concat('tovarish-relayer')),
|
||||
this.Aggregator.relayersData.staticCall(
|
||||
relayerNameHashes,
|
||||
subdomains.concat('tovarish-relayer'),
|
||||
),
|
||||
this.provider.getBlock(lastBlock).then((b) => Number(b?.timestamp)),
|
||||
]);
|
||||
|
||||
const relayers = relayersData
|
||||
.map(({ owner, balance: stakeBalance, records, isRegistered }, index) => {
|
||||
.map(
|
||||
(
|
||||
{ owner, balance: stakeBalance, records, isRegistered },
|
||||
index,
|
||||
) => {
|
||||
const { ensName, relayerAddress } = uniqueRegisters[index];
|
||||
|
||||
let tovarishHost = undefined;
|
||||
|
||||
const hostnames = records.reduce((acc, record, recordIndex) => {
|
||||
const hostnames = records.reduce(
|
||||
(acc, record, recordIndex) => {
|
||||
if (record) {
|
||||
// tovarish-relayer.relayer.eth
|
||||
if (recordIndex === records.length - 1) {
|
||||
@ -1209,27 +1397,42 @@ export class BaseRegistryService extends BaseEventsService<RegistersEvents> {
|
||||
return acc;
|
||||
}
|
||||
|
||||
acc[Number(Object.keys(this.relayerEnsSubdomains)[recordIndex])] = record;
|
||||
acc[
|
||||
Number(
|
||||
Object.keys(this.relayerEnsSubdomains)[
|
||||
recordIndex
|
||||
],
|
||||
)
|
||||
] = record;
|
||||
}
|
||||
return acc;
|
||||
}, {} as SubdomainMap);
|
||||
},
|
||||
{} as SubdomainMap,
|
||||
);
|
||||
|
||||
const hasMinBalance = stakeBalance >= MIN_STAKE_BALANCE;
|
||||
|
||||
const preCondition = Object.keys(hostnames).length && isRegistered && hasMinBalance;
|
||||
const preCondition =
|
||||
Object.keys(hostnames).length &&
|
||||
isRegistered &&
|
||||
hasMinBalance;
|
||||
|
||||
if (preCondition) {
|
||||
return {
|
||||
ensName,
|
||||
relayerAddress: owner,
|
||||
registeredAddress: owner !== relayerAddress ? relayerAddress : undefined,
|
||||
registeredAddress:
|
||||
owner !== relayerAddress
|
||||
? relayerAddress
|
||||
: undefined,
|
||||
isRegistered,
|
||||
stakeBalance: formatEther(stakeBalance),
|
||||
hostnames,
|
||||
tovarishHost,
|
||||
} as CachedRelayerInfo;
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
.filter((r) => r) as CachedRelayerInfo[];
|
||||
|
||||
await getTovarishNetworks(this, relayers);
|
||||
@ -1256,14 +1459,19 @@ export class BaseRegistryService extends BaseEventsService<RegistersEvents> {
|
||||
*/
|
||||
async updateRelayers(): Promise<CachedRelayers> {
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { lastBlock, timestamp, relayers, fromCache } = await this.getSavedRelayers();
|
||||
let { lastBlock, timestamp, relayers, fromCache } =
|
||||
await this.getSavedRelayers();
|
||||
|
||||
let shouldSave = fromCache ?? false;
|
||||
|
||||
if (!relayers.length || timestamp + this.updateInterval < Math.floor(Date.now() / 1000)) {
|
||||
if (
|
||||
!relayers.length ||
|
||||
timestamp + this.updateInterval < Math.floor(Date.now() / 1000)
|
||||
) {
|
||||
console.log('\nUpdating relayers from registry\n');
|
||||
|
||||
({ lastBlock, timestamp, relayers } = await this.getLatestRelayers());
|
||||
({ lastBlock, timestamp, relayers } =
|
||||
await this.getLatestRelayers());
|
||||
|
||||
shouldSave = true;
|
||||
}
|
||||
|
||||
@ -152,7 +152,8 @@ export async function loadRemoteEvents<T extends MinimalEvents>({
|
||||
}
|
||||
}
|
||||
|
||||
export interface DBTornadoServiceConstructor extends BaseTornadoServiceConstructor {
|
||||
export interface DBTornadoServiceConstructor
|
||||
extends BaseTornadoServiceConstructor {
|
||||
staticUrl: string;
|
||||
idb: IndexedDB;
|
||||
}
|
||||
@ -186,7 +187,10 @@ export class DBTornadoService extends BaseTornadoService {
|
||||
});
|
||||
}
|
||||
|
||||
async saveEvents({ events, lastBlock }: BaseEvents<DepositsEvents | WithdrawalsEvents>) {
|
||||
async saveEvents({
|
||||
events,
|
||||
lastBlock,
|
||||
}: BaseEvents<DepositsEvents | WithdrawalsEvents>) {
|
||||
await saveDBEvents<DepositsEvents | WithdrawalsEvents>({
|
||||
idb: this.idb,
|
||||
instanceName: this.getInstanceName(),
|
||||
@ -240,7 +244,8 @@ export class DBEchoService extends BaseEchoService {
|
||||
}
|
||||
}
|
||||
|
||||
export interface DBEncryptedNotesServiceConstructor extends BaseEncryptedNotesServiceConstructor {
|
||||
export interface DBEncryptedNotesServiceConstructor
|
||||
extends BaseEncryptedNotesServiceConstructor {
|
||||
staticUrl: string;
|
||||
idb: IndexedDB;
|
||||
}
|
||||
@ -284,7 +289,8 @@ export class DBEncryptedNotesService extends BaseEncryptedNotesService {
|
||||
}
|
||||
}
|
||||
|
||||
export interface DBGovernanceServiceConstructor extends BaseGovernanceServiceConstructor {
|
||||
export interface DBGovernanceServiceConstructor
|
||||
extends BaseGovernanceServiceConstructor {
|
||||
staticUrl: string;
|
||||
idb: IndexedDB;
|
||||
}
|
||||
@ -328,7 +334,8 @@ export class DBGovernanceService extends BaseGovernanceService {
|
||||
}
|
||||
}
|
||||
|
||||
export interface DBRegistryServiceConstructor extends BaseRegistryServiceConstructor {
|
||||
export interface DBRegistryServiceConstructor
|
||||
extends BaseRegistryServiceConstructor {
|
||||
staticUrl: string;
|
||||
idb: IndexedDB;
|
||||
}
|
||||
|
||||
60
src/fees.ts
60
src/fees.ts
@ -20,7 +20,10 @@ export function convertETHToTokenAmount(
|
||||
tokenDecimals: number = 18,
|
||||
): bigint {
|
||||
const tokenDecimalsMultiplier = BigInt(10 ** Number(tokenDecimals));
|
||||
return (BigInt(amountInWei) * tokenDecimalsMultiplier) / BigInt(tokenPriceInWei);
|
||||
return (
|
||||
(BigInt(amountInWei) * tokenDecimalsMultiplier) /
|
||||
BigInt(tokenPriceInWei)
|
||||
);
|
||||
}
|
||||
|
||||
export interface RelayerFeeParams {
|
||||
@ -40,7 +43,10 @@ export class TornadoFeeOracle {
|
||||
provider: JsonRpcApiProvider;
|
||||
ovmGasPriceOracle?: OvmGasPriceOracle;
|
||||
|
||||
constructor(provider: JsonRpcApiProvider, ovmGasPriceOracle?: OvmGasPriceOracle) {
|
||||
constructor(
|
||||
provider: JsonRpcApiProvider,
|
||||
ovmGasPriceOracle?: OvmGasPriceOracle,
|
||||
) {
|
||||
this.provider = provider;
|
||||
|
||||
if (ovmGasPriceOracle) {
|
||||
@ -67,14 +73,21 @@ export class TornadoFeeOracle {
|
||||
})(),
|
||||
(async () => {
|
||||
try {
|
||||
return BigInt(await this.provider.send('eth_maxPriorityFeePerGas', []));
|
||||
return BigInt(
|
||||
await this.provider.send(
|
||||
'eth_maxPriorityFeePerGas',
|
||||
[],
|
||||
),
|
||||
);
|
||||
} catch {
|
||||
return BigInt(0);
|
||||
}
|
||||
})(),
|
||||
]);
|
||||
|
||||
return block?.baseFeePerGas ? (block.baseFeePerGas * BigInt(15)) / BigInt(10) + getPriorityFee : getGasPrice;
|
||||
return block?.baseFeePerGas
|
||||
? (block.baseFeePerGas * BigInt(15)) / BigInt(10) + getPriorityFee
|
||||
: getGasPrice;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -100,7 +113,9 @@ export class TornadoFeeOracle {
|
||||
};
|
||||
}
|
||||
|
||||
return this.ovmGasPriceOracle.getL1Fee.staticCall(Transaction.from(tx).unsignedSerialized);
|
||||
return this.ovmGasPriceOracle.getL1Fee.staticCall(
|
||||
Transaction.from(tx).unsignedSerialized,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -109,14 +124,25 @@ export class TornadoFeeOracle {
|
||||
* Using 30 gwei for default but it is recommended to supply cached gasPrice value from the UI
|
||||
*/
|
||||
defaultEthRefund(gasPrice?: BigNumberish, gasLimit?: BigNumberish): bigint {
|
||||
return (gasPrice ? BigInt(gasPrice) : parseUnits('30', 'gwei')) * BigInt(gasLimit || 1_000_000);
|
||||
return (
|
||||
(gasPrice ? BigInt(gasPrice) : parseUnits('30', 'gwei')) *
|
||||
BigInt(gasLimit || 1_000_000)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates token amount for required ethRefund purchases required to calculate fees
|
||||
*/
|
||||
calculateTokenAmount(ethRefund: BigNumberish, tokenPriceInEth: BigNumberish, tokenDecimals?: number): bigint {
|
||||
return convertETHToTokenAmount(ethRefund, tokenPriceInEth, tokenDecimals);
|
||||
calculateTokenAmount(
|
||||
ethRefund: BigNumberish,
|
||||
tokenPriceInEth: BigNumberish,
|
||||
tokenDecimals?: number,
|
||||
): bigint {
|
||||
return convertETHToTokenAmount(
|
||||
ethRefund,
|
||||
tokenPriceInEth,
|
||||
tokenDecimals,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -137,17 +163,29 @@ export class TornadoFeeOracle {
|
||||
}: RelayerFeeParams): bigint {
|
||||
const gasCosts = BigInt(gasPrice) * BigInt(gasLimit) + BigInt(l1Fee);
|
||||
|
||||
const relayerFee = (BigInt(denomination) * BigInt(Math.floor(10000 * relayerFeePercent))) / BigInt(10000 * 100);
|
||||
const relayerFee =
|
||||
(BigInt(denomination) *
|
||||
BigInt(Math.floor(10000 * relayerFeePercent))) /
|
||||
BigInt(10000 * 100);
|
||||
|
||||
if (isEth) {
|
||||
// Add 20% premium
|
||||
return ((gasCosts + relayerFee) * BigInt(premiumPercent ? 100 + premiumPercent : 100)) / BigInt(100);
|
||||
return (
|
||||
((gasCosts + relayerFee) *
|
||||
BigInt(premiumPercent ? 100 + premiumPercent : 100)) /
|
||||
BigInt(100)
|
||||
);
|
||||
}
|
||||
|
||||
const feeInEth = gasCosts + BigInt(ethRefund);
|
||||
|
||||
return (
|
||||
((convertETHToTokenAmount(feeInEth, tokenPriceInWei, tokenDecimals) + relayerFee) *
|
||||
((convertETHToTokenAmount(
|
||||
feeInEth,
|
||||
tokenPriceInWei,
|
||||
tokenDecimals,
|
||||
) +
|
||||
relayerFee) *
|
||||
BigInt(premiumPercent ? 100 + premiumPercent : 100)) /
|
||||
BigInt(100)
|
||||
);
|
||||
|
||||
@ -147,7 +147,9 @@ export async function getStatistic({
|
||||
return {
|
||||
events,
|
||||
lastSyncBlock:
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock,
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock
|
||||
? lastEvent.blockNumber + 1
|
||||
: lastSyncBlock,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log('Error from getStatistic query');
|
||||
@ -179,7 +181,11 @@ export interface getMetaReturns {
|
||||
hasIndexingErrors: null | boolean;
|
||||
}
|
||||
|
||||
export async function getMeta({ graphApi, subgraphName, fetchDataOptions }: getMetaParams): Promise<getMetaReturns> {
|
||||
export async function getMeta({
|
||||
graphApi,
|
||||
subgraphName,
|
||||
fetchDataOptions,
|
||||
}: getMetaParams): Promise<getMetaReturns> {
|
||||
try {
|
||||
const {
|
||||
_meta: {
|
||||
@ -297,7 +303,10 @@ export async function getAllRegisters({
|
||||
break;
|
||||
}
|
||||
|
||||
result = result.filter(({ blockRegistration }) => blockRegistration !== lastEvent.blockRegistration);
|
||||
result = result.filter(
|
||||
({ blockRegistration }) =>
|
||||
blockRegistration !== lastEvent.blockRegistration,
|
||||
);
|
||||
fromBlock = Number(lastEvent.blockRegistration);
|
||||
|
||||
events.push(...result);
|
||||
@ -310,7 +319,8 @@ export async function getAllRegisters({
|
||||
};
|
||||
}
|
||||
|
||||
const result = events.map(({ id, address, ensName, blockRegistration }) => {
|
||||
const result = events.map(
|
||||
({ id, address, ensName, blockRegistration }) => {
|
||||
const [transactionHash, logIndex] = id.split('-');
|
||||
|
||||
return {
|
||||
@ -320,7 +330,8 @@ export async function getAllRegisters({
|
||||
ensName,
|
||||
relayerAddress: getAddress(address),
|
||||
};
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
events: result,
|
||||
@ -435,7 +446,9 @@ export async function getAllDeposits({
|
||||
break;
|
||||
}
|
||||
|
||||
result = result.filter(({ blockNumber }) => blockNumber !== lastEvent.blockNumber);
|
||||
result = result.filter(
|
||||
({ blockNumber }) => blockNumber !== lastEvent.blockNumber,
|
||||
);
|
||||
fromBlock = Number(lastEvent.blockNumber);
|
||||
|
||||
events.push(...result);
|
||||
@ -448,7 +461,8 @@ export async function getAllDeposits({
|
||||
};
|
||||
}
|
||||
|
||||
const result = events.map(({ id, blockNumber, commitment, index, timestamp, from }) => {
|
||||
const result = events.map(
|
||||
({ id, blockNumber, commitment, index, timestamp, from }) => {
|
||||
const [transactionHash, logIndex] = id.split('-');
|
||||
|
||||
return {
|
||||
@ -460,14 +474,17 @@ export async function getAllDeposits({
|
||||
timestamp: Number(timestamp),
|
||||
from: getAddress(from),
|
||||
};
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
const [lastEvent] = result.slice(-1);
|
||||
|
||||
return {
|
||||
events: result,
|
||||
lastSyncBlock:
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock,
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock
|
||||
? lastEvent.blockNumber + 1
|
||||
: lastSyncBlock,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log('Error from getAllDeposits query');
|
||||
@ -581,7 +598,9 @@ export async function getAllWithdrawals({
|
||||
break;
|
||||
}
|
||||
|
||||
result = result.filter(({ blockNumber }) => blockNumber !== lastEvent.blockNumber);
|
||||
result = result.filter(
|
||||
({ blockNumber }) => blockNumber !== lastEvent.blockNumber,
|
||||
);
|
||||
fromBlock = Number(lastEvent.blockNumber);
|
||||
|
||||
events.push(...result);
|
||||
@ -594,7 +613,8 @@ export async function getAllWithdrawals({
|
||||
};
|
||||
}
|
||||
|
||||
const result = events.map(({ id, blockNumber, nullifier, to, fee, timestamp }) => {
|
||||
const result = events.map(
|
||||
({ id, blockNumber, nullifier, to, fee, timestamp }) => {
|
||||
const [transactionHash, logIndex] = id.split('-');
|
||||
|
||||
return {
|
||||
@ -606,14 +626,17 @@ export async function getAllWithdrawals({
|
||||
fee,
|
||||
timestamp: Number(timestamp),
|
||||
};
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
const [lastEvent] = result.slice(-1);
|
||||
|
||||
return {
|
||||
events: result,
|
||||
lastSyncBlock:
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock,
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock
|
||||
? lastEvent.blockNumber + 1
|
||||
: lastSyncBlock,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log('Error from getAllWithdrawals query');
|
||||
@ -783,7 +806,9 @@ export async function getAllGraphEchoEvents({
|
||||
break;
|
||||
}
|
||||
|
||||
result = result.filter(({ blockNumber }) => blockNumber !== lastEvent.blockNumber);
|
||||
result = result.filter(
|
||||
({ blockNumber }) => blockNumber !== lastEvent.blockNumber,
|
||||
);
|
||||
fromBlock = Number(lastEvent.blockNumber);
|
||||
|
||||
events.push(...result);
|
||||
@ -813,7 +838,9 @@ export async function getAllGraphEchoEvents({
|
||||
return {
|
||||
events: result,
|
||||
lastSyncBlock:
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock,
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock
|
||||
? lastEvent.blockNumber + 1
|
||||
: lastSyncBlock,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log('Error from getAllGraphEchoEvents query');
|
||||
@ -915,7 +942,9 @@ export async function getAllEncryptedNotes({
|
||||
break;
|
||||
}
|
||||
|
||||
result = result.filter(({ blockNumber }) => blockNumber !== lastEvent.blockNumber);
|
||||
result = result.filter(
|
||||
({ blockNumber }) => blockNumber !== lastEvent.blockNumber,
|
||||
);
|
||||
fromBlock = Number(lastEvent.blockNumber);
|
||||
|
||||
events.push(...result);
|
||||
@ -940,7 +969,9 @@ export async function getAllEncryptedNotes({
|
||||
return {
|
||||
events: result,
|
||||
lastSyncBlock:
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock,
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock
|
||||
? lastEvent.blockNumber + 1
|
||||
: lastSyncBlock,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log('Error from getAllEncryptedNotes query');
|
||||
@ -1054,13 +1085,18 @@ export async function getAllGovernanceEvents({
|
||||
|
||||
lastSyncBlock = currentBlock;
|
||||
|
||||
const eventsLength = proposals.length + votes.length + delegates.length + undelegates.length;
|
||||
const eventsLength =
|
||||
proposals.length +
|
||||
votes.length +
|
||||
delegates.length +
|
||||
undelegates.length;
|
||||
|
||||
if (eventsLength === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
const formattedProposals: GovernanceProposalCreatedEvents[] = proposals.map(
|
||||
const formattedProposals: GovernanceProposalCreatedEvents[] =
|
||||
proposals.map(
|
||||
({
|
||||
blockNumber,
|
||||
logIndex,
|
||||
@ -1088,7 +1124,17 @@ export async function getAllGovernanceEvents({
|
||||
);
|
||||
|
||||
const formattedVotes: GovernanceVotedEvents[] = votes.map(
|
||||
({ blockNumber, logIndex, transactionHash, proposalId, voter, support, votes, from, input }) => {
|
||||
({
|
||||
blockNumber,
|
||||
logIndex,
|
||||
transactionHash,
|
||||
proposalId,
|
||||
voter,
|
||||
support,
|
||||
votes,
|
||||
from,
|
||||
input,
|
||||
}) => {
|
||||
// Filter spammy txs
|
||||
if (!input || input.length > 2048) {
|
||||
input = '';
|
||||
@ -1109,8 +1155,15 @@ export async function getAllGovernanceEvents({
|
||||
},
|
||||
);
|
||||
|
||||
const formattedDelegates: GovernanceDelegatedEvents[] = delegates.map(
|
||||
({ blockNumber, logIndex, transactionHash, account, delegateTo }) => {
|
||||
const formattedDelegates: GovernanceDelegatedEvents[] =
|
||||
delegates.map(
|
||||
({
|
||||
blockNumber,
|
||||
logIndex,
|
||||
transactionHash,
|
||||
account,
|
||||
delegateTo,
|
||||
}) => {
|
||||
return {
|
||||
blockNumber: Number(blockNumber),
|
||||
logIndex: Number(logIndex),
|
||||
@ -1122,8 +1175,15 @@ export async function getAllGovernanceEvents({
|
||||
},
|
||||
);
|
||||
|
||||
const formattedUndelegates: GovernanceUndelegatedEvents[] = undelegates.map(
|
||||
({ blockNumber, logIndex, transactionHash, account, delegateFrom }) => {
|
||||
const formattedUndelegates: GovernanceUndelegatedEvents[] =
|
||||
undelegates.map(
|
||||
({
|
||||
blockNumber,
|
||||
logIndex,
|
||||
transactionHash,
|
||||
account,
|
||||
delegateFrom,
|
||||
}) => {
|
||||
return {
|
||||
blockNumber: Number(blockNumber),
|
||||
logIndex: Number(logIndex),
|
||||
@ -1164,7 +1224,9 @@ export async function getAllGovernanceEvents({
|
||||
});
|
||||
}
|
||||
|
||||
formattedEvents = formattedEvents.filter(({ blockNumber }) => blockNumber !== lastEvent.blockNumber);
|
||||
formattedEvents = formattedEvents.filter(
|
||||
({ blockNumber }) => blockNumber !== lastEvent.blockNumber,
|
||||
);
|
||||
|
||||
fromBlock = Number(lastEvent.blockNumber);
|
||||
|
||||
@ -1176,7 +1238,9 @@ export async function getAllGovernanceEvents({
|
||||
return {
|
||||
events: result,
|
||||
lastSyncBlock:
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock ? lastEvent.blockNumber + 1 : lastSyncBlock,
|
||||
lastEvent && lastEvent.blockNumber >= lastSyncBlock
|
||||
? lastEvent.blockNumber + 1
|
||||
: lastSyncBlock,
|
||||
};
|
||||
} catch (err) {
|
||||
console.log('Error from getAllGovernance query');
|
||||
|
||||
74
src/idb.ts
74
src/idb.ts
@ -2,7 +2,8 @@
|
||||
import { openDB, deleteDB, OpenDBCallbacks, IDBPDatabase } from 'idb';
|
||||
import { getConfig, NetIdType } from './networkConfig';
|
||||
|
||||
export const INDEX_DB_ERROR = 'A mutation operation was attempted on a database that did not allow mutations.';
|
||||
export const INDEX_DB_ERROR =
|
||||
'A mutation operation was attempted on a database that did not allow mutations.';
|
||||
|
||||
export interface IDBIndex {
|
||||
name: string;
|
||||
@ -39,7 +40,8 @@ export class IndexedDB {
|
||||
db.deleteObjectStore(value);
|
||||
});
|
||||
|
||||
[{ name: 'keyval' }, ...(stores || [])].forEach(({ name, keyPath, indexes }) => {
|
||||
[{ name: 'keyval' }, ...(stores || [])].forEach(
|
||||
({ name, keyPath, indexes }) => {
|
||||
const store = db.createObjectStore(name, {
|
||||
keyPath,
|
||||
autoIncrement: true,
|
||||
@ -50,7 +52,8 @@ export class IndexedDB {
|
||||
store.createIndex(name, name, { unique });
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
@ -135,13 +138,24 @@ export class IndexedDB {
|
||||
}
|
||||
|
||||
try {
|
||||
return (await this.db.getAllFromIndex(storeName, indexName, key, count)) as T;
|
||||
return (await this.db.getAllFromIndex(
|
||||
storeName,
|
||||
indexName,
|
||||
key,
|
||||
count,
|
||||
)) as T;
|
||||
} catch (err: any) {
|
||||
throw new Error(`Method getAllFromIndex has error: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async getItem<T>({ storeName, key }: { storeName: string; key: string }): Promise<T | undefined> {
|
||||
async getItem<T>({
|
||||
storeName,
|
||||
key,
|
||||
}: {
|
||||
storeName: string;
|
||||
key: string;
|
||||
}): Promise<T | undefined> {
|
||||
await this.initDB();
|
||||
|
||||
if (!this.db) {
|
||||
@ -157,7 +171,15 @@ export class IndexedDB {
|
||||
}
|
||||
}
|
||||
|
||||
async addItem({ storeName, data, key = '' }: { storeName: string; data: any; key: string }) {
|
||||
async addItem({
|
||||
storeName,
|
||||
data,
|
||||
key = '',
|
||||
}: {
|
||||
storeName: string;
|
||||
data: any;
|
||||
key: string;
|
||||
}) {
|
||||
await this.initDB();
|
||||
|
||||
if (!this.db) {
|
||||
@ -176,7 +198,15 @@ export class IndexedDB {
|
||||
}
|
||||
}
|
||||
|
||||
async putItem({ storeName, data, key }: { storeName: string; data: any; key?: string }) {
|
||||
async putItem({
|
||||
storeName,
|
||||
data,
|
||||
key,
|
||||
}: {
|
||||
storeName: string;
|
||||
data: any;
|
||||
key?: string;
|
||||
}) {
|
||||
await this.initDB();
|
||||
|
||||
if (!this.db) {
|
||||
@ -239,7 +269,13 @@ export class IndexedDB {
|
||||
return this.deleteItem({ storeName: 'keyval', key });
|
||||
}
|
||||
|
||||
async clearStore({ storeName, mode = 'readwrite' }: { storeName: string; mode: IDBTransactionMode }) {
|
||||
async clearStore({
|
||||
storeName,
|
||||
mode = 'readwrite',
|
||||
}: {
|
||||
storeName: string;
|
||||
mode: IDBTransactionMode;
|
||||
}) {
|
||||
await this.initDB();
|
||||
|
||||
if (!this.db) {
|
||||
@ -273,10 +309,17 @@ export class IndexedDB {
|
||||
try {
|
||||
const tx = this.db.transaction(storeName, mode);
|
||||
|
||||
await (tx.objectStore(storeName).add as (value: any, key?: any) => Promise<any>)(data);
|
||||
await (
|
||||
tx.objectStore(storeName).add as (
|
||||
value: any,
|
||||
key?: any,
|
||||
) => Promise<any>
|
||||
)(data);
|
||||
await tx.done;
|
||||
} catch (err: any) {
|
||||
throw new Error(`Method createTransactions has error: ${err.message}`);
|
||||
throw new Error(
|
||||
`Method createTransactions has error: ${err.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -302,11 +345,15 @@ export class IndexedDB {
|
||||
|
||||
for (const item of data) {
|
||||
if (item) {
|
||||
await (tx.store.put as (value: any, key?: any) => Promise<any>)({ ...item, ...index });
|
||||
await (
|
||||
tx.store.put as (value: any, key?: any) => Promise<any>
|
||||
)({ ...item, ...index });
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
throw new Error(`Method createMultipleTransactions has error: ${err.message}`);
|
||||
throw new Error(
|
||||
`Method createMultipleTransactions has error: ${err.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -364,7 +411,8 @@ export async function getIndexedDB(netId?: NetIdType) {
|
||||
|
||||
const config = getConfig(netId);
|
||||
|
||||
const { tokens, nativeCurrency, registryContract, governanceContract } = config;
|
||||
const { tokens, nativeCurrency, registryContract, governanceContract } =
|
||||
config;
|
||||
|
||||
const stores = [...defaultState];
|
||||
|
||||
|
||||
@ -1,5 +1,10 @@
|
||||
import { Worker as NodeWorker } from 'worker_threads';
|
||||
import { MerkleTree, PartialMerkleTree, Element, TreeEdge } from '@tornado/fixed-merkle-tree';
|
||||
import {
|
||||
MerkleTree,
|
||||
PartialMerkleTree,
|
||||
Element,
|
||||
TreeEdge,
|
||||
} from '@tornado/fixed-merkle-tree';
|
||||
import type { Tornado } from '@tornado/contracts';
|
||||
import { isNode, toFixedHex } from './utils';
|
||||
import { mimc } from './mimc';
|
||||
@ -61,28 +66,43 @@ export class MerkleTreeService {
|
||||
|
||||
try {
|
||||
if (isNode) {
|
||||
const merkleWorkerPromise = new Promise((resolve, reject) => {
|
||||
const worker = new NodeWorker(this.merkleWorkerPath as string, {
|
||||
const merkleWorkerPromise = new Promise(
|
||||
(resolve, reject) => {
|
||||
const worker = new NodeWorker(
|
||||
this.merkleWorkerPath as string,
|
||||
{
|
||||
workerData: {
|
||||
merkleTreeHeight: this.merkleTreeHeight,
|
||||
elements: events,
|
||||
zeroElement: this.emptyElement,
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
worker.on('message', resolve);
|
||||
worker.on('error', reject);
|
||||
worker.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`Worker stopped with exit code ${code}`));
|
||||
reject(
|
||||
new Error(
|
||||
`Worker stopped with exit code ${code}`,
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
}) as Promise<string>;
|
||||
},
|
||||
) as Promise<string>;
|
||||
|
||||
return MerkleTree.deserialize(JSON.parse(await merkleWorkerPromise), hashFunction);
|
||||
return MerkleTree.deserialize(
|
||||
JSON.parse(await merkleWorkerPromise),
|
||||
hashFunction,
|
||||
);
|
||||
} else {
|
||||
const merkleWorkerPromise = new Promise((resolve, reject) => {
|
||||
const merkleWorkerPromise = new Promise(
|
||||
(resolve, reject) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const worker = new (Worker as any)(this.merkleWorkerPath);
|
||||
const worker = new (Worker as any)(
|
||||
this.merkleWorkerPath,
|
||||
);
|
||||
|
||||
worker.onmessage = (e: { data: string }) => {
|
||||
resolve(e.data);
|
||||
@ -98,12 +118,18 @@ export class MerkleTreeService {
|
||||
elements: events,
|
||||
zeroElement: this.emptyElement,
|
||||
});
|
||||
}) as Promise<string>;
|
||||
},
|
||||
) as Promise<string>;
|
||||
|
||||
return MerkleTree.deserialize(JSON.parse(await merkleWorkerPromise), hashFunction);
|
||||
return MerkleTree.deserialize(
|
||||
JSON.parse(await merkleWorkerPromise),
|
||||
hashFunction,
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('merkleWorker failed, falling back to synchronous merkle tree');
|
||||
console.log(
|
||||
'merkleWorker failed, falling back to synchronous merkle tree',
|
||||
);
|
||||
console.log(err);
|
||||
}
|
||||
}
|
||||
@ -114,7 +140,13 @@ export class MerkleTreeService {
|
||||
});
|
||||
}
|
||||
|
||||
async createPartialTree({ edge, elements }: { edge: TreeEdge; elements: Element[] }) {
|
||||
async createPartialTree({
|
||||
edge,
|
||||
elements,
|
||||
}: {
|
||||
edge: TreeEdge;
|
||||
elements: Element[];
|
||||
}) {
|
||||
const { hash: hashFunction } = await mimc.getHash();
|
||||
|
||||
if (this.merkleWorkerPath) {
|
||||
@ -122,29 +154,44 @@ export class MerkleTreeService {
|
||||
|
||||
try {
|
||||
if (isNode) {
|
||||
const merkleWorkerPromise = new Promise((resolve, reject) => {
|
||||
const worker = new NodeWorker(this.merkleWorkerPath as string, {
|
||||
const merkleWorkerPromise = new Promise(
|
||||
(resolve, reject) => {
|
||||
const worker = new NodeWorker(
|
||||
this.merkleWorkerPath as string,
|
||||
{
|
||||
workerData: {
|
||||
merkleTreeHeight: this.merkleTreeHeight,
|
||||
edge,
|
||||
elements,
|
||||
zeroElement: this.emptyElement,
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
worker.on('message', resolve);
|
||||
worker.on('error', reject);
|
||||
worker.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`Worker stopped with exit code ${code}`));
|
||||
reject(
|
||||
new Error(
|
||||
`Worker stopped with exit code ${code}`,
|
||||
),
|
||||
);
|
||||
}
|
||||
});
|
||||
}) as Promise<string>;
|
||||
},
|
||||
) as Promise<string>;
|
||||
|
||||
return PartialMerkleTree.deserialize(JSON.parse(await merkleWorkerPromise), hashFunction);
|
||||
return PartialMerkleTree.deserialize(
|
||||
JSON.parse(await merkleWorkerPromise),
|
||||
hashFunction,
|
||||
);
|
||||
} else {
|
||||
const merkleWorkerPromise = new Promise((resolve, reject) => {
|
||||
const merkleWorkerPromise = new Promise(
|
||||
(resolve, reject) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const worker = new (Worker as any)(this.merkleWorkerPath);
|
||||
const worker = new (Worker as any)(
|
||||
this.merkleWorkerPath,
|
||||
);
|
||||
|
||||
worker.onmessage = (e: { data: string }) => {
|
||||
resolve(e.data);
|
||||
@ -161,12 +208,18 @@ export class MerkleTreeService {
|
||||
elements,
|
||||
zeroElement: this.emptyElement,
|
||||
});
|
||||
}) as Promise<string>;
|
||||
},
|
||||
) as Promise<string>;
|
||||
|
||||
return PartialMerkleTree.deserialize(JSON.parse(await merkleWorkerPromise), hashFunction);
|
||||
return PartialMerkleTree.deserialize(
|
||||
JSON.parse(await merkleWorkerPromise),
|
||||
hashFunction,
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('merkleWorker failed, falling back to synchronous merkle tree');
|
||||
console.log(
|
||||
'merkleWorker failed, falling back to synchronous merkle tree',
|
||||
);
|
||||
console.log(err);
|
||||
}
|
||||
}
|
||||
@ -184,9 +237,13 @@ export class MerkleTreeService {
|
||||
|
||||
const timeStart = Date.now();
|
||||
|
||||
const tree = await this.createTree(events.map(({ commitment }) => commitment));
|
||||
const tree = await this.createTree(
|
||||
events.map(({ commitment }) => commitment),
|
||||
);
|
||||
|
||||
const isKnownRoot = await this.Tornado.isKnownRoot(toFixedHex(BigInt(tree.root)));
|
||||
const isKnownRoot = await this.Tornado.isKnownRoot(
|
||||
toFixedHex(BigInt(tree.root)),
|
||||
);
|
||||
|
||||
if (!isKnownRoot) {
|
||||
const errMsg = `Deposit Event ${this.netId} ${this.amount} ${this.currency} is invalid`;
|
||||
|
||||
@ -1,6 +1,11 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import workerThreads from 'worker_threads';
|
||||
import { MerkleTree, Element, TreeEdge, PartialMerkleTree } from '@tornado/fixed-merkle-tree';
|
||||
import {
|
||||
MerkleTree,
|
||||
Element,
|
||||
TreeEdge,
|
||||
PartialMerkleTree,
|
||||
} from '@tornado/fixed-merkle-tree';
|
||||
import { mimc } from './mimc';
|
||||
import { isNode } from './utils';
|
||||
|
||||
@ -13,15 +18,23 @@ interface WorkData {
|
||||
|
||||
async function nodePostWork() {
|
||||
const { hash: hashFunction } = await mimc.getHash();
|
||||
const { merkleTreeHeight, edge, elements, zeroElement } = workerThreads.workerData as WorkData;
|
||||
const { merkleTreeHeight, edge, elements, zeroElement } =
|
||||
workerThreads.workerData as WorkData;
|
||||
|
||||
if (edge) {
|
||||
const merkleTree = new PartialMerkleTree(merkleTreeHeight, edge, elements, {
|
||||
const merkleTree = new PartialMerkleTree(
|
||||
merkleTreeHeight,
|
||||
edge,
|
||||
elements,
|
||||
{
|
||||
zeroElement,
|
||||
hashFunction,
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
(workerThreads.parentPort as workerThreads.MessagePort).postMessage(merkleTree.toString());
|
||||
(workerThreads.parentPort as workerThreads.MessagePort).postMessage(
|
||||
merkleTree.toString(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -30,12 +43,18 @@ async function nodePostWork() {
|
||||
hashFunction,
|
||||
});
|
||||
|
||||
(workerThreads.parentPort as workerThreads.MessagePort).postMessage(merkleTree.toString());
|
||||
(workerThreads.parentPort as workerThreads.MessagePort).postMessage(
|
||||
merkleTree.toString(),
|
||||
);
|
||||
}
|
||||
|
||||
if (isNode && workerThreads) {
|
||||
nodePostWork();
|
||||
} else if (!isNode && typeof addEventListener === 'function' && typeof postMessage === 'function') {
|
||||
} else if (
|
||||
!isNode &&
|
||||
typeof addEventListener === 'function' &&
|
||||
typeof postMessage === 'function'
|
||||
) {
|
||||
addEventListener('message', async (e: any) => {
|
||||
let data;
|
||||
|
||||
@ -46,13 +65,19 @@ if (isNode && workerThreads) {
|
||||
}
|
||||
|
||||
const { hash: hashFunction } = await mimc.getHash();
|
||||
const { merkleTreeHeight, edge, elements, zeroElement } = data as WorkData;
|
||||
const { merkleTreeHeight, edge, elements, zeroElement } =
|
||||
data as WorkData;
|
||||
|
||||
if (edge) {
|
||||
const merkleTree = new PartialMerkleTree(merkleTreeHeight, edge, elements, {
|
||||
const merkleTree = new PartialMerkleTree(
|
||||
merkleTreeHeight,
|
||||
edge,
|
||||
elements,
|
||||
{
|
||||
zeroElement,
|
||||
hashFunction,
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
postMessage(merkleTree.toString());
|
||||
return;
|
||||
|
||||
@ -12,7 +12,10 @@ export class Mimc {
|
||||
|
||||
async initMimc() {
|
||||
this.sponge = await buildMimcSponge();
|
||||
this.hash = (left, right) => this.sponge?.F.toString(this.sponge?.multiHash([BigInt(left), BigInt(right)]));
|
||||
this.hash = (left, right) =>
|
||||
this.sponge?.F.toString(
|
||||
this.sponge?.multiHash([BigInt(left), BigInt(right)]),
|
||||
);
|
||||
}
|
||||
|
||||
async getHash() {
|
||||
|
||||
@ -14,7 +14,8 @@ export interface Call3 {
|
||||
export async function multicall(Multicall: Multicall, calls: Call3[]) {
|
||||
const calldata = calls.map((call) => {
|
||||
const target = (call.contract?.target || call.address) as string;
|
||||
const callInterface = (call.contract?.interface || call.interface) as Interface;
|
||||
const callInterface = (call.contract?.interface ||
|
||||
call.interface) as Interface;
|
||||
|
||||
return {
|
||||
target,
|
||||
@ -26,11 +27,18 @@ export async function multicall(Multicall: Multicall, calls: Call3[]) {
|
||||
const returnData = await Multicall.aggregate3.staticCall(calldata);
|
||||
|
||||
const res = returnData.map((call, i) => {
|
||||
const callInterface = (calls[i].contract?.interface || calls[i].interface) as Interface;
|
||||
const callInterface = (calls[i].contract?.interface ||
|
||||
calls[i].interface) as Interface;
|
||||
const [result, data] = call;
|
||||
const decodeResult =
|
||||
result && data && data !== '0x' ? callInterface.decodeFunctionResult(calls[i].name, data) : null;
|
||||
return !decodeResult ? null : decodeResult.length === 1 ? decodeResult[0] : decodeResult;
|
||||
result && data && data !== '0x'
|
||||
? callInterface.decodeFunctionResult(calls[i].name, data)
|
||||
: null;
|
||||
return !decodeResult
|
||||
? null
|
||||
: decodeResult.length === 1
|
||||
? decodeResult[0]
|
||||
: decodeResult;
|
||||
});
|
||||
|
||||
return res;
|
||||
|
||||
@ -122,7 +122,8 @@ export const defaultConfig: networkConfig = {
|
||||
currencyName: 'ETH',
|
||||
explorerUrl: 'https://etherscan.io',
|
||||
merkleTreeHeight: 20,
|
||||
emptyElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
emptyElement:
|
||||
'21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
networkName: 'Ethereum Mainnet',
|
||||
deployedBlock: 9116966,
|
||||
rpcUrls: {
|
||||
@ -258,7 +259,8 @@ export const defaultConfig: networkConfig = {
|
||||
currencyName: 'BNB',
|
||||
explorerUrl: 'https://bscscan.com',
|
||||
merkleTreeHeight: 20,
|
||||
emptyElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
emptyElement:
|
||||
'21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
networkName: 'Binance Smart Chain',
|
||||
deployedBlock: 8158799,
|
||||
stablecoin: '0x8AC76a51cc950d9822D68b83fE1Ad97B32Cd580d',
|
||||
@ -321,7 +323,8 @@ export const defaultConfig: networkConfig = {
|
||||
currencyName: 'MATIC',
|
||||
explorerUrl: 'https://polygonscan.com',
|
||||
merkleTreeHeight: 20,
|
||||
emptyElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
emptyElement:
|
||||
'21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
networkName: 'Polygon (Matic) Network',
|
||||
deployedBlock: 16257962,
|
||||
stablecoin: '0x3c499c542cEF5E3811e1192ce70d8cC03d5c3359',
|
||||
@ -372,7 +375,8 @@ export const defaultConfig: networkConfig = {
|
||||
currencyName: 'ETH',
|
||||
explorerUrl: 'https://optimistic.etherscan.io',
|
||||
merkleTreeHeight: 20,
|
||||
emptyElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
emptyElement:
|
||||
'21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
networkName: 'Optimism',
|
||||
deployedBlock: 2243689,
|
||||
stablecoin: '0x0b2C639c533813f4Aa9D7837CAf62653d097Ff85',
|
||||
@ -424,7 +428,8 @@ export const defaultConfig: networkConfig = {
|
||||
currencyName: 'ETH',
|
||||
explorerUrl: 'https://arbiscan.io',
|
||||
merkleTreeHeight: 20,
|
||||
emptyElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
emptyElement:
|
||||
'21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
networkName: 'Arbitrum One',
|
||||
deployedBlock: 3430648,
|
||||
stablecoin: '0xaf88d065e77c8cC2239327C5EDb3A432268e5831',
|
||||
@ -479,7 +484,8 @@ export const defaultConfig: networkConfig = {
|
||||
currencyName: 'xDAI',
|
||||
explorerUrl: 'https://gnosisscan.io',
|
||||
merkleTreeHeight: 20,
|
||||
emptyElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
emptyElement:
|
||||
'21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
networkName: 'Gnosis Chain',
|
||||
deployedBlock: 17754561,
|
||||
stablecoin: '0xDDAfbb505ad214D7b80b1f830fcCc89B60fb7A83',
|
||||
@ -530,7 +536,8 @@ export const defaultConfig: networkConfig = {
|
||||
currencyName: 'AVAX',
|
||||
explorerUrl: 'https://snowtrace.io',
|
||||
merkleTreeHeight: 20,
|
||||
emptyElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
emptyElement:
|
||||
'21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
networkName: 'Avalanche Mainnet',
|
||||
deployedBlock: 4429818,
|
||||
stablecoin: '0xB97EF9Ef8734C71904D8002F8b6Bc66Dd9c48a6E',
|
||||
@ -580,7 +587,8 @@ export const defaultConfig: networkConfig = {
|
||||
currencyName: 'SepoliaETH',
|
||||
explorerUrl: 'https://sepolia.etherscan.io',
|
||||
merkleTreeHeight: 20,
|
||||
emptyElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
emptyElement:
|
||||
'21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
networkName: 'Ethereum Sepolia',
|
||||
deployedBlock: 5594395,
|
||||
stablecoin: '0x1c7D4B196Cb0C7B01d743Fbc6116a902379C7238',
|
||||
@ -650,7 +658,9 @@ export const defaultConfig: networkConfig = {
|
||||
},
|
||||
};
|
||||
|
||||
export const enabledChains = Object.values(NetId).filter((n) => typeof n === 'number') as NetIdType[];
|
||||
export const enabledChains = Object.values(NetId).filter(
|
||||
(n) => typeof n === 'number',
|
||||
) as NetIdType[];
|
||||
|
||||
/**
|
||||
* Custom config object to extend default config
|
||||
|
||||
@ -16,7 +16,9 @@ export class Pedersen {
|
||||
|
||||
async unpackPoint(buffer: Uint8Array) {
|
||||
await this.pedersenPromise;
|
||||
return this.babyJub?.unpackPoint(this.pedersenHash?.hash(buffer) as Uint8Array);
|
||||
return this.babyJub?.unpackPoint(
|
||||
this.pedersenHash?.hash(buffer) as Uint8Array,
|
||||
);
|
||||
}
|
||||
|
||||
toStringBuffer(buffer: Uint8Array): string {
|
||||
|
||||
@ -1,4 +1,9 @@
|
||||
import { ERC20Permit, ERC20Mock, TORN, PermitTornado } from '@tornado/contracts';
|
||||
import {
|
||||
ERC20Permit,
|
||||
ERC20Mock,
|
||||
TORN,
|
||||
PermitTornado,
|
||||
} from '@tornado/contracts';
|
||||
import {
|
||||
BaseContract,
|
||||
MaxUint256,
|
||||
@ -100,7 +105,10 @@ export async function getPermitCommitmentsSignature({
|
||||
signer?: Signer;
|
||||
}) {
|
||||
const value = BigInt(commitments.length) * denomination;
|
||||
const commitmentsHash = solidityPackedKeccak256(['bytes32[]'], [commitments]);
|
||||
const commitmentsHash = solidityPackedKeccak256(
|
||||
['bytes32[]'],
|
||||
[commitments],
|
||||
);
|
||||
|
||||
return await getPermitSignature({
|
||||
Token,
|
||||
@ -191,7 +199,9 @@ export async function getPermit2Signature({
|
||||
|
||||
const hash = new TypedDataEncoder(types).hash(values);
|
||||
|
||||
const signature = Signature.from(await sigSigner.signTypedData(domain, types, values));
|
||||
const signature = Signature.from(
|
||||
await sigSigner.signTypedData(domain, types, values),
|
||||
);
|
||||
|
||||
return {
|
||||
domain,
|
||||
@ -216,7 +226,10 @@ export async function getPermit2CommitmentsSignature({
|
||||
signer?: Signer;
|
||||
}) {
|
||||
const value = BigInt(commitments.length) * denomination;
|
||||
const commitmentsHash = solidityPackedKeccak256(['bytes32[]'], [commitments]);
|
||||
const commitmentsHash = solidityPackedKeccak256(
|
||||
['bytes32[]'],
|
||||
[commitments],
|
||||
);
|
||||
|
||||
return await getPermit2Signature({
|
||||
Token,
|
||||
|
||||
@ -9,7 +9,11 @@ export class TokenPriceOracle {
|
||||
|
||||
fallbackPrice: bigint;
|
||||
|
||||
constructor(provider: Provider, multicall: Multicall, oracle?: OffchainOracle) {
|
||||
constructor(
|
||||
provider: Provider,
|
||||
multicall: Multicall,
|
||||
oracle?: OffchainOracle,
|
||||
) {
|
||||
this.provider = provider;
|
||||
this.multicall = multicall;
|
||||
this.oracle = oracle;
|
||||
@ -31,7 +35,10 @@ export class TokenPriceOracle {
|
||||
}
|
||||
|
||||
buildStable(stablecoinAddress: string): Call3[] {
|
||||
const stablecoin = ERC20__factory.connect(stablecoinAddress, this.provider);
|
||||
const stablecoin = ERC20__factory.connect(
|
||||
stablecoinAddress,
|
||||
this.provider,
|
||||
);
|
||||
|
||||
return [
|
||||
{
|
||||
@ -74,29 +81,44 @@ export class TokenPriceOracle {
|
||||
): Promise<bigint[]> {
|
||||
// setup mock price for testnets
|
||||
if (!this.oracle) {
|
||||
return new Promise((resolve) => resolve(tokens.map(() => this.fallbackPrice)));
|
||||
return new Promise((resolve) =>
|
||||
resolve(tokens.map(() => this.fallbackPrice)),
|
||||
);
|
||||
}
|
||||
|
||||
const prices = (await multicall(this.multicall, this.buildCalls(tokens))) as (bigint | null)[];
|
||||
const prices = (await multicall(
|
||||
this.multicall,
|
||||
this.buildCalls(tokens),
|
||||
)) as (bigint | null)[];
|
||||
|
||||
return prices.map((price, index) => {
|
||||
if (!price) {
|
||||
price = this.fallbackPrice;
|
||||
}
|
||||
return (price * BigInt(10 ** tokens[index].decimals)) / BigInt(10 ** 18);
|
||||
return (
|
||||
(price * BigInt(10 ** tokens[index].decimals)) /
|
||||
BigInt(10 ** 18)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
async fetchEthUSD(stablecoinAddress: string): Promise<number> {
|
||||
// setup mock price for testnets
|
||||
if (!this.oracle) {
|
||||
return new Promise((resolve) => resolve(10 ** 18 / Number(this.fallbackPrice)));
|
||||
return new Promise((resolve) =>
|
||||
resolve(10 ** 18 / Number(this.fallbackPrice)),
|
||||
);
|
||||
}
|
||||
|
||||
const [decimals, price] = await multicall(this.multicall, this.buildStable(stablecoinAddress));
|
||||
const [decimals, price] = await multicall(
|
||||
this.multicall,
|
||||
this.buildStable(stablecoinAddress),
|
||||
);
|
||||
|
||||
// eth wei price of usdc token
|
||||
const ethPrice = ((price || this.fallbackPrice) * BigInt(10n ** decimals)) / BigInt(10 ** 18);
|
||||
const ethPrice =
|
||||
((price || this.fallbackPrice) * BigInt(10n ** decimals)) /
|
||||
BigInt(10 ** 18);
|
||||
|
||||
return 1 / Number(formatEther(ethPrice));
|
||||
}
|
||||
|
||||
120
src/providers.ts
120
src/providers.ts
@ -20,7 +20,12 @@ import {
|
||||
EnsPlugin,
|
||||
GasCostPlugin,
|
||||
} from 'ethers';
|
||||
import type { RequestInfo, RequestInit, Response, HeadersInit } from 'node-fetch';
|
||||
import type {
|
||||
RequestInfo,
|
||||
RequestInit,
|
||||
Response,
|
||||
HeadersInit,
|
||||
} from 'node-fetch';
|
||||
// Temporary workaround until @types/node-fetch is compatible with @types/node
|
||||
import type { AbortSignal as FetchAbortSignal } from 'node-fetch/externals';
|
||||
import { isNode, sleep } from './utils';
|
||||
@ -33,11 +38,15 @@ declare global {
|
||||
}
|
||||
|
||||
// Update this for every Tor Browser release
|
||||
export const defaultUserAgent = 'Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0';
|
||||
export const defaultUserAgent =
|
||||
'Mozilla/5.0 (Windows NT 10.0; rv:109.0) Gecko/20100101 Firefox/115.0';
|
||||
|
||||
export const fetch = crossFetch as unknown as nodeFetch;
|
||||
|
||||
export type nodeFetch = (url: RequestInfo, init?: RequestInit) => Promise<Response>;
|
||||
export type nodeFetch = (
|
||||
url: RequestInfo,
|
||||
init?: RequestInit,
|
||||
) => Promise<Response>;
|
||||
|
||||
export type fetchDataOptions = RequestInit & {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
@ -53,7 +62,9 @@ export type fetchDataOptions = RequestInit & {
|
||||
returnResponse?: boolean;
|
||||
};
|
||||
|
||||
export type NodeAgent = RequestOptions['agent'] | ((parsedUrl: URL) => RequestOptions['agent']);
|
||||
export type NodeAgent =
|
||||
| RequestOptions['agent']
|
||||
| ((parsedUrl: URL) => RequestOptions['agent']);
|
||||
|
||||
export function getHttpAgent({
|
||||
fetchUrl,
|
||||
@ -73,7 +84,9 @@ export function getHttpAgent({
|
||||
/* eslint-enable @typescript-eslint/no-require-imports */
|
||||
|
||||
if (torPort) {
|
||||
return new SocksProxyAgent(`socks5h://tor${retry}@127.0.0.1:${torPort}`);
|
||||
return new SocksProxyAgent(
|
||||
`socks5h://tor${retry}@127.0.0.1:${torPort}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!proxyUrl) {
|
||||
@ -82,7 +95,11 @@ export function getHttpAgent({
|
||||
|
||||
const isHttps = fetchUrl.includes('https://');
|
||||
|
||||
if (proxyUrl.includes('socks://') || proxyUrl.includes('socks4://') || proxyUrl.includes('socks5://')) {
|
||||
if (
|
||||
proxyUrl.includes('socks://') ||
|
||||
proxyUrl.includes('socks4://') ||
|
||||
proxyUrl.includes('socks5://')
|
||||
) {
|
||||
return new SocksProxyAgent(proxyUrl);
|
||||
}
|
||||
|
||||
@ -167,7 +184,9 @@ export async function fetchData(url: string, options: fetchDataOptions = {}) {
|
||||
}
|
||||
|
||||
if (!resp.ok) {
|
||||
const errMsg = `Request to ${url} failed with error code ${resp.status}:\n` + (await resp.text());
|
||||
const errMsg =
|
||||
`Request to ${url} failed with error code ${resp.status}:\n` +
|
||||
(await resp.text());
|
||||
throw new Error(errMsg);
|
||||
}
|
||||
|
||||
@ -262,7 +281,10 @@ export type getProviderOptions = fetchDataOptions & {
|
||||
pollingInterval?: number;
|
||||
};
|
||||
|
||||
export async function getProvider(rpcUrl: string, fetchOptions?: getProviderOptions): Promise<JsonRpcProvider> {
|
||||
export async function getProvider(
|
||||
rpcUrl: string,
|
||||
fetchOptions?: getProviderOptions,
|
||||
): Promise<JsonRpcProvider> {
|
||||
const fetchReq = new FetchRequest(rpcUrl);
|
||||
|
||||
fetchReq.getUrlFunc = fetchGetUrlFunc(fetchOptions);
|
||||
@ -322,7 +344,9 @@ export const populateTransaction = async (
|
||||
|
||||
const [feeData, nonce] = await Promise.all([
|
||||
tx.maxFeePerGas || tx.gasPrice ? undefined : provider.getFeeData(),
|
||||
tx.nonce ? undefined : provider.getTransactionCount(signer.address, 'pending'),
|
||||
tx.nonce
|
||||
? undefined
|
||||
: provider.getTransactionCount(signer.address, 'pending'),
|
||||
]);
|
||||
|
||||
if (feeData) {
|
||||
@ -332,7 +356,10 @@ export const populateTransaction = async (
|
||||
tx.type = 2;
|
||||
}
|
||||
|
||||
tx.maxFeePerGas = (feeData.maxFeePerGas * (BigInt(10000) + BigInt(signer.gasPriceBump))) / BigInt(10000);
|
||||
tx.maxFeePerGas =
|
||||
(feeData.maxFeePerGas *
|
||||
(BigInt(10000) + BigInt(signer.gasPriceBump))) /
|
||||
BigInt(10000);
|
||||
tx.maxPriorityFeePerGas = feeData.maxPriorityFeePerGas;
|
||||
delete tx.gasPrice;
|
||||
} else if (feeData.gasPrice) {
|
||||
@ -356,10 +383,14 @@ export const populateTransaction = async (
|
||||
tx.gasLimit =
|
||||
gasLimit === BigInt(21000)
|
||||
? gasLimit
|
||||
: (gasLimit * (BigInt(10000) + BigInt(signer.gasLimitBump))) / BigInt(10000);
|
||||
: (gasLimit *
|
||||
(BigInt(10000) + BigInt(signer.gasLimitBump))) /
|
||||
BigInt(10000);
|
||||
} catch (error) {
|
||||
if (signer.gasFailover) {
|
||||
console.log('populateTransaction: warning gas estimation failed falling back to 3M gas');
|
||||
console.log(
|
||||
'populateTransaction: warning gas estimation failed falling back to 3M gas',
|
||||
);
|
||||
// Gas failover
|
||||
tx.gasLimit = BigInt('3000000');
|
||||
} else {
|
||||
@ -387,7 +418,12 @@ export class TornadoWallet extends Wallet {
|
||||
constructor(
|
||||
key: string | SigningKey,
|
||||
provider?: Provider,
|
||||
{ gasPriceBump, gasLimitBump, gasFailover, bumpNonce }: TornadoWalletOptions = {},
|
||||
{
|
||||
gasPriceBump,
|
||||
gasLimitBump,
|
||||
gasFailover,
|
||||
bumpNonce,
|
||||
}: TornadoWalletOptions = {},
|
||||
) {
|
||||
super(key, provider);
|
||||
// 10% bump from the recommended fee
|
||||
@ -399,10 +435,23 @@ export class TornadoWallet extends Wallet {
|
||||
this.bumpNonce = bumpNonce ?? false;
|
||||
}
|
||||
|
||||
static fromMnemonic(mneomnic: string, provider: Provider, index = 0, options?: TornadoWalletOptions) {
|
||||
static fromMnemonic(
|
||||
mneomnic: string,
|
||||
provider: Provider,
|
||||
index = 0,
|
||||
options?: TornadoWalletOptions,
|
||||
) {
|
||||
const defaultPath = `m/44'/60'/0'/0/${index}`;
|
||||
const { privateKey } = HDNodeWallet.fromPhrase(mneomnic, undefined, defaultPath);
|
||||
return new TornadoWallet(privateKey as unknown as SigningKey, provider, options);
|
||||
const { privateKey } = HDNodeWallet.fromPhrase(
|
||||
mneomnic,
|
||||
undefined,
|
||||
defaultPath,
|
||||
);
|
||||
return new TornadoWallet(
|
||||
privateKey as unknown as SigningKey,
|
||||
provider,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
async populateTransaction(tx: TransactionRequest) {
|
||||
@ -422,7 +471,12 @@ export class TornadoVoidSigner extends VoidSigner {
|
||||
constructor(
|
||||
address: string,
|
||||
provider?: Provider,
|
||||
{ gasPriceBump, gasLimitBump, gasFailover, bumpNonce }: TornadoWalletOptions = {},
|
||||
{
|
||||
gasPriceBump,
|
||||
gasLimitBump,
|
||||
gasFailover,
|
||||
bumpNonce,
|
||||
}: TornadoWalletOptions = {},
|
||||
) {
|
||||
super(address, provider);
|
||||
// 10% bump from the recommended fee
|
||||
@ -451,7 +505,12 @@ export class TornadoRpcSigner extends JsonRpcSigner {
|
||||
constructor(
|
||||
provider: JsonRpcApiProvider,
|
||||
address: string,
|
||||
{ gasPriceBump, gasLimitBump, gasFailover, bumpNonce }: TornadoWalletOptions = {},
|
||||
{
|
||||
gasPriceBump,
|
||||
gasLimitBump,
|
||||
gasFailover,
|
||||
bumpNonce,
|
||||
}: TornadoWalletOptions = {},
|
||||
) {
|
||||
super(provider, address);
|
||||
// 10% bump from the recommended fee
|
||||
@ -464,7 +523,9 @@ export class TornadoRpcSigner extends JsonRpcSigner {
|
||||
}
|
||||
|
||||
async sendUncheckedTransaction(tx: TransactionRequest) {
|
||||
return super.sendUncheckedTransaction(await populateTransaction(this, tx));
|
||||
return super.sendUncheckedTransaction(
|
||||
await populateTransaction(this, tx),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -484,7 +545,11 @@ export interface TornadoBrowserProviderOptions extends TornadoWalletOptions {
|
||||
|
||||
export class TornadoBrowserProvider extends BrowserProvider {
|
||||
options?: TornadoBrowserProviderOptions;
|
||||
constructor(ethereum: Eip1193Provider, network?: Networkish, options?: TornadoBrowserProviderOptions) {
|
||||
constructor(
|
||||
ethereum: Eip1193Provider,
|
||||
network?: Networkish,
|
||||
options?: TornadoBrowserProviderOptions,
|
||||
) {
|
||||
super(ethereum, network);
|
||||
this.options = options;
|
||||
}
|
||||
@ -501,15 +566,24 @@ export class TornadoBrowserProvider extends BrowserProvider {
|
||||
}
|
||||
|
||||
if (this.options?.handleNetworkChanges) {
|
||||
window?.ethereum?.on('chainChanged', this.options.handleNetworkChanges);
|
||||
window?.ethereum?.on(
|
||||
'chainChanged',
|
||||
this.options.handleNetworkChanges,
|
||||
);
|
||||
}
|
||||
|
||||
if (this.options?.handleAccountChanges) {
|
||||
window?.ethereum?.on('accountsChanged', this.options.handleAccountChanges);
|
||||
window?.ethereum?.on(
|
||||
'accountsChanged',
|
||||
this.options.handleAccountChanges,
|
||||
);
|
||||
}
|
||||
|
||||
if (this.options?.handleAccountDisconnect) {
|
||||
window?.ethereum?.on('disconnect', this.options.handleAccountDisconnect);
|
||||
window?.ethereum?.on(
|
||||
'disconnect',
|
||||
this.options.handleAccountDisconnect,
|
||||
);
|
||||
}
|
||||
|
||||
return new TornadoRpcSigner(this, signerAddress, this.options);
|
||||
|
||||
@ -125,7 +125,10 @@ export function isRelayerUpdated(relayerVersion: string, netId: NetIdType) {
|
||||
}
|
||||
**/
|
||||
|
||||
export function calculateScore({ stakeBalance, tornadoServiceFee }: RelayerInfo) {
|
||||
export function calculateScore({
|
||||
stakeBalance,
|
||||
tornadoServiceFee,
|
||||
}: RelayerInfo) {
|
||||
if (tornadoServiceFee < MIN_FEE) {
|
||||
tornadoServiceFee = MIN_FEE;
|
||||
} else if (tornadoServiceFee >= MAX_FEE) {
|
||||
@ -134,9 +137,12 @@ export function calculateScore({ stakeBalance, tornadoServiceFee }: RelayerInfo)
|
||||
|
||||
const serviceFeeCoefficient = (tornadoServiceFee - MIN_FEE) ** 2;
|
||||
const feeDiffCoefficient = 1 / (MAX_FEE - MIN_FEE) ** 2;
|
||||
const coefficientsMultiplier = 1 - feeDiffCoefficient * serviceFeeCoefficient;
|
||||
const coefficientsMultiplier =
|
||||
1 - feeDiffCoefficient * serviceFeeCoefficient;
|
||||
|
||||
return BigInt(Math.floor(Number(stakeBalance || '0') * coefficientsMultiplier));
|
||||
return BigInt(
|
||||
Math.floor(Number(stakeBalance || '0') * coefficientsMultiplier),
|
||||
);
|
||||
}
|
||||
|
||||
export function getWeightRandom(weightsScores: bigint[], random: bigint) {
|
||||
@ -221,13 +227,16 @@ export class RelayerClient {
|
||||
const rawStatus = (await fetchData(`${url}status`, {
|
||||
...this.fetchDataOptions,
|
||||
headers: {
|
||||
'Content-Type': 'application/json, application/x-www-form-urlencoded',
|
||||
'Content-Type':
|
||||
'application/json, application/x-www-form-urlencoded',
|
||||
},
|
||||
timeout: 30000,
|
||||
maxRetry: this.fetchDataOptions?.torPort ? 2 : 0,
|
||||
})) as object;
|
||||
|
||||
const statusValidator = ajv.compile(getStatusSchema(this.netId, this.config, this.tovarish));
|
||||
const statusValidator = ajv.compile(
|
||||
getStatusSchema(this.netId, this.config, this.tovarish),
|
||||
);
|
||||
|
||||
if (!statusValidator(rawStatus)) {
|
||||
throw new Error('Invalid status schema');
|
||||
@ -246,14 +255,22 @@ export class RelayerClient {
|
||||
throw new Error('This relayer serves a different network');
|
||||
}
|
||||
|
||||
if (relayerAddress && this.netId === NetId.MAINNET && status.rewardAccount !== relayerAddress) {
|
||||
throw new Error('The Relayer reward address must match registered address');
|
||||
if (
|
||||
relayerAddress &&
|
||||
this.netId === NetId.MAINNET &&
|
||||
status.rewardAccount !== relayerAddress
|
||||
) {
|
||||
throw new Error(
|
||||
'The Relayer reward address must match registered address',
|
||||
);
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
async filterRelayer(relayer: CachedRelayerInfo): Promise<RelayerInfo | RelayerError | undefined> {
|
||||
async filterRelayer(
|
||||
relayer: CachedRelayerInfo,
|
||||
): Promise<RelayerInfo | RelayerError | undefined> {
|
||||
const hostname = relayer.hostnames[this.netId];
|
||||
const { ensName, relayerAddress } = relayer;
|
||||
|
||||
@ -298,8 +315,11 @@ export class RelayerClient {
|
||||
}> {
|
||||
const invalidRelayers: RelayerError[] = [];
|
||||
|
||||
const validRelayers = (await Promise.all(relayers.map((relayer) => this.filterRelayer(relayer)))).filter(
|
||||
(r) => {
|
||||
const validRelayers = (
|
||||
await Promise.all(
|
||||
relayers.map((relayer) => this.filterRelayer(relayer)),
|
||||
)
|
||||
).filter((r) => {
|
||||
if (!r) {
|
||||
return false;
|
||||
}
|
||||
@ -308,8 +328,7 @@ export class RelayerClient {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
) as RelayerInfo[];
|
||||
}) as RelayerInfo[];
|
||||
|
||||
return {
|
||||
validRelayers,
|
||||
@ -323,7 +342,9 @@ export class RelayerClient {
|
||||
|
||||
async tornadoWithdraw(
|
||||
{ contract, proof, args }: TornadoWithdrawParams,
|
||||
callback?: (jobResp: RelayerTornadoWithdraw | RelayerTornadoJobs) => void,
|
||||
callback?: (
|
||||
jobResp: RelayerTornadoWithdraw | RelayerTornadoJobs,
|
||||
) => void,
|
||||
) {
|
||||
const { url } = this.selectedRelayer as RelayerInfo;
|
||||
|
||||
@ -371,7 +392,10 @@ export class RelayerClient {
|
||||
|
||||
console.log(`Job submitted: ${jobUrl}\n`);
|
||||
|
||||
while (!relayerStatus || !['FAILED', 'CONFIRMED'].includes(relayerStatus)) {
|
||||
while (
|
||||
!relayerStatus ||
|
||||
!['FAILED', 'CONFIRMED'].includes(relayerStatus)
|
||||
) {
|
||||
const jobResponse = await fetchData(jobUrl, {
|
||||
...this.fetchDataOptions,
|
||||
method: 'GET',
|
||||
@ -391,18 +415,25 @@ export class RelayerClient {
|
||||
throw new Error(errMsg);
|
||||
}
|
||||
|
||||
const { status, txHash, confirmations, failedReason } = jobResponse as unknown as RelayerTornadoJobs;
|
||||
const { status, txHash, confirmations, failedReason } =
|
||||
jobResponse as unknown as RelayerTornadoJobs;
|
||||
|
||||
if (relayerStatus !== status) {
|
||||
if (status === 'FAILED') {
|
||||
const errMsg = `Job ${status}: ${jobUrl} failed reason: ${failedReason}`;
|
||||
throw new Error(errMsg);
|
||||
} else if (status === 'SENT') {
|
||||
console.log(`Job ${status}: ${jobUrl}, txhash: ${txHash}\n`);
|
||||
console.log(
|
||||
`Job ${status}: ${jobUrl}, txhash: ${txHash}\n`,
|
||||
);
|
||||
} else if (status === 'MINED') {
|
||||
console.log(`Job ${status}: ${jobUrl}, txhash: ${txHash}, confirmations: ${confirmations}\n`);
|
||||
console.log(
|
||||
`Job ${status}: ${jobUrl}, txhash: ${txHash}, confirmations: ${confirmations}\n`,
|
||||
);
|
||||
} else if (status === 'CONFIRMED') {
|
||||
console.log(`Job ${status}: ${jobUrl}, txhash: ${txHash}, confirmations: ${confirmations}\n`);
|
||||
console.log(
|
||||
`Job ${status}: ${jobUrl}, txhash: ${txHash}, confirmations: ${confirmations}\n`,
|
||||
);
|
||||
} else {
|
||||
console.log(`Job ${status}: ${jobUrl}\n`);
|
||||
}
|
||||
|
||||
@ -12,7 +12,9 @@ const baseEventsSchemaProperty = {
|
||||
transactionHash: bytes32SchemaType,
|
||||
} as const;
|
||||
|
||||
const baseEventsSchemaRequired = Object.keys(baseEventsSchemaProperty) as string[];
|
||||
const baseEventsSchemaRequired = Object.keys(
|
||||
baseEventsSchemaProperty,
|
||||
) as string[];
|
||||
|
||||
export const governanceEventsSchema = {
|
||||
type: 'array',
|
||||
@ -74,7 +76,11 @@ export const governanceEventsSchema = {
|
||||
account: addressSchemaType,
|
||||
delegateTo: addressSchemaType,
|
||||
},
|
||||
required: [...baseEventsSchemaRequired, 'account', 'delegateTo'],
|
||||
required: [
|
||||
...baseEventsSchemaRequired,
|
||||
'account',
|
||||
'delegateTo',
|
||||
],
|
||||
additionalProperties: false,
|
||||
},
|
||||
{
|
||||
@ -85,7 +91,11 @@ export const governanceEventsSchema = {
|
||||
account: addressSchemaType,
|
||||
delegateFrom: addressSchemaType,
|
||||
},
|
||||
required: [...baseEventsSchemaRequired, 'account', 'delegateFrom'],
|
||||
required: [
|
||||
...baseEventsSchemaRequired,
|
||||
'account',
|
||||
'delegateFrom',
|
||||
],
|
||||
additionalProperties: false,
|
||||
},
|
||||
],
|
||||
@ -117,7 +127,13 @@ export const depositsEventsSchema = {
|
||||
timestamp: { type: 'number' },
|
||||
from: addressSchemaType,
|
||||
},
|
||||
required: [...baseEventsSchemaRequired, 'commitment', 'leafIndex', 'timestamp', 'from'],
|
||||
required: [
|
||||
...baseEventsSchemaRequired,
|
||||
'commitment',
|
||||
'leafIndex',
|
||||
'timestamp',
|
||||
'from',
|
||||
],
|
||||
additionalProperties: false,
|
||||
},
|
||||
} as const;
|
||||
@ -133,7 +149,13 @@ export const withdrawalsEventsSchema = {
|
||||
fee: bnSchemaType,
|
||||
timestamp: { type: 'number' },
|
||||
},
|
||||
required: [...baseEventsSchemaRequired, 'nullifierHash', 'to', 'fee', 'timestamp'],
|
||||
required: [
|
||||
...baseEventsSchemaRequired,
|
||||
'nullifierHash',
|
||||
'to',
|
||||
'fee',
|
||||
'timestamp',
|
||||
],
|
||||
additionalProperties: false,
|
||||
},
|
||||
} as const;
|
||||
|
||||
@ -129,10 +129,22 @@ const statusSchema: statusSchema = {
|
||||
onSyncEvents: { type: 'boolean' },
|
||||
currentQueue: { type: 'number' },
|
||||
},
|
||||
required: ['rewardAccount', 'instances', 'netId', 'tornadoServiceFee', 'version', 'health', 'currentQueue'],
|
||||
required: [
|
||||
'rewardAccount',
|
||||
'instances',
|
||||
'netId',
|
||||
'tornadoServiceFee',
|
||||
'version',
|
||||
'health',
|
||||
'currentQueue',
|
||||
],
|
||||
};
|
||||
|
||||
export function getStatusSchema(netId: NetIdType, config: Config, tovarish: boolean) {
|
||||
export function getStatusSchema(
|
||||
netId: NetIdType,
|
||||
config: Config,
|
||||
tovarish: boolean,
|
||||
) {
|
||||
const { tokens, optionalTokens, disabledTokens, nativeCurrency } = config;
|
||||
|
||||
// deep copy schema
|
||||
@ -140,7 +152,13 @@ export function getStatusSchema(netId: NetIdType, config: Config, tovarish: bool
|
||||
|
||||
const instances = Object.keys(tokens).reduce(
|
||||
(acc: statusInstancesType, token) => {
|
||||
const { instanceAddress, tokenAddress, symbol, decimals, optionalInstances = [] } = tokens[token];
|
||||
const {
|
||||
instanceAddress,
|
||||
tokenAddress,
|
||||
symbol,
|
||||
decimals,
|
||||
optionalInstances = [],
|
||||
} = tokens[token];
|
||||
const amounts = Object.keys(instanceAddress);
|
||||
|
||||
const instanceProperties: statusInstanceType = {
|
||||
@ -160,7 +178,9 @@ export function getStatusSchema(netId: NetIdType, config: Config, tovarish: bool
|
||||
},
|
||||
{},
|
||||
),
|
||||
required: amounts.filter((amount) => !optionalInstances.includes(amount)),
|
||||
required: amounts.filter(
|
||||
(amount) => !optionalInstances.includes(amount),
|
||||
),
|
||||
},
|
||||
decimals: { enum: [decimals] },
|
||||
},
|
||||
@ -178,7 +198,10 @@ export function getStatusSchema(netId: NetIdType, config: Config, tovarish: bool
|
||||
}
|
||||
|
||||
acc.properties[token] = instanceProperties;
|
||||
if (!optionalTokens?.includes(token) && !disabledTokens?.includes(token)) {
|
||||
if (
|
||||
!optionalTokens?.includes(token) &&
|
||||
!disabledTokens?.includes(token)
|
||||
) {
|
||||
acc.required.push(token);
|
||||
}
|
||||
return acc;
|
||||
@ -193,7 +216,10 @@ export function getStatusSchema(netId: NetIdType, config: Config, tovarish: bool
|
||||
schema.properties.instances = instances;
|
||||
|
||||
const _tokens = Object.keys(tokens).filter(
|
||||
(t) => t !== nativeCurrency && !config.optionalTokens?.includes(t) && !config.disabledTokens?.includes(t),
|
||||
(t) =>
|
||||
t !== nativeCurrency &&
|
||||
!config.optionalTokens?.includes(t) &&
|
||||
!config.disabledTokens?.includes(t),
|
||||
);
|
||||
|
||||
if (netId === NetId.MAINNET) {
|
||||
@ -203,10 +229,16 @@ export function getStatusSchema(netId: NetIdType, config: Config, tovarish: bool
|
||||
if (_tokens.length) {
|
||||
const ethPrices: statusEthPricesType = {
|
||||
type: 'object',
|
||||
properties: _tokens.reduce((acc: { [key in string]: typeof bnSchemaType }, token: string) => {
|
||||
properties: _tokens.reduce(
|
||||
(
|
||||
acc: { [key in string]: typeof bnSchemaType },
|
||||
token: string,
|
||||
) => {
|
||||
acc[token] = bnSchemaType;
|
||||
return acc;
|
||||
}, {}),
|
||||
},
|
||||
{},
|
||||
),
|
||||
required: _tokens,
|
||||
};
|
||||
schema.properties.ethPrices = ethPrices;
|
||||
@ -214,7 +246,13 @@ export function getStatusSchema(netId: NetIdType, config: Config, tovarish: bool
|
||||
}
|
||||
|
||||
if (tovarish) {
|
||||
schema.required.push('gasPrices', 'latestBlock', 'latestBalance', 'syncStatus', 'onSyncEvents');
|
||||
schema.required.push(
|
||||
'gasPrices',
|
||||
'latestBlock',
|
||||
'latestBalance',
|
||||
'syncStatus',
|
||||
'onSyncEvents',
|
||||
);
|
||||
}
|
||||
|
||||
return schema;
|
||||
|
||||
@ -61,11 +61,15 @@ export async function getTokenBalances({
|
||||
|
||||
const ethResults = multicallResults[0];
|
||||
const tokenResults = multicallResults.slice(1).length
|
||||
? chunk(multicallResults.slice(1), tokenCalls.length / tokenAddresses.length)
|
||||
? chunk(
|
||||
multicallResults.slice(1),
|
||||
tokenCalls.length / tokenAddresses.length,
|
||||
)
|
||||
: [];
|
||||
|
||||
const tokenBalances = tokenResults.map((tokenResult, index) => {
|
||||
const [tokenBalance, tokenName, tokenSymbol, tokenDecimals] = tokenResult;
|
||||
const [tokenBalance, tokenName, tokenSymbol, tokenDecimals] =
|
||||
tokenResult;
|
||||
const tokenAddress = tokenAddresses[index];
|
||||
|
||||
return {
|
||||
|
||||
@ -138,7 +138,8 @@ export class TovarishClient extends RelayerClient {
|
||||
const statusArray = (await fetchData(`${url}status`, {
|
||||
...this.fetchDataOptions,
|
||||
headers: {
|
||||
'Content-Type': 'application/json, application/x-www-form-urlencoded',
|
||||
'Content-Type':
|
||||
'application/json, application/x-www-form-urlencoded',
|
||||
},
|
||||
timeout: 30000,
|
||||
maxRetry: this.fetchDataOptions?.torPort ? 2 : 0,
|
||||
@ -181,8 +182,14 @@ export class TovarishClient extends RelayerClient {
|
||||
throw new Error('This relayer serves a different network');
|
||||
}
|
||||
|
||||
if (relayerAddress && status.netId === NetId.MAINNET && status.rewardAccount !== relayerAddress) {
|
||||
throw new Error('The Relayer reward address must match registered address');
|
||||
if (
|
||||
relayerAddress &&
|
||||
status.netId === NetId.MAINNET &&
|
||||
status.rewardAccount !== relayerAddress
|
||||
) {
|
||||
throw new Error(
|
||||
'The Relayer reward address must match registered address',
|
||||
);
|
||||
}
|
||||
|
||||
if (!status.version.includes('tovarish')) {
|
||||
@ -195,8 +202,11 @@ export class TovarishClient extends RelayerClient {
|
||||
return tovarishStatus;
|
||||
}
|
||||
|
||||
async filterRelayer(relayer: CachedRelayerInfo): Promise<TovarishInfo | RelayerError | undefined> {
|
||||
const { ensName, relayerAddress, tovarishHost, tovarishNetworks } = relayer;
|
||||
async filterRelayer(
|
||||
relayer: CachedRelayerInfo,
|
||||
): Promise<TovarishInfo | RelayerError | undefined> {
|
||||
const { ensName, relayerAddress, tovarishHost, tovarishNetworks } =
|
||||
relayer;
|
||||
|
||||
if (!tovarishHost || !tovarishNetworks?.includes(this.netId)) {
|
||||
return;
|
||||
@ -248,8 +258,11 @@ export class TovarishClient extends RelayerClient {
|
||||
}> {
|
||||
const invalidRelayers: RelayerError[] = [];
|
||||
|
||||
const validRelayers = (await Promise.all(relayers.map((relayer) => this.filterRelayer(relayer)))).filter(
|
||||
(r) => {
|
||||
const validRelayers = (
|
||||
await Promise.all(
|
||||
relayers.map((relayer) => this.filterRelayer(relayer)),
|
||||
)
|
||||
).filter((r) => {
|
||||
if (!r) {
|
||||
return false;
|
||||
}
|
||||
@ -258,8 +271,7 @@ export class TovarishClient extends RelayerClient {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
) as TovarishInfo[];
|
||||
}) as TovarishInfo[];
|
||||
|
||||
return {
|
||||
validRelayers,
|
||||
@ -294,7 +306,9 @@ export class TovarishClient extends RelayerClient {
|
||||
ensName,
|
||||
relayerAddress,
|
||||
rewardAccount: getAddress(status.rewardAccount),
|
||||
instances: getSupportedInstances(status.instances),
|
||||
instances: getSupportedInstances(
|
||||
status.instances,
|
||||
),
|
||||
stakeBalance: relayer.stakeBalance,
|
||||
gasPrice: status.gasPrices?.fast,
|
||||
ethPrices: status.ethPrices,
|
||||
@ -344,7 +358,8 @@ export class TovarishClient extends RelayerClient {
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { events: fetchedEvents, lastSyncBlock: currentBlock } = (await fetchData(url, {
|
||||
let { events: fetchedEvents, lastSyncBlock: currentBlock } =
|
||||
(await fetchData(url, {
|
||||
...this.fetchDataOptions,
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@ -391,7 +406,9 @@ export class TovarishClient extends RelayerClient {
|
||||
break;
|
||||
}
|
||||
|
||||
fetchedEvents = fetchedEvents.filter((e) => e.blockNumber !== lastEvent.blockNumber);
|
||||
fetchedEvents = fetchedEvents.filter(
|
||||
(e) => e.blockNumber !== lastEvent.blockNumber,
|
||||
);
|
||||
fromBlock = Number(lastEvent.blockNumber);
|
||||
|
||||
events.push(...fetchedEvents);
|
||||
|
||||
43
src/utils.ts
43
src/utils.ts
@ -16,10 +16,14 @@ export const isNode =
|
||||
}
|
||||
).browser && typeof globalThis.window === 'undefined';
|
||||
|
||||
export const crypto = isNode ? webcrypto : (globalThis.crypto as typeof webcrypto);
|
||||
export const crypto = isNode
|
||||
? webcrypto
|
||||
: (globalThis.crypto as typeof webcrypto);
|
||||
|
||||
export const chunk = <T>(arr: T[], size: number): T[][] =>
|
||||
[...Array(Math.ceil(arr.length / size))].map((_, i) => arr.slice(size * i, size + size * i));
|
||||
[...Array(Math.ceil(arr.length / size))].map((_, i) =>
|
||||
arr.slice(size * i, size + size * i),
|
||||
);
|
||||
|
||||
export function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
@ -29,7 +33,9 @@ export function validateUrl(url: string, protocols?: string[]) {
|
||||
try {
|
||||
const parsedUrl = new URL(url);
|
||||
if (protocols && protocols.length) {
|
||||
return protocols.map((p) => p.toLowerCase()).includes(parsedUrl.protocol);
|
||||
return protocols
|
||||
.map((p) => p.toLowerCase())
|
||||
.includes(parsedUrl.protocol);
|
||||
}
|
||||
return true;
|
||||
} catch {
|
||||
@ -54,7 +60,9 @@ export function bufferToBytes(b: Buffer) {
|
||||
}
|
||||
|
||||
export function bytesToBase64(bytes: Uint8Array) {
|
||||
return btoa(bytes.reduce((data, byte) => data + String.fromCharCode(byte), ''));
|
||||
return btoa(
|
||||
bytes.reduce((data, byte) => data + String.fromCharCode(byte), ''),
|
||||
);
|
||||
}
|
||||
|
||||
export function base64ToBytes(base64: string) {
|
||||
@ -77,7 +85,11 @@ export function hexToBytes(hexString: string) {
|
||||
if (hexString.length % 2 !== 0) {
|
||||
hexString = '0' + hexString;
|
||||
}
|
||||
return Uint8Array.from((hexString.match(/.{1,2}/g) as string[]).map((byte) => parseInt(byte, 16)));
|
||||
return Uint8Array.from(
|
||||
(hexString.match(/.{1,2}/g) as string[]).map((byte) =>
|
||||
parseInt(byte, 16),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Convert BE encoded bytes (Buffer | Uint8Array) array to BigInt
|
||||
@ -88,7 +100,8 @@ export function bytesToBN(bytes: Uint8Array) {
|
||||
// Convert BigInt to BE encoded Uint8Array type
|
||||
export function bnToBytes(bigint: bigint | string) {
|
||||
// Parse bigint to hex string
|
||||
let hexString: string = typeof bigint === 'bigint' ? bigint.toString(16) : bigint;
|
||||
let hexString: string =
|
||||
typeof bigint === 'bigint' ? bigint.toString(16) : bigint;
|
||||
// Remove hex string prefix if exists
|
||||
if (hexString.slice(0, 2) === '0x') {
|
||||
hexString = hexString.slice(2);
|
||||
@ -97,7 +110,11 @@ export function bnToBytes(bigint: bigint | string) {
|
||||
if (hexString.length % 2 !== 0) {
|
||||
hexString = '0' + hexString;
|
||||
}
|
||||
return Uint8Array.from((hexString.match(/.{1,2}/g) as string[]).map((byte) => parseInt(byte, 16)));
|
||||
return Uint8Array.from(
|
||||
(hexString.match(/.{1,2}/g) as string[]).map((byte) =>
|
||||
parseInt(byte, 16),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Convert LE encoded bytes (Buffer | Uint8Array) array to BigInt
|
||||
@ -152,7 +169,10 @@ export async function digest(bytes: Uint8Array, algo: string = 'SHA-384') {
|
||||
return new Uint8Array(await crypto.subtle.digest(algo, bytes));
|
||||
}
|
||||
|
||||
export function numberFormatter(num: string | number | bigint, digits: number = 3): string {
|
||||
export function numberFormatter(
|
||||
num: string | number | bigint,
|
||||
digits: number = 3,
|
||||
): string {
|
||||
const lookup = [
|
||||
{ value: 1, symbol: '' },
|
||||
{ value: 1e3, symbol: 'K' },
|
||||
@ -167,7 +187,12 @@ export function numberFormatter(num: string | number | bigint, digits: number =
|
||||
.slice()
|
||||
.reverse()
|
||||
.find((item) => Number(num) >= item.value);
|
||||
return item ? (Number(num) / item.value).toFixed(digits).replace(regexp, '').concat(item.symbol) : '0';
|
||||
return item
|
||||
? (Number(num) / item.value)
|
||||
.toFixed(digits)
|
||||
.replace(regexp, '')
|
||||
.concat(item.symbol)
|
||||
: '0';
|
||||
}
|
||||
|
||||
export function isHex(value: string) {
|
||||
|
||||
@ -69,7 +69,12 @@ export async function calculateSnarkProof(
|
||||
|
||||
console.log('Start generating SNARK proof', snarkInput);
|
||||
console.time('SNARK proof time');
|
||||
const proofData = await websnarkUtils.genWitnessAndProve(await groth16, snarkInput, circuit, provingKey);
|
||||
const proofData = await websnarkUtils.genWitnessAndProve(
|
||||
await groth16,
|
||||
snarkInput,
|
||||
circuit,
|
||||
provingKey,
|
||||
);
|
||||
const proof = websnarkUtils.toSolidityInput(proofData).proof;
|
||||
console.timeEnd('SNARK proof time');
|
||||
|
||||
|
||||
@ -58,7 +58,9 @@ export async function downloadZip<T>({
|
||||
|
||||
const { [zipName]: content } = await unzipAsync(data);
|
||||
|
||||
console.log(`Downloaded ${url}${zipDigest ? ` ( Digest: ${zipDigest} )` : ''}`);
|
||||
console.log(
|
||||
`Downloaded ${url}${zipDigest ? ` ( Digest: ${zipDigest} )` : ''}`,
|
||||
);
|
||||
|
||||
if (parseJson) {
|
||||
return JSON.parse(new TextDecoder().decode(content)) as T;
|
||||
|
||||
@ -15,11 +15,17 @@ describe('./src/deposit.ts', function () {
|
||||
const instanceFixture = async () => {
|
||||
const [owner] = await getSigners();
|
||||
|
||||
const Hasher = (await (await deployHasher(owner)).wait())?.contractAddress as string;
|
||||
const Hasher = (await (await deployHasher(owner)).wait())
|
||||
?.contractAddress as string;
|
||||
|
||||
const Verifier = await new Verifier__factory(owner).deploy();
|
||||
|
||||
const Instance = await new ETHTornado__factory(owner).deploy(Verifier.target, Hasher, 1n, 20);
|
||||
const Instance = await new ETHTornado__factory(owner).deploy(
|
||||
Verifier.target,
|
||||
Hasher,
|
||||
1n,
|
||||
20,
|
||||
);
|
||||
|
||||
return { Instance };
|
||||
};
|
||||
@ -41,7 +47,9 @@ describe('./src/deposit.ts', function () {
|
||||
value: 1n,
|
||||
});
|
||||
|
||||
await expect(resp).to.emit(Instance, 'Deposit').withArgs(deposit.commitmentHex, 0, anyValue);
|
||||
await expect(resp)
|
||||
.to.emit(Instance, 'Deposit')
|
||||
.withArgs(deposit.commitmentHex, 0, anyValue);
|
||||
|
||||
expect(await Instance.commitments(deposit.commitmentHex)).to.be.true;
|
||||
});
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user