before dangerous ops

Signed-off-by: T-Hax <>
This commit is contained in:
T-Hax 2023-05-06 23:59:38 +00:00
parent 4065d89909
commit 704648ed2c
41 changed files with 31021 additions and 460 deletions

@ -1,13 +0,0 @@
# RPC URLs
ETH_MAINNET_TEST_RPC=
# debug (debug events are logged to console)
DEBUG=
# use tor (torify tests)
TORIFY=
# tor port (regular = 9050, browser = 9150)
TOR_PORT=
# relayer DOMAIN (the example.xyz in https://example.xyz) for testing
TEST_RELAYER_DOMAIN=

@ -0,0 +1 @@
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.

@ -47,13 +47,14 @@
"ganache": "^7.7.7", "ganache": "^7.7.7",
"mocha": "^10.2.0", "mocha": "^10.2.0",
"prettier": "^2.3.0", "prettier": "^2.3.0",
"rimraf": "^4.4.0", "rimraf": "^5.0.0",
"source-map-support": "^0.5.19", "source-map-support": "^0.5.19",
"ts-essentials": "^9.3.1", "ts-essentials": "^9.3.1",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsc-alias": "^1.2.11", "tsc-alias": "^1.2.11",
"tsconfig-paths": "^4.1.2", "tsconfig-paths": "^4.1.2",
"typechain": "^8.1.1", "typechain": "^8.1.1",
"typedoc": "^0.24.6",
"typescript": "^5.0.4" "typescript": "^5.0.4"
}, },
"scripts": { "scripts": {
@ -65,7 +66,7 @@
"postversion": "git push --follow-tags && npm publish", "postversion": "git push --follow-tags && npm publish",
"lint": "eslint --ext ts,js --fix src", "lint": "eslint --ext ts,js --fix src",
"build-live": "tsc -w && tsc-alias -w", "build-live": "tsc -w && tsc-alias -w",
"clean": "rm -rf --interactive=never cache/*", "clean": "rm -rf --interactive=never docs/*",
"format": "prettier src/**/*.ts -w" "format": "prettier src/**/*.ts -w"
}, },
"files": [ "files": [

@ -70,23 +70,39 @@ export class Chain {
public signer?: Signer public signer?: Signer
public provider: Provider public provider: Provider
private _emptySigner: VoidSigner private _emptySigner: VoidSigner
public chainId?: number
public symbol?: string private _chainId?: number
private _symbol?: string
private _fetched: boolean
constructor(provider: Provider, signer?: Signer) { constructor(provider: Provider, signer?: Signer) {
this.provider = provider this.provider = provider
this.signer = signer this.signer = signer
this._emptySigner = new VoidSigner('0x' + randomBytes(20).toString('hex'), provider) this._emptySigner = new VoidSigner('0x' + randomBytes(20).toString('hex'), provider)
this._fetched = false
} }
async getChainId(): Promise<number> { async fetchChainData(): Promise<void> {
if (!this.chainId) this.chainId = (await this.provider.getNetwork()).chainId const network = await this.provider.getNetwork()
return this.chainId this._chainId = network.chainId
this._symbol = await Onchain.getNetworkSymbol(String(network.chainId))
this._fetched = true
} }
async getChainSymbol(): Promise<string> { private _propertiesFetched(parentCallName: string): void {
if (!this.symbol) this.symbol = await Onchain.getNetworkSymbol(String(await this.getChainId())) if (!this._fetched)
return this.symbol throw ErrorUtils.getError(
`Chain.${parentCallName}: properties must be fetched first with \`fetchProperties\`.`
)
}
get id(): number {
this._propertiesFetched('id')
return this._chainId!
}
get symbol(): string {
this._propertiesFetched('symbol')
return this._symbol!
} }
latestBlockNum(): Promise<number> { latestBlockNum(): Promise<number> {
@ -127,12 +143,12 @@ export class Chain {
): Promise<TransactionRequest> { ): Promise<TransactionRequest> {
if (callStruct[0].value) if (callStruct[0].value)
return await Multicall3Contract__factory.connect( return await Multicall3Contract__factory.connect(
await Onchain.getMulticall3Address(String(this.chainId)), await Onchain.getMulticall3Address(String(this.id)),
this.provider this.provider
).populateTransaction.aggregate3Value(callStruct as Array<Multicall3.Call3ValueStruct>) ).populateTransaction.aggregate3Value(callStruct as Array<Multicall3.Call3ValueStruct>)
return await Multicall3Contract__factory.connect( return await Multicall3Contract__factory.connect(
await Onchain.getMulticall3Address(String(this.chainId)), await Onchain.getMulticall3Address(String(this.id)),
this.provider this.provider
).populateTransaction.aggregate3(callStruct) ).populateTransaction.aggregate3(callStruct)
} }
@ -143,12 +159,12 @@ export class Chain {
if (this.signer) if (this.signer)
if (callStruct[0].value) if (callStruct[0].value)
return await Multicall3Contract__factory.connect( return await Multicall3Contract__factory.connect(
await Onchain.getMulticall3Address(String(this.chainId)), await Onchain.getMulticall3Address(String(this.id)),
this.signer this.signer
).aggregate3Value(callStruct as Array<Multicall3.Call3ValueStruct>) ).aggregate3Value(callStruct as Array<Multicall3.Call3ValueStruct>)
else { else {
return await Multicall3Contract__factory.connect( return await Multicall3Contract__factory.connect(
await Onchain.getMulticall3Address(String(this.chainId)), await Onchain.getMulticall3Address(String(this.id)),
this.provider this.provider
).aggregate3(callStruct) ).aggregate3(callStruct)
} }
@ -181,33 +197,30 @@ export namespace Contracts {
type Path = string type Path = string
const contractMap: Map<Path, BaseContract> = new Map<Path, BaseContract>() const contractMap: Map<Path, BaseContract> = new Map<Path, BaseContract>()
export async function getProxy( export function getProxy(network: string, signerOrProvider: Signer | Provider): TornadoProxy {
network: string,
signerOrProvider: Signer | Provider
): Promise<TornadoProxy> {
const key = `TornadoProxy${network}` const key = `TornadoProxy${network}`
if (!contractMap.has(key)) { if (!contractMap.has(key)) {
contractMap.set( contractMap.set(
key, key,
_getContract<TornadoProxy>('TornadoProxy', await Onchain.getProxyAddress(network), signerOrProvider) _getContract<TornadoProxy>('TornadoProxy', Onchain.getProxyAddressSync(network), signerOrProvider)
) )
} }
return contractMap.get(`TornadoProxy${network}`) as TornadoProxy return contractMap.get(`TornadoProxy${network}`) as TornadoProxy
} }
export async function getInstance( export function getInstance(
network: string, network: string,
token: string, token: string,
denomination: string, denomination: string,
signerOrProvider: Signer | Provider signerOrProvider: Signer | Provider
): Promise<TornadoInstance> { ): TornadoInstance {
const key = `TornadoInstance${network}${token}${denomination}` const key = `TornadoInstance${network}${token}${denomination}`
if (!contractMap.has(key)) { if (!contractMap.has(key)) {
contractMap.set( contractMap.set(
key, key,
_getContract<TornadoInstance>( _getContract<TornadoInstance>(
'TornadoInstance', 'TornadoInstance',
await Onchain.getInstanceAddress(network, token, denomination), Onchain.getInstanceAddressSync(network, token, denomination),
signerOrProvider signerOrProvider
) )
) )
@ -235,7 +248,8 @@ export namespace Contracts {
export abstract class Synchronizer extends EventEmitter { export abstract class Synchronizer extends EventEmitter {
async sync( async sync(
event: EventFilter, eventName: string,
filter: EventFilter,
contract: BaseContract, contract: BaseContract,
cache: Cache.Syncable<Docs.Base>, cache: Cache.Syncable<Docs.Base>,
options?: Options.Sync options?: Options.Sync
@ -301,11 +315,7 @@ export abstract class Synchronizer extends EventEmitter {
this.emit('sync', 'synced') this.emit('sync', 'synced')
// Immediately start listening if we're doing this // Immediately start listening if we're doing this
if (_options.listenForEvents) { if (_options.listenForEvents) this.listenForEvents(eventName, contract, filter, cache)
contract = contract.on(event, (...eventArgs) => {
this.emit(cache.name, 'received', cache.db.put(cache.buildDoc(eventArgs[eventArgs.length - 1])))
})
}
// Then wait for all pooler requests to resolve // Then wait for all pooler requests to resolve
let results = await cache.pooler!.all() let results = await cache.pooler!.all()
@ -325,5 +335,21 @@ export abstract class Synchronizer extends EventEmitter {
}) })
} }
listenForEvents(
name: string,
contract: BaseContract,
filter: EventFilter,
cache: Cache.Syncable<Docs.Base>
) {
contract.on(filter, (...eventArgs) => {
this.emit(name, cache.name, cache.db.put(cache.buildDoc(eventArgs[eventArgs.length - 1])))
})
}
clearListenerByIndex(contract: BaseContract, event: EventFilter, listenerIndex: number = 0): void {
const listeners = contract.listeners()
contract.off(event, listeners[listenerIndex])
}
protected abstract _populateSyncOptions(options?: Options.Sync): Promise<DeepRequired<Options.Sync>> protected abstract _populateSyncOptions(options?: Options.Sync): Promise<DeepRequired<Options.Sync>>
} }

@ -1,13 +0,0 @@
# RPC URLs
ETH_MAINNET_TEST_RPC=
# debug (debug events are logged to console)
DEBUG=
# use tor (torify tests)
TORIFY=
# tor port (regular = 9050, browser = 9150)
TOR_PORT=
# relayer DOMAIN (the example.xyz in https://example.xyz) for testing
TEST_RELAYER_DOMAIN=

@ -0,0 +1 @@
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.

@ -49,13 +49,14 @@
"ganache": "^7.7.7", "ganache": "^7.7.7",
"mocha": "^10.2.0", "mocha": "^10.2.0",
"prettier": "^2.3.0", "prettier": "^2.3.0",
"rimraf": "^4.4.0", "rimraf": "^5.0.0",
"source-map-support": "^0.5.19", "source-map-support": "^0.5.19",
"ts-essentials": "^9.3.1", "ts-essentials": "^9.3.1",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsc-alias": "^1.2.11", "tsc-alias": "^1.2.11",
"tsconfig-paths": "^4.1.2", "tsconfig-paths": "^4.1.2",
"typechain": "^8.1.1", "typechain": "^8.1.1",
"typedoc": "^0.24.6",
"typescript": "^5.0.4" "typescript": "^5.0.4"
}, },
"scripts": { "scripts": {
@ -67,7 +68,7 @@
"postversion": "git push --follow-tags && npm publish", "postversion": "git push --follow-tags && npm publish",
"lint": "eslint --ext ts,js --fix src", "lint": "eslint --ext ts,js --fix src",
"build-live": "tsc -w && tsc-alias -w", "build-live": "tsc -w && tsc-alias -w",
"clean": "rm -rf --interactive=never cache/*", "clean": "rm -rf --interactive=never docs/*",
"format": "prettier src/**/*.ts -w" "format": "prettier src/**/*.ts -w"
}, },
"files": [ "files": [

@ -10,7 +10,7 @@ import { ZKDepositData, InputFor } from '@tornado/sdk-crypto'
// External imports // External imports
import { TransactionRequest } from '@ethersproject/abstract-provider' import { TransactionRequest } from '@ethersproject/abstract-provider'
import { BigNumber, providers } from 'ethers' import { BigNumber, EventFilter, providers } from 'ethers'
import { parseUnits } from 'ethers/lib/utils' import { parseUnits } from 'ethers/lib/utils'
import { bigInt } from 'snarkjs' import { bigInt } from 'snarkjs'
@ -30,10 +30,6 @@ export namespace Options {
export interface Deposit { export interface Deposit {
depositsPerInstance?: Array<number> depositsPerInstance?: Array<number>
doNotPopulate?: boolean doNotPopulate?: boolean
backup?: {
invoices?: boolean
notes?: boolean
}
} }
export type Invoice = Deposit export type Invoice = Deposit
@ -128,58 +124,52 @@ export class WithdrawalCache extends Cache.Syncable<Docs.Withdrawal> {
type Provider = providers.Provider type Provider = providers.Provider
type BackupDepositDoc = {
network: string
denomination: string
token: string
invoice?: string
note?: string
}
type RelayerProperties = MarkOptional< type RelayerProperties = MarkOptional<
Pick<RelayerDataProperties, 'address' | 'serviceFeePercent' | 'prices'>, Pick<RelayerDataProperties, 'address' | 'serviceFeePercent' | 'prices'>,
'serviceFeePercent' | 'prices' 'serviceFeePercent' | 'prices'
> >
export class Core extends Synchronizer { export class Core extends Synchronizer {
chain: Chain private _mutex: AsyncUtils.SimpleMutex
caches: Map<string, Cache.Base<Docs.Base>> caches: Map<string, Cache.Base<Docs.Base>>
instances: Map<string, TornadoInstance> chain?: Chain
constructor(provider: providers.Provider) { constructor() {
super() super()
this.chain = new Chain(provider)
this.caches = new Map<string, Cache.Syncable<Docs.Base>>() this.caches = new Map<string, Cache.Syncable<Docs.Base>>()
this.instances = new Map<string, TornadoInstance>() this._mutex = new AsyncUtils.SimpleMutex()
} }
connect(provider: Provider): void { private _checkProvider(parentCallName: string): void {
this.chain.provider = provider try {
this.chain?.id
} catch (err) {
throw ErrorUtils.getError('Core.' + parentCallName + ': you must first connect a provider!')
}
} }
async getInstances( async connect(provider: Provider): Promise<void> {
keys: Array<{ token: string; denomination: number | string }> if (!this.chain) this.chain = new Chain(provider)
): Promise<Array<TornadoInstance>> { else this.chain.provider = provider
const chainId = await this.chain.getChainId() await this.chain.fetchChainData()
return Promise.all( }
keys.map((key) =>
Contracts.getInstance(String(chainId), key.token, String(key.denomination), this.chain.provider) getInstances(keys: Array<{ token: string; denomination: number | string }>): Array<TornadoInstance> {
) this._checkProvider('getInstances')
return keys.map((key) =>
Contracts.getInstance(String(this.chain!.id), key.token, String(key.denomination), this.chain!.provider)
) )
} }
async getInstance(token: string, denomination: number | string): Promise<TornadoInstance> { getInstance(token: string, denomination: number | string): TornadoInstance {
const chainId = String(await this.chain.getChainId()) this._checkProvider('getInstance')
token = token.toLowerCase() return this.loadInstance(this.chain!.id, token, denomination)
denomination = String(denomination)
if (this.instances.has(chainId + token + denomination))
return this.instances.get(chainId + token + denomination)!
else return Contracts.getInstance(chainId, token, denomination, this.chain.provider)
} }
async getProxy(): Promise<TornadoProxy> { getProxy(): TornadoProxy {
const chainId = await this.chain.getChainId() this._checkProvider('getProxy')
return Contracts.getProxy(String(chainId), this.chain.provider) return Contracts.getProxy(String(this.chain!.id), this.chain!.provider)
} }
async buildDepositProof( async buildDepositProof(
@ -215,6 +205,8 @@ export class Core extends Synchronizer {
zkDepositsData: Array<ZKDepositData>, zkDepositsData: Array<ZKDepositData>,
options?: Options.Core.BuildDepositProof options?: Options.Core.BuildDepositProof
): Promise<Array<Array<string>>> { ): Promise<Array<Array<string>>> {
this._checkProvider('buildDepositProofs')
// Extract commitments and nullifier hashes // Extract commitments and nullifier hashes
const hexCommitments: string[] = [] const hexCommitments: string[] = []
const hexNullifierHashes: string[] = [] const hexNullifierHashes: string[] = []
@ -237,8 +229,8 @@ export class Core extends Synchronizer {
}) })
// Determine cache name // Determine cache name
const lookupKeys = await Onchain.getInstanceLookupKeys(instance.address) const { network, token, denomination } = await Onchain.getInstanceLookupKeys(instance.address)
const name = 'Deposits' + (lookupKeys.network + lookupKeys.token + lookupKeys.denomination).toUpperCase() const name = 'Deposits' + (network + token + denomination).toUpperCase()
// Find all leaves & indices by reading from cache // Find all leaves & indices by reading from cache
const [leaves, leafIndices] = await this._findLeavesAndIndices(name, hexCommitments) const [leaves, leafIndices] = await this._findLeavesAndIndices(name, hexCommitments)
@ -305,20 +297,23 @@ export class Core extends Synchronizer {
// Rest of note invariant arguments // Rest of note invariant arguments
const inputsForProofs: InputFor.ZKProof[] = [] const inputsForProofs: InputFor.ZKProof[] = []
const gasPrice = options?.gasPrice ?? (await this.chain.getGasPrice()) const gasPrice = options?.gasPrice ?? (await this.chain!.getGasPrice())
const gasPriceCushion = options?.gasPrice ?? gasPrice.mul(10).div(100) const gasPriceCushion = options?.gasPrice ?? gasPrice.mul(10).div(100)
// In reality, if a manual withdraw is made, we don't differentiate it from a relayer withdraw // In reality, if a manual withdraw is made, we don't differentiate it from a relayer withdraw
// Since it is only serviceFee 0 AND without a token price, the function will not buy more tokens // Since it is only serviceFee 0 AND without a token price, the function will not buy more tokens
const serviceFeePercent = relayerProperties.serviceFeePercent ?? 0 const serviceFeePercent = relayerProperties.serviceFeePercent ?? 0
const tokenPrice = relayerProperties.prices?.get(lookupKeys.token) const tokenPrice = relayerProperties.prices?.get(token)
const decimals = BigNumber.from(10).pow( const decimals = BigNumber.from(10).pow(
options?.tokenDecimals ?? (await Onchain.getTokenDecimals(lookupKeys.network, lookupKeys.token)) options?.tokenDecimals ?? (await Onchain.getTokenDecimals(network, token))
) )
const toWithdraw = BigNumber.from(+lookupKeys.denomination * 10 ** lookupKeys.denomination.length)
const toWithdraw = BigNumber.from(+denomination * 10 ** denomination.length)
.mul(decimals) .mul(decimals)
.div(10 ** lookupKeys.denomination.length) .div(10 ** denomination.length)
const native = lookupKeys.token == (await this.chain.getChainSymbol())
const native = token == this.chain!.symbol
if (!tokenPrice && !native) if (!tokenPrice && !native)
throw ErrorUtils.getError( throw ErrorUtils.getError(
@ -403,6 +398,9 @@ export class Core extends Synchronizer {
const leaves: Array<string> = [] const leaves: Array<string> = []
const cache = this.loadCache<Cache.Base<Docs.Deposit>>(instanceName) const cache = this.loadCache<Cache.Base<Docs.Deposit>>(instanceName)
// Shallow copy so we can find indexes again for commitments
const commitmentsCopy = [...commitments]
const docs = await cache.db.allDocs() const docs = await cache.db.allDocs()
// If no docs in cache throw and stop // If no docs in cache throw and stop
@ -415,13 +413,25 @@ export class Core extends Synchronizer {
// Otherwise start looking for commitment leaf indices and also pick up all other leafs on the way // Otherwise start looking for commitment leaf indices and also pick up all other leafs on the way
for (const row of docs.rows) { for (const row of docs.rows) {
const [, leafIndex, loadedCommitment] = parseIndexableString(row.id) let index = -1
const index = commitments.findIndex((commitment) => commitment === loadedCommitment)
// If some commitment is found then add the leaf index and remove that commitment const [, leafIndex, loadedCommitment] = parseIndexableString(row.id)
// Search only if there is some left
if (commitments.length !== 0)
index = commitments.findIndex((commitment) => commitment === loadedCommitment)
// If some commitment is found then add the leaf index
if (index !== -1) { if (index !== -1) {
indices[index] = leafIndex // Add it there where we intended for it to be originally
indices[commitmentsCopy.findIndex((commitment) => commitment === loadedCommitment)] = leafIndex
commitments.splice(index, 1) commitments.splice(index, 1)
this.emit(
'debug',
`\nMatched commitment ${loadedCommitment} @ leaf index ${leafIndex}, leftover commitments:\n\n${commitments.join(
'\n'
)}\n`
)
} }
// In any case push every leaf // In any case push every leaf
@ -443,8 +453,7 @@ export class Core extends Synchronizer {
if (indexes) if (indexes)
for (let i = 0, len = rows.length; i < len; i++) { for (let i = 0, len = rows.length; i < len; i++) {
const [index, , ,] = parseIndexableString(rows[i].id)[0] docs.push(rows[indexes[i]].doc)
if (0 < indexes.findIndex(index)) docs.push(rows[i].doc)
} }
else docs = rows.map((row) => row.doc) else docs = rows.map((row) => row.doc)
@ -469,82 +478,141 @@ export class Core extends Synchronizer {
return this.parseNotes([note])[0] return this.parseNotes([note])[0]
} }
async createInvoice( clearListener(
instance: TornadoInstance, instance: TornadoInstance | string,
options?: Omit<Options.Core.Invoice, 'depositsPerInstance'> event: Function | number = 0,
): Promise<Transactions.Invoice> { listenerIndex: number = 0
let opts: Options.Core.Invoice = options ?? {} ): void {
opts.depositsPerInstance = [1] const _instance = this._resolveInstance(instance)
return (await this.createInvoices([instance], options))[0] const filter = this._resolveInstanceEvent(_instance, event)
this.clearListenerByIndex(
_instance,
this._instanceEventToFilter(filter, _instance.filters.Deposit),
listenerIndex
)
} }
async createInvoices( clearListeners(instance: TornadoInstance | string): void {
instances: Array<TornadoInstance>, this._resolveInstance(instance).removeAllListeners()
options?: Options.Core.Invoice
): Promise<Array<Transactions.Invoice>> {
if (!options) options = {}
if (!options.backup) options.backup = {}
options.backup.invoices = options.backup.invoices ?? true
options.backup.notes = options.backup.notes ?? true
options.doNotPopulate = options.doNotPopulate ?? true
return this.buildDepositTransactions(instances, options)
} }
async buildDepositTransaction( listenForDeposits(instance: TornadoInstance | string): void {
instance: TornadoInstance, this.listenForInstanceEvents(instance, 0)
options?: Options.Core.Deposit }
): Promise<Transactions.Deposit> {
listenForWithdrawals(instance: TornadoInstance | string): void {
this.listenForInstanceEvents(instance, 1)
}
listenForInstanceEvents(instance: TornadoInstance | string, event: Function | number = 0): void {
let _instance: TornadoInstance
let key: string
if (typeof instance !== 'string') {
const { network, token, denomination } = Onchain.getInstanceLookupKeysSync(instance.address)
_instance = this.loadInstance(network, token, denomination)
key = network + token + denomination
} else {
key = instance.toLowerCase()
_instance = this._resolveInstance(key)
}
const filter = this._resolveInstanceEvent(_instance!, event)
const isDeposit = filter == _instance.filters.Deposit
const cache = isDeposit
? this.loadDepositCache('Deposits' + key.toUpperCase())
: this.loadWithdrawalCache('Withdrawals' + key.toUpperCase())
this.listenForEvents(
isDeposit ? 'deposit' : 'withdrawal',
_instance!,
this._instanceEventToFilter(filter, _instance.filters.Deposit),
cache
)
}
private _instanceEventToFilter(event: Function, depositEvent: Function): EventFilter {
return event == depositEvent ? event(null, null, null) : event(null, null, null, null)
}
private _resolveInstanceEvent(instance: TornadoInstance, event: Function | number = 0): Function {
let filter: Function
if (typeof event === 'number') {
filter = event === 0 ? instance.filters.Deposit : instance.filters.Withdrawal
} else filter = event
return filter
}
private _resolveInstance(instance: TornadoInstance | string): TornadoInstance {
let _instance: TornadoInstance
if (typeof instance === 'string') {
instance = instance.toLowerCase()
const regexp = /([0-9]+)([a-z]+)([0-9.]+)/
const matches = instance.match(regexp)?.slice(1)
if (!matches || matches.length === 0)
throw ErrorUtils.getError('Core._resolveInstance: instance string key invalid.')
_instance = this.loadInstance(matches[0], matches[1], matches[2])
} else _instance = instance
return _instance
}
/**
* This is the main function to build a single Tornado Cash Classic deposit. An address need not be supplied because the returned note proves a deposit.
* @param instance The TornadoInstance for which to build transactions.
* @param options Whether or not to populate the transactions (only in the sense of encoding transaction data), and whether to backup notes and invoices. Defaults: `depositsPerInstance = [1], doNotPopulate = false, backup { notes = true, invoices = false }` Deposits per instance are hardcoded to 1, since we're doing a single transaction.
* @returns A promise which resolves to the created transaction.
*/
buildDepositTransaction(instance: TornadoInstance, options?: Options.Core.Deposit): Transactions.Deposit {
let opts: Options.Core.Deposit = options ?? {} let opts: Options.Core.Deposit = options ?? {}
opts.depositsPerInstance = [1] opts.depositsPerInstance = [1]
return (await this.buildDepositTransactions([instance], opts))[0] return this.buildDepositTransactions([instance], opts)[0]
} }
async buildDepositTransactions( /**
* This is the main function which is used to build Tornado Cash Classic deposit transactions. An address need not be supplied because the returned note proves a deposit.
* @param instances The TornadoInstance instances for which to build transactions.
* @param options The number of deposits per instance, whether or not to populate the transactions (only in the sense of encoding transaction data), and whether to backup notes and invoices. Defaults: `depositsPerInstance = [1]*instance_num, doNotPopulate = false, backup { notes = true, invoices = false }`
* @returns A promise which resolves to the created transactions.
* @todo TODO: Maybe this should be sync and deposit backups should be async somewhere else
*/
buildDepositTransactions(
instances: Array<TornadoInstance>, instances: Array<TornadoInstance>,
options?: Options.Core.Deposit options?: Options.Core.Deposit
): Promise<Array<Transactions.Deposit>> { ): Array<Transactions.Deposit> {
this._checkProvider('buildDepositTransactions')
const depositsPerInstance = options?.depositsPerInstance ?? new Array<number>(instances.length).fill(1) const depositsPerInstance = options?.depositsPerInstance ?? new Array<number>(instances.length).fill(1)
const doNotPopulate = options?.doNotPopulate ?? false const doNotPopulate = options?.doNotPopulate ?? false
const backupNotes = options?.backup?.notes ?? true
const backupInvoices = options?.backup?.invoices ?? false
if (depositsPerInstance.length != instances.length) if (depositsPerInstance.length != instances.length)
throw ErrorUtils.getError( throw ErrorUtils.getError(
'Core.buildDepositTx: number of deposit amount elements must equal the number of instances!' 'Core.buildDepositTx: number of deposit amount elements must equal the number of instances!'
) )
const notesToBackup: Array<BackupDepositDoc> = [] const chainId = this.chain!.id
const invoicesToBackup: Array<BackupDepositDoc> = []
const txs: Array<Transactions.Deposit> = []
const chainId = await this.chain.getChainId()
const proxy: TornadoProxy = await Contracts.getProxy(String(chainId), this.chain.provider) const proxy: TornadoProxy = Contracts.getProxy(String(chainId), this.chain!.provider)
const txs: Array<Transactions.Deposit> = []
for (let i = 0, nInstances = instances.length; i < nInstances; i++) { for (let i = 0, nInstances = instances.length; i < nInstances; i++) {
const lookupKeys = await Onchain.getInstanceLookupKeys(instances[i].address) const { network, token, denomination } = Onchain.getInstanceLookupKeysSync(instances[i].address)
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination const pathstring = network + token + denomination
for (let d = 0, nDeposits = depositsPerInstance[i]; d < nDeposits; d++) { for (let d = 0, nDeposits = depositsPerInstance[i]; d < nDeposits; d++) {
const deposit = Primitives.createDeposit() const deposit = Primitives.createDeposit()
const note = Primitives.createNote(deposit.preimage) const note = Primitives.createNote(deposit.preimage)
if (backupNotes)
notesToBackup.push({
network: lookupKeys.network,
denomination: lookupKeys.denomination,
token: lookupKeys.token,
note: note
})
if (backupInvoices)
invoicesToBackup.push({
network: lookupKeys.network,
denomination: lookupKeys.denomination,
token: lookupKeys.token,
invoice: deposit.hexCommitment
})
if (!doNotPopulate) { if (!doNotPopulate) {
txs.push({ txs.push({
request: { request: {
@ -554,7 +622,7 @@ export class Core extends Synchronizer {
deposit.hexCommitment, deposit.hexCommitment,
[] []
]), ]),
value: lookupKeys.token == 'eth' ? parseUnits(lookupKeys.denomination) : BigNumber.from(0) value: token == 'eth' ? parseUnits(denomination) : BigNumber.from(0)
}, },
note: pathstring + '_' + note, note: pathstring + '_' + note,
invoice: pathstring + '_' + deposit.hexCommitment invoice: pathstring + '_' + deposit.hexCommitment
@ -568,41 +636,67 @@ export class Core extends Synchronizer {
} }
} }
if (backupNotes)
await this._backupDepositData(this.loadCache<Cache.Base<Docs.Note>>('DepositNotes'), notesToBackup)
if (backupInvoices)
await this._backupDepositData(
this.loadCache<Cache.Base<Docs.Invoice>>('DepositInvoices'),
invoicesToBackup
)
return txs return txs
} }
async backupNote(instance: TornadoInstance, transaction: Transactions.Deposit): Promise<void> {
await this.backupNotes(instance, [transaction])
}
async backupInvoice(instance: TornadoInstance, transaction: Transactions.Deposit): Promise<void> {
await this.backupInvoices(instance, [transaction])
}
async backupNotes(instance: TornadoInstance, transactions: Array<Transactions.Deposit>): Promise<void> {
const { network, token, denomination } = await Onchain.getInstanceLookupKeys(instance.address)
await this._backupDepositData(
network,
token,
denomination,
transactions,
this.loadCache<Cache.Base<Docs.Note>>('DepositNotes')
)
}
async backupInvoices(instance: TornadoInstance, transactions: Array<Transactions.Deposit>): Promise<void> {
const { network, token, denomination } = await Onchain.getInstanceLookupKeys(instance.address)
await this._backupDepositData(
network,
token,
denomination,
transactions,
this.loadCache<Cache.Base<Docs.Invoice>>('DepositInvoices')
)
}
private async _backupDepositData<T extends Docs.Note | Docs.Invoice>( private async _backupDepositData<T extends Docs.Note | Docs.Invoice>(
cache: Cache.Base<T>, network: string,
backupData: Array<BackupDepositDoc> token: string,
denomination: string,
transactions: Array<Transactions.Deposit>,
cache: Cache.Base<T>
): Promise<void> { ): Promise<void> {
const notes = cache.name.length === 12 ? true : false
const name = notes ? 'notes' : 'invoices'
// We need a mutex here
const release = await this._mutex.acquire(name)
let id = +(await cache.db.info()).update_seq let id = +(await cache.db.info()).update_seq
await cache.db await cache.db
.bulkDocs( .bulkDocs(
backupData.map((entry) => { transactions.map((transaction) => {
if (entry.note) if (notes) return new Docs.Note(++id, network, token, denomination, transaction.note!)
return new Docs.Note(++id, entry.network, entry.token, entry.denomination, entry.note) else return new Docs.Invoice(++id, network, token, denomination, transaction.invoice!)
else if (entry.invoice)
return new Docs.Invoice(++id, entry.network, entry.token, entry.denomination, entry.invoice)
}) as Array<T> }) as Array<T>
) )
.catch((err) => { .catch((err) => {
throw ErrorUtils.ensureError(err) throw ErrorUtils.ensureError(err)
}) })
// TODO: Decide whether to close caches by default or not // Release
//await cache.close().catch((err) => { release()
// throw ErrorUtils.ensureError(err)
//})
} }
loadDepositCache(name: string, options?: Options.Sync): DepositCache { loadDepositCache(name: string, options?: Options.Sync): DepositCache {
@ -644,50 +738,57 @@ export class Core extends Synchronizer {
return this.caches.get(name) as C return this.caches.get(name) as C
} }
loadInstance(chainId: number | string, token: string, denomination: number | string): TornadoInstance {
token = token.toLowerCase()
return Contracts.getInstance('' + chainId, token, '' + denomination, this.chain!.provider)
}
async syncDeposits(instance: TornadoInstance, options?: Options.Sync): Promise<void> { async syncDeposits(instance: TornadoInstance, options?: Options.Sync): Promise<void> {
const lookupKeys = await Onchain.getInstanceLookupKeys(instance.address) this._checkProvider('syncDeposits')
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination
const { network, token, denomination } = await Onchain.getInstanceLookupKeys(instance.address)
const pathstring = network + token + denomination
options = options ?? {} options = options ?? {}
options.startBlock = await Onchain.getInstanceDeployBlockNum( options.startBlock = await Onchain.getInstanceDeployBlockNum(network, token, denomination)
lookupKeys.network,
lookupKeys.token,
lookupKeys.denomination
)
const populatedOptions = await this._populateSyncOptions(options) const populatedOptions = await this._populateSyncOptions(options)
const cache = this.loadDepositCache('Deposits' + pathstring.toUpperCase(), populatedOptions) const cache = this.loadDepositCache('Deposits' + pathstring.toUpperCase(), populatedOptions)
await this.sync(instance.filters.Deposit(null, null, null), instance, cache, populatedOptions) await this.sync('deposit', instance.filters.Deposit(null, null, null), instance, cache, populatedOptions)
if (!this.instances.has(pathstring)) this.instances.set(pathstring, instance)
if (!this.caches.has(cache.name)) this.caches.set(cache.name, cache) if (!this.caches.has(cache.name)) this.caches.set(cache.name, cache)
} }
async syncWithdrawals(instance: TornadoInstance, options?: Options.Sync): Promise<void> { async syncWithdrawals(instance: TornadoInstance, options?: Options.Sync): Promise<void> {
const lookupKeys = await Onchain.getInstanceLookupKeys(instance.address) this._checkProvider('syncWithdrawals')
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination
const { network, token, denomination } = await Onchain.getInstanceLookupKeys(instance.address)
const pathstring = network + token + denomination
options = options ?? {} options = options ?? {}
options.startBlock = await Onchain.getInstanceDeployBlockNum( options.startBlock = await Onchain.getInstanceDeployBlockNum(network, token, denomination)
lookupKeys.network,
lookupKeys.token,
lookupKeys.denomination
)
const populatedOptions = await this._populateSyncOptions(options) const populatedOptions = await this._populateSyncOptions(options)
const cache = this.loadWithdrawalCache('Withdrawals' + pathstring.toUpperCase(), populatedOptions) const cache = this.loadWithdrawalCache('Withdrawals' + pathstring.toUpperCase(), populatedOptions)
await this.sync(instance.filters.Withdrawal(null, null, null), instance, cache, populatedOptions) await this.sync(
'withdrawal',
instance.filters.Withdrawal(null, null, null),
instance,
cache,
populatedOptions
)
if (!this.instances.has(pathstring)) this.instances.set(pathstring, instance)
if (!this.caches.has(cache.name)) this.caches.set(cache.name, cache) if (!this.caches.has(cache.name)) this.caches.set(cache.name, cache)
} }
protected async _populateSyncOptions(options: Options.Sync): Promise<DeepRequired<Options.Sync>> { protected async _populateSyncOptions(options: Options.Sync): Promise<DeepRequired<Options.Sync>> {
if (!options.startBlock) throw ErrorUtils.getError('Core._populateSyncOptions: startBlock not set.') if (!options.startBlock) throw ErrorUtils.getError('Core._populateSyncOptions: startBlock not set.')
options.targetBlock = options.targetBlock ?? (await this.chain.latestBlockNum()) options.targetBlock = options.targetBlock ?? (await this.chain!.latestBlockNum())
options.blockDivisor = options.blockDivisor ?? 40 options.blockDivisor = options.blockDivisor ?? 40

@ -1,13 +0,0 @@
# RPC URLs
ETH_MAINNET_TEST_RPC=
# debug (debug events are logged to console)
DEBUG=
# use tor (torify tests)
TORIFY=
# tor port (regular = 9050, browser = 9150)
TOR_PORT=
# relayer DOMAIN (the example.xyz in https://example.xyz) for testing
TEST_RELAYER_DOMAIN=

@ -0,0 +1 @@
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.

@ -43,12 +43,13 @@
"fs-extra": "^11.1.0", "fs-extra": "^11.1.0",
"mocha": "^10.2.0", "mocha": "^10.2.0",
"prettier": "^2.3.0", "prettier": "^2.3.0",
"rimraf": "^4.4.0", "rimraf": "^5.0.0",
"source-map-support": "^0.5.19", "source-map-support": "^0.5.19",
"ts-essentials": "^9.3.1", "ts-essentials": "^9.3.1",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsc-alias": "^1.2.11", "tsc-alias": "^1.2.11",
"tsconfig-paths": "^4.1.2", "tsconfig-paths": "^4.1.2",
"typedoc": "^0.24.6",
"typescript": "^5.0.4" "typescript": "^5.0.4"
}, },
"scripts": { "scripts": {
@ -60,7 +61,7 @@
"postversion": "git push --follow-tags && npm publish", "postversion": "git push --follow-tags && npm publish",
"lint": "eslint --ext ts,js --fix src", "lint": "eslint --ext ts,js --fix src",
"build-live": "tsc -w && tsc-alias -w", "build-live": "tsc -w && tsc-alias -w",
"clean": "rm -rf --interactive=never cache/*", "clean": "rm -rf --interactive=never docs/*",
"format": "prettier src/*.ts -w" "format": "prettier src/*.ts -w"
}, },
"files": [ "files": [

@ -140,12 +140,12 @@ export type ZKDepositData = OutputOf.CreateDeposit
* (will be) contained within this namespace. * (will be) contained within this namespace.
*/ */
export namespace Setup { export namespace Setup {
export async function getProvingKey(): Promise<ArrayBufferLike> { export function getProvingKey(): ArrayBufferLike {
return (await Files.loadRaw('circuits/tornadoProvingKey.bin')).buffer return Files.loadRawSync('circuits/tornadoProvingKey.bin').buffer
} }
export async function getTornadoCircuit(): Promise<any> { export function getTornadoCircuit(): any {
return Json.load('circuits/tornado.json') return Json.loadSync('circuits/tornado.json')
} }
let cachedGroth16Prover: Groth16 | null = null let cachedGroth16Prover: Groth16 | null = null
@ -158,6 +158,11 @@ export namespace Setup {
if (!cachedGroth16Prover) cachedGroth16Prover = await buildGroth16(defaultParams) if (!cachedGroth16Prover) cachedGroth16Prover = await buildGroth16(defaultParams)
return cachedGroth16Prover return cachedGroth16Prover
} }
export function terminateGroth16(): void {
cachedGroth16Prover!.terminate()
cachedGroth16Prover = null
}
} }
export namespace Primitives { export namespace Primitives {
@ -212,8 +217,8 @@ export namespace Primitives {
export async function calcDepositProofs(inputs: Array<InputFor.ZKProof>): Promise<Array<Array<string>>> { export async function calcDepositProofs(inputs: Array<InputFor.ZKProof>): Promise<Array<Array<string>>> {
const proofs: string[][] = [] const proofs: string[][] = []
const groth16 = await Setup.getGroth16() const groth16 = await Setup.getGroth16()
const circuit = await Setup.getTornadoCircuit() const circuit = Setup.getTornadoCircuit()
const provingKey = await Setup.getProvingKey() const provingKey = Setup.getProvingKey()
for (let i = 0, len = inputs.length; i < len; i++) { for (let i = 0, len = inputs.length; i < len; i++) {
const input = inputs[i] const input = inputs[i]
@ -273,7 +278,7 @@ export namespace Primitives {
} }
// Done. 🤷‍♀️ // Done. 🤷‍♀️
groth16.terminate() Setup.terminateGroth16()
return proofs return proofs
} }

@ -1,13 +0,0 @@
# RPC URLs
ETH_MAINNET_TEST_RPC=
# debug (debug events are logged to console)
DEBUG=
# use tor (torify tests)
TORIFY=
# tor port (regular = 9050, browser = 9150)
TOR_PORT=
# relayer DOMAIN (the example.xyz in https://example.xyz) for testing
TEST_RELAYER_DOMAIN=

@ -0,0 +1 @@
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.

@ -42,12 +42,13 @@
"fs-extra": "^11.1.0", "fs-extra": "^11.1.0",
"mocha": "^10.2.0", "mocha": "^10.2.0",
"prettier": "^2.3.0", "prettier": "^2.3.0",
"rimraf": "^4.4.0", "rimraf": "^5.0.0",
"source-map-support": "^0.5.19", "source-map-support": "^0.5.19",
"ts-essentials": "^9.3.1", "ts-essentials": "^9.3.1",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsc-alias": "^1.2.11", "tsc-alias": "^1.2.11",
"tsconfig-paths": "^4.1.2", "tsconfig-paths": "^4.1.2",
"typedoc": "^0.24.6",
"typescript": "^5.0.4" "typescript": "^5.0.4"
}, },
"scripts": { "scripts": {
@ -59,7 +60,7 @@
"postversion": "git push --follow-tags && npm publish", "postversion": "git push --follow-tags && npm publish",
"lint": "eslint --ext ts,js --fix src", "lint": "eslint --ext ts,js --fix src",
"build-live": "tsc -w && tsc-alias -w", "build-live": "tsc -w && tsc-alias -w",
"clean": "rm -rf --interactive=never cache/*", "clean": "rm -rf --interactive=never docs/*",
"format": "prettier src/*.ts -w" "format": "prettier src/*.ts -w"
}, },
"files": [ "files": [

@ -1,6 +1,6 @@
// Big modules // Big modules
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { existsSync, mkdirSync } from 'fs' import { existsSync, mkdirSync, readFileSync } from 'fs'
import { opendir, readFile, rm } from 'fs/promises' import { opendir, readFile, rm } from 'fs/promises'
import { createInterface } from 'readline' import { createInterface } from 'readline'
@ -80,6 +80,7 @@ export namespace Files {
export const makeCacheDir = (prefix?: string): void => mkdirSync(getCachePath('', prefix)) export const makeCacheDir = (prefix?: string): void => mkdirSync(getCachePath('', prefix))
export const loadRaw = (relative: string): Promise<Buffer> => readFile(getResourcePath(relative)) export const loadRaw = (relative: string): Promise<Buffer> => readFile(getResourcePath(relative))
export const loadRawSync = (relative: string): Buffer => readFileSync(getResourcePath(relative))
export async function wipeCache(prompt: boolean = true): Promise<void> { export async function wipeCache(prompt: boolean = true): Promise<void> {
const dir = await opendir(getCachePath('')) const dir = await opendir(getCachePath(''))
@ -126,7 +127,6 @@ export namespace Files {
export namespace Json { export namespace Json {
const cachedJsonData = new Map<string, any>() const cachedJsonData = new Map<string, any>()
// reading
export async function load( export async function load(
relativePath: string, relativePath: string,
encoding: BufferEncoding = 'utf8', encoding: BufferEncoding = 'utf8',
@ -140,6 +140,19 @@ export namespace Json {
} }
} }
export function loadSync(
relativePath: string,
encoding: BufferEncoding = 'utf8',
pathGetter: Files.PathGetter = Files.getResourcePath
): any {
if (cachedJsonData.has(relativePath)) return cachedJsonData.get(relativePath)
else {
const obj = JSON.parse(readFileSync(pathGetter(relativePath), encoding))
cachedJsonData.set(relativePath, obj)
return obj
}
}
export function toMap<V>(jsonData: any): Map<string, V> { export function toMap<V>(jsonData: any): Map<string, V> {
return new Map<string, V>(Object.entries(jsonData)) return new Map<string, V>(Object.entries(jsonData))
} }
@ -187,6 +200,23 @@ export namespace Onchain {
} }
} }
export function getClassicInstanceDataSync(
network: string,
token: string,
denomination: string
): ClassicInstance {
const instanceData = Json.getValue(Json.loadSync('onchain/instances.json'), [network, token])
return {
network: +network,
symbol: token.toUpperCase(),
decimals: Json.getValue(instanceData, ['decimals']),
denomination: +denomination,
deployBlock: Json.getValue(instanceData, ['deployedBlockNumber', denomination]),
address: Json.getValue(instanceData, ['instanceAddress', denomination]),
anonymityMiningEnabled: Json.getValue(instanceData, ['miningEnabled'])
}
}
export async function getInstanceLookupKeys(instanceAddress: string): Promise<Keys.InstanceLookup> { export async function getInstanceLookupKeys(instanceAddress: string): Promise<Keys.InstanceLookup> {
// lookup some stuff first // lookup some stuff first
const lookupObj: { [key: string]: string } = await Json.load('onchain/instanceAddresses.json') const lookupObj: { [key: string]: string } = await Json.load('onchain/instanceAddresses.json')
@ -204,6 +234,23 @@ export namespace Onchain {
} }
} }
export function getInstanceLookupKeysSync(instanceAddress: string): Keys.InstanceLookup {
// lookup some stuff first
const lookupObj: { [key: string]: string } = Json.loadSync('onchain/instanceAddresses.json')
const pathstring: string = Object.entries(lookupObj).find((el) => el[1] === instanceAddress)![0]
const network = pathstring.match('[0-9]+')![0],
token = pathstring.substring(network.length).match('[a-z]+')![0],
denomination = pathstring.substring(network.length + token.length)
return {
network: network,
token: token,
denomination: denomination
}
}
export async function getPathstringBasedContent<T>( export async function getPathstringBasedContent<T>(
filepath: string, filepath: string,
paths: Array<{ paths: Array<{
@ -220,12 +267,31 @@ export namespace Onchain {
) )
} }
export function getPathstringBasedContentSync<T>(
filepath: string,
paths: Array<{
network?: string
token?: string
denomination?: string
}>
): Array<T> {
return paths.map((path) =>
Json.getValue(Json.loadSync(filepath), [
`${path.network ?? ''}${path.token ?? ''}${path.denomination ?? ''}`
])
)
}
export async function getNetworkSymbol(networkId: string): Promise<string> { export async function getNetworkSymbol(networkId: string): Promise<string> {
return ( return (
await getPathstringBasedContent<string>('onchain/networkSymbols.json', [{ network: networkId }]) await getPathstringBasedContent<string>('onchain/networkSymbols.json', [{ network: networkId }])
)[0] )[0]
} }
export function getNetworkSymbolSync(networkId: string): string {
return getPathstringBasedContentSync<string>('onchain/networkSymbols.json', [{ network: networkId }])[0]
}
export function getInstanceAddresses( export function getInstanceAddresses(
paths: Array<{ paths: Array<{
network: string network: string
@ -236,6 +302,16 @@ export namespace Onchain {
return getPathstringBasedContent<string>('onchain/instanceAddresses.json', paths) return getPathstringBasedContent<string>('onchain/instanceAddresses.json', paths)
} }
export function getInstanceAddressesSync(
paths: Array<{
network: string
token: string
denomination: string
}>
): Array<string> {
return getPathstringBasedContentSync<string>('onchain/instanceAddresses.json', paths)
}
export async function getInstanceAddress( export async function getInstanceAddress(
network: string, network: string,
token: string, token: string,
@ -244,6 +320,10 @@ export namespace Onchain {
return (await getInstanceAddresses([{ network: network, token: token, denomination: denomination }]))[0] return (await getInstanceAddresses([{ network: network, token: token, denomination: denomination }]))[0]
} }
export function getInstanceAddressSync(network: string, token: string, denomination: string): string {
return getInstanceAddressesSync([{ network: network, token: token, denomination: denomination }])[0]
}
export function getInstanceDeployBlockNums( export function getInstanceDeployBlockNums(
paths: Array<{ paths: Array<{
network: string network: string
@ -254,6 +334,16 @@ export namespace Onchain {
return getPathstringBasedContent<number>('onchain/deployedBlockNumbers.json', paths) return getPathstringBasedContent<number>('onchain/deployedBlockNumbers.json', paths)
} }
export function getInstanceDeployBlockNumsSync(
paths: Array<{
network: string
token: string
denomination: string
}>
): Array<number> {
return getPathstringBasedContentSync<number>('onchain/deployedBlockNumbers.json', paths)
}
export async function getInstanceDeployBlockNum( export async function getInstanceDeployBlockNum(
network: string, network: string,
token: string, token: string,
@ -264,6 +354,14 @@ export namespace Onchain {
)[0] )[0]
} }
export function getInstanceDeployBlockNumSync(
network: string,
token: string,
denomination: string
): number {
return getInstanceDeployBlockNumsSync([{ network: network, token: token, denomination: denomination }])[0]
}
export async function getProxyAddress(network: string): Promise<string> { export async function getProxyAddress(network: string): Promise<string> {
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'proxy']) return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'proxy'])
} }
@ -276,6 +374,18 @@ export namespace Onchain {
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'multicall3']) return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'multicall3'])
} }
export function getProxyAddressSync(network: string): string {
return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'proxy'])
}
export function getMulticallAddressSync(network: string): string {
return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'multicall'])
}
export function getMulticall3AddressSync(network: string): string {
return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'multicall3'])
}
export async function getTokenData(network: string, token: string): Promise<TokenData> { export async function getTokenData(network: string, token: string): Promise<TokenData> {
const data = Json.getValue(await Json.load('onchain/tokens.json'), [network, token]) const data = Json.getValue(await Json.load('onchain/tokens.json'), [network, token])
return { return {
@ -285,6 +395,15 @@ export namespace Onchain {
} }
} }
export function getTokenDataSync(network: string, token: string): TokenData {
const data = Json.getValue(Json.loadSync('onchain/tokens.json'), [network, token])
return {
network: +network,
decimals: +data['decimals'],
address: data['address']
}
}
export async function getTokenAddress(network: string, token: string): Promise<string> { export async function getTokenAddress(network: string, token: string): Promise<string> {
return ( return (
await getPathstringBasedContent<string>('onchain/tokenAddresses.json', [ await getPathstringBasedContent<string>('onchain/tokenAddresses.json', [
@ -293,11 +412,23 @@ export namespace Onchain {
)[0] )[0]
} }
export function getTokenAddressSync(network: string, token: string): string {
return getPathstringBasedContentSync<string>('onchain/tokenAddresses.json', [
{ network: network, token: token }
])[0]
}
export async function getTokenDecimals(network: string, token: string): Promise<number> { export async function getTokenDecimals(network: string, token: string): Promise<number> {
return ( return (
await getPathstringBasedContent<number>('onchain/decimals.json', [{ network: network, token: token }]) await getPathstringBasedContent<number>('onchain/decimals.json', [{ network: network, token: token }])
)[0] )[0]
} }
export function getTokenDecimalsSync(network: string, token: string): number {
return getPathstringBasedContentSync<number>('onchain/decimals.json', [
{ network: network, token: token }
])[0]
}
} }
export namespace Offchain { export namespace Offchain {
@ -317,9 +448,29 @@ export namespace Offchain {
return rpcs.get(keys.next().value)! return rpcs.get(keys.next().value)!
} }
export function getUncensoredRpcURLSync(network: string, name: string = ''): string {
const rpcs = Json.toMap<string>(
Json.getValue(Json.loadSync('offchain/infrastructure.json'), ['jrpc-uncensored', network])
)
if (name.length !== 0) {
return rpcs.get(name)!
}
let keys = rpcs.keys()
let randCount = NumberUtils.getRandomFromRange(0, rpcs.size - 1)
for (let i = 0; i < randCount; i++) keys.next()
return rpcs.get(keys.next().value)!
}
export async function getClassicSubgraphURL(network: string): Promise<string> { export async function getClassicSubgraphURL(network: string): Promise<string> {
return Json.getValue(await Json.load('offchain/infrastructure.json'), ['subgraph', network]) return Json.getValue(await Json.load('offchain/infrastructure.json'), ['subgraph', network])
} }
export function getClassicSubgraphURLSync(network: string): string {
return Json.getValue(Json.loadSync('offchain/infrastructure.json'), ['subgraph', network])
}
} }
export namespace Constants { export namespace Constants {

@ -1,13 +0,0 @@
# RPC URLs
ETH_MAINNET_TEST_RPC=
# debug (debug events are logged to console)
DEBUG=
# use tor (torify tests)
TORIFY=
# tor port (regular = 9050, browser = 9150)
TOR_PORT=
# relayer DOMAIN (the example.xyz in https://example.xyz) for testing
TEST_RELAYER_DOMAIN=

@ -0,0 +1 @@
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.

@ -39,12 +39,13 @@
"fs-extra": "^11.1.0", "fs-extra": "^11.1.0",
"mocha": "^10.2.0", "mocha": "^10.2.0",
"prettier": "^2.3.0", "prettier": "^2.3.0",
"rimraf": "^4.4.0", "rimraf": "^5.0.0",
"source-map-support": "^0.5.19", "source-map-support": "^0.5.19",
"ts-essentials": "^9.3.1", "ts-essentials": "^9.3.1",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsc-alias": "^1.2.11", "tsc-alias": "^1.2.11",
"tsconfig-paths": "^4.1.2", "tsconfig-paths": "^4.1.2",
"typedoc": "^0.24.6",
"typescript": "^5.0.4" "typescript": "^5.0.4"
}, },
"scripts": { "scripts": {
@ -55,7 +56,7 @@
"postversion": "git push --follow-tags && npm publish", "postversion": "git push --follow-tags && npm publish",
"lint": "eslint --ext ts,js --fix src", "lint": "eslint --ext ts,js --fix src",
"build-live": "tsc -w && tsc-alias -w", "build-live": "tsc -w && tsc-alias -w",
"clean": "rm -rf --interactive=never cache/*", "clean": "rm -rf --interactive=never docs/*",
"format": "prettier src/*.ts -w" "format": "prettier src/*.ts -w"
}, },
"files": [ "files": [

@ -176,6 +176,31 @@ export namespace AsyncUtils {
export function timeout(msTimeout: number): Promise<any> { export function timeout(msTimeout: number): Promise<any> {
return new Promise((resolve) => setTimeout(resolve, msTimeout)) return new Promise((resolve) => setTimeout(resolve, msTimeout))
} }
export class SimpleMutex {
private _mutexes: Map<string, Promise<any>>
constructor() {
this._mutexes = new Map<string, Promise<any>>()
}
async acquire(parentCallName: string): Promise<Function> {
let release: Function = () => null
const prevMutex = this._mutexes.get(parentCallName)
this._mutexes.set(
parentCallName,
new Promise((resolve) => {
release = resolve
})
)
if (prevMutex) await prevMutex
return release
}
}
} }
export namespace NumberUtils { export namespace NumberUtils {

@ -1,13 +0,0 @@
# RPC URLs
ETH_MAINNET_TEST_RPC=
# debug (debug events are logged to console)
DEBUG=
# use tor (torify tests)
TORIFY=
# tor port (regular = 9050, browser = 9150)
TOR_PORT=
# relayer DOMAIN (the example.xyz in https://example.xyz) for testing
TEST_RELAYER_DOMAIN=

@ -0,0 +1 @@
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.

@ -43,12 +43,13 @@
"fs-extra": "^11.1.0", "fs-extra": "^11.1.0",
"mocha": "^10.2.0", "mocha": "^10.2.0",
"prettier": "^2.3.0", "prettier": "^2.3.0",
"rimraf": "^4.4.0", "rimraf": "^5.0.0",
"source-map-support": "^0.5.19", "source-map-support": "^0.5.19",
"ts-essentials": "^9.3.1", "ts-essentials": "^9.3.1",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsc-alias": "^1.2.11", "tsc-alias": "^1.2.11",
"tsconfig-paths": "^4.1.2", "tsconfig-paths": "^4.1.2",
"typedoc": "^0.24.6",
"typescript": "^5.0.4" "typescript": "^5.0.4"
}, },
"scripts": { "scripts": {
@ -60,7 +61,7 @@
"postversion": "git push --follow-tags && npm publish", "postversion": "git push --follow-tags && npm publish",
"lint": "eslint --ext ts,js --fix src", "lint": "eslint --ext ts,js --fix src",
"build-live": "tsc -w && tsc-alias -w", "build-live": "tsc -w && tsc-alias -w",
"clean": "rm -rf --interactive=never cache/*", "clean": "rm -rf --interactive=never docs/*",
"format": "prettier src/*.ts -w" "format": "prettier src/*.ts -w"
}, },
"files": [ "files": [

@ -228,7 +228,7 @@ export class Relayer {
.catch((err) => { .catch((err) => {
throw ErrorUtils.ensureError(err) throw ErrorUtils.ensureError(err)
}) })
.then((res) => res.data.prices[token]) .then((res) => BigNumber.from(res.data['ethPrices'][token]))
) )
} }
@ -259,7 +259,7 @@ export class Relayer {
finished = true finished = true
} }
if (status == 'CONFIRMED') { if (status === 'CONFIRMED') {
result.success = true result.success = true
result.txHash = txHash result.txHash = txHash
finished = true finished = true
@ -298,6 +298,10 @@ export class Relayer {
throw ErrorUtils.getError(`Relayer.fromCache: relayer ${options.url} isn't stored in cache.`) throw ErrorUtils.getError(`Relayer.fromCache: relayer ${options.url} isn't stored in cache.`)
}) })
await cache.close().catch((err) => {
throw ErrorUtils.ensureError(err)
})
return new Relayer(options, properties) return new Relayer(options, properties)
} }
@ -305,7 +309,7 @@ export class Relayer {
* Cache relayer data into a PouchDB database in your cache folder. This will automatically fetch properties if they are not fetched. * Cache relayer data into a PouchDB database in your cache folder. This will automatically fetch properties if they are not fetched.
*/ */
async remember(): Promise<void> { async remember(): Promise<void> {
if (!this._fetched) await this.fetchProperties() this._propertiesFetched('remember')
const cache = new Cache.Base<Docs.Relayer>('Relayers') const cache = new Cache.Base<Docs.Relayer>('Relayers')

@ -1,13 +0,0 @@
# RPC URLs
ETH_MAINNET_TEST_RPC=
# debug (debug events are logged to console)
DEBUG=
# use tor (torify tests)
TORIFY=
# tor port (regular = 9050, browser = 9150)
TOR_PORT=
# relayer DOMAIN (the example.xyz in https://example.xyz) for testing
TEST_RELAYER_DOMAIN=

@ -0,0 +1 @@
TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false.

@ -31,8 +31,10 @@
"eslint-plugin-prettier": "^4.2.1", "eslint-plugin-prettier": "^4.2.1",
"ethers": "^5", "ethers": "^5",
"prettier": "^2.3.0", "prettier": "^2.3.0",
"rimraf": "^5.0.0",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"tsconfig-paths": "^4.1.2", "tsconfig-paths": "^4.1.2",
"typedoc": "^0.24.6",
"typescript": "^5.0.4" "typescript": "^5.0.4"
}, },
"main": "./build/index.js", "main": "./build/index.js",
@ -43,7 +45,7 @@
"postversion": "git push --follow-tags && npm publish", "postversion": "git push --follow-tags && npm publish",
"lint": "eslint --ext ts,js --fix src", "lint": "eslint --ext ts,js --fix src",
"build-live": "tsc -w && tsc-alias -w", "build-live": "tsc -w && tsc-alias -w",
"clean": "rm -rf --interactive=never cache/*", "clean": "rm -rf --interactive=never docs/*",
"format": "prettier src/*.ts -w" "format": "prettier src/*.ts -w"
}, },
"files": [ "files": [

@ -1,7 +1,9 @@
# tornado-sdk # Tornado Cash SDK
SDK to integrate your protocol with Tornado Cash. SDK to integrate your protocol with Tornado Cash.
[Check out the docs here.](./docs/ABOUT.md)
Contributions are welcome, we are here for freedom after all! Contributions are welcome, we are here for freedom after all!
See [HISTORY.md](./HISTORY.md) for a development log. See [HISTORY.md](./HISTORY.md) for a development log.

12
docs/ABOUT.md Normal file

@ -0,0 +1,12 @@
# Tornado Cash SDK
A collection of Typescript packages which enable you to easily use the Tornado Cash protocol.
* [Installation](./INSTALLATION.md)
* [Usage](./USAGE.md)
### Package list and short descriptions
* **@tornado/sdk** - this package bundles all of the below packages into one, which can then be imported into a project. It does not provide a default import, instead you may alias it as wished.
* **@tornado/sdk-core** - this package provides `Core` which bundles the main Tornado Cash Classic logic into one class. In short, you can build deposit & withdrawal transactions, synchronize deposit and event caches more easily, listen to new deposit or withdrawal events

19
docs/INSTALLATION.md Normal file

@ -0,0 +1,19 @@
# Installation
## Configuring Gitea
In order to use any of the packages, you have configure `npm` / `yarn` / `pnpm` to work with Gitea.
The [official guide is here](https://development.tornadocash.community/T-Hax/gitea-guide), you can also download it by doing:
```bash
git clone https://development.tornadocash.community/T-Hax/gitea-guide
```
If you want to skip doing the entire configuration, you can just add to your `.npmrc` (if using `npm`), the following, albeit this is limited and untested, but should work for `npm`:
```yaml
@tornado:registry=https://development.tornadocash.community/api/packages/T-Hax/npm/
```
## Installing the packages

1
docs/USAGE.md Normal file

@ -0,0 +1 @@
# Usage

@ -69,6 +69,7 @@
"format:crypto": "yarn workspace @tornado/sdk-crypto run format", "format:crypto": "yarn workspace @tornado/sdk-crypto run format",
"format:chain": "yarn workspace @tornado/sdk-chain run format", "format:chain": "yarn workspace @tornado/sdk-chain run format",
"format:utils": "yarn workspace @tornado/sdk-utils run format", "format:utils": "yarn workspace @tornado/sdk-utils run format",
"docs": "yarn workspaces foreach run typedocs --out docs src/index.ts",
"compile": "yarn workspaces foreach run tsc", "compile": "yarn workspaces foreach run tsc",
"compile:sdk": "yarn workspace @tornado/sdk tsc", "compile:sdk": "yarn workspace @tornado/sdk tsc",
"compile:core": "yarn workspace @tornado/sdk-core tsc", "compile:core": "yarn workspace @tornado/sdk-core tsc",
@ -77,6 +78,14 @@
"compile:crypto": "yarn workspace @tornado/sdk-crypto tsc", "compile:crypto": "yarn workspace @tornado/sdk-crypto tsc",
"compile:chain": "yarn workspace @tornado/sdk-chain tsc", "compile:chain": "yarn workspace @tornado/sdk-chain tsc",
"compile:utils": "yarn workspace @tornado/sdk-utils tsc", "compile:utils": "yarn workspace @tornado/sdk-utils tsc",
"prepare": "yarn workspaces foreach run format && yarn workspaces foreach run tsc",
"prepare:sdk": "yarn format:sdk && yarn compile:sdk",
"prepare:core": "yarn format:core && yarn compile:core",
"prepare:web": "yarn format:web && yarn compile:web",
"prepare:data": "yarn format:data && yarn compile:data",
"prepare:crypto": "yarn format:crypto && yarn compile:crypto",
"prepare:chain": "yarn format:chain && yarn compile:chain",
"prepare:utils": "yarn format:utils && yarn compile:utils",
"typechain": "yarn typechain:test && yarn typechain:core && yarn typechain:chain", "typechain": "yarn typechain:test && yarn typechain:core && yarn typechain:chain",
"typechain:test": "typechain --target \"ethers-v5\" --discriminate-types --glob \"./abis/*.json\" --out-dir=\"./test/deth\"", "typechain:test": "typechain --target \"ethers-v5\" --discriminate-types --glob \"./abis/*.json\" --out-dir=\"./test/deth\"",
"typechain:core": "typechain --target \"ethers-v5\" --discriminate-types --glob \"./abis/*.json\" --out-dir=\"./@tornado/sdk-core/src/deth\"", "typechain:core": "typechain --target \"ethers-v5\" --discriminate-types --glob \"./abis/*.json\" --out-dir=\"./@tornado/sdk-core/src/deth\"",

@ -2,6 +2,7 @@ import chai from 'chai'
import * as ganache from 'ganache' import * as ganache from 'ganache'
// External // External
import { once } from 'events'
import { solidity } from 'ethereum-waffle' import { solidity } from 'ethereum-waffle'
import { providers, BigNumber } from 'ethers' import { providers, BigNumber } from 'ethers'
import { parseUnits } from 'ethers/lib/utils' import { parseUnits } from 'ethers/lib/utils'
@ -21,6 +22,10 @@ import eth01DepositsReference from './resources/deposits_eth_0.1.json'
import eth1DepositsReference from './resources/deposits_eth_1.json' import eth1DepositsReference from './resources/deposits_eth_1.json'
import eth10DepositsReference from './resources/deposits_eth_10.json' import eth10DepositsReference from './resources/deposits_eth_10.json'
import eth100DepositsReference from './resources/deposits_eth_100.json' import eth100DepositsReference from './resources/deposits_eth_100.json'
import dai100DepositsReference from './resources/deposits_dai_100.json'
import dai1000DepositsReference from './resources/deposits_dai_1000.json'
import dai10000DepositsReference from './resources/deposits_dai_10000.json'
import dai100KDepositsReference from './resources/deposits_dai_100000.json' import dai100KDepositsReference from './resources/deposits_dai_100000.json'
chai.use(solidity) chai.use(solidity)
@ -61,10 +66,6 @@ describe('Core', () => {
const chain = new Chain(ganacheProvider) const chain = new Chain(ganacheProvider)
it('Should print cache path to console', async () => {
console.log(await Files.getCachePath('anything'))
})
after(async function () { after(async function () {
this.timeout(0) this.timeout(0)
await Files.wipeCache() await Files.wipeCache()
@ -72,9 +73,8 @@ describe('Core', () => {
describe('namespace Contracts', () => { describe('namespace Contracts', () => {
it('getClassicInstance: should be able to get a tornado instance', async () => { it('getClassicInstance: should be able to get a tornado instance', async () => {
let instance = await Contracts.getInstance(String(1), 'eth', String(1), mainnetProvider) let instance = Contracts.getInstance(String(1), 'eth', String(1), mainnetProvider)
expect(instance.address).to.equal('0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936') expect(instance.address).to.equal('0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936')
await expect(instance.getLastRoot()).to.not.be.reverted
}).timeout(0) }).timeout(0)
}) })
@ -87,15 +87,19 @@ describe('Core', () => {
if (!denominations.length) throw ErrorUtils.getError('Instances entered were INVALID') if (!denominations.length) throw ErrorUtils.getError('Instances entered were INVALID')
const depositReferences: { [key: string]: typeof eth01DepositsReference } = { let depositReferences: { [key: string]: typeof eth01DepositsReference } = {}
'1ETH0.1': eth01DepositsReference,
'1ETH1': eth1DepositsReference,
'1ETH10': eth10DepositsReference,
'1ETH100': eth100DepositsReference,
'1DAI100000': dai100KDepositsReference
}
const core = new Core(mainnetProvider) depositReferences['1ETH0.1'] = eth01DepositsReference
depositReferences['1ETH1'] = eth1DepositsReference
depositReferences['1ETH10'] = eth10DepositsReference
depositReferences['1ETH100'] = eth100DepositsReference
depositReferences['1DAI100'] = dai100DepositsReference
depositReferences['1DAI1000'] = dai1000DepositsReference
depositReferences['1DAI10000'] = dai10000DepositsReference
depositReferences['1DAI100000'] = dai100KDepositsReference
const core = new Core()
let instances: TornadoInstance[] = [] let instances: TornadoInstance[] = []
@ -110,7 +114,9 @@ describe('Core', () => {
before(async function () { before(async function () {
this.timeout(0) this.timeout(0)
const regexp = /([0-9]+)([A-Z]+)([0-9]+)/ await core.connect(mainnetProvider)
const regexp = /([0-9]+)([A-Za-z]+)([0-9.]+)/
const promises = denominations.map((denom) => { const promises = denominations.map((denom) => {
const matches = denom.match(regexp)!.slice(2) const matches = denom.match(regexp)!.slice(2)
@ -133,7 +139,7 @@ describe('Core', () => {
// This is going to try syncing the entire range // This is going to try syncing the entire range
await core.syncDeposits(instances[i], { await core.syncDeposits(instances[i], {
blockDivisor: 50, blockDivisor: 40,
concurrencyLimit: 20, concurrencyLimit: 20,
msTimeout: 300 msTimeout: 300
}) })
@ -144,6 +150,8 @@ describe('Core', () => {
expect(rows.length).to.be.gte(valid.length) expect(rows.length).to.be.gte(valid.length)
console.log('\n📄 Validating inputs for ' + denominations[i] + '\n')
for (let i = 0, len = valid.length; i < len; i++) { for (let i = 0, len = valid.length; i < len; i++) {
const id = rows[i].id const id = rows[i].id
const [bn, leafIndex, commitment] = parseIndexableString(id) const [bn, leafIndex, commitment] = parseIndexableString(id)
@ -160,31 +168,40 @@ describe('Core', () => {
describe('Forked (Ganache)', async () => { describe('Forked (Ganache)', async () => {
describe('class Classic', async () => { describe('class Classic', async () => {
// Init sync objects // Init sync objects
const core = new Core(ganacheProvider) const core = new Core()
const needsMoney = ganacheProvider.getSigner() const needsMoney = ganacheProvider.getSigner()
const withdrawer = ganacheProvider.getSigner(2)
const daiWhaleSigner = ganacheProvider.getSigner(daiWhale) const daiWhaleSigner = ganacheProvider.getSigner(daiWhale)
const debugListener = (message: string) => console.debug(message) const debugListener = (message: string) => console.debug(message)
let snapshotId: any let snapshotId: any
let needsMoneyAddress: string let needsMoneyAddress: string
let withdrawerAddress: string
let dai: ERC20 let dai: ERC20
let smallestEth: TornadoInstance let smallestEth: TornadoInstance
let dai100K: TornadoInstance let dai100K: TornadoInstance
before(async function () { before(async function () {
this.timeout(0) this.timeout(0)
// We need to connect core first
await core.connect(ganacheProvider)
// Get snapshot just in case // Get snapshot just in case
snapshotId = await ganacheProvider.send('evm_snapshot', []) snapshotId = await ganacheProvider.send('evm_snapshot', [])
// Prep whale eth balance // Prep whale eth balance
await ganacheProvider.send('evm_setAccountBalance', [daiWhale, parseUnits('10').toHexString()]) await ganacheProvider.send('evm_setAccountBalance', [daiWhale, parseUnits('10').toHexString()])
// Init async objects // Addresses
needsMoneyAddress = await needsMoney.getAddress() needsMoneyAddress = await needsMoney.getAddress()
withdrawerAddress = await withdrawer.getAddress()
daiAddress = await Onchain.getTokenAddress('1', 'dai') daiAddress = await Onchain.getTokenAddress('1', 'dai')
dai = chain.getTokenContract(daiAddress).connect(daiWhaleSigner)
smallestEth = await core.getInstance('eth', 0.1) // Contracts
dai100K = await core.getInstance('dai', 100000) dai = chain.getTokenContract(daiAddress)
smallestEth = core.getInstance('eth', 0.1)
dai100K = core.getInstance('dai', 100000)
// Set debug // Set debug
if (debug) core.on('debug', debugListener) if (debug) core.on('debug', debugListener)
@ -194,7 +211,7 @@ describe('Core', () => {
await ganacheProvider.send('evm_revert', [snapshotId]) await ganacheProvider.send('evm_revert', [snapshotId])
core.off('debug', debugListener) core.off('debug', debugListener)
}) })
afterEach(() => { beforeEach(() => {
dai = dai.connect(daiWhaleSigner) dai = dai.connect(daiWhaleSigner)
}) })
@ -202,31 +219,27 @@ describe('Core', () => {
const initBal = await needsMoney.getBalance() const initBal = await needsMoney.getBalance()
// Build tx and load cache for this test // Build tx and load cache for this test
const tx = await core.buildDepositTransaction(smallestEth) const tx = core.buildDepositTransaction(smallestEth)
const cache = core.loadDepositCache('Deposits1ETH0.1')
// Prep promise to only try withdrawing after cache has been updated // Listen to deposit events
const putPromise = new Promise((resolve) => { core.listenForDeposits(smallestEth)
smallestEth.on(
smallestEth.filters.Deposit(null, null, null),
function (commitment, leafIndex, timestamp, event) {
resolve(cache.db.put(cache.buildDoc(event)))
}
)
})
const listener = smallestEth.listeners(smallestEth.filters.Deposit(null, null, null))[0] // Get the promise we need
const promise = once(core, 'deposit')
// Deposit and await cache updated // Deposit and await cache updated
const response = await needsMoney.sendTransaction(tx.request) const response = await needsMoney.sendTransaction(tx.request)
await response.wait() await response.wait()
const endBal = await needsMoney.getBalance() const endBal = await needsMoney.getBalance()
// Passing resolve as callback into put didn't work // Await deposit addition to cache
await await putPromise await promise
// Turn off listener (NEEDED OR WE'RE NOT RESOLVING) // Remove listeners
smallestEth.off(smallestEth.filters.Deposit(null, null, null), listener) core.clearListeners(smallestEth)
// Backup
await core.backupNote(smallestEth, tx)
// Check deposit predicates // Check deposit predicates
expect(initBal).to.equal(parseUnits('1000')) expect(initBal).to.equal(parseUnits('1000'))
@ -234,34 +247,25 @@ describe('Core', () => {
}).timeout(0) }).timeout(0)
it('buildDepositProof: it should be able to build an eth proof', async () => { it('buildDepositProof: it should be able to build an eth proof', async () => {
// Get withdrawer, load cache, prep note for this test // Get all of the notes
const withdrawer = ganacheProvider.getSigner(2)
const cache = core.loadDepositCache('Deposits1ETH0.1')
// We need this to clean the cache, we want to have clean state
const doc = (await cache.db.allDocs({ include_docs: true, descending: true, limit: 1 })).rows[0].doc
// We are not transforming because we want to test this out
const notes = await core.loadNotes() const notes = await core.loadNotes()
// Build proof // Build proof
let proof: any let proof: any
try { proof = await core.buildDepositProof(
proof = await core.buildDepositProof( smallestEth,
smallestEth, {
{ address: withdrawerAddress
address: await withdrawer.getAddress() },
}, needsMoneyAddress,
await needsMoney.getAddress(), notes[0],
notes[0], {
{ // On by default but stating for visibility
// On by default but stating for visibility checkNotesSpent: true,
checkNotesSpent: true, checkKnownRoot: true
checkKnownRoot: true }
} )
)
} finally {
await cache.db.remove(doc?._id!, doc?._rev!)
}
// Substract the calculated fee from the received amount // Substract the calculated fee from the received amount
const ethDelta = parseUnits('0.1').sub(proof[5]) const ethDelta = parseUnits('0.1').sub(proof[5])
@ -278,26 +282,22 @@ describe('Core', () => {
it('buildDepositTransaction: build a single token deposit tx and succeed', async () => { it('buildDepositTransaction: build a single token deposit tx and succeed', async () => {
// Prep deposit amount, proxy for approval, cache, bal for comp // Prep deposit amount, proxy for approval, cache, bal for comp
const depositAmount = parseUnits('100000') const depositAmount = parseUnits('100000')
const proxy = await core.getProxy() const proxy = core.getProxy()
const cache = core.loadDepositCache('Deposits1DAI100000')
const daiBalBef = await dai.balanceOf(dai100K.address) const daiBalBef = await dai.balanceOf(dai100K.address)
// Prep promise to only try withdrawing after cache has been updated // We listen for deposits
const putPromise = new Promise((resolve) => { core.listenForDeposits(dai100K)
dai100K.on(
dai100K.filters.Deposit(null, null, null),
function (commitment, leafIndex, timestamp, event) {
resolve(cache.db.put(cache.buildDoc(event)))
}
)
})
const listener = dai100K.listeners()[0] // We will wait for the event
const promise = once(core, 'deposit')
// Prep for deposit // Prep for deposit
await dai.transfer(needsMoneyAddress, depositAmount) await dai.transfer(needsMoneyAddress, depositAmount)
dai = dai.connect(needsMoney) dai = dai.connect(needsMoney)
const tx = await core.buildDepositTransaction(dai100K)
const tx = core.buildDepositTransaction(dai100K)
// Approve dai for the proxy first (transferFrom) // Approve dai for the proxy first (transferFrom)
await dai.approve(proxy.address, depositAmount) await dai.approve(proxy.address, depositAmount)
@ -308,11 +308,14 @@ describe('Core', () => {
// Prep for check // Prep for check
const daiBalPost = await dai.balanceOf(dai100K.address) const daiBalPost = await dai.balanceOf(dai100K.address)
// Passing resolve as callback into put didn't work // Passing resolve as callback into put didn't work.
await await putPromise await promise
// Off (otherwise no resolve) // Have to clear the listeners
dai100K.off(dai100K.filters.Deposit(null, null, null), listener) core.clearListeners(dai100K)
// Backup since we need it for later
await core.backupNote(dai100K, tx)
// Checks // Checks
expect(daiBalBef).to.equal(daiBalPost.sub(depositAmount)) expect(daiBalBef).to.equal(daiBalPost.sub(depositAmount))
@ -322,19 +325,14 @@ describe('Core', () => {
it('buildDepositProof: it should be able to build a token proof', async () => { it('buildDepositProof: it should be able to build a token proof', async () => {
if (!process.env.TEST_RELAYER_DOMAIN) throw ErrorUtils.getError('core.test.ts: Need a relayer name') if (!process.env.TEST_RELAYER_DOMAIN) throw ErrorUtils.getError('core.test.ts: Need a relayer name')
// Get withdrawer, load cache, prep note for this test // Get all of the notes
const withdrawer = ganacheProvider.getSigner(2)
const cache = core.loadDepositCache('Deposits1DAI100000')
// We need this to clean the cache, we want to have clean state
const doc = (await cache.db.allDocs({ include_docs: true, descending: true, limit: 1 })).rows[0].doc
// We are not transforming because we want to test this out
const notes = await core.loadNotes() const notes = await core.loadNotes()
// We need to select last // We need to select last
const note = notes[notes.length - 1] const note = notes[notes.length - 1]
let properties: RelayerProperties = { let properties: RelayerProperties = {
address: await withdrawer.getAddress(), address: withdrawerAddress,
version: '2', version: '2',
serviceFeePercent: 0.04, serviceFeePercent: 0.04,
miningFeePercent: 0.15, miningFeePercent: 0.15,
@ -345,21 +343,12 @@ describe('Core', () => {
properties.prices.set('dai', BigNumber.from(10).pow(18).div(1800)) properties.prices.set('dai', BigNumber.from(10).pow(18).div(1800))
// Just set another address
properties.address = await withdrawer.getAddress()
// Build proof with relayer properties this time // Build proof with relayer properties this time
let proof const proof = await core.buildDepositProof(dai100K, properties, needsMoneyAddress, note, {
// On by default but stating for visibility
try { checkNotesSpent: true,
proof = await core.buildDepositProof(dai100K, properties, await needsMoney.getAddress(), note, { checkKnownRoot: true
// On by default but stating for visibility })
checkNotesSpent: true,
checkKnownRoot: true
})
} finally {
await cache.db.remove(doc?._id!, doc?._rev!)
}
// Calc balance diff again... it will be expressed in dai // Calc balance diff again... it will be expressed in dai
const daiDelta = parseUnits('100000').sub(proof[5]) const daiDelta = parseUnits('100000').sub(proof[5])
@ -371,58 +360,220 @@ describe('Core', () => {
).to.changeTokenBalance(dai, needsMoney, daiDelta) ).to.changeTokenBalance(dai, needsMoney, daiDelta)
}).timeout(0) }).timeout(0)
it.only('buildDepositTransactions: multiple eth deposits', async () => { it('buildDepositTransactions: multiple eth deposits', async () => {
const instances = await core.getInstances( const instances = core.getInstances(
[0.1, 1, 10, 100].map((el) => { [0.1, 1, 10, 100].map((el) => {
return { token: 'eth', denomination: el } return { token: 'eth', denomination: el }
}) })
) )
const txs = await core.buildDepositTransactions(instances, { // That easy
depositsPerInstance: [1, 1, 2, 1] instances.forEach((instance) => core.listenForDeposits(instance))
const depositsPer = [1, 1, 2, 1]
const txs = core.buildDepositTransactions(instances, {
depositsPerInstance: depositsPer
}) })
for (let i = 0, len = txs.length; i < len; i++) { for (let i = 0, len = txs.length; i < len; i++) {
console.log('SENDING => ', i) const promise = once(core, 'deposit')
const response = await needsMoney.sendTransaction(txs[i].request) const response = await needsMoney.sendTransaction(txs[i].request)
console.log('TX SENT => ', i)
await response.wait() await response.wait()
console.log('WAITING => ', i) await promise
} }
// That easy
instances.forEach((instance) => core.clearListeners(instance))
// And backup the notes
await Promise.all(
instances.map((instance, index) => core.backupNotes(instance, txs.splice(0, depositsPer[index])))
)
//for (let i = 0, len = instances.length; i < len; i++) {
// await core.backupNotes(instances[i], txs.splice(0, depositsPer[i]))
//}
expect(await needsMoney.getBalance()).to.be.lte(parseUnits('888.8')) expect(await needsMoney.getBalance()).to.be.lte(parseUnits('888.8'))
}).timeout(0) }).timeout(0)
it('buildDepositProofs: should be able to withdraw', async () => {
// ETH instances
const instances = core.getInstances(
[0.1, 1, 10, 100].map((el) => {
return { token: 'eth', denomination: el }
})
)
// Number deposits per instance
const depositsPer = [1, 1, 2, 1]
// Get all of the notes
let notes = await core.loadNotes()
// Handle all withdrawals
for (let i = 0, len = instances.length; i < len; i++) {
const proofs = await core.buildDepositProofs(
instances[i],
{
address: withdrawerAddress
},
new Array(depositsPer[i]).fill(needsMoneyAddress),
notes.splice(0, depositsPer[i]),
{
// On by default but stating for visibility
checkNotesSpent: true,
checkKnownRoot: true
}
)
for (let p = 0, plen = proofs.length; p < plen; p++) {
// Get proof
const proof = proofs[p]
// Substract the calculated fee from the received amount
const ethDelta = parseUnits('0.1')
.mul(10 ** i)
.sub(proof[5])
// Withdrawal time, let's see if it works
// The balance diff will be exact because withdrawer is paying for gas as relayer
await expect(() =>
instances[i]
.connect(withdrawer)
.withdraw(proof[0], proof[1], proof[2], proof[3], proof[4], proof[5], proof[6])
).to.changeEtherBalance(needsMoney, ethDelta)
}
}
}).timeout(0)
it('buildDepositTransactions: multiple token deposits', async () => { it('buildDepositTransactions: multiple token deposits', async () => {
const instances = await core.getInstances( // Prepare contracts
[100, 1000, 10000, 100000].map((el) => { const denoms = [100, 1000, 10000, 100000]
const proxy = core.getProxy()
const instances = core.getInstances(
denoms.map((el) => {
return { token: 'dai', denomination: el } return { token: 'dai', denomination: el }
}) })
) )
const proxy = await core.getProxy() // Prep the money
const depositAmount = parseUnits('432100') const depositsPer = [1, 2, 1, 2]
await dai.transfer(needsMoneyAddress, parseUnits('212100'))
await dai.transfer(needsMoneyAddress, parseUnits('432100'))
dai = dai.connect(needsMoney) dai = dai.connect(needsMoney)
const txs = await core.buildDepositTransactions(instances, { await dai.approve(proxy.address, parseUnits('212100'))
depositsPerInstance: [1, 2, 3, 4]
// Record the money
const daiBalancesBef = await Promise.all(instances.map((instance) => dai.balanceOf(instance.address)))
// Begin to listen
instances.forEach((instance) => core.listenForDeposits(instance))
// Build txs
const txs = core.buildDepositTransactions(instances, {
depositsPerInstance: depositsPer
}) })
await dai.approve(proxy.address, depositAmount) // Send transactions
for (let i = 0, len = txs.length; i < len; i++) { for (let i = 0, len = txs.length; i < len; i++) {
await expect(() => needsMoney.sendTransaction(txs[i].request)).to.not.be.reverted const promise = once(core, 'deposit')
const resp = await needsMoney.sendTransaction(txs[i].request)
await resp.wait()
await promise
} }
// Clear listeners
instances.forEach((instance) => core.clearListeners(instance))
// Backup notes
await Promise.all(
instances.map((instance, index) => core.backupNotes(instance, txs.splice(0, depositsPer[index])))
)
// Get new balances
const daiBalancesPost = await Promise.all(
instances.map((instance) => dai.balanceOf(instance.address))
)
// Check and done
for (let i = 0; i < 4; i++) {
expect(daiBalancesBef[i]).to.equal(
daiBalancesPost[i].sub(parseUnits('' + denoms[i] * depositsPer[i]))
)
}
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0) expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
}).timeout(0) }).timeout(0)
it('createInvoice: should be able to create an invoice', async () => { it('buildDepositProofs: multiple dai withdrawals', async () => {
const instance = await core.getInstance('dai', '1000') // ETH instances
const invoice = await core.createInvoice(instance) const denoms = [100, 1000, 10000, 100000]
console.log(invoice) const instances = core.getInstances(
denoms.map((el) => {
return { token: 'dai', denomination: el }
})
)
// Number deposits per instance
const depositsPer = [1, 2, 1, 2]
// Get all of the notes
let notes = await core.loadNotes()
// Fake relayer properties
let properties: RelayerProperties = {
address: withdrawerAddress,
version: '2',
serviceFeePercent: 0.04,
miningFeePercent: 0.15,
status: 'whatever',
chainId: 1,
prices: new Map<string, BigNumber>()
}
properties.prices.set('dai', BigNumber.from(10).pow(18).div(1800))
// Handle all withdrawals
for (let i = 0, len = instances.length; i < len; i++) {
const proofs = await core.buildDepositProofs(
instances[i],
properties,
new Array(depositsPer[i]).fill(needsMoneyAddress),
notes.splice(0, depositsPer[i]),
{
// On by default but stating for visibility
checkNotesSpent: true,
checkKnownRoot: true
}
)
for (let p = 0, plen = proofs.length; p < plen; p++) {
// Get proof
const proof = proofs[p]
// Substract the calculated fee from the received amount
const daiDelta = parseUnits("100")
.mul(10 ** i)
.sub(proof[5])
// Withdrawal time, let's see if it works
// The balance diff will be exact because withdrawer is paying for gas as relayer
await expect(() =>
instances[i]
.connect(withdrawer)
.withdraw(proof[0], proof[1], proof[2], proof[3], proof[4], proof[5], proof[6])
).to.changeTokenBalance(dai, needsMoney, daiDelta)
}
}
}).timeout(0) }).timeout(0)
}) })
}) })

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -24526,9 +24526,5 @@
"commitment": "0x05f2917b82f40a9b03c9cdaf1f2530817aa7c969e3c9dbdd45469ceb026aa513", "commitment": "0x05f2917b82f40a9b03c9cdaf1f2530817aa7c969e3c9dbdd45469ceb026aa513",
"leafIndex": 3503, "leafIndex": 3503,
"timestamp": "1681468727" "timestamp": "1681468727"
},
{
"blockNumber": 17048787,
"transactionHash": null
} }
] ]

@ -378334,9 +378334,5 @@
"commitment": "0x237c8901459592f4312e8fe108b86f72b946625dcf6ff9f5b5becb16c2f9638f", "commitment": "0x237c8901459592f4312e8fe108b86f72b946625dcf6ff9f5b5becb16c2f9638f",
"leafIndex": 54047, "leafIndex": 54047,
"timestamp": "1681514471" "timestamp": "1681514471"
},
{
"blockNumber": 17048734,
"transactionHash": null
} }
] ]

@ -331917,9 +331917,5 @@
"commitment": "0x1ceee9f2f5fc599df4d9b0ddf330e2e0faa3688cb8f38bc0bbf5ed4bd7872879", "commitment": "0x1ceee9f2f5fc599df4d9b0ddf330e2e0faa3688cb8f38bc0bbf5ed4bd7872879",
"leafIndex": 47416, "leafIndex": 47416,
"timestamp": "1681514255" "timestamp": "1681514255"
},
{
"blockNumber": 17048737,
"transactionHash": null
} }
] ]

@ -221891,9 +221891,5 @@
"commitment": "0x2eb0ced50584cfa5fcf07106ada0d3361220615344e499ef11ea12c34a9614d8", "commitment": "0x2eb0ced50584cfa5fcf07106ada0d3361220615344e499ef11ea12c34a9614d8",
"leafIndex": 31698, "leafIndex": 31698,
"timestamp": "1681441067" "timestamp": "1681441067"
},
{
"blockNumber": 17048741,
"transactionHash": null
} }
] ]

128
yarn.lock

@ -1021,13 +1021,14 @@ __metadata:
mocha: ^10.2.0 mocha: ^10.2.0
pouchdb-collate: ^8.0.1 pouchdb-collate: ^8.0.1
prettier: ^2.3.0 prettier: ^2.3.0
rimraf: ^4.4.0 rimraf: ^5.0.0
source-map-support: ^0.5.19 source-map-support: ^0.5.19
ts-essentials: ^9.3.1 ts-essentials: ^9.3.1
ts-node: ^10.9.1 ts-node: ^10.9.1
tsc-alias: ^1.2.11 tsc-alias: ^1.2.11
tsconfig-paths: ^4.1.2 tsconfig-paths: ^4.1.2
typechain: ^8.1.1 typechain: ^8.1.1
typedoc: ^0.24.6
typescript: ^5.0.4 typescript: ^5.0.4
dependenciesMeta: dependenciesMeta:
tsconfig-paths@4.2.0: tsconfig-paths@4.2.0:
@ -1069,7 +1070,7 @@ __metadata:
mocha: ^10.2.0 mocha: ^10.2.0
pouchdb-collate: ^8.0.1 pouchdb-collate: ^8.0.1
prettier: ^2.3.0 prettier: ^2.3.0
rimraf: ^4.4.0 rimraf: ^5.0.0
snarkjs: "npm:@tornado/snarkjs@^0.1.20-p2" snarkjs: "npm:@tornado/snarkjs@^0.1.20-p2"
source-map-support: ^0.5.19 source-map-support: ^0.5.19
ts-essentials: ^9.3.1 ts-essentials: ^9.3.1
@ -1077,6 +1078,7 @@ __metadata:
tsc-alias: ^1.2.11 tsc-alias: ^1.2.11
tsconfig-paths: ^4.1.2 tsconfig-paths: ^4.1.2
typechain: ^8.1.1 typechain: ^8.1.1
typedoc: ^0.24.6
typescript: ^5.0.4 typescript: ^5.0.4
dependenciesMeta: dependenciesMeta:
tsconfig-paths@4.2.0: tsconfig-paths@4.2.0:
@ -1110,13 +1112,14 @@ __metadata:
fs-extra: ^11.1.0 fs-extra: ^11.1.0
mocha: ^10.2.0 mocha: ^10.2.0
prettier: ^2.3.0 prettier: ^2.3.0
rimraf: ^4.4.0 rimraf: ^5.0.0
snarkjs: "npm:@tornado/snarkjs@^0.1.20-p2" snarkjs: "npm:@tornado/snarkjs@^0.1.20-p2"
source-map-support: ^0.5.19 source-map-support: ^0.5.19
ts-essentials: ^9.3.1 ts-essentials: ^9.3.1
ts-node: ^10.9.1 ts-node: ^10.9.1
tsc-alias: ^1.2.11 tsc-alias: ^1.2.11
tsconfig-paths: ^4.1.2 tsconfig-paths: ^4.1.2
typedoc: ^0.24.6
typescript: ^5.0.4 typescript: ^5.0.4
websnark: "npm:@tornado/websnark@^0.0.4-p1" websnark: "npm:@tornado/websnark@^0.0.4-p1"
dependenciesMeta: dependenciesMeta:
@ -1153,12 +1156,13 @@ __metadata:
pouchdb-adapter-memory: ^8.0.1 pouchdb-adapter-memory: ^8.0.1
pouchdb-collate: ^8.0.1 pouchdb-collate: ^8.0.1
prettier: ^2.3.0 prettier: ^2.3.0
rimraf: ^4.4.0 rimraf: ^5.0.0
source-map-support: ^0.5.19 source-map-support: ^0.5.19
ts-essentials: ^9.3.1 ts-essentials: ^9.3.1
ts-node: ^10.9.1 ts-node: ^10.9.1
tsc-alias: ^1.2.11 tsc-alias: ^1.2.11
tsconfig-paths: ^4.1.2 tsconfig-paths: ^4.1.2
typedoc: ^0.24.6
typescript: ^5.0.4 typescript: ^5.0.4
dependenciesMeta: dependenciesMeta:
tsconfig-paths@4.2.0: tsconfig-paths@4.2.0:
@ -1229,13 +1233,14 @@ __metadata:
fs-extra: ^11.1.0 fs-extra: ^11.1.0
mocha: ^10.2.0 mocha: ^10.2.0
prettier: ^2.3.0 prettier: ^2.3.0
rimraf: ^4.4.0 rimraf: ^5.0.0
snarkjs: "npm:@tornado/snarkjs@^0.1.20-p2" snarkjs: "npm:@tornado/snarkjs@^0.1.20-p2"
source-map-support: ^0.5.19 source-map-support: ^0.5.19
ts-essentials: ^9.3.1 ts-essentials: ^9.3.1
ts-node: ^10.9.1 ts-node: ^10.9.1
tsc-alias: ^1.2.11 tsc-alias: ^1.2.11
tsconfig-paths: ^4.1.2 tsconfig-paths: ^4.1.2
typedoc: ^0.24.6
typescript: ^5.0.4 typescript: ^5.0.4
dependenciesMeta: dependenciesMeta:
tsconfig-paths@4.2.0: tsconfig-paths@4.2.0:
@ -1270,13 +1275,14 @@ __metadata:
fs-extra: ^11.1.0 fs-extra: ^11.1.0
mocha: ^10.2.0 mocha: ^10.2.0
prettier: ^2.3.0 prettier: ^2.3.0
rimraf: ^4.4.0 rimraf: ^5.0.0
socks-proxy-agent: ^7.0.0 socks-proxy-agent: ^7.0.0
source-map-support: ^0.5.19 source-map-support: ^0.5.19
ts-essentials: ^9.3.1 ts-essentials: ^9.3.1
ts-node: ^10.9.1 ts-node: ^10.9.1
tsc-alias: ^1.2.11 tsc-alias: ^1.2.11
tsconfig-paths: ^4.1.2 tsconfig-paths: ^4.1.2
typedoc: ^0.24.6
typescript: ^5.0.4 typescript: ^5.0.4
web3-providers-http: "npm:@tornado/web3-providers-http@^1.6.5-p1" web3-providers-http: "npm:@tornado/web3-providers-http@^1.6.5-p1"
dependenciesMeta: dependenciesMeta:
@ -1303,8 +1309,10 @@ __metadata:
eslint-plugin-prettier: ^4.2.1 eslint-plugin-prettier: ^4.2.1
ethers: ^5 ethers: ^5
prettier: ^2.3.0 prettier: ^2.3.0
rimraf: ^5.0.0
ts-node: ^10.9.1 ts-node: ^10.9.1
tsconfig-paths: ^4.1.2 tsconfig-paths: ^4.1.2
typedoc: ^0.24.6
typescript: ^5.0.4 typescript: ^5.0.4
dependenciesMeta: dependenciesMeta:
tsconfig-paths@4.2.0: tsconfig-paths@4.2.0:
@ -2235,6 +2243,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"ansi-sequence-parser@npm:^1.1.0":
version: 1.1.0
resolution: "ansi-sequence-parser@npm:1.1.0"
checksum: 75f4d3a4c555655a698aec05b5763cbddcd16ccccdbfd178fb0aa471ab74fdf98e031b875ef26e64be6a95cf970c89238744b26de6e34af97f316d5186b1df53
languageName: node
linkType: hard
"ansi-styles@npm:^3.2.0, ansi-styles@npm:^3.2.1": "ansi-styles@npm:^3.2.0, ansi-styles@npm:^3.2.1":
version: 3.2.1 version: 3.2.1
resolution: "ansi-styles@npm:3.2.1" resolution: "ansi-styles@npm:3.2.1"
@ -5457,18 +5472,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"glob@npm:^9.2.0":
version: 9.3.5
resolution: "glob@npm:9.3.5"
dependencies:
fs.realpath: ^1.0.0
minimatch: ^8.0.2
minipass: ^4.2.4
path-scurry: ^1.6.1
checksum: 94b093adbc591bc36b582f77927d1fb0dbf3ccc231828512b017601408be98d1fe798fc8c0b19c6f2d1a7660339c3502ce698de475e9d938ccbb69b47b647c84
languageName: node
linkType: hard
"global-modules@npm:^0.2.3": "global-modules@npm:^0.2.3":
version: 0.2.3 version: 0.2.3
resolution: "global-modules@npm:0.2.3" resolution: "global-modules@npm:0.2.3"
@ -6501,6 +6504,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"jsonc-parser@npm:^3.2.0":
version: 3.2.0
resolution: "jsonc-parser@npm:3.2.0"
checksum: 946dd9a5f326b745aa326d48a7257e3f4a4b62c5e98ec8e49fa2bdd8d96cef7e6febf1399f5c7016114fd1f68a1c62c6138826d5d90bc650448e3cf0951c53c7
languageName: node
linkType: hard
"jsonfile@npm:^4.0.0": "jsonfile@npm:^4.0.0":
version: 4.0.0 version: 4.0.0
resolution: "jsonfile@npm:4.0.0" resolution: "jsonfile@npm:4.0.0"
@ -7015,6 +7025,13 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"lunr@npm:^2.3.9":
version: 2.3.9
resolution: "lunr@npm:2.3.9"
checksum: 176719e24fcce7d3cf1baccce9dd5633cd8bdc1f41ebe6a180112e5ee99d80373fe2454f5d4624d437e5a8319698ca6837b9950566e15d2cae5f2a543a3db4b8
languageName: node
linkType: hard
"make-error@npm:^1.1.1": "make-error@npm:^1.1.1":
version: 1.3.6 version: 1.3.6
resolution: "make-error@npm:1.3.6" resolution: "make-error@npm:1.3.6"
@ -7069,6 +7086,15 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"marked@npm:^4.3.0":
version: 4.3.0
resolution: "marked@npm:4.3.0"
bin:
marked: bin/marked.js
checksum: 0db6817893952c3ec710eb9ceafb8468bf5ae38cb0f92b7b083baa13d70b19774674be04db5b817681fa7c5c6a088f61300815e4dd75a59696f4716ad69f6260
languageName: node
linkType: hard
"matched@npm:^0.4.1": "matched@npm:^0.4.1":
version: 0.4.4 version: 0.4.4
resolution: "matched@npm:0.4.4" resolution: "matched@npm:0.4.4"
@ -7343,15 +7369,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"minimatch@npm:^8.0.2":
version: 8.0.4
resolution: "minimatch@npm:8.0.4"
dependencies:
brace-expansion: ^2.0.1
checksum: 2e46cffb86bacbc524ad45a6426f338920c529dd13f3a732cc2cf7618988ee1aae88df4ca28983285aca9e0f45222019ac2d14ebd17c1edadd2ee12221ab801a
languageName: node
linkType: hard
"minimatch@npm:^9.0.0": "minimatch@npm:^9.0.0":
version: 9.0.0 version: 9.0.0
resolution: "minimatch@npm:9.0.0" resolution: "minimatch@npm:9.0.0"
@ -7449,7 +7466,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"minipass@npm:^4.0.0, minipass@npm:^4.2.4": "minipass@npm:^4.0.0":
version: 4.2.8 version: 4.2.8
resolution: "minipass@npm:4.2.8" resolution: "minipass@npm:4.2.8"
checksum: 7f4914d5295a9a30807cae5227a37a926e6d910c03f315930fde52332cf0575dfbc20295318f91f0baf0e6bb11a6f668e30cde8027dea7a11b9d159867a3c830 checksum: 7f4914d5295a9a30807cae5227a37a926e6d910c03f315930fde52332cf0575dfbc20295318f91f0baf0e6bb11a6f668e30cde8027dea7a11b9d159867a3c830
@ -8276,7 +8293,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"path-scurry@npm:^1.6.1, path-scurry@npm:^1.7.0": "path-scurry@npm:^1.7.0":
version: 1.7.0 version: 1.7.0
resolution: "path-scurry@npm:1.7.0" resolution: "path-scurry@npm:1.7.0"
dependencies: dependencies:
@ -9101,17 +9118,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"rimraf@npm:^4.4.0":
version: 4.4.1
resolution: "rimraf@npm:4.4.1"
dependencies:
glob: ^9.2.0
bin:
rimraf: dist/cjs/src/bin.js
checksum: b786adc02651e2e24bbedb04bbdea80652fc9612632931ff2d9f898c5e4708fe30956186597373c568bd5230a4dc2fadfc816ccacba8a1daded3a006a6b74f1a
languageName: node
linkType: hard
"rimraf@npm:^5.0.0": "rimraf@npm:^5.0.0":
version: 5.0.0 version: 5.0.0
resolution: "rimraf@npm:5.0.0" resolution: "rimraf@npm:5.0.0"
@ -9399,6 +9405,18 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"shiki@npm:^0.14.1":
version: 0.14.2
resolution: "shiki@npm:0.14.2"
dependencies:
ansi-sequence-parser: ^1.1.0
jsonc-parser: ^3.2.0
vscode-oniguruma: ^1.7.0
vscode-textmate: ^8.0.0
checksum: f2a14302b1803617e3ff1b751a5c87b4af4ad15214dc00e9215402e42940a84a0b956cf55d628f25dbf1296b18e277b8529571cd9359b971ac599a0ab11303e7
languageName: node
linkType: hard
"side-channel@npm:^1.0.4": "side-channel@npm:^1.0.4":
version: 1.0.4 version: 1.0.4
resolution: "side-channel@npm:1.0.4" resolution: "side-channel@npm:1.0.4"
@ -10397,6 +10415,22 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"typedoc@npm:^0.24.6":
version: 0.24.6
resolution: "typedoc@npm:0.24.6"
dependencies:
lunr: ^2.3.9
marked: ^4.3.0
minimatch: ^9.0.0
shiki: ^0.14.1
peerDependencies:
typescript: 4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x
bin:
typedoc: bin/typedoc
checksum: 3911ef6a7736ce6655a4b22fabc5be6df8812412e209d730fd168bfa7797847897f05fed797bc16558f11ce647b20746453748ff8afe85b1375c3efe2d7b57df
languageName: node
linkType: hard
"typescript@npm:^5.0.4": "typescript@npm:^5.0.4":
version: 5.0.4 version: 5.0.4
resolution: "typescript@npm:5.0.4" resolution: "typescript@npm:5.0.4"
@ -10686,6 +10720,20 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"vscode-oniguruma@npm:^1.7.0":
version: 1.7.0
resolution: "vscode-oniguruma@npm:1.7.0"
checksum: 53519d91d90593e6fb080260892e87d447e9b200c4964d766772b5053f5699066539d92100f77f1302c91e8fc5d9c772fbe40fe4c90f3d411a96d5a9b1e63f42
languageName: node
linkType: hard
"vscode-textmate@npm:^8.0.0":
version: 8.0.0
resolution: "vscode-textmate@npm:8.0.0"
checksum: 127780dfea89559d70b8326df6ec344cfd701312dd7f3f591a718693812b7852c30b6715e3cfc8b3200a4e2515b4c96f0843c0eacc0a3020969b5de262c2a4bb
languageName: node
linkType: hard
"vuvuzela@npm:1.0.3": "vuvuzela@npm:1.0.3":
version: 1.0.3 version: 1.0.3
resolution: "vuvuzela@npm:1.0.3" resolution: "vuvuzela@npm:1.0.3"