2023.04.12: Check HISTORY.md for more info

Signed-off-by: T-Hax <>
This commit is contained in:
T-Hax 2023-04-11 19:36:32 +00:00
parent 08621e0f0d
commit 0a7c98abb9
16 changed files with 719 additions and 178 deletions

@ -1,5 +1,10 @@
# All of these are used for tests
# If someone is using the SDK, there is no reason to use .env
# Tor # Tor
# Torify tests (need to make possible on each still)
TORIFY= TORIFY=
# Tor port (regular = 9050, browser = 9150)
TOR_PORT= TOR_PORT=
# RPCs # RPCs

@ -1,5 +1,18 @@
# History # History
### 2023.04.12 (2023-04-12)
Did:
* `TorHttpClient`, `RegularHttpClient` and some tests.
* Working on `Relayer`, withdrawing.
* Crypto logic for withdrawals.
Next:
* Finish withdrawal logic.
* Censorship test on RPC.
### 2023.04.09 (2023-04-09) ### 2023.04.09 (2023-04-09)
Did: Did:

@ -10,9 +10,9 @@
"zk" "zk"
], ],
"private": false, "private": false,
"version": "2023.04.06", "version": "2023.04.12",
"engines": { "engines": {
"node": ">=18" "node": "^18"
}, },
"main": "./build/index.js", "main": "./build/index.js",
"files": [ "files": [

@ -2,9 +2,12 @@
import * as Types from 'types/sdk/crypto' import * as Types from 'types/sdk/crypto'
// External crypto // External crypto
import { Groth16 } from 'src/groth16'
import circomlib from 'circomlib' import circomlib from 'circomlib'
import { bigInt } from 'snarkjs'
import { Groth16 } from 'websnark/src/groth16'
import { buildGroth16 } from 'websnark' import { buildGroth16 } from 'websnark'
import { MerkleTree } from 'fixed-merkle-tree'
import { genWitnessAndProve, toSolidityInput } from 'websnark/src/utils'
// Some utils to work with hex numbers // Some utils to work with hex numbers
import { HexUtils, NumberUtils } from 'lib/utils' import { HexUtils, NumberUtils } from 'lib/utils'
@ -18,17 +21,20 @@ import { Files } from 'lib/data'
* (will be) contained within this namespace. * (will be) contained within this namespace.
*/ */
export namespace Setup { export namespace Setup {
export async function provingKey(): Promise<ArrayBufferLike> { export async function getProvingKey(): Promise<ArrayBufferLike> {
return (await Files.loadRaw('circuits/tornadoProvingKey.bin')).buffer return (await Files.loadRaw('circuits/tornadoProvingKey.bin')).buffer
} }
export async function tornadoCircuit(): Promise<Buffer> { export async function getTornadoCircuit(): Promise<Buffer> {
return await Files.loadRaw('circuits/tornado.json') return await Files.loadRaw('circuits/tornado.json')
} }
export function groth16(): Promise<Groth16> { /**
* @note The following is a comment from tornado-cli: `groth16 initialises a lot of Promises that will never be resolved, that's why we need to use process.exit to terminate the CLI`. They literally didn't check the code to see that these are just worker threads and that `groth16` has a `terminate()` function to remove them. 🤦
*/
export async function getGroth16(): Promise<Groth16> {
const defaultParams = { wasmInitialMemory: 5000 } const defaultParams = { wasmInitialMemory: 5000 }
return buildGroth16(defaultParams) return await buildGroth16(defaultParams)
} }
} }
@ -43,29 +49,105 @@ export namespace Primitives {
return HexUtils.bufferToHex(msg, 62) return HexUtils.bufferToHex(msg, 62)
} }
export function createDeposit(depositData?: Types.InputFor.CreateDeposit): Types.TornadoDeposit { export function parseNote(hexNote: string): Types.ZKDepositData {
if (!depositData?.nullifier || !depositData?.secret) const buffer = Buffer.from(hexNote, 'hex')
depositData = { return createDeposit({
// @ts-expect-error
nullifier: bigInt.leBuff2int(buffer.subarray(0, 31)),
// @ts-expect-error
secret: bigInt.leBuff2int(buffer.subarray(32, 62))
})
}
export function createDeposit(input?: Types.InputFor.CreateDeposit): Types.ZKDepositData {
if (!input?.nullifier || !input?.secret)
input = {
nullifier: NumberUtils.randomBigInteger(31), nullifier: NumberUtils.randomBigInteger(31),
secret: NumberUtils.randomBigInteger(31) secret: NumberUtils.randomBigInteger(31)
} }
// @ts-ignore // @ts-expect-error
let preimage = Buffer.concat([depositData.nullifier.leInt2Buff(31), depositData.secret.leInt2Buff(31)]) let preimage = Buffer.concat([depositData.nullifier.leInt2Buff(31), depositData.secret.leInt2Buff(31)])
let commitment = calcPedersenHash({ msg: preimage }) let commitment = calcPedersenHash({ msg: preimage })
let commitmentHex = HexUtils.bigIntToHex(commitment) let commitmentHex = HexUtils.bigIntToHex(commitment)
// @ts-ignore // @ts-expect-error
let nullifierHash = calcPedersenHash({ msg: depositData.nullifier.leInt2Buff(31) }) let nullifierHash = calcPedersenHash({ msg: depositData.nullifier.leInt2Buff(31) })
let nullifierHex = HexUtils.bigIntToHex(nullifierHash) let nullifierHex = HexUtils.bigIntToHex(nullifierHash)
return { return {
nullifier: depositData.nullifier!, nullifier: input.nullifier!,
secret: depositData.secret!, secret: input.secret!,
preimage: preimage, preimage: preimage,
commitment: commitment, commitment: commitment,
commitmentHex: commitmentHex, hexCommitment: commitmentHex,
nullifierHash: nullifierHash, nullifierHash: nullifierHash,
nullifierHex: nullifierHex hexNullifierHash: nullifierHex
} }
} }
export function buildMerkleTree(inputs: Types.InputFor.BuildMerkleTree): MerkleTree {
return new MerkleTree(inputs.height, inputs.leaves)
}
export async function calcDepositProofs(inputs: Array<Types.InputFor.ZKProof>): Promise<Array<any>> {
const proofs: string[][] = []
const args: any[][] = []
const groth16 = await Setup.getGroth16()
const circuit = await Setup.getTornadoCircuit()
const provingKey = await Setup.getProvingKey()
for (let i = 0, len = inputs.length; i < len; i++) {
const input = inputs[i]
// Compute Merkle Proof
const { pathElements, pathIndex } = input.public.tree.path(input.public.leafIndex)
args.push([])
proofs.push([])
const proofData = await genWitnessAndProve(
groth16,
{
// Public inputs
root: input.public.root,
// @ts-ignore
nullifierHash: bigInt(input.public.hexNullifierHash),
// @ts-ignore
fee: bigInt(input.public.fee),
// @ts-ignore
refund: bigInt(input.public.refund),
// @ts-ignore
relayer: bigInt(input.public.relayerAddress),
// @ts-ignore
recipient: bigInt(input.public.recipientAddress),
// Private inputs
nullifier: input.private.nullifier,
secret: input.private.secret,
pathElements: pathElements,
pathIndices: pathIndex
},
circuit,
provingKey
)
proofs[i].push(toSolidityInput(proofData).proof)
args[i].push([
input.public.root,
input.public.hexNullifierHash,
HexUtils.prepareAddress(input.public.recipientAddress, 20),
// @ts-ignore
HexUtils.prepareAddress(input.public.relayerAddress, 20),
HexUtils.numberToHex(input.public.fee),
HexUtils.numberToHex(input.public.refund)
])
}
// Done. 🤷‍♀️
groth16.terminate()
return proofs.concat(args)
}
} }
// TODO: implement and decide whether to add in declarations an ambient namespace and merge it here // TODO: implement and decide whether to add in declarations an ambient namespace and merge it here

@ -1,6 +1,7 @@
// Local types // Local types
import { TornadoInstance } from 'types/deth' import { TornadoInstance } from 'types/deth'
import * as Types from 'types/sdk/data' import * as Types from 'types/sdk/data'
import { RelayerProperties } from 'types/sdk/data'
import { Options } from 'types/sdk/main' import { Options } from 'types/sdk/main'
// Local logic // Local logic
@ -19,6 +20,7 @@ import * as PouchDBAdapterMemory from 'pouchdb-adapter-memory'
// @ts-ignore // @ts-ignore
import { toIndexableString } from 'pouchdb-collate' import { toIndexableString } from 'pouchdb-collate'
import { timeStamp } from 'console'
// Register plugins // Register plugins
PouchDB.plugin(PouchDBAdapterMemory) PouchDB.plugin(PouchDBAdapterMemory)
@ -244,6 +246,8 @@ export namespace Constants {
} }
export namespace Docs { export namespace Docs {
// TODO: Probably find some easier way to lookup below docs for the end user...
export class Base { export class Base {
_id: string _id: string
_rev?: string _rev?: string
@ -255,9 +259,9 @@ export namespace Docs {
export class Deposit extends Base { export class Deposit extends Base {
blockNumber: number blockNumber: number
transactionHash: string
commitment: string
leafIndex: number leafIndex: number
commitment: string
transactionHash: string
timestamp: string timestamp: string
constructor(obj: any) { constructor(obj: any) {
@ -267,21 +271,22 @@ export namespace Docs {
const leafIndex = obj['args']['leafIndex'] const leafIndex = obj['args']['leafIndex']
const timestamp = obj['args']['timestamp'] const timestamp = obj['args']['timestamp']
super(toIndexableString([blockNumber, transactionHash, commitment, leafIndex, timestamp])) // To preserve order because we will need it later
super(toIndexableString([blockNumber, leafIndex, commitment]))
this.blockNumber = blockNumber
this.transactionHash = transactionHash
this.commitment = commitment this.commitment = commitment
this.blockNumber = blockNumber
this.leafIndex = leafIndex this.leafIndex = leafIndex
this.transactionHash = transactionHash
this.timestamp = timestamp this.timestamp = timestamp
} }
} }
export class Withdrawal extends Base { export class Withdrawal extends Base {
blockNumber: number blockNumber: number
transactionHash: string
nullifierHash: string
to: string to: string
nullifierHash: string
transactionHash: string
fee: string fee: string
constructor(obj: any) { constructor(obj: any) {
@ -291,12 +296,12 @@ export namespace Docs {
const nullifierHash = obj['args']['nullifierHash'] const nullifierHash = obj['args']['nullifierHash']
const fee = (obj['args']['fee'] as BigNumber).toString() const fee = (obj['args']['fee'] as BigNumber).toString()
super(toIndexableString([blockNumber, transactionHash, nullifierHash, to, fee])) super(toIndexableString([blockNumber, to, nullifierHash]))
this.blockNumber = blockNumber this.blockNumber = blockNumber
this.transactionHash = transactionHash
this.nullifierHash = nullifierHash
this.to = to this.to = to
this.nullifierHash = nullifierHash
this.transactionHash = transactionHash
this.fee = fee this.fee = fee
} }
} }
@ -306,7 +311,7 @@ export namespace Docs {
note: string note: string
constructor(index: number, pathstring: string, note: string) { constructor(index: number, pathstring: string, note: string) {
super(toIndexableString([index, pathstring, note])) super(toIndexableString([index, pathstring]))
this.pathstring = pathstring this.pathstring = pathstring
this.note = note this.note = note
} }
@ -317,11 +322,30 @@ export namespace Docs {
invoice: string invoice: string
constructor(index: number, pathstring: string, invoice: string) { constructor(index: number, pathstring: string, invoice: string) {
super(toIndexableString([index, pathstring, invoice])) super(toIndexableString([index, pathstring]))
this.pathstring = pathstring this.pathstring = pathstring
this.invoice = invoice this.invoice = invoice
} }
} }
export class Relayer extends Base {
address: string
version: string
serviceFeePercent: number
miningFeePercent: number
status: string
chainId: number
constructor(url: string, properties: RelayerProperties) {
super(toIndexableString([url]))
this.address = properties.address
this.version = properties.version
this.serviceFeePercent = properties.serviceFeePercent
this.miningFeePercent = properties.miningFeePercent
this.status = properties.status
this.chainId = properties.chainId
}
}
} }
export namespace Cache { export namespace Cache {
@ -341,6 +365,12 @@ export namespace Cache {
this.db = new PouchDB<T>(Files.getCachePath(name), { adapter: dbAdapter }) this.db = new PouchDB<T>(Files.getCachePath(name), { adapter: dbAdapter })
} }
async get(keys: Array<any>): Promise<T> {
return await this.db.get(toIndexableString(keys)).catch((err) => {
throw ErrorUtils.ensureError(err)
})
}
async close(): Promise<void> { async close(): Promise<void> {
await this.db.close() await this.db.close()
} }
@ -392,6 +422,34 @@ export namespace Cache {
] ]
} }
} }
type DocsArray<T extends Docs.Base> = Array<{
doc?: T
id: string
key: string
value: {
rev: string
deleted?: boolean
}
}>
export async function loadContents<T extends Docs.Base>(
nameOfContent: string,
full: boolean = true,
emptyError: Error = ErrorUtils.getError(
`Core.loadCacheContents: there is no cache entry for ${nameOfContent}`
)
): Promise<DocsArray<T>> {
const cache = new Cache.Base<T>(Files.getCachePath(nameOfContent))
const docs = await cache.db.allDocs({ include_docs: full }).catch((err) => {
throw ErrorUtils.ensureError(err)
})
if (docs.total_rows === 0) throw emptyError
return docs.rows as DocsArray<T>
}
} }
// Namespace exports // Namespace exports

@ -2,16 +2,19 @@
import { DeepRequired } from 'ts-essentials' import { DeepRequired } from 'ts-essentials'
// Local types // Local types
import { Relayer, Options, Transactions } from 'types/sdk/main' import { RelayerProperties } from 'types/sdk/data'
import { Options, Transactions } from 'types/sdk/main'
import { ZKDepositData, InputFor } from 'types/sdk/crypto'
import { TornadoInstance, TornadoProxy } from 'types/deth' import { TornadoInstance, TornadoProxy } from 'types/deth'
// Ethers // External imports
import { Signer } from '@ethersproject/abstract-signer'
import { TransactionResponse } from '@ethersproject/abstract-provider'
import { BigNumber, EventFilter, providers } from 'ethers' import { BigNumber, EventFilter, providers } from 'ethers'
// @ts-ignore
import { parseIndexableString } from 'pouchdb-collate'
// Important local // Important local
import { Docs, Cache, Types as DataTypes, Json } from 'lib/data' import { Docs, Cache, Types as DataTypes, Json, Constants } from 'lib/data'
import { Primitives } from 'lib/crypto' import { Primitives } from 'lib/crypto'
import { Contracts } from 'lib/chain' import { Contracts } from 'lib/chain'
@ -21,6 +24,8 @@ import { ErrorUtils } from 'lib/utils'
import { Chain } from 'lib/chain' import { Chain } from 'lib/chain'
import { parseUnits } from 'ethers/lib/utils' import { parseUnits } from 'ethers/lib/utils'
type Provider = providers.Provider
type BackupDepositDoc = { type BackupDepositDoc = {
pathstring: string pathstring: string
invoice?: string invoice?: string
@ -32,129 +37,209 @@ export class Core {
caches: Map<string, Cache.Base<Docs.Base>> caches: Map<string, Cache.Base<Docs.Base>>
instances: Map<string, TornadoInstance> instances: Map<string, TornadoInstance>
constructor(provider: providers.Provider, signer?: Signer) { constructor(provider: providers.Provider) {
this.chain = new Chain(provider, signer) this.chain = new Chain(provider)
this.caches = new Map<string, Cache.Base<Docs.Base>>() this.caches = new Map<string, Cache.Base<Docs.Base>>()
this.instances = new Map<string, TornadoInstance>() this.instances = new Map<string, TornadoInstance>()
} }
connect(signer: Signer): void { connect(provider: Provider): void {
this.chain.signer = signer this.chain.provider = provider
} }
async getInstances( async getInstances(
keys: Array<{ token: string; denomination: number | string }> keys: Array<{ token: string; denomination: number | string }>
): Promise<Array<TornadoInstance>> { ): Promise<Array<TornadoInstance>> {
const chainId = await this.chain.getChainId() const chainId = await this.chain.getChainId()
return await Promise.all( return Promise.all(
keys.map((key) => keys.map((key) =>
Contracts.getInstance( Contracts.getInstance(String(chainId), key.token, String(key.denomination), this.chain.provider)
String(chainId),
key.token,
String(key.denomination),
this.chain.signer ?? this.chain.provider
)
) )
) )
} }
async getInstance(token: string, denomination: number | string): Promise<TornadoInstance> { async getInstance(token: string, denomination: number | string): Promise<TornadoInstance> {
const chainId = await this.chain.getChainId() const chainId = await this.chain.getChainId()
return await Contracts.getInstance( return Contracts.getInstance(String(chainId), token, String(denomination), this.chain.provider)
String(chainId),
token,
String(denomination),
this.chain.signer ?? this.chain.provider
)
} }
async getProxy(): Promise<TornadoProxy> { async getProxy(): Promise<TornadoProxy> {
const chainId = await this.chain.getChainId() const chainId = await this.chain.getChainId()
return await Contracts.getProxy(String(chainId), this.chain.signer ?? this.chain.provider) return Contracts.getProxy(String(chainId), this.chain.signer ?? this.chain.provider)
} }
async buildWithdrawalTx( async buildDepositProof(
instance: TornadoInstance, instance: TornadoInstance,
withdrawOptions?: Options.Core.Withdrawal relayerProperties: RelayerProperties,
): Promise<Transactions.Withdrawal> { recipientAddress: string,
return (await this.buildWithdrawalTxs([instance], withdrawOptions))[0] zkDepositsData: ZKDepositData,
options?: Options.Core.BuildDepositProof
): Promise<any> {
return (
await this.buildDepositProofs(
instance,
relayerProperties,
[recipientAddress],
[zkDepositsData],
options
)
)[0]
} }
// TODO: lots of stuff /**
async buildWithdrawalTxs( * @param instance This is the Tornado Instance which will be withdrawn from.
instances: Array<TornadoInstance>, * @param relayerProperties The properties of the relayer that is going to be used for the withdrawals. These properties are included in the ZK proof.
withdrawOptions?: Options.Core.Withdrawal * @param recipientAddresses The recipient addresses which should receive the withdrawals, in order.
): Promise<Array<Transactions.Withdrawal>> { * @param zkDepositsData These represent the public and private values, reconstructed from the deposit note, generated during the building of deposit transactions, used for building the proof of knowledge statement for withdrawal, for each withdrawal (in this context).
for (let i = 0, nInstances = instances.length; i < nInstances; i++) { * @param options Additional options which allow the user to skip checking whether the notes are spent or changing the target merkle tree height.
const lookupKeys = await this.getInstanceLookupKeys(instances[i].address) * @returns The proofs for which the user should then decide whether to use a relayer (recommended, but decide carefully which one) or use his own wallet (if needed).
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination */
const db = new Cache.Base<Docs.Deposit>('Deposits' + pathstring.toUpperCase()) async buildDepositProofs(
instance: TornadoInstance,
relayerProperties: RelayerProperties,
recipientAddresses: Array<string>,
zkDepositsData: Array<ZKDepositData>,
options?: Options.Core.BuildDepositProof
): Promise<Array<any>> {
// Extract commitments and nullifier hashes
const hexCommitments: string[] = []
const hexNullifierHashes: string[] = []
const purchaseAmounts = options?.ethPurchaseAmounts ?? new Array(zkDepositsData.length)
if (zkDepositsData.length !== recipientAddresses.length)
throw ErrorUtils.getError(
'Core.buildDepositProofs: the number of recipients must equal the length of zkDepositsData.'
)
if (zkDepositsData.length !== purchaseAmounts.length)
throw ErrorUtils.getError(
'Core.buildDepositProofs: if purchase amounts is specified, it must equal the length of zkDepositsData.'
)
zkDepositsData.forEach((deposit) => {
hexCommitments.push(deposit.hexCommitment)
hexNullifierHashes.push(deposit.hexNullifierHash)
})
// Determine cache name
const lookupKeys = await this.getInstanceLookupKeys(instance.address)
const name = 'Deposit' + (lookupKeys.network + lookupKeys.token + lookupKeys.denomination).toUpperCase()
// Find all leaf indices by reading from cache
const leafIndices = await this._findLeafIndices(name, hexCommitments)
const invalidCommitments: string[] = []
// Determine whether we will be checking whether notes are spent
const spentNotes: string[] = []
const checkSpent = options?.checkNotesSpent !== false
// If yes, immediately check it with the supplied Tornado Instance
const checkSpentArray = checkSpent ? await instance.isSpentArray(hexNullifierHashes) : null
// Check whether a commitment has not been found in all deposits, meaning that it is invalid
// Also add the invalid commitments. We can do leafIndices[i] because the matched one are concatenated
// at the start
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
if (!leafIndices[i]) invalidCommitments.push(hexCommitments[i])
if (checkSpent && !checkSpentArray![i]) spentNotes.push(hexNullifierHashes[i])
} }
// Placeholder // If something is wrong, throw
return [{ request: {} }] const commitmentsAreInvalid = invalidCommitments.length !== 0
} const notesAreSpent = spentNotes.length !== 0
async depositInMultiple( if (commitmentsAreInvalid || notesAreSpent)
instances: Array<TornadoInstance>, throw ErrorUtils.getError(
depositOptions?: Options.Core.Deposit `Core.buildWithdrawalTxs: ` +
): Promise<Array<TransactionResponse>> { (commitmentsAreInvalid
if (!this.chain.signer) ? `following commitments are invalid:\n\n${invalidCommitments.join('\n')}\n\n`
throw ErrorUtils.getError('Core.depositInMultiple: need connected signer to deposit!') : '') +
const txs = await this.buildDepositTxs(instances, depositOptions) (notesAreSpent
return await Promise.all(txs.map((tx) => this.chain.signer!.sendTransaction(tx.request))) ? `${commitmentsAreInvalid ? 'and ' : ''}following notes are already spent:\n\n${spentNotes.join(
} '\n'
)}\n\n`
: '')
)
async depositInSingle( // Otherwise, build the merkle tree from the leaf indices
instance: TornadoInstance, // We have to slice to get the leaf indices in order
depositOptions?: Omit<Options.Core.Deposit, 'depositsPerInstance'> const merkleTree = Primitives.buildMerkleTree({
): Promise<TransactionResponse> { height: options?.merkleTreeHeight ?? Constants.MERKLE_TREE_HEIGHT,
if (!this.chain.signer) leaves: leafIndices.slice(zkDepositsData.length).map((leafIndex) => String(leafIndex))
throw ErrorUtils.getError('Core.depositInMultiple: need connected signer to deposit!') })
const tx = await this.buildDepositTx(instance, depositOptions)
return await this.chain.signer!.sendTransaction(tx.request) const root: string = merkleTree.root()
// Check whether the root is valid
if (!(await instance.isKnownRoot(root)))
throw ErrorUtils.getError(
'Core.buildWithdrawalTxs: the merkle tree created is not valid, something went wrong with syncing.'
)
// Compute proofs
const inputsForProofs: InputFor.ZKProof[] = []
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
inputsForProofs.push({
public: {
root: root,
tree: merkleTree,
leafIndex: leafIndices[i],
hexNullifierHash: zkDepositsData[i].hexNullifierHash,
recipientAddress: recipientAddresses[i],
relayerAddress: relayerProperties.address,
fee: 5, // TODO: placeholder
refund: purchaseAmounts[i] ?? 0
},
private: {
nullifier: zkDepositsData[i].nullifier,
secret: zkDepositsData[i].secret
}
})
}
return await Primitives.calcDepositProofs(inputsForProofs)
} }
async createInvoice( async createInvoice(
instance: TornadoInstance, instance: TornadoInstance,
invoiceOptions?: Omit<Options.Core.Invoice, 'depositsPerInstance'> options?: Omit<Options.Core.Invoice, 'depositsPerInstance'>
): Promise<Transactions.Invoice> { ): Promise<Transactions.Invoice> {
let opts: Options.Core.Invoice = invoiceOptions ?? {} let opts: Options.Core.Invoice = options ?? {}
opts.depositsPerInstance = [1] opts.depositsPerInstance = [1]
return (await this.createInvoices([instance], invoiceOptions))[0] return (await this.createInvoices([instance], options))[0]
} }
async createInvoices( async createInvoices(
instances: Array<TornadoInstance>, instances: Array<TornadoInstance>,
invoiceOptions?: Options.Core.Invoice options?: Options.Core.Invoice
): Promise<Array<Transactions.Invoice>> { ): Promise<Array<Transactions.Invoice>> {
if (!invoiceOptions) invoiceOptions = {} if (!options) options = {}
if (!invoiceOptions.backup) invoiceOptions.backup = {} if (!options.backup) options.backup = {}
invoiceOptions.backup.invoices = invoiceOptions.backup.invoices ?? true options.backup.invoices = options.backup.invoices ?? true
invoiceOptions.backup.notes = invoiceOptions.backup.notes ?? true options.backup.notes = options.backup.notes ?? true
invoiceOptions.doNotPopulate = invoiceOptions.doNotPopulate ?? true options.doNotPopulate = options.doNotPopulate ?? true
return await this.buildDepositTxs(instances, invoiceOptions) return this.buildDepositTxs(instances, options)
} }
async buildDepositTx( async buildDepositTx(
instance: TornadoInstance, instance: TornadoInstance,
depositOptions?: Options.Core.Deposit options?: Options.Core.Deposit
): Promise<Transactions.Deposit> { ): Promise<Transactions.Deposit> {
let opts: Options.Core.Deposit = depositOptions ?? {} let opts: Options.Core.Deposit = options ?? {}
opts.depositsPerInstance = [1] opts.depositsPerInstance = [1]
return (await this.buildDepositTxs([instance], opts))[0] return (await this.buildDepositTxs([instance], opts))[0]
} }
async buildDepositTxs( async buildDepositTxs(
instances: Array<TornadoInstance>, instances: Array<TornadoInstance>,
depositOptions?: Options.Core.Deposit options?: Options.Core.Deposit
): Promise<Array<Transactions.Deposit>> { ): Promise<Array<Transactions.Deposit>> {
const depositsPerInstance = const depositsPerInstance = options?.depositsPerInstance ?? new Array<number>(instances.length).fill(1)
depositOptions?.depositsPerInstance ?? new Array<number>(instances.length).fill(1)
const doNotPopulate = depositOptions?.doNotPopulate ?? false const doNotPopulate = options?.doNotPopulate ?? false
const backupNotes = depositOptions?.backup?.notes ?? true const backupNotes = options?.backup?.notes ?? true
const backupInvoices = depositOptions?.backup?.invoices ?? false const backupInvoices = options?.backup?.invoices ?? false
if (depositsPerInstance.length != instances.length) if (depositsPerInstance.length != instances.length)
throw ErrorUtils.getError( throw ErrorUtils.getError(
@ -178,7 +263,7 @@ export class Core {
if (backupNotes) notesToBackup.push({ pathstring: pathstring, note: note }) if (backupNotes) notesToBackup.push({ pathstring: pathstring, note: note })
if (backupInvoices) invoicesToBackup.push({ pathstring: pathstring, invoice: deposit.commitmentHex }) if (backupInvoices) invoicesToBackup.push({ pathstring: pathstring, invoice: deposit.hexCommitment })
if (!doNotPopulate) { if (!doNotPopulate) {
txs.push({ txs.push({
@ -186,19 +271,19 @@ export class Core {
to: proxy.address, to: proxy.address,
data: proxy.interface.encodeFunctionData('deposit', [ data: proxy.interface.encodeFunctionData('deposit', [
instances[i].address, instances[i].address,
deposit.commitmentHex, deposit.hexCommitment,
[] []
]), ]),
value: lookupKeys.token == 'eth' ? parseUnits(lookupKeys.denomination) : BigNumber.from(0) value: lookupKeys.token == 'eth' ? parseUnits(lookupKeys.denomination) : BigNumber.from(0)
}, },
note: pathstring + '_' + note, note: pathstring + '_' + note,
invoice: pathstring + '_' + deposit.commitmentHex invoice: pathstring + '_' + deposit.hexCommitment
}) })
} else } else
txs.push({ txs.push({
request: {}, request: {},
note: pathstring + '_' + note, note: pathstring + '_' + note,
invoice: pathstring + '_' + deposit.commitmentHex invoice: pathstring + '_' + deposit.hexCommitment
}) })
} }
} }
@ -382,6 +467,52 @@ export class Core {
return syncOptions as DeepRequired<Options.Core.Sync> return syncOptions as DeepRequired<Options.Core.Sync>
} }
/**
* @param instanceName The name of the instance as created in `_sync` function.
* @param commitments The commitments for which the leaf index values are to be noted down extra.
* @returns The result of concatenating the array of leaf indices found by matching them with the provided commitment values, followed by the array of all leaf indices, including all of the formerly mentioned values given that they are valid. Values which have not been matched, meaning probably invalid values, will be `0`.
*/
private async _findLeafIndices(instanceName: string, commitments: Array<string>): Promise<Array<number>> {
const matchedLeafIndices = new Array<number>(commitments.length).fill(0)
const leafIndices: Array<number> = []
// Either load all deposit events from memory or from cache
let cache: Cache.Base<Docs.Deposit>
if (!this.caches.has(instanceName)) {
cache = new Cache.Base<Docs.Deposit>(instanceName)
} else cache = this.caches.get(instanceName) as Cache.Base<Docs.Deposit>
const docs = await cache.db.allDocs()
// If no docs in cache throw and stop
if (docs.total_rows === 0) {
await cache.clear()
throw ErrorUtils.getError(
`Core.buildMerkleTree: events for instance ${instanceName} have not been synchronized.`
)
}
// Otherwise start looking for commitment leaf indices and also pick up
// all other leafs on the way
for (const row of docs.rows) {
const [, leafIndex, loadedCommitment] = parseIndexableString(row.id)
const index = commitments.findIndex((commitment) => commitment === loadedCommitment)
// If some commitment is found then add the leaf index and remove that commitment
if (index !== -1) {
matchedLeafIndices[index] = leafIndex
commitments.splice(index, 1)
}
// In any case push every leaf
leafIndices.push(leafIndex)
}
// Concat matched and all leaf indices
return matchedLeafIndices.concat(leafIndices)
}
async getInstanceLookupKeys(instanceAddress: string): Promise<DataTypes.Keys.InstanceLookup> { async getInstanceLookupKeys(instanceAddress: string): Promise<DataTypes.Keys.InstanceLookup> {
// lookup some stuff first // lookup some stuff first
const lookupObj: { [key: string]: string } = Json.getValue(await Json.load('onchain/quickLookup.json'), [ const lookupObj: { [key: string]: string } = Json.getValue(await Json.load('onchain/quickLookup.json'), [
@ -403,4 +534,4 @@ export class Core {
} }
} }
export { Relayer, Transactions, Options } export { Transactions, Options }

@ -200,6 +200,10 @@ export namespace HexUtils {
// @ts-ignore // @ts-ignore
return '0x' + number.toString(16).padStart(2 * byteLen, '0') return '0x' + number.toString(16).padStart(2 * byteLen, '0')
} }
export function prepareAddress(address: string, bytelen: number = 32): string {
return (address.slice(0, 2) == '0x' ? address.slice(2) : address).toLowerCase().padStart(bytelen * 2, '0')
}
} }
export namespace ObjectUtils { export namespace ObjectUtils {

@ -1,5 +1,13 @@
import axios from 'axios'
import { AxiosInstance } from 'axios'
import { SocksProxyAgent } from 'socks-proxy-agent' import { SocksProxyAgent } from 'socks-proxy-agent'
import { Web3Provider, Networkish } from '@ethersproject/providers' import { Web3Provider, Networkish } from '@ethersproject/providers'
import { RelayerOptions } from 'types/sdk/web'
import { BigNumber } from 'ethers'
import { ErrorUtils } from './utils'
import { Cache, Docs } from './data'
import { RelayerProperties } from 'types/sdk/data'
// It seems that the default HttpProvider offered by the normal web3 package // It seems that the default HttpProvider offered by the normal web3 package
// has some logic which either ignores the SocksProxyAgent or which falls back to // has some logic which either ignores the SocksProxyAgent or which falls back to
@ -13,20 +21,210 @@ export interface TorOptions {
headers?: { name: string; value: string }[] headers?: { name: string; value: string }[]
} }
/**
* You can also set up a SOCKS5 I2P tunnel on some port and then use that instead. Meaning that this should be compatible with I2P.
*/
export class TorProvider extends Web3Provider { export class TorProvider extends Web3Provider {
constructor(url: string, torOpts: TorOptions, network?: Networkish) { constructor(url: string, torOpts: TorOptions, network?: Networkish) {
const torPort = torOpts.port ?? 9050, const torPort = torOpts.port ?? 9050,
headers = torOpts.headers ?? [ headers = torOpts.headers ?? [
{ name: 'User-Agent', value: 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0' } { name: 'User-Agent', value: 'Mozilla/5.0 (Windows NT 10.0; rv:102.0) Gecko/20100101 Firefox/102.0' }
] ]
super( super(
new HttpProvider(url, { new HttpProvider(url, {
agent: { https: new SocksProxyAgent('socks5h://127.0.0.1:' + torPort) } // The h after socks5 means that DNS resolution is also done through Tor
// Don't want to set for some reason, need to override somehow agent: { https: new SocksProxyAgent('socks5h://127.0.0.1:' + torPort) },
// headers: headers // The XHR2 XMLHttpRequest assigns a Tor Browser header by itself.
// But if in Browser we assign just in case.
headers: typeof window !== 'undefined' ? headers : undefined
}), }),
network network
) )
} }
} }
// @ts-ignore
export const TorHttpClient: new (opts?: {
port?: number
headers?: { [key: string]: string }
rv?: string
}) => AxiosInstance = function (opts?: { port?: number; headers?: { [key: string]: string }; rv?: string }) {
const rv = opts?.rv ?? '102.0'
return axios.create({
headers: opts?.headers ?? {
'User-Agent': `Mozilla/5.0 (Windows NT 10.0; rv:${rv}) Gecko/20100101 Firefox/${rv}`
},
httpsAgent: new SocksProxyAgent('socks5h://127.0.0.1:' + opts?.port ?? 9050),
httpAgent: new SocksProxyAgent('socks5h://127.0.0.1:' + opts?.port ?? 9050),
// 2 minute timeout
timeout: 120000
})
}
// @ts-ignore
export const RegularHttpClient: new (opts?: any) => AxiosInstance = function (opts: any) {
return axios.create(opts)
}
export class Relayer {
url: string
httpClient: AxiosInstance
private _fetched: boolean
private _address?: string
private _version?: string
private _serviceFee?: number
private _miningFee?: number
private _status?: string
private _chainId?: number
constructor(options: RelayerOptions, properties?: RelayerProperties) {
this.url = options.url
this.httpClient = options.httpClient
this._fetched = false
if (properties) {
this._address = properties.address
this._version = properties.version
this._chainId = properties.chainId
this._serviceFee = properties.serviceFeePercent
this._miningFee = properties.miningFeePercent
this._status = properties.status
this._fetched = true
}
}
// Setup
/**
* This function MUST be called to unlock the rest of the `Relayer` class functionality, as otherwise we don't have the property data necessary for all the logic we want.
* @returns Fetched `RelayerProperties`.
*/
async fetchProperties(): Promise<RelayerProperties> {
const properties = await this.httpClient
.get(this.url + '/status')
.catch((err) => {
throw ErrorUtils.ensureError(err)
})
.then((res) => res.data)
if (Object.entries(properties).length === 0)
throw ErrorUtils.getError(
'Relayer.fetchProperties: Something went wrong with fetching properties from relayer endpoint.'
)
this._address = properties['rewardAccount']
this._version = properties['version']
this._chainId = properties['netId']
this._serviceFee = properties['tornadoServiceFee']
this._miningFee = properties['miningFee']
this._status = properties['health']['status']
this._fetched = true
return {
address: this._address!,
version: this._version!,
chainId: this._chainId!,
serviceFeePercent: this._serviceFee!,
miningFeePercent: this._miningFee!,
status: this._status!
}
}
private _propertiesFetched(parentCallName: string): void {
if (!this._fetched)
throw ErrorUtils.getError(
`Relayer.${parentCallName}: properties must be fetched first with \`fetchProperties\`.`
)
}
// Getters
get address(): string {
this._propertiesFetched('address')
return this._address!
}
get version(): string {
this._propertiesFetched('version')
return this._version!
}
get serviceFeePercent(): number {
this._propertiesFetched('serviceFee')
return this._serviceFee!
}
get miningFeePercent(): number {
this._propertiesFetched('miningFee')
return this._miningFee!
}
get status(): string {
this._propertiesFetched('status')
return this._status!
}
get chainId(): number {
this._propertiesFetched('chainId')
return this._chainId!
}
async getETHPurchasePrice(token: string): Promise<BigNumber> {
return BigNumber.from(
await this.httpClient
.get(this.url + '/status')
.catch((err) => {
throw ErrorUtils.ensureError(err)
})
.then((res) => res.data.prices[token])
)
}
// TODO: Relaying stuff and related
async relay(): Promise<any> {}
async calcWithdrawalFee(token: string, denomination: number): Promise<BigNumber> {
//placeholder
return BigNumber.from(0)
}
// Cache
/**
* Construct a new Relayer by reading relayer data from cache.
*/
static async fromCache(options: RelayerOptions): Promise<Relayer> {
const cache = new Cache.Base<Docs.Relayer>('Relayers')
// Error is ensured already
const properties = await cache.get([options.url]).catch(() => {
throw ErrorUtils.getError(`Relayer.fromCache: relayer ${options.url} isn't stored in cache.`)
})
return new Relayer(options, properties)
}
/**
* Cache relayer data into a PouchDB database in your cache folder. This will automatically fetch properties if they are not fetched.
*/
async remember(): Promise<void> {
if (!this._fetched) await this.fetchProperties()
const cache = new Cache.Base<Docs.Relayer>('Relayers')
const doc = new Docs.Relayer(this.url, {
address: this._address!,
version: this._version!,
chainId: this._chainId!,
serviceFeePercent: this._serviceFee!,
miningFeePercent: this._miningFee!,
status: this._status!
})
await cache.db.put(doc).catch((err) => {
throw ErrorUtils.ensureError(err)
})
await cache.close().catch((err) => {
throw ErrorUtils.ensureError(err)
})
}
}

@ -20,8 +20,8 @@ describe('crypto', () => {
expect(deposit.secret).to.exist expect(deposit.secret).to.exist
expect(deposit.preimage).to.exist expect(deposit.preimage).to.exist
expect(deposit.commitment).to.exist expect(deposit.commitment).to.exist
expect(deposit.commitmentHex).to.exist expect(deposit.hexCommitment).to.exist
expect(deposit.nullifierHash).to.exist expect(deposit.hexNullifierHash).to.exist
// From the whitepaper, the nullifier k E B^248 // From the whitepaper, the nullifier k E B^248
expect(BigNumber.from(deposit.nullifier.toString())).to.be.lte(limit) expect(BigNumber.from(deposit.nullifier.toString())).to.be.lte(limit)

@ -11,12 +11,15 @@ import { Core } from 'lib/main'
import { Chain, Contracts } from 'lib/chain' import { Chain, Contracts } from 'lib/chain'
import { Files, OnchainData } from 'lib/data' import { Files, OnchainData } from 'lib/data'
import { ErrorUtils } from 'lib/utils' import { ErrorUtils } from 'lib/utils'
import { TorProvider } from 'lib/web'
chai.use(solidity) chai.use(solidity)
const expect = chai.expect const expect = chai.expect
describe('main', () => { describe('main', () => {
const torify = process.env.TORIFY === 'true'
if (!process.env.ETH_MAINNET_TEST_RPC) throw ErrorUtils.getError('need a mainnet rpc endpoint.') if (!process.env.ETH_MAINNET_TEST_RPC) throw ErrorUtils.getError('need a mainnet rpc endpoint.')
console.log('\nNote that these tests are time intensive. ⏳. ⏳.. ⏳...\n') console.log('\nNote that these tests are time intensive. ⏳. ⏳.. ⏳...\n')
@ -27,7 +30,10 @@ describe('main', () => {
let daiData: Json.TokenData let daiData: Json.TokenData
const daiWhale = '0x5777d92f208679db4b9778590fa3cab3ac9e2168' // Uniswap V3 Something/Dai Pool const daiWhale = '0x5777d92f208679db4b9778590fa3cab3ac9e2168' // Uniswap V3 Something/Dai Pool
const mainnetProvider = new providers.JsonRpcProvider(process.env.ETH_MAINNET_TEST_RPC) const mainnetProvider = torify
? new TorProvider(process.env.ETH_MAINNET_TEST_RPC, { port: +process.env.TOR_PORT! })
: new providers.JsonRpcProvider(process.env.ETH_MAINNET_TEST_RPC)
const ganacheProvider = new providers.Web3Provider( const ganacheProvider = new providers.Web3Provider(
// @ts-ignore // @ts-ignore
ganache.provider({ ganache.provider({
@ -60,7 +66,7 @@ describe('main', () => {
}) })
describe('class Classic', () => { describe('class Classic', () => {
it.only('sync: should be able to fetch a couple events', async () => { it('sync: should be able to fetch a couple events', async () => {
const core = new Core(mainnetProvider) const core = new Core(mainnetProvider)
const instance = await Contracts.getInstance(String(1), 'eth', String(0.1), mainnetProvider) const instance = await Contracts.getInstance(String(1), 'eth', String(0.1), mainnetProvider)
const targetBlock = 16928712 const targetBlock = 16928712

@ -1,25 +1,58 @@
import chai from 'chai' import chai from 'chai'
import { TorProvider } from 'lib/web' import { TorHttpClient, TorProvider } from 'lib/web'
// Waffle matchers // Waffle matchers
import { solidity } from 'ethereum-waffle' import { solidity } from 'ethereum-waffle'
import { ErrorUtils } from 'lib/utils' import { ErrorUtils } from 'lib/utils'
import { parseUnits } from 'ethers/lib/utils'
chai.use(solidity) chai.use(solidity)
const expect = chai.expect const expect = chai.expect
describe.skip('web', () => { describe('web', () => {
if (!process.env.ETH_MAINNET_TEST_RPC || !process.env.TOR_PORT) if (!process.env.ETH_MAINNET_TEST_RPC || !process.env.TOR_PORT)
throw ErrorUtils.getError('need a tor port and mainnet rpc endpoint.') throw ErrorUtils.getError('need a tor port and mainnet rpc endpoint.')
const torProvider = new TorProvider(process.env.ETH_MAINNET_TEST_RPC, { port: +process.env.TOR_PORT }) const torProvider = new TorProvider(process.env.ETH_MAINNET_TEST_RPC, { port: +process.env.TOR_PORT })
const httpClient = new TorHttpClient({ port: +process.env.TOR_PORT })
// TODO: Make these tests better and either auto-detect proxy or spin up tor console.log(
'\nSome Tor tips: Support non-profit exit node operators, host your own nodes, avoid spy nodes by configuring torrc.\n'
)
it.skip('CONNECTED: Should be able to request over Tor', async () => { function torErrorThrow(err: Error) {
console.log(await torProvider.getBlockNumber()) err.message =
"\n\nThis test most likely failed because you (Tor) didn't open a SOCKS5 tunnel at either 9050 or the Tor port you specified in .env. As such, the provider couldn't send a request. Please start Tor or Tor Browser. 🧅\n\n"
throw err
}
it('httpClient: Should be able to send requests over Tor', async function () {
try {
const check = (await httpClient.get('https://check.torproject.org/api/ip')).data
expect(check.IsTor).to.be.true
console.log(
`\n🧅 check.torproject.org/api/ip says...\n\nWe are using Tor: ${check.IsTor ? '✅' : '❌'}`
)
console.log(`Our IP is: ${check.IP}\n`)
} catch (err) {
torErrorThrow(ErrorUtils.ensureError(err))
}
}).timeout(0)
it.only('TorProvider: Should be able to fetch some basic blockchain data over Tor', async () => {
try {
console.log('\nBlock Number: ' + (await torProvider.getBlockNumber()))
console.log('Gas Price: ' + (await torProvider.getGasPrice()).div(1000000000) + ' gwei')
console.log(
'Zero address ETH burned: ' +
(await torProvider.getBalance('0x0000000000000000000000000000000000000000')).div(parseUnits('1')) +
'\n'
)
} catch (err) {
torErrorThrow(ErrorUtils.ensureError(err))
}
}).timeout(0) }).timeout(0)
it.skip('DISCONNECTED: Should not be able to request over Tor', async function () { it.skip('DISCONNECTED: Should not be able to request over Tor', async function () {

@ -1,5 +1,6 @@
// Interfaces for the cryptographic primitives. // Interfaces for the cryptographic primitives.
import { MerkleTree } from 'fixed-merkle-tree'
import { bigInt } from 'snarkjs' import { bigInt } from 'snarkjs'
export type bigInt = typeof bigInt export type bigInt = typeof bigInt
@ -15,20 +16,15 @@ export namespace OutputOf {
export type PedersenHash = bigInt export type PedersenHash = bigInt
export interface CreateDeposit { export interface CreateDeposit {
// This is really some type of number but since it was written in javascript,
// the entire thing translates absolutely horribly into Typescript. It pushed me over the type-border.
nullifier: bigInt nullifier: bigInt
secret: bigInt secret: bigInt
preimage: Buffer preimage: Buffer
commitment: PedersenHash commitment: PedersenHash
commitmentHex: string hexCommitment: string
nullifierHash: PedersenHash nullifierHash: PedersenHash
nullifierHex: string hexNullifierHash: string
} }
export interface MerkleTree {}
// TODO: Type these
export interface MerkleProof { export interface MerkleProof {
root: any root: any
path: { path: {
@ -37,7 +33,7 @@ export namespace OutputOf {
} }
} }
export interface DepositProof { export interface Groth16Proof {
pi_a: Array<string> pi_a: Array<string>
pi_b: Array<string> pi_b: Array<string>
pi_c: Array<string> pi_c: Array<string>
@ -52,10 +48,8 @@ export namespace OutputOf {
*/ */
type __OutputAliasDelimiter = null type __OutputAliasDelimiter = null
export type MerkleTree = OutputOf.MerkleTree
export type MerkleProof = OutputOf.MerkleProof export type MerkleProof = OutputOf.MerkleProof
export type Groth16Proof = OutputOf.DepositProof export type ZKProof = OutputOf.Groth16Proof
export type ZKProof = OutputOf.DepositProof
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INPUTS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INPUTS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/** /**
@ -75,11 +69,28 @@ export namespace InputFor {
secret?: bigInt secret?: bigInt
} }
export interface MerkleTree {} export interface BuildMerkleTree {
height: number
leaves: Array<string>
}
export type MerkleProof = MerkleTree export interface ZKProof {
public: {
root: string
tree: MerkleTree
leafIndex: number
hexNullifierHash: string
recipientAddress: string
relayerAddress: string
fee: number
refund: number
}
private: {
nullifier: bigInt
secret: bigInt
}
}
// TODO: Type these
interface PublicGroth16 { interface PublicGroth16 {
root: any root: any
nullifierHash: PedersenHash nullifierHash: PedersenHash
@ -92,18 +103,11 @@ export namespace InputFor {
interface PrivateGroth16 { interface PrivateGroth16 {
nullifier: bigInt nullifier: bigInt
secret: bigInt secret: bigInt
pathIndices: any pathIndices: number[]
pathElements: any[] pathElements: any[]
} }
export type Groth16 = PublicGroth16 & PrivateGroth16 export type Groth16 = PublicGroth16 & PrivateGroth16
export interface DepositProof {
merkleProof: OutputOf.MerkleProof
groth16: any
inputs: Groth16
provingKey: ArrayBufferLike
}
} }
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INPUT ALIASES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INPUT ALIASES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -114,4 +118,4 @@ export namespace InputFor {
*/ */
type __InputAliasDelimiter = null type __InputAliasDelimiter = null
export type TornadoDeposit = OutputOf.CreateDeposit export type ZKDepositData = OutputOf.CreateDeposit

@ -73,3 +73,12 @@ export namespace Keys {
denomination: string denomination: string
} }
} }
export interface RelayerProperties {
address: string
version: string
serviceFeePercent: number
miningFeePercent: number
status: string
chainId: number
}

@ -6,15 +6,8 @@
// instead of having to specify exactly what type he is constructing. // instead of having to specify exactly what type he is constructing.
import { TransactionRequest } from '@ethersproject/abstract-provider' import { TransactionRequest } from '@ethersproject/abstract-provider'
import { BigNumber } from 'ethers' import { BigNumber } from 'ethers'
import { RelayerProperties as RelayerDataProperties } from 'types/sdk/data'
export interface Relayer {
url: string
handleWithdrawal(withdrawalData: any): Promise<any>
calcWithdrawalFee(token: string, denomination: number): Promise<BigNumber>
getServiceFee(): Promise<number>
}
export namespace Options { export namespace Options {
export namespace Cache { export namespace Cache {
@ -54,8 +47,10 @@ export namespace Options {
export type Invoice = Deposit export type Invoice = Deposit
export interface Withdrawal { export interface BuildDepositProof {
withdrawalsPerInstance?: Array<number> ethPurchaseAmounts?: Array<BigNumber>
merkleTreeHeight?: number
checkNotesSpent?: boolean
} }
} }
} }
@ -67,8 +62,4 @@ export namespace Transactions {
note?: string note?: string
} }
export type Invoice = Deposit export type Invoice = Deposit
export interface Withdrawal {
request: TransactionRequest
}
} }

@ -0,0 +1,7 @@
import { AxiosInstance } from 'axios'
export interface RelayerOptions {
url: string
address?: string
httpClient: AxiosInstance
}

@ -3,9 +3,9 @@
"@babel/code-frame@^7.0.0": "@babel/code-frame@^7.0.0":
version "7.18.6" version "7.21.4"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.21.4.tgz#d0fa9e4413aca81f2b23b9442797bda1826edb39"
integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== integrity sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==
dependencies: dependencies:
"@babel/highlight" "^7.18.6" "@babel/highlight" "^7.18.6"
@ -896,9 +896,9 @@
form-data "^3.0.0" form-data "^3.0.0"
"@types/node@*": "@types/node@*":
version "18.15.6" version "18.15.11"
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.6.tgz#af98ef4a36e7ac5f2d03040f3109fcce972bf6cb" resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.11.tgz#b3b790f09cb1696cffcec605de025b088fa4225f"
integrity sha512-YErOafCZpK4g+Rp3Q/PBgZNAsWKGunQTm9FA3/Pbcm0VCriTEzcrutQ/SxSc0rytAp0NoFWue669jmKhEtd0sA== integrity sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==
"@types/node@11.11.6": "@types/node@11.11.6":
version "11.11.6" version "11.11.6"
@ -1908,9 +1908,9 @@ cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
safe-buffer "^5.0.1" safe-buffer "^5.0.1"
"circomlib@npm:@urk1122/ks82ls0dn": "circomlib@npm:@urk1122/ks82ls0dn":
version "0.0.20-p5" version "0.0.20-p6"
resolved "https://registry.yarnpkg.com/@urk1122/ks82ls0dn/-/ks82ls0dn-0.0.20-p5.tgz#7e912513066b9d6d149c07ee00e4c19a54f49609" resolved "https://registry.yarnpkg.com/@urk1122/ks82ls0dn/-/ks82ls0dn-0.0.20-p6.tgz#d4e712694ef610ec41f3f89160a982176066122a"
integrity sha512-NdXMGf0yn2mjR0zY+maEPQwrdshjIpIQe9bNnLyld5qEgDyq38TbSmZ8MBgd3oPJ+yc1f1mjvD9RemqUt4+8GQ== integrity sha512-nAoJeTwsGrxCESnaXAU5bPn67FALVKHFiBUU7pXTYelxToDJjCoIuJcTPAAjL7OnywaTD9RRqSU6XakVa2X2yg==
dependencies: dependencies:
blake-hash "^1.1.0" blake-hash "^1.1.0"
blake2b "^2.1.3" blake2b "^2.1.3"
@ -3229,9 +3229,9 @@ find-up@^4.1.0:
path-exists "^4.0.0" path-exists "^4.0.0"
"fixed-merkle-tree@npm:@urk1122/s20lwm24m": "fixed-merkle-tree@npm:@urk1122/s20lwm24m":
version "0.6.1-p12" version "0.6.1-p13"
resolved "https://registry.yarnpkg.com/@urk1122/s20lwm24m/-/s20lwm24m-0.6.1-p12.tgz#79c03121b6edf4a9a6ccd07f38a325781b694732" resolved "https://registry.yarnpkg.com/@urk1122/s20lwm24m/-/s20lwm24m-0.6.1-p13.tgz#78d1a54c457bcd3e11c1cc276cac72d5a35db4c8"
integrity sha512-1/BYMszCP3hoMWSEEoIpEu71OhSbtAIaeCkvWzA6AAAqBytkHDRGlzICeRbcMEr0AypCdEcu2wOt9jfXyvQXZA== integrity sha512-VL+yvhjqNtcg5jdbKLxS+AwYifY0oav/EL6HZq0YWCq15jMrEbHbqAnJKERTsyjAZ6lDSf6nYICQygeOzRmr1Q==
dependencies: dependencies:
circomlib "npm:@urk1122/ks82ls0dn" circomlib "npm:@urk1122/ks82ls0dn"
snarkjs "npm:@urk1122/ske92jfn2jr" snarkjs "npm:@urk1122/ske92jfn2jr"
@ -4789,9 +4789,9 @@ mkdirp-promise@^5.0.1:
mkdirp "*" mkdirp "*"
mkdirp@*: mkdirp@*:
version "2.1.6" version "3.0.0"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-2.1.6.tgz#964fbcb12b2d8c5d6fbc62a963ac95a273e2cc19" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.0.tgz#758101231418bda24435c0888a91d9bd91f1372d"
integrity sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A== integrity sha512-7+JDnNsyCvZXoUJdkMR0oUE2AmAdsNXGTmRbiOjYIwQ6q+bL6NwrozGQdPcmYaNcrhH37F50HHBUzoaBV6FITQ==
mkdirp@^0.5.1, mkdirp@^0.5.5: mkdirp@^0.5.1, mkdirp@^0.5.5:
version "0.5.6" version "0.5.6"
@ -6143,9 +6143,9 @@ smart-buffer@^4.2.0:
integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg== integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==
"snarkjs@npm:@urk1122/ske92jfn2jr": "snarkjs@npm:@urk1122/ske92jfn2jr":
version "0.1.20-p6" version "0.1.20-p7"
resolved "https://registry.yarnpkg.com/@urk1122/ske92jfn2jr/-/ske92jfn2jr-0.1.20-p6.tgz#8204547bdc6b8bf065ff9e91f6009abf6acf4569" resolved "https://registry.yarnpkg.com/@urk1122/ske92jfn2jr/-/ske92jfn2jr-0.1.20-p7.tgz#6623c8d50923b38d41d70d652527647dfec9a252"
integrity sha512-gshi4hosxXUJGCHeNnjhD/Q2KZpeanYiv0pwbtTl4YdsPiWq3J443b8S2x7Umiv/NxHBhclW+Y8Y3ecXOFk92Q== integrity sha512-KPAiX9Tmh9Y/M1AYPSsVosrEJdM/hcVk/yII9wjsbRtqg4zKz4DMBl02nN3xYr0JS50CMA34G+7GOH4iMBWH2Q==
dependencies: dependencies:
big-integer "^1.6.43" big-integer "^1.6.43"
chai "^4.2.0" chai "^4.2.0"
@ -7195,9 +7195,9 @@ webidl-conversions@^3.0.0:
integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
"websnark@npm:@urk1122/ls02kr83j": "websnark@npm:@urk1122/ls02kr83j":
version "0.0.4-p9" version "0.0.4-p10"
resolved "https://registry.yarnpkg.com/@urk1122/ls02kr83j/-/ls02kr83j-0.0.4-p9.tgz#8c05e9c94765272dcb66bd4af67bb72db99627cb" resolved "https://registry.yarnpkg.com/@urk1122/ls02kr83j/-/ls02kr83j-0.0.4-p10.tgz#f87088910548606666aa2706bb12ce4efe07b194"
integrity sha512-bwd/OWYw0mC3SxpLAym3xpXrUv1hcDVACoNMnH+V5BUtuz2CMDm6XPJXtQRTNm/GMGPB2Y6KGF7NKmTFVv1OFw== integrity sha512-G7t0KAV387LAHrj6/Ugg/88Dt1P1JFTf+18s2cmlFjQ7ducZQFcCoXiVzyVdd663VMK0Po2jLQsMgOhYotxcsA==
dependencies: dependencies:
big-integer "1.6.42" big-integer "1.6.42"
snarkjs "npm:@urk1122/ske92jfn2jr" snarkjs "npm:@urk1122/ske92jfn2jr"