2023.04.12: Check HISTORY.md for more info

Signed-off-by: T-Hax <>
This commit is contained in:
T-Hax 2023-04-11 19:36:32 +00:00
parent 08621e0f0d
commit 0a7c98abb9
16 changed files with 719 additions and 178 deletions

@ -1,5 +1,10 @@
# All of these are used for tests
# If someone is using the SDK, there is no reason to use .env
# Tor
# Torify tests (need to make possible on each still)
TORIFY=
# Tor port (regular = 9050, browser = 9150)
TOR_PORT=
# RPCs

@ -1,5 +1,18 @@
# History
### 2023.04.12 (2023-04-12)
Did:
* `TorHttpClient`, `RegularHttpClient` and some tests.
* Working on `Relayer`, withdrawing.
* Crypto logic for withdrawals.
Next:
* Finish withdrawal logic.
* Censorship test on RPC.
### 2023.04.09 (2023-04-09)
Did:

@ -10,9 +10,9 @@
"zk"
],
"private": false,
"version": "2023.04.06",
"version": "2023.04.12",
"engines": {
"node": ">=18"
"node": "^18"
},
"main": "./build/index.js",
"files": [

@ -2,9 +2,12 @@
import * as Types from 'types/sdk/crypto'
// External crypto
import { Groth16 } from 'src/groth16'
import circomlib from 'circomlib'
import { bigInt } from 'snarkjs'
import { Groth16 } from 'websnark/src/groth16'
import { buildGroth16 } from 'websnark'
import { MerkleTree } from 'fixed-merkle-tree'
import { genWitnessAndProve, toSolidityInput } from 'websnark/src/utils'
// Some utils to work with hex numbers
import { HexUtils, NumberUtils } from 'lib/utils'
@ -18,17 +21,20 @@ import { Files } from 'lib/data'
* (will be) contained within this namespace.
*/
export namespace Setup {
export async function provingKey(): Promise<ArrayBufferLike> {
export async function getProvingKey(): Promise<ArrayBufferLike> {
return (await Files.loadRaw('circuits/tornadoProvingKey.bin')).buffer
}
export async function tornadoCircuit(): Promise<Buffer> {
export async function getTornadoCircuit(): Promise<Buffer> {
return await Files.loadRaw('circuits/tornado.json')
}
export function groth16(): Promise<Groth16> {
/**
* @note The following is a comment from tornado-cli: `groth16 initialises a lot of Promises that will never be resolved, that's why we need to use process.exit to terminate the CLI`. They literally didn't check the code to see that these are just worker threads and that `groth16` has a `terminate()` function to remove them. 🤦
*/
export async function getGroth16(): Promise<Groth16> {
const defaultParams = { wasmInitialMemory: 5000 }
return buildGroth16(defaultParams)
return await buildGroth16(defaultParams)
}
}
@ -43,29 +49,105 @@ export namespace Primitives {
return HexUtils.bufferToHex(msg, 62)
}
export function createDeposit(depositData?: Types.InputFor.CreateDeposit): Types.TornadoDeposit {
if (!depositData?.nullifier || !depositData?.secret)
depositData = {
export function parseNote(hexNote: string): Types.ZKDepositData {
const buffer = Buffer.from(hexNote, 'hex')
return createDeposit({
// @ts-expect-error
nullifier: bigInt.leBuff2int(buffer.subarray(0, 31)),
// @ts-expect-error
secret: bigInt.leBuff2int(buffer.subarray(32, 62))
})
}
export function createDeposit(input?: Types.InputFor.CreateDeposit): Types.ZKDepositData {
if (!input?.nullifier || !input?.secret)
input = {
nullifier: NumberUtils.randomBigInteger(31),
secret: NumberUtils.randomBigInteger(31)
}
// @ts-ignore
// @ts-expect-error
let preimage = Buffer.concat([depositData.nullifier.leInt2Buff(31), depositData.secret.leInt2Buff(31)])
let commitment = calcPedersenHash({ msg: preimage })
let commitmentHex = HexUtils.bigIntToHex(commitment)
// @ts-ignore
// @ts-expect-error
let nullifierHash = calcPedersenHash({ msg: depositData.nullifier.leInt2Buff(31) })
let nullifierHex = HexUtils.bigIntToHex(nullifierHash)
return {
nullifier: depositData.nullifier!,
secret: depositData.secret!,
nullifier: input.nullifier!,
secret: input.secret!,
preimage: preimage,
commitment: commitment,
commitmentHex: commitmentHex,
hexCommitment: commitmentHex,
nullifierHash: nullifierHash,
nullifierHex: nullifierHex
hexNullifierHash: nullifierHex
}
}
export function buildMerkleTree(inputs: Types.InputFor.BuildMerkleTree): MerkleTree {
return new MerkleTree(inputs.height, inputs.leaves)
}
export async function calcDepositProofs(inputs: Array<Types.InputFor.ZKProof>): Promise<Array<any>> {
const proofs: string[][] = []
const args: any[][] = []
const groth16 = await Setup.getGroth16()
const circuit = await Setup.getTornadoCircuit()
const provingKey = await Setup.getProvingKey()
for (let i = 0, len = inputs.length; i < len; i++) {
const input = inputs[i]
// Compute Merkle Proof
const { pathElements, pathIndex } = input.public.tree.path(input.public.leafIndex)
args.push([])
proofs.push([])
const proofData = await genWitnessAndProve(
groth16,
{
// Public inputs
root: input.public.root,
// @ts-ignore
nullifierHash: bigInt(input.public.hexNullifierHash),
// @ts-ignore
fee: bigInt(input.public.fee),
// @ts-ignore
refund: bigInt(input.public.refund),
// @ts-ignore
relayer: bigInt(input.public.relayerAddress),
// @ts-ignore
recipient: bigInt(input.public.recipientAddress),
// Private inputs
nullifier: input.private.nullifier,
secret: input.private.secret,
pathElements: pathElements,
pathIndices: pathIndex
},
circuit,
provingKey
)
proofs[i].push(toSolidityInput(proofData).proof)
args[i].push([
input.public.root,
input.public.hexNullifierHash,
HexUtils.prepareAddress(input.public.recipientAddress, 20),
// @ts-ignore
HexUtils.prepareAddress(input.public.relayerAddress, 20),
HexUtils.numberToHex(input.public.fee),
HexUtils.numberToHex(input.public.refund)
])
}
// Done. 🤷‍♀️
groth16.terminate()
return proofs.concat(args)
}
}
// TODO: implement and decide whether to add in declarations an ambient namespace and merge it here

@ -1,6 +1,7 @@
// Local types
import { TornadoInstance } from 'types/deth'
import * as Types from 'types/sdk/data'
import { RelayerProperties } from 'types/sdk/data'
import { Options } from 'types/sdk/main'
// Local logic
@ -19,6 +20,7 @@ import * as PouchDBAdapterMemory from 'pouchdb-adapter-memory'
// @ts-ignore
import { toIndexableString } from 'pouchdb-collate'
import { timeStamp } from 'console'
// Register plugins
PouchDB.plugin(PouchDBAdapterMemory)
@ -244,6 +246,8 @@ export namespace Constants {
}
export namespace Docs {
// TODO: Probably find some easier way to lookup below docs for the end user...
export class Base {
_id: string
_rev?: string
@ -255,9 +259,9 @@ export namespace Docs {
export class Deposit extends Base {
blockNumber: number
transactionHash: string
commitment: string
leafIndex: number
commitment: string
transactionHash: string
timestamp: string
constructor(obj: any) {
@ -267,21 +271,22 @@ export namespace Docs {
const leafIndex = obj['args']['leafIndex']
const timestamp = obj['args']['timestamp']
super(toIndexableString([blockNumber, transactionHash, commitment, leafIndex, timestamp]))
// To preserve order because we will need it later
super(toIndexableString([blockNumber, leafIndex, commitment]))
this.blockNumber = blockNumber
this.transactionHash = transactionHash
this.commitment = commitment
this.blockNumber = blockNumber
this.leafIndex = leafIndex
this.transactionHash = transactionHash
this.timestamp = timestamp
}
}
export class Withdrawal extends Base {
blockNumber: number
transactionHash: string
nullifierHash: string
to: string
nullifierHash: string
transactionHash: string
fee: string
constructor(obj: any) {
@ -291,12 +296,12 @@ export namespace Docs {
const nullifierHash = obj['args']['nullifierHash']
const fee = (obj['args']['fee'] as BigNumber).toString()
super(toIndexableString([blockNumber, transactionHash, nullifierHash, to, fee]))
super(toIndexableString([blockNumber, to, nullifierHash]))
this.blockNumber = blockNumber
this.transactionHash = transactionHash
this.nullifierHash = nullifierHash
this.to = to
this.nullifierHash = nullifierHash
this.transactionHash = transactionHash
this.fee = fee
}
}
@ -306,7 +311,7 @@ export namespace Docs {
note: string
constructor(index: number, pathstring: string, note: string) {
super(toIndexableString([index, pathstring, note]))
super(toIndexableString([index, pathstring]))
this.pathstring = pathstring
this.note = note
}
@ -317,11 +322,30 @@ export namespace Docs {
invoice: string
constructor(index: number, pathstring: string, invoice: string) {
super(toIndexableString([index, pathstring, invoice]))
super(toIndexableString([index, pathstring]))
this.pathstring = pathstring
this.invoice = invoice
}
}
export class Relayer extends Base {
address: string
version: string
serviceFeePercent: number
miningFeePercent: number
status: string
chainId: number
constructor(url: string, properties: RelayerProperties) {
super(toIndexableString([url]))
this.address = properties.address
this.version = properties.version
this.serviceFeePercent = properties.serviceFeePercent
this.miningFeePercent = properties.miningFeePercent
this.status = properties.status
this.chainId = properties.chainId
}
}
}
export namespace Cache {
@ -341,6 +365,12 @@ export namespace Cache {
this.db = new PouchDB<T>(Files.getCachePath(name), { adapter: dbAdapter })
}
async get(keys: Array<any>): Promise<T> {
return await this.db.get(toIndexableString(keys)).catch((err) => {
throw ErrorUtils.ensureError(err)
})
}
async close(): Promise<void> {
await this.db.close()
}
@ -392,6 +422,34 @@ export namespace Cache {
]
}
}
type DocsArray<T extends Docs.Base> = Array<{
doc?: T
id: string
key: string
value: {
rev: string
deleted?: boolean
}
}>
export async function loadContents<T extends Docs.Base>(
nameOfContent: string,
full: boolean = true,
emptyError: Error = ErrorUtils.getError(
`Core.loadCacheContents: there is no cache entry for ${nameOfContent}`
)
): Promise<DocsArray<T>> {
const cache = new Cache.Base<T>(Files.getCachePath(nameOfContent))
const docs = await cache.db.allDocs({ include_docs: full }).catch((err) => {
throw ErrorUtils.ensureError(err)
})
if (docs.total_rows === 0) throw emptyError
return docs.rows as DocsArray<T>
}
}
// Namespace exports

@ -2,16 +2,19 @@
import { DeepRequired } from 'ts-essentials'
// Local types
import { Relayer, Options, Transactions } from 'types/sdk/main'
import { RelayerProperties } from 'types/sdk/data'
import { Options, Transactions } from 'types/sdk/main'
import { ZKDepositData, InputFor } from 'types/sdk/crypto'
import { TornadoInstance, TornadoProxy } from 'types/deth'
// Ethers
import { Signer } from '@ethersproject/abstract-signer'
import { TransactionResponse } from '@ethersproject/abstract-provider'
// External imports
import { BigNumber, EventFilter, providers } from 'ethers'
// @ts-ignore
import { parseIndexableString } from 'pouchdb-collate'
// Important local
import { Docs, Cache, Types as DataTypes, Json } from 'lib/data'
import { Docs, Cache, Types as DataTypes, Json, Constants } from 'lib/data'
import { Primitives } from 'lib/crypto'
import { Contracts } from 'lib/chain'
@ -21,6 +24,8 @@ import { ErrorUtils } from 'lib/utils'
import { Chain } from 'lib/chain'
import { parseUnits } from 'ethers/lib/utils'
type Provider = providers.Provider
type BackupDepositDoc = {
pathstring: string
invoice?: string
@ -32,129 +37,209 @@ export class Core {
caches: Map<string, Cache.Base<Docs.Base>>
instances: Map<string, TornadoInstance>
constructor(provider: providers.Provider, signer?: Signer) {
this.chain = new Chain(provider, signer)
constructor(provider: providers.Provider) {
this.chain = new Chain(provider)
this.caches = new Map<string, Cache.Base<Docs.Base>>()
this.instances = new Map<string, TornadoInstance>()
}
connect(signer: Signer): void {
this.chain.signer = signer
connect(provider: Provider): void {
this.chain.provider = provider
}
async getInstances(
keys: Array<{ token: string; denomination: number | string }>
): Promise<Array<TornadoInstance>> {
const chainId = await this.chain.getChainId()
return await Promise.all(
return Promise.all(
keys.map((key) =>
Contracts.getInstance(
String(chainId),
key.token,
String(key.denomination),
this.chain.signer ?? this.chain.provider
)
Contracts.getInstance(String(chainId), key.token, String(key.denomination), this.chain.provider)
)
)
}
async getInstance(token: string, denomination: number | string): Promise<TornadoInstance> {
const chainId = await this.chain.getChainId()
return await Contracts.getInstance(
String(chainId),
token,
String(denomination),
this.chain.signer ?? this.chain.provider
)
return Contracts.getInstance(String(chainId), token, String(denomination), this.chain.provider)
}
async getProxy(): Promise<TornadoProxy> {
const chainId = await this.chain.getChainId()
return await Contracts.getProxy(String(chainId), this.chain.signer ?? this.chain.provider)
return Contracts.getProxy(String(chainId), this.chain.signer ?? this.chain.provider)
}
async buildWithdrawalTx(
async buildDepositProof(
instance: TornadoInstance,
withdrawOptions?: Options.Core.Withdrawal
): Promise<Transactions.Withdrawal> {
return (await this.buildWithdrawalTxs([instance], withdrawOptions))[0]
relayerProperties: RelayerProperties,
recipientAddress: string,
zkDepositsData: ZKDepositData,
options?: Options.Core.BuildDepositProof
): Promise<any> {
return (
await this.buildDepositProofs(
instance,
relayerProperties,
[recipientAddress],
[zkDepositsData],
options
)
)[0]
}
// TODO: lots of stuff
async buildWithdrawalTxs(
instances: Array<TornadoInstance>,
withdrawOptions?: Options.Core.Withdrawal
): Promise<Array<Transactions.Withdrawal>> {
for (let i = 0, nInstances = instances.length; i < nInstances; i++) {
const lookupKeys = await this.getInstanceLookupKeys(instances[i].address)
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination
const db = new Cache.Base<Docs.Deposit>('Deposits' + pathstring.toUpperCase())
/**
* @param instance This is the Tornado Instance which will be withdrawn from.
* @param relayerProperties The properties of the relayer that is going to be used for the withdrawals. These properties are included in the ZK proof.
* @param recipientAddresses The recipient addresses which should receive the withdrawals, in order.
* @param zkDepositsData These represent the public and private values, reconstructed from the deposit note, generated during the building of deposit transactions, used for building the proof of knowledge statement for withdrawal, for each withdrawal (in this context).
* @param options Additional options which allow the user to skip checking whether the notes are spent or changing the target merkle tree height.
* @returns The proofs for which the user should then decide whether to use a relayer (recommended, but decide carefully which one) or use his own wallet (if needed).
*/
async buildDepositProofs(
instance: TornadoInstance,
relayerProperties: RelayerProperties,
recipientAddresses: Array<string>,
zkDepositsData: Array<ZKDepositData>,
options?: Options.Core.BuildDepositProof
): Promise<Array<any>> {
// Extract commitments and nullifier hashes
const hexCommitments: string[] = []
const hexNullifierHashes: string[] = []
const purchaseAmounts = options?.ethPurchaseAmounts ?? new Array(zkDepositsData.length)
if (zkDepositsData.length !== recipientAddresses.length)
throw ErrorUtils.getError(
'Core.buildDepositProofs: the number of recipients must equal the length of zkDepositsData.'
)
if (zkDepositsData.length !== purchaseAmounts.length)
throw ErrorUtils.getError(
'Core.buildDepositProofs: if purchase amounts is specified, it must equal the length of zkDepositsData.'
)
zkDepositsData.forEach((deposit) => {
hexCommitments.push(deposit.hexCommitment)
hexNullifierHashes.push(deposit.hexNullifierHash)
})
// Determine cache name
const lookupKeys = await this.getInstanceLookupKeys(instance.address)
const name = 'Deposit' + (lookupKeys.network + lookupKeys.token + lookupKeys.denomination).toUpperCase()
// Find all leaf indices by reading from cache
const leafIndices = await this._findLeafIndices(name, hexCommitments)
const invalidCommitments: string[] = []
// Determine whether we will be checking whether notes are spent
const spentNotes: string[] = []
const checkSpent = options?.checkNotesSpent !== false
// If yes, immediately check it with the supplied Tornado Instance
const checkSpentArray = checkSpent ? await instance.isSpentArray(hexNullifierHashes) : null
// Check whether a commitment has not been found in all deposits, meaning that it is invalid
// Also add the invalid commitments. We can do leafIndices[i] because the matched one are concatenated
// at the start
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
if (!leafIndices[i]) invalidCommitments.push(hexCommitments[i])
if (checkSpent && !checkSpentArray![i]) spentNotes.push(hexNullifierHashes[i])
}
// Placeholder
return [{ request: {} }]
}
// If something is wrong, throw
const commitmentsAreInvalid = invalidCommitments.length !== 0
const notesAreSpent = spentNotes.length !== 0
async depositInMultiple(
instances: Array<TornadoInstance>,
depositOptions?: Options.Core.Deposit
): Promise<Array<TransactionResponse>> {
if (!this.chain.signer)
throw ErrorUtils.getError('Core.depositInMultiple: need connected signer to deposit!')
const txs = await this.buildDepositTxs(instances, depositOptions)
return await Promise.all(txs.map((tx) => this.chain.signer!.sendTransaction(tx.request)))
}
if (commitmentsAreInvalid || notesAreSpent)
throw ErrorUtils.getError(
`Core.buildWithdrawalTxs: ` +
(commitmentsAreInvalid
? `following commitments are invalid:\n\n${invalidCommitments.join('\n')}\n\n`
: '') +
(notesAreSpent
? `${commitmentsAreInvalid ? 'and ' : ''}following notes are already spent:\n\n${spentNotes.join(
'\n'
)}\n\n`
: '')
)
async depositInSingle(
instance: TornadoInstance,
depositOptions?: Omit<Options.Core.Deposit, 'depositsPerInstance'>
): Promise<TransactionResponse> {
if (!this.chain.signer)
throw ErrorUtils.getError('Core.depositInMultiple: need connected signer to deposit!')
const tx = await this.buildDepositTx(instance, depositOptions)
return await this.chain.signer!.sendTransaction(tx.request)
// Otherwise, build the merkle tree from the leaf indices
// We have to slice to get the leaf indices in order
const merkleTree = Primitives.buildMerkleTree({
height: options?.merkleTreeHeight ?? Constants.MERKLE_TREE_HEIGHT,
leaves: leafIndices.slice(zkDepositsData.length).map((leafIndex) => String(leafIndex))
})
const root: string = merkleTree.root()
// Check whether the root is valid
if (!(await instance.isKnownRoot(root)))
throw ErrorUtils.getError(
'Core.buildWithdrawalTxs: the merkle tree created is not valid, something went wrong with syncing.'
)
// Compute proofs
const inputsForProofs: InputFor.ZKProof[] = []
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
inputsForProofs.push({
public: {
root: root,
tree: merkleTree,
leafIndex: leafIndices[i],
hexNullifierHash: zkDepositsData[i].hexNullifierHash,
recipientAddress: recipientAddresses[i],
relayerAddress: relayerProperties.address,
fee: 5, // TODO: placeholder
refund: purchaseAmounts[i] ?? 0
},
private: {
nullifier: zkDepositsData[i].nullifier,
secret: zkDepositsData[i].secret
}
})
}
return await Primitives.calcDepositProofs(inputsForProofs)
}
async createInvoice(
instance: TornadoInstance,
invoiceOptions?: Omit<Options.Core.Invoice, 'depositsPerInstance'>
options?: Omit<Options.Core.Invoice, 'depositsPerInstance'>
): Promise<Transactions.Invoice> {
let opts: Options.Core.Invoice = invoiceOptions ?? {}
let opts: Options.Core.Invoice = options ?? {}
opts.depositsPerInstance = [1]
return (await this.createInvoices([instance], invoiceOptions))[0]
return (await this.createInvoices([instance], options))[0]
}
async createInvoices(
instances: Array<TornadoInstance>,
invoiceOptions?: Options.Core.Invoice
options?: Options.Core.Invoice
): Promise<Array<Transactions.Invoice>> {
if (!invoiceOptions) invoiceOptions = {}
if (!invoiceOptions.backup) invoiceOptions.backup = {}
invoiceOptions.backup.invoices = invoiceOptions.backup.invoices ?? true
invoiceOptions.backup.notes = invoiceOptions.backup.notes ?? true
invoiceOptions.doNotPopulate = invoiceOptions.doNotPopulate ?? true
return await this.buildDepositTxs(instances, invoiceOptions)
if (!options) options = {}
if (!options.backup) options.backup = {}
options.backup.invoices = options.backup.invoices ?? true
options.backup.notes = options.backup.notes ?? true
options.doNotPopulate = options.doNotPopulate ?? true
return this.buildDepositTxs(instances, options)
}
async buildDepositTx(
instance: TornadoInstance,
depositOptions?: Options.Core.Deposit
options?: Options.Core.Deposit
): Promise<Transactions.Deposit> {
let opts: Options.Core.Deposit = depositOptions ?? {}
let opts: Options.Core.Deposit = options ?? {}
opts.depositsPerInstance = [1]
return (await this.buildDepositTxs([instance], opts))[0]
}
async buildDepositTxs(
instances: Array<TornadoInstance>,
depositOptions?: Options.Core.Deposit
options?: Options.Core.Deposit
): Promise<Array<Transactions.Deposit>> {
const depositsPerInstance =
depositOptions?.depositsPerInstance ?? new Array<number>(instances.length).fill(1)
const depositsPerInstance = options?.depositsPerInstance ?? new Array<number>(instances.length).fill(1)
const doNotPopulate = depositOptions?.doNotPopulate ?? false
const backupNotes = depositOptions?.backup?.notes ?? true
const backupInvoices = depositOptions?.backup?.invoices ?? false
const doNotPopulate = options?.doNotPopulate ?? false
const backupNotes = options?.backup?.notes ?? true
const backupInvoices = options?.backup?.invoices ?? false
if (depositsPerInstance.length != instances.length)
throw ErrorUtils.getError(
@ -178,7 +263,7 @@ export class Core {
if (backupNotes) notesToBackup.push({ pathstring: pathstring, note: note })
if (backupInvoices) invoicesToBackup.push({ pathstring: pathstring, invoice: deposit.commitmentHex })
if (backupInvoices) invoicesToBackup.push({ pathstring: pathstring, invoice: deposit.hexCommitment })
if (!doNotPopulate) {
txs.push({
@ -186,19 +271,19 @@ export class Core {
to: proxy.address,
data: proxy.interface.encodeFunctionData('deposit', [
instances[i].address,
deposit.commitmentHex,
deposit.hexCommitment,
[]
]),
value: lookupKeys.token == 'eth' ? parseUnits(lookupKeys.denomination) : BigNumber.from(0)
},
note: pathstring + '_' + note,
invoice: pathstring + '_' + deposit.commitmentHex
invoice: pathstring + '_' + deposit.hexCommitment
})
} else
txs.push({
request: {},
note: pathstring + '_' + note,
invoice: pathstring + '_' + deposit.commitmentHex
invoice: pathstring + '_' + deposit.hexCommitment
})
}
}
@ -382,6 +467,52 @@ export class Core {
return syncOptions as DeepRequired<Options.Core.Sync>
}
/**
* @param instanceName The name of the instance as created in `_sync` function.
* @param commitments The commitments for which the leaf index values are to be noted down extra.
* @returns The result of concatenating the array of leaf indices found by matching them with the provided commitment values, followed by the array of all leaf indices, including all of the formerly mentioned values given that they are valid. Values which have not been matched, meaning probably invalid values, will be `0`.
*/
private async _findLeafIndices(instanceName: string, commitments: Array<string>): Promise<Array<number>> {
const matchedLeafIndices = new Array<number>(commitments.length).fill(0)
const leafIndices: Array<number> = []
// Either load all deposit events from memory or from cache
let cache: Cache.Base<Docs.Deposit>
if (!this.caches.has(instanceName)) {
cache = new Cache.Base<Docs.Deposit>(instanceName)
} else cache = this.caches.get(instanceName) as Cache.Base<Docs.Deposit>
const docs = await cache.db.allDocs()
// If no docs in cache throw and stop
if (docs.total_rows === 0) {
await cache.clear()
throw ErrorUtils.getError(
`Core.buildMerkleTree: events for instance ${instanceName} have not been synchronized.`
)
}
// Otherwise start looking for commitment leaf indices and also pick up
// all other leafs on the way
for (const row of docs.rows) {
const [, leafIndex, loadedCommitment] = parseIndexableString(row.id)
const index = commitments.findIndex((commitment) => commitment === loadedCommitment)
// If some commitment is found then add the leaf index and remove that commitment
if (index !== -1) {
matchedLeafIndices[index] = leafIndex
commitments.splice(index, 1)
}
// In any case push every leaf
leafIndices.push(leafIndex)
}
// Concat matched and all leaf indices
return matchedLeafIndices.concat(leafIndices)
}
async getInstanceLookupKeys(instanceAddress: string): Promise<DataTypes.Keys.InstanceLookup> {
// lookup some stuff first
const lookupObj: { [key: string]: string } = Json.getValue(await Json.load('onchain/quickLookup.json'), [
@ -403,4 +534,4 @@ export class Core {
}
}
export { Relayer, Transactions, Options }
export { Transactions, Options }

@ -200,6 +200,10 @@ export namespace HexUtils {
// @ts-ignore
return '0x' + number.toString(16).padStart(2 * byteLen, '0')
}
export function prepareAddress(address: string, bytelen: number = 32): string {
return (address.slice(0, 2) == '0x' ? address.slice(2) : address).toLowerCase().padStart(bytelen * 2, '0')
}
}
export namespace ObjectUtils {

@ -1,5 +1,13 @@
import axios from 'axios'
import { AxiosInstance } from 'axios'
import { SocksProxyAgent } from 'socks-proxy-agent'
import { Web3Provider, Networkish } from '@ethersproject/providers'
import { RelayerOptions } from 'types/sdk/web'
import { BigNumber } from 'ethers'
import { ErrorUtils } from './utils'
import { Cache, Docs } from './data'
import { RelayerProperties } from 'types/sdk/data'
// It seems that the default HttpProvider offered by the normal web3 package
// has some logic which either ignores the SocksProxyAgent or which falls back to
@ -13,20 +21,210 @@ export interface TorOptions {
headers?: { name: string; value: string }[]
}
/**
* You can also set up a SOCKS5 I2P tunnel on some port and then use that instead. Meaning that this should be compatible with I2P.
*/
export class TorProvider extends Web3Provider {
constructor(url: string, torOpts: TorOptions, network?: Networkish) {
const torPort = torOpts.port ?? 9050,
headers = torOpts.headers ?? [
{ name: 'User-Agent', value: 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0' }
{ name: 'User-Agent', value: 'Mozilla/5.0 (Windows NT 10.0; rv:102.0) Gecko/20100101 Firefox/102.0' }
]
super(
new HttpProvider(url, {
agent: { https: new SocksProxyAgent('socks5h://127.0.0.1:' + torPort) }
// Don't want to set for some reason, need to override somehow
// headers: headers
// The h after socks5 means that DNS resolution is also done through Tor
agent: { https: new SocksProxyAgent('socks5h://127.0.0.1:' + torPort) },
// The XHR2 XMLHttpRequest assigns a Tor Browser header by itself.
// But if in Browser we assign just in case.
headers: typeof window !== 'undefined' ? headers : undefined
}),
network
)
}
}
// @ts-ignore
export const TorHttpClient: new (opts?: {
port?: number
headers?: { [key: string]: string }
rv?: string
}) => AxiosInstance = function (opts?: { port?: number; headers?: { [key: string]: string }; rv?: string }) {
const rv = opts?.rv ?? '102.0'
return axios.create({
headers: opts?.headers ?? {
'User-Agent': `Mozilla/5.0 (Windows NT 10.0; rv:${rv}) Gecko/20100101 Firefox/${rv}`
},
httpsAgent: new SocksProxyAgent('socks5h://127.0.0.1:' + opts?.port ?? 9050),
httpAgent: new SocksProxyAgent('socks5h://127.0.0.1:' + opts?.port ?? 9050),
// 2 minute timeout
timeout: 120000
})
}
// @ts-ignore
export const RegularHttpClient: new (opts?: any) => AxiosInstance = function (opts: any) {
return axios.create(opts)
}
export class Relayer {
url: string
httpClient: AxiosInstance
private _fetched: boolean
private _address?: string
private _version?: string
private _serviceFee?: number
private _miningFee?: number
private _status?: string
private _chainId?: number
constructor(options: RelayerOptions, properties?: RelayerProperties) {
this.url = options.url
this.httpClient = options.httpClient
this._fetched = false
if (properties) {
this._address = properties.address
this._version = properties.version
this._chainId = properties.chainId
this._serviceFee = properties.serviceFeePercent
this._miningFee = properties.miningFeePercent
this._status = properties.status
this._fetched = true
}
}
// Setup
/**
* This function MUST be called to unlock the rest of the `Relayer` class functionality, as otherwise we don't have the property data necessary for all the logic we want.
* @returns Fetched `RelayerProperties`.
*/
async fetchProperties(): Promise<RelayerProperties> {
const properties = await this.httpClient
.get(this.url + '/status')
.catch((err) => {
throw ErrorUtils.ensureError(err)
})
.then((res) => res.data)
if (Object.entries(properties).length === 0)
throw ErrorUtils.getError(
'Relayer.fetchProperties: Something went wrong with fetching properties from relayer endpoint.'
)
this._address = properties['rewardAccount']
this._version = properties['version']
this._chainId = properties['netId']
this._serviceFee = properties['tornadoServiceFee']
this._miningFee = properties['miningFee']
this._status = properties['health']['status']
this._fetched = true
return {
address: this._address!,
version: this._version!,
chainId: this._chainId!,
serviceFeePercent: this._serviceFee!,
miningFeePercent: this._miningFee!,
status: this._status!
}
}
private _propertiesFetched(parentCallName: string): void {
if (!this._fetched)
throw ErrorUtils.getError(
`Relayer.${parentCallName}: properties must be fetched first with \`fetchProperties\`.`
)
}
// Getters
get address(): string {
this._propertiesFetched('address')
return this._address!
}
get version(): string {
this._propertiesFetched('version')
return this._version!
}
get serviceFeePercent(): number {
this._propertiesFetched('serviceFee')
return this._serviceFee!
}
get miningFeePercent(): number {
this._propertiesFetched('miningFee')
return this._miningFee!
}
get status(): string {
this._propertiesFetched('status')
return this._status!
}
get chainId(): number {
this._propertiesFetched('chainId')
return this._chainId!
}
async getETHPurchasePrice(token: string): Promise<BigNumber> {
return BigNumber.from(
await this.httpClient
.get(this.url + '/status')
.catch((err) => {
throw ErrorUtils.ensureError(err)
})
.then((res) => res.data.prices[token])
)
}
// TODO: Relaying stuff and related
async relay(): Promise<any> {}
async calcWithdrawalFee(token: string, denomination: number): Promise<BigNumber> {
//placeholder
return BigNumber.from(0)
}
// Cache
/**
* Construct a new Relayer by reading relayer data from cache.
*/
static async fromCache(options: RelayerOptions): Promise<Relayer> {
const cache = new Cache.Base<Docs.Relayer>('Relayers')
// Error is ensured already
const properties = await cache.get([options.url]).catch(() => {
throw ErrorUtils.getError(`Relayer.fromCache: relayer ${options.url} isn't stored in cache.`)
})
return new Relayer(options, properties)
}
/**
* Cache relayer data into a PouchDB database in your cache folder. This will automatically fetch properties if they are not fetched.
*/
async remember(): Promise<void> {
if (!this._fetched) await this.fetchProperties()
const cache = new Cache.Base<Docs.Relayer>('Relayers')
const doc = new Docs.Relayer(this.url, {
address: this._address!,
version: this._version!,
chainId: this._chainId!,
serviceFeePercent: this._serviceFee!,
miningFeePercent: this._miningFee!,
status: this._status!
})
await cache.db.put(doc).catch((err) => {
throw ErrorUtils.ensureError(err)
})
await cache.close().catch((err) => {
throw ErrorUtils.ensureError(err)
})
}
}

@ -20,8 +20,8 @@ describe('crypto', () => {
expect(deposit.secret).to.exist
expect(deposit.preimage).to.exist
expect(deposit.commitment).to.exist
expect(deposit.commitmentHex).to.exist
expect(deposit.nullifierHash).to.exist
expect(deposit.hexCommitment).to.exist
expect(deposit.hexNullifierHash).to.exist
// From the whitepaper, the nullifier k E B^248
expect(BigNumber.from(deposit.nullifier.toString())).to.be.lte(limit)

@ -11,12 +11,15 @@ import { Core } from 'lib/main'
import { Chain, Contracts } from 'lib/chain'
import { Files, OnchainData } from 'lib/data'
import { ErrorUtils } from 'lib/utils'
import { TorProvider } from 'lib/web'
chai.use(solidity)
const expect = chai.expect
describe('main', () => {
const torify = process.env.TORIFY === 'true'
if (!process.env.ETH_MAINNET_TEST_RPC) throw ErrorUtils.getError('need a mainnet rpc endpoint.')
console.log('\nNote that these tests are time intensive. ⏳. ⏳.. ⏳...\n')
@ -27,7 +30,10 @@ describe('main', () => {
let daiData: Json.TokenData
const daiWhale = '0x5777d92f208679db4b9778590fa3cab3ac9e2168' // Uniswap V3 Something/Dai Pool
const mainnetProvider = new providers.JsonRpcProvider(process.env.ETH_MAINNET_TEST_RPC)
const mainnetProvider = torify
? new TorProvider(process.env.ETH_MAINNET_TEST_RPC, { port: +process.env.TOR_PORT! })
: new providers.JsonRpcProvider(process.env.ETH_MAINNET_TEST_RPC)
const ganacheProvider = new providers.Web3Provider(
// @ts-ignore
ganache.provider({
@ -60,7 +66,7 @@ describe('main', () => {
})
describe('class Classic', () => {
it.only('sync: should be able to fetch a couple events', async () => {
it('sync: should be able to fetch a couple events', async () => {
const core = new Core(mainnetProvider)
const instance = await Contracts.getInstance(String(1), 'eth', String(0.1), mainnetProvider)
const targetBlock = 16928712

@ -1,25 +1,58 @@
import chai from 'chai'
import { TorProvider } from 'lib/web'
import { TorHttpClient, TorProvider } from 'lib/web'
// Waffle matchers
import { solidity } from 'ethereum-waffle'
import { ErrorUtils } from 'lib/utils'
import { parseUnits } from 'ethers/lib/utils'
chai.use(solidity)
const expect = chai.expect
describe.skip('web', () => {
describe('web', () => {
if (!process.env.ETH_MAINNET_TEST_RPC || !process.env.TOR_PORT)
throw ErrorUtils.getError('need a tor port and mainnet rpc endpoint.')
const torProvider = new TorProvider(process.env.ETH_MAINNET_TEST_RPC, { port: +process.env.TOR_PORT })
const httpClient = new TorHttpClient({ port: +process.env.TOR_PORT })
// TODO: Make these tests better and either auto-detect proxy or spin up tor
console.log(
'\nSome Tor tips: Support non-profit exit node operators, host your own nodes, avoid spy nodes by configuring torrc.\n'
)
it.skip('CONNECTED: Should be able to request over Tor', async () => {
console.log(await torProvider.getBlockNumber())
function torErrorThrow(err: Error) {
err.message =
"\n\nThis test most likely failed because you (Tor) didn't open a SOCKS5 tunnel at either 9050 or the Tor port you specified in .env. As such, the provider couldn't send a request. Please start Tor or Tor Browser. 🧅\n\n"
throw err
}
it('httpClient: Should be able to send requests over Tor', async function () {
try {
const check = (await httpClient.get('https://check.torproject.org/api/ip')).data
expect(check.IsTor).to.be.true
console.log(
`\n🧅 check.torproject.org/api/ip says...\n\nWe are using Tor: ${check.IsTor ? '✅' : '❌'}`
)
console.log(`Our IP is: ${check.IP}\n`)
} catch (err) {
torErrorThrow(ErrorUtils.ensureError(err))
}
}).timeout(0)
it.only('TorProvider: Should be able to fetch some basic blockchain data over Tor', async () => {
try {
console.log('\nBlock Number: ' + (await torProvider.getBlockNumber()))
console.log('Gas Price: ' + (await torProvider.getGasPrice()).div(1000000000) + ' gwei')
console.log(
'Zero address ETH burned: ' +
(await torProvider.getBalance('0x0000000000000000000000000000000000000000')).div(parseUnits('1')) +
'\n'
)
} catch (err) {
torErrorThrow(ErrorUtils.ensureError(err))
}
}).timeout(0)
it.skip('DISCONNECTED: Should not be able to request over Tor', async function () {

@ -1,5 +1,6 @@
// Interfaces for the cryptographic primitives.
import { MerkleTree } from 'fixed-merkle-tree'
import { bigInt } from 'snarkjs'
export type bigInt = typeof bigInt
@ -15,20 +16,15 @@ export namespace OutputOf {
export type PedersenHash = bigInt
export interface CreateDeposit {
// This is really some type of number but since it was written in javascript,
// the entire thing translates absolutely horribly into Typescript. It pushed me over the type-border.
nullifier: bigInt
secret: bigInt
preimage: Buffer
commitment: PedersenHash
commitmentHex: string
hexCommitment: string
nullifierHash: PedersenHash
nullifierHex: string
hexNullifierHash: string
}
export interface MerkleTree {}
// TODO: Type these
export interface MerkleProof {
root: any
path: {
@ -37,7 +33,7 @@ export namespace OutputOf {
}
}
export interface DepositProof {
export interface Groth16Proof {
pi_a: Array<string>
pi_b: Array<string>
pi_c: Array<string>
@ -52,10 +48,8 @@ export namespace OutputOf {
*/
type __OutputAliasDelimiter = null
export type MerkleTree = OutputOf.MerkleTree
export type MerkleProof = OutputOf.MerkleProof
export type Groth16Proof = OutputOf.DepositProof
export type ZKProof = OutputOf.DepositProof
export type ZKProof = OutputOf.Groth16Proof
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INPUTS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
@ -75,11 +69,28 @@ export namespace InputFor {
secret?: bigInt
}
export interface MerkleTree {}
export interface BuildMerkleTree {
height: number
leaves: Array<string>
}
export type MerkleProof = MerkleTree
export interface ZKProof {
public: {
root: string
tree: MerkleTree
leafIndex: number
hexNullifierHash: string
recipientAddress: string
relayerAddress: string
fee: number
refund: number
}
private: {
nullifier: bigInt
secret: bigInt
}
}
// TODO: Type these
interface PublicGroth16 {
root: any
nullifierHash: PedersenHash
@ -92,18 +103,11 @@ export namespace InputFor {
interface PrivateGroth16 {
nullifier: bigInt
secret: bigInt
pathIndices: any
pathIndices: number[]
pathElements: any[]
}
export type Groth16 = PublicGroth16 & PrivateGroth16
export interface DepositProof {
merkleProof: OutputOf.MerkleProof
groth16: any
inputs: Groth16
provingKey: ArrayBufferLike
}
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INPUT ALIASES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@ -114,4 +118,4 @@ export namespace InputFor {
*/
type __InputAliasDelimiter = null
export type TornadoDeposit = OutputOf.CreateDeposit
export type ZKDepositData = OutputOf.CreateDeposit

@ -73,3 +73,12 @@ export namespace Keys {
denomination: string
}
}
export interface RelayerProperties {
address: string
version: string
serviceFeePercent: number
miningFeePercent: number
status: string
chainId: number
}

@ -6,15 +6,8 @@
// instead of having to specify exactly what type he is constructing.
import { TransactionRequest } from '@ethersproject/abstract-provider'
import { BigNumber } from 'ethers'
export interface Relayer {
url: string
handleWithdrawal(withdrawalData: any): Promise<any>
calcWithdrawalFee(token: string, denomination: number): Promise<BigNumber>
getServiceFee(): Promise<number>
}
import { RelayerProperties as RelayerDataProperties } from 'types/sdk/data'
export namespace Options {
export namespace Cache {
@ -54,8 +47,10 @@ export namespace Options {
export type Invoice = Deposit
export interface Withdrawal {
withdrawalsPerInstance?: Array<number>
export interface BuildDepositProof {
ethPurchaseAmounts?: Array<BigNumber>
merkleTreeHeight?: number
checkNotesSpent?: boolean
}
}
}
@ -67,8 +62,4 @@ export namespace Transactions {
note?: string
}
export type Invoice = Deposit
export interface Withdrawal {
request: TransactionRequest
}
}

@ -0,0 +1,7 @@
import { AxiosInstance } from 'axios'
export interface RelayerOptions {
url: string
address?: string
httpClient: AxiosInstance
}

@ -3,9 +3,9 @@
"@babel/code-frame@^7.0.0":
version "7.18.6"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a"
integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==
version "7.21.4"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.21.4.tgz#d0fa9e4413aca81f2b23b9442797bda1826edb39"
integrity sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g==
dependencies:
"@babel/highlight" "^7.18.6"
@ -896,9 +896,9 @@
form-data "^3.0.0"
"@types/node@*":
version "18.15.6"
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.6.tgz#af98ef4a36e7ac5f2d03040f3109fcce972bf6cb"
integrity sha512-YErOafCZpK4g+Rp3Q/PBgZNAsWKGunQTm9FA3/Pbcm0VCriTEzcrutQ/SxSc0rytAp0NoFWue669jmKhEtd0sA==
version "18.15.11"
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.11.tgz#b3b790f09cb1696cffcec605de025b088fa4225f"
integrity sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==
"@types/node@11.11.6":
version "11.11.6"
@ -1908,9 +1908,9 @@ cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3:
safe-buffer "^5.0.1"
"circomlib@npm:@urk1122/ks82ls0dn":
version "0.0.20-p5"
resolved "https://registry.yarnpkg.com/@urk1122/ks82ls0dn/-/ks82ls0dn-0.0.20-p5.tgz#7e912513066b9d6d149c07ee00e4c19a54f49609"
integrity sha512-NdXMGf0yn2mjR0zY+maEPQwrdshjIpIQe9bNnLyld5qEgDyq38TbSmZ8MBgd3oPJ+yc1f1mjvD9RemqUt4+8GQ==
version "0.0.20-p6"
resolved "https://registry.yarnpkg.com/@urk1122/ks82ls0dn/-/ks82ls0dn-0.0.20-p6.tgz#d4e712694ef610ec41f3f89160a982176066122a"
integrity sha512-nAoJeTwsGrxCESnaXAU5bPn67FALVKHFiBUU7pXTYelxToDJjCoIuJcTPAAjL7OnywaTD9RRqSU6XakVa2X2yg==
dependencies:
blake-hash "^1.1.0"
blake2b "^2.1.3"
@ -3229,9 +3229,9 @@ find-up@^4.1.0:
path-exists "^4.0.0"
"fixed-merkle-tree@npm:@urk1122/s20lwm24m":
version "0.6.1-p12"
resolved "https://registry.yarnpkg.com/@urk1122/s20lwm24m/-/s20lwm24m-0.6.1-p12.tgz#79c03121b6edf4a9a6ccd07f38a325781b694732"
integrity sha512-1/BYMszCP3hoMWSEEoIpEu71OhSbtAIaeCkvWzA6AAAqBytkHDRGlzICeRbcMEr0AypCdEcu2wOt9jfXyvQXZA==
version "0.6.1-p13"
resolved "https://registry.yarnpkg.com/@urk1122/s20lwm24m/-/s20lwm24m-0.6.1-p13.tgz#78d1a54c457bcd3e11c1cc276cac72d5a35db4c8"
integrity sha512-VL+yvhjqNtcg5jdbKLxS+AwYifY0oav/EL6HZq0YWCq15jMrEbHbqAnJKERTsyjAZ6lDSf6nYICQygeOzRmr1Q==
dependencies:
circomlib "npm:@urk1122/ks82ls0dn"
snarkjs "npm:@urk1122/ske92jfn2jr"
@ -4789,9 +4789,9 @@ mkdirp-promise@^5.0.1:
mkdirp "*"
mkdirp@*:
version "2.1.6"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-2.1.6.tgz#964fbcb12b2d8c5d6fbc62a963ac95a273e2cc19"
integrity sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A==
version "3.0.0"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.0.tgz#758101231418bda24435c0888a91d9bd91f1372d"
integrity sha512-7+JDnNsyCvZXoUJdkMR0oUE2AmAdsNXGTmRbiOjYIwQ6q+bL6NwrozGQdPcmYaNcrhH37F50HHBUzoaBV6FITQ==
mkdirp@^0.5.1, mkdirp@^0.5.5:
version "0.5.6"
@ -6143,9 +6143,9 @@ smart-buffer@^4.2.0:
integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==
"snarkjs@npm:@urk1122/ske92jfn2jr":
version "0.1.20-p6"
resolved "https://registry.yarnpkg.com/@urk1122/ske92jfn2jr/-/ske92jfn2jr-0.1.20-p6.tgz#8204547bdc6b8bf065ff9e91f6009abf6acf4569"
integrity sha512-gshi4hosxXUJGCHeNnjhD/Q2KZpeanYiv0pwbtTl4YdsPiWq3J443b8S2x7Umiv/NxHBhclW+Y8Y3ecXOFk92Q==
version "0.1.20-p7"
resolved "https://registry.yarnpkg.com/@urk1122/ske92jfn2jr/-/ske92jfn2jr-0.1.20-p7.tgz#6623c8d50923b38d41d70d652527647dfec9a252"
integrity sha512-KPAiX9Tmh9Y/M1AYPSsVosrEJdM/hcVk/yII9wjsbRtqg4zKz4DMBl02nN3xYr0JS50CMA34G+7GOH4iMBWH2Q==
dependencies:
big-integer "^1.6.43"
chai "^4.2.0"
@ -7195,9 +7195,9 @@ webidl-conversions@^3.0.0:
integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
"websnark@npm:@urk1122/ls02kr83j":
version "0.0.4-p9"
resolved "https://registry.yarnpkg.com/@urk1122/ls02kr83j/-/ls02kr83j-0.0.4-p9.tgz#8c05e9c94765272dcb66bd4af67bb72db99627cb"
integrity sha512-bwd/OWYw0mC3SxpLAym3xpXrUv1hcDVACoNMnH+V5BUtuz2CMDm6XPJXtQRTNm/GMGPB2Y6KGF7NKmTFVv1OFw==
version "0.0.4-p10"
resolved "https://registry.yarnpkg.com/@urk1122/ls02kr83j/-/ls02kr83j-0.0.4-p10.tgz#f87088910548606666aa2706bb12ce4efe07b194"
integrity sha512-G7t0KAV387LAHrj6/Ugg/88Dt1P1JFTf+18s2cmlFjQ7ducZQFcCoXiVzyVdd663VMK0Po2jLQsMgOhYotxcsA==
dependencies:
big-integer "1.6.42"
snarkjs "npm:@urk1122/ske92jfn2jr"