2023-04-24 01:01:45 +03:00
|
|
|
import chai from 'chai'
|
|
|
|
import * as ganache from 'ganache'
|
|
|
|
|
|
|
|
// External
|
|
|
|
import { solidity } from 'ethereum-waffle'
|
2023-05-03 01:19:28 +03:00
|
|
|
import { providers, BigNumber } from 'ethers'
|
2023-04-24 01:01:45 +03:00
|
|
|
import { parseUnits } from 'ethers/lib/utils'
|
|
|
|
// @ts-expect-error
|
|
|
|
import { parseIndexableString } from 'pouchdb-collate'
|
|
|
|
|
|
|
|
// Local
|
2023-05-03 01:19:28 +03:00
|
|
|
import { ERC20, TornadoInstance } from './deth'
|
|
|
|
import { Files, Onchain, RelayerProperties } from '@tornado/sdk-data'
|
|
|
|
import { Chain, Contracts } from '@tornado/sdk-chain'
|
|
|
|
import { ErrorUtils } from '@tornado/sdk-utils'
|
|
|
|
import { TorProvider } from '@tornado/sdk-web'
|
|
|
|
import { Core } from '@tornado/sdk-core'
|
2023-04-24 01:01:45 +03:00
|
|
|
|
|
|
|
// Data
|
2023-05-03 01:19:28 +03:00
|
|
|
import eth01DepositsReference from './resources/deposits_eth_0.1.json'
|
|
|
|
import eth1DepositsReference from './resources/deposits_eth_1.json'
|
|
|
|
import eth10DepositsReference from './resources/deposits_eth_10.json'
|
|
|
|
import eth100DepositsReference from './resources/deposits_eth_100.json'
|
|
|
|
import dai100KDepositsReference from './resources/deposits_dai_100000.json'
|
2023-04-24 01:01:45 +03:00
|
|
|
|
|
|
|
chai.use(solidity)
|
|
|
|
|
|
|
|
const expect = chai.expect
|
|
|
|
|
|
|
|
describe('Core', () => {
|
|
|
|
const torify = process.env.TORIFY === 'true'
|
|
|
|
const debug = process.env.DEBUG === 'true'
|
|
|
|
|
|
|
|
if (!process.env.ETH_MAINNET_TEST_RPC) throw ErrorUtils.getError('need a mainnet rpc endpoint.')
|
|
|
|
|
|
|
|
console.log('\nNote that these tests are time intensive. ⏳. ⏳.. ⏳...\n')
|
|
|
|
console.log(
|
|
|
|
'Also, we are using ganache because we just need a forked blockchain and not an entire environment. 🐧'
|
|
|
|
)
|
|
|
|
|
|
|
|
let daiAddress: string
|
|
|
|
const daiWhale = '0x5777d92f208679db4b9778590fa3cab3ac9e2168' // Uniswap V3 Something/Dai Pool
|
|
|
|
|
|
|
|
const mainnetProvider: providers.Provider = torify
|
|
|
|
? new TorProvider(process.env.ETH_MAINNET_TEST_RPC!, { port: +process.env.TOR_PORT! })
|
|
|
|
: new providers.JsonRpcProvider(process.env.ETH_MAINNET_TEST_RPC)
|
|
|
|
|
|
|
|
const _ganacheProvider = ganache.provider({
|
|
|
|
chain: { chainId: 1 },
|
|
|
|
// @ts-ignore
|
|
|
|
fork: { url: process.env.ETH_MAINNET_TEST_RPC },
|
|
|
|
logging: { quiet: true },
|
|
|
|
wallet: {
|
|
|
|
totalAccounts: 20,
|
|
|
|
unlockedAccounts: [daiWhale]
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
// @ts-expect-error
|
|
|
|
const ganacheProvider = new providers.Web3Provider(_ganacheProvider)
|
|
|
|
|
|
|
|
const chain = new Chain(ganacheProvider)
|
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
it('Should print cache path to console', async () => {
|
|
|
|
console.log(await Files.getCachePath('anything'))
|
|
|
|
})
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
after(async function () {
|
|
|
|
this.timeout(0)
|
|
|
|
await Files.wipeCache()
|
|
|
|
})
|
|
|
|
|
|
|
|
describe('namespace Contracts', () => {
|
|
|
|
it('getClassicInstance: should be able to get a tornado instance', async () => {
|
|
|
|
let instance = await Contracts.getInstance(String(1), 'eth', String(1), mainnetProvider)
|
|
|
|
expect(instance.address).to.equal('0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936')
|
|
|
|
await expect(instance.getLastRoot()).to.not.be.reverted
|
|
|
|
}).timeout(0)
|
|
|
|
})
|
|
|
|
|
|
|
|
context('Unforked', () => {
|
|
|
|
describe('class Classic', () => {
|
2023-05-03 01:19:28 +03:00
|
|
|
if (!process.env.SYNC_TEST_INSTANCES)
|
|
|
|
throw ErrorUtils.getError('SYNC_TEST_INSTANCES is required for sync tests.')
|
|
|
|
|
|
|
|
const denominations = process.env.SYNC_TEST_INSTANCES.split(',')
|
|
|
|
|
|
|
|
if (!denominations.length) throw ErrorUtils.getError('Instances entered were INVALID')
|
|
|
|
|
|
|
|
const depositReferences: { [key: string]: typeof eth01DepositsReference } = {
|
|
|
|
'1ETH0.1': eth01DepositsReference,
|
|
|
|
'1ETH1': eth1DepositsReference,
|
|
|
|
'1ETH10': eth10DepositsReference,
|
|
|
|
'1ETH100': eth100DepositsReference,
|
|
|
|
'1DAI100000': dai100KDepositsReference
|
|
|
|
}
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
const core = new Core(mainnetProvider)
|
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
let instances: TornadoInstance[] = []
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
let logListener = function (...args: any[]) {
|
|
|
|
if (args.length === 3) {
|
|
|
|
console.debug(`\nSync will be started with SB: ${args[0]}, TB: ${args[1]}, BD: ${args[2]}\n`)
|
|
|
|
} else if (args.length == 2) {
|
|
|
|
console.debug(`Syncing from block ${args[0]} to ${args[1]}`)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
before(async function () {
|
|
|
|
this.timeout(0)
|
2023-05-03 01:19:28 +03:00
|
|
|
|
|
|
|
const regexp = /([0-9]+)([A-Z]+)([0-9]+)/
|
|
|
|
|
|
|
|
const promises = denominations.map((denom) => {
|
|
|
|
const matches = denom.match(regexp)!.slice(2)
|
|
|
|
return core.getInstance(matches[0].toLowerCase(), +matches[1])
|
|
|
|
})
|
|
|
|
|
|
|
|
;(await Promise.all(promises)).forEach((instance) => instances.push(instance))
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
if (debug) core.on('debug', logListener)
|
|
|
|
})
|
2023-05-03 01:19:28 +03:00
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
after(async function () {
|
|
|
|
this.timeout()
|
|
|
|
if (debug) core.off('debug', logListener)
|
|
|
|
})
|
|
|
|
|
2023-05-04 00:42:42 +03:00
|
|
|
it('Should sync all instances.', async function () {
|
2023-05-03 01:19:28 +03:00
|
|
|
for (let i = 0; i < instances.length; i++) {
|
|
|
|
console.log('\n ♻️ Syncing ' + denominations[i] + '\n')
|
2023-04-24 01:01:45 +03:00
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
// This is going to try syncing the entire range
|
|
|
|
await core.syncDeposits(instances[i], {
|
|
|
|
blockDivisor: 50,
|
|
|
|
concurrencyLimit: 20,
|
|
|
|
msTimeout: 300
|
|
|
|
})
|
2023-04-24 01:01:45 +03:00
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
const cache = core.caches.get('Deposits' + denominations[i])
|
|
|
|
const rows = (await cache!.db.allDocs()).rows
|
|
|
|
const valid = Object.values(depositReferences[denominations[i]])
|
2023-04-24 01:01:45 +03:00
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
expect(rows.length).to.be.gte(valid.length)
|
2023-04-24 01:01:45 +03:00
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
for (let i = 0, len = valid.length; i < len; i++) {
|
|
|
|
const id = rows[i].id
|
|
|
|
const [bn, leafIndex, commitment] = parseIndexableString(id)
|
|
|
|
const validDoc = valid[i]
|
|
|
|
expect(bn).to.equal(validDoc['blockNumber'])
|
|
|
|
expect(leafIndex).to.equal(validDoc['leafIndex'])
|
|
|
|
expect(commitment).to.equal(validDoc['commitment'])
|
|
|
|
}
|
2023-04-24 01:01:45 +03:00
|
|
|
}
|
|
|
|
}).timeout(0)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
|
|
|
|
describe('Forked (Ganache)', async () => {
|
|
|
|
describe('class Classic', async () => {
|
|
|
|
// Init sync objects
|
|
|
|
const core = new Core(ganacheProvider)
|
|
|
|
const needsMoney = ganacheProvider.getSigner()
|
|
|
|
const daiWhaleSigner = ganacheProvider.getSigner(daiWhale)
|
|
|
|
const debugListener = (message: string) => console.debug(message)
|
|
|
|
|
|
|
|
let snapshotId: any
|
|
|
|
let needsMoneyAddress: string
|
|
|
|
let dai: ERC20
|
|
|
|
let smallestEth: TornadoInstance
|
|
|
|
let dai100K: TornadoInstance
|
|
|
|
|
|
|
|
before(async function () {
|
|
|
|
this.timeout(0)
|
|
|
|
// Get snapshot just in case
|
|
|
|
snapshotId = await ganacheProvider.send('evm_snapshot', [])
|
|
|
|
|
|
|
|
// Prep whale eth balance
|
|
|
|
await ganacheProvider.send('evm_setAccountBalance', [daiWhale, parseUnits('10').toHexString()])
|
|
|
|
|
|
|
|
// Init async objects
|
|
|
|
needsMoneyAddress = await needsMoney.getAddress()
|
|
|
|
daiAddress = await Onchain.getTokenAddress('1', 'dai')
|
|
|
|
dai = chain.getTokenContract(daiAddress).connect(daiWhaleSigner)
|
|
|
|
smallestEth = await core.getInstance('eth', 0.1)
|
|
|
|
dai100K = await core.getInstance('dai', 100000)
|
|
|
|
|
|
|
|
// Set debug
|
|
|
|
if (debug) core.on('debug', debugListener)
|
|
|
|
})
|
|
|
|
after(async function () {
|
|
|
|
this.timeout(0)
|
|
|
|
await ganacheProvider.send('evm_revert', [snapshotId])
|
|
|
|
core.off('debug', debugListener)
|
|
|
|
})
|
|
|
|
afterEach(() => {
|
|
|
|
dai = dai.connect(daiWhaleSigner)
|
|
|
|
})
|
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
it('buildDepositTransaction: build a single eth deposit tx and succeed', async () => {
|
2023-04-24 01:01:45 +03:00
|
|
|
const initBal = await needsMoney.getBalance()
|
|
|
|
|
|
|
|
// Build tx and load cache for this test
|
|
|
|
const tx = await core.buildDepositTransaction(smallestEth)
|
|
|
|
const cache = core.loadDepositCache('Deposits1ETH0.1')
|
|
|
|
|
|
|
|
// Prep promise to only try withdrawing after cache has been updated
|
|
|
|
const putPromise = new Promise((resolve) => {
|
|
|
|
smallestEth.on(
|
|
|
|
smallestEth.filters.Deposit(null, null, null),
|
|
|
|
function (commitment, leafIndex, timestamp, event) {
|
|
|
|
resolve(cache.db.put(cache.buildDoc(event)))
|
|
|
|
}
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2023-04-27 20:34:22 +03:00
|
|
|
const listener = smallestEth.listeners(smallestEth.filters.Deposit(null, null, null))[0]
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
// Deposit and await cache updated
|
|
|
|
const response = await needsMoney.sendTransaction(tx.request)
|
|
|
|
await response.wait()
|
|
|
|
const endBal = await needsMoney.getBalance()
|
|
|
|
|
|
|
|
// Passing resolve as callback into put didn't work
|
|
|
|
await await putPromise
|
|
|
|
|
2023-04-27 20:34:22 +03:00
|
|
|
// Turn off listener (NEEDED OR WE'RE NOT RESOLVING)
|
|
|
|
smallestEth.off(smallestEth.filters.Deposit(null, null, null), listener)
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
// Check deposit predicates
|
|
|
|
expect(initBal).to.equal(parseUnits('1000'))
|
|
|
|
expect(endBal).to.be.lte(parseUnits('999.9'))
|
|
|
|
}).timeout(0)
|
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
it('buildDepositProof: it should be able to build an eth proof', async () => {
|
2023-04-24 01:01:45 +03:00
|
|
|
// Get withdrawer, load cache, prep note for this test
|
|
|
|
const withdrawer = ganacheProvider.getSigner(2)
|
|
|
|
const cache = core.loadDepositCache('Deposits1ETH0.1')
|
|
|
|
// We need this to clean the cache, we want to have clean state
|
|
|
|
const doc = (await cache.db.allDocs({ include_docs: true, descending: true, limit: 1 })).rows[0].doc
|
|
|
|
// We are not transforming because we want to test this out
|
|
|
|
const notes = await core.loadNotes()
|
|
|
|
|
|
|
|
// Build proof
|
2023-04-27 20:34:22 +03:00
|
|
|
let proof: any
|
2023-04-24 01:01:45 +03:00
|
|
|
|
|
|
|
try {
|
|
|
|
proof = await core.buildDepositProof(
|
|
|
|
smallestEth,
|
|
|
|
{
|
|
|
|
address: await withdrawer.getAddress()
|
|
|
|
},
|
|
|
|
await needsMoney.getAddress(),
|
|
|
|
notes[0],
|
|
|
|
{
|
|
|
|
// On by default but stating for visibility
|
|
|
|
checkNotesSpent: true,
|
|
|
|
checkKnownRoot: true
|
|
|
|
}
|
|
|
|
)
|
|
|
|
} finally {
|
|
|
|
await cache.db.remove(doc?._id!, doc?._rev!)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Substract the calculated fee from the received amount
|
|
|
|
const ethDelta = parseUnits('0.1').sub(proof[5])
|
|
|
|
|
|
|
|
// Withdrawal time, let's see if it works
|
|
|
|
// The balance diff will be exact because withdrawer is paying for gas as relayer
|
2023-04-27 20:34:22 +03:00
|
|
|
await expect(() =>
|
|
|
|
smallestEth
|
2023-04-24 01:01:45 +03:00
|
|
|
.connect(withdrawer)
|
|
|
|
.withdraw(proof[0], proof[1], proof[2], proof[3], proof[4], proof[5], proof[6])
|
|
|
|
).to.changeEtherBalance(needsMoney, ethDelta)
|
|
|
|
}).timeout(0)
|
|
|
|
|
2023-04-29 19:44:05 +03:00
|
|
|
it('buildDepositTransaction: build a single token deposit tx and succeed', async () => {
|
2023-04-24 01:01:45 +03:00
|
|
|
// Prep deposit amount, proxy for approval, cache, bal for comp
|
|
|
|
const depositAmount = parseUnits('100000')
|
|
|
|
const proxy = await core.getProxy()
|
|
|
|
const cache = core.loadDepositCache('Deposits1DAI100000')
|
|
|
|
const daiBalBef = await dai.balanceOf(dai100K.address)
|
|
|
|
|
|
|
|
// Prep promise to only try withdrawing after cache has been updated
|
|
|
|
const putPromise = new Promise((resolve) => {
|
|
|
|
dai100K.on(
|
|
|
|
dai100K.filters.Deposit(null, null, null),
|
|
|
|
function (commitment, leafIndex, timestamp, event) {
|
|
|
|
resolve(cache.db.put(cache.buildDoc(event)))
|
|
|
|
}
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2023-04-27 20:34:22 +03:00
|
|
|
const listener = dai100K.listeners()[0]
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
// Prep for deposit
|
|
|
|
await dai.transfer(needsMoneyAddress, depositAmount)
|
|
|
|
dai = dai.connect(needsMoney)
|
|
|
|
const tx = await core.buildDepositTransaction(dai100K)
|
|
|
|
// Approve dai for the proxy first (transferFrom)
|
|
|
|
await dai.approve(proxy.address, depositAmount)
|
|
|
|
|
|
|
|
// Deposit
|
|
|
|
const response = await needsMoney.sendTransaction(tx.request)
|
|
|
|
await response.wait()
|
|
|
|
|
|
|
|
// Prep for check
|
|
|
|
const daiBalPost = await dai.balanceOf(dai100K.address)
|
|
|
|
|
|
|
|
// Passing resolve as callback into put didn't work
|
|
|
|
await await putPromise
|
|
|
|
|
2023-04-27 20:34:22 +03:00
|
|
|
// Off (otherwise no resolve)
|
|
|
|
dai100K.off(dai100K.filters.Deposit(null, null, null), listener)
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
// Checks
|
|
|
|
expect(daiBalBef).to.equal(daiBalPost.sub(depositAmount))
|
|
|
|
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
|
|
|
|
}).timeout(0)
|
|
|
|
|
2023-04-29 19:44:05 +03:00
|
|
|
it('buildDepositProof: it should be able to build a token proof', async () => {
|
2023-04-27 20:34:22 +03:00
|
|
|
if (!process.env.TEST_RELAYER_DOMAIN) throw ErrorUtils.getError('core.test.ts: Need a relayer name')
|
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
// Get withdrawer, load cache, prep note for this test
|
|
|
|
const withdrawer = ganacheProvider.getSigner(2)
|
|
|
|
const cache = core.loadDepositCache('Deposits1DAI100000')
|
|
|
|
|
|
|
|
// We need this to clean the cache, we want to have clean state
|
|
|
|
const doc = (await cache.db.allDocs({ include_docs: true, descending: true, limit: 1 })).rows[0].doc
|
|
|
|
// We are not transforming because we want to test this out
|
|
|
|
const notes = await core.loadNotes()
|
|
|
|
// We need to select last
|
|
|
|
const note = notes[notes.length - 1]
|
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
let properties: RelayerProperties = {
|
|
|
|
address: await withdrawer.getAddress(),
|
|
|
|
version: '2',
|
|
|
|
serviceFeePercent: 0.04,
|
|
|
|
miningFeePercent: 0.15,
|
|
|
|
status: 'whatever',
|
|
|
|
chainId: 1,
|
|
|
|
prices: new Map<string, BigNumber>()
|
|
|
|
}
|
2023-04-27 20:34:22 +03:00
|
|
|
|
2023-05-03 01:19:28 +03:00
|
|
|
properties.prices.set('dai', BigNumber.from(10).pow(18).div(1800))
|
2023-04-24 01:01:45 +03:00
|
|
|
|
|
|
|
// Just set another address
|
|
|
|
properties.address = await withdrawer.getAddress()
|
|
|
|
|
|
|
|
// Build proof with relayer properties this time
|
|
|
|
let proof
|
|
|
|
|
|
|
|
try {
|
|
|
|
proof = await core.buildDepositProof(dai100K, properties, await needsMoney.getAddress(), note, {
|
|
|
|
// On by default but stating for visibility
|
|
|
|
checkNotesSpent: true,
|
|
|
|
checkKnownRoot: true
|
|
|
|
})
|
|
|
|
} finally {
|
|
|
|
await cache.db.remove(doc?._id!, doc?._rev!)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Calc balance diff again... it will be expressed in dai
|
|
|
|
const daiDelta = parseUnits('100000').sub(proof[5])
|
|
|
|
|
|
|
|
await expect(
|
2023-04-27 20:34:22 +03:00
|
|
|
await dai100K
|
2023-04-24 01:01:45 +03:00
|
|
|
.connect(withdrawer)
|
|
|
|
.withdraw(proof[0], proof[1], proof[2], proof[3], proof[4], proof[5], proof[6])
|
|
|
|
).to.changeTokenBalance(dai, needsMoney, daiDelta)
|
|
|
|
}).timeout(0)
|
|
|
|
|
2023-05-04 00:42:42 +03:00
|
|
|
it.only('buildDepositTransactions: multiple eth deposits', async () => {
|
2023-04-24 01:01:45 +03:00
|
|
|
const instances = await core.getInstances(
|
|
|
|
[0.1, 1, 10, 100].map((el) => {
|
|
|
|
return { token: 'eth', denomination: el }
|
|
|
|
})
|
|
|
|
)
|
2023-05-03 01:19:28 +03:00
|
|
|
|
2023-04-24 01:01:45 +03:00
|
|
|
const txs = await core.buildDepositTransactions(instances, {
|
2023-05-03 01:19:28 +03:00
|
|
|
depositsPerInstance: [1, 1, 2, 1]
|
2023-04-24 01:01:45 +03:00
|
|
|
})
|
|
|
|
|
|
|
|
for (let i = 0, len = txs.length; i < len; i++) {
|
2023-05-03 01:19:28 +03:00
|
|
|
console.log('SENDING => ', i)
|
|
|
|
const response = await needsMoney.sendTransaction(txs[i].request)
|
|
|
|
console.log('TX SENT => ', i)
|
|
|
|
await response.wait()
|
|
|
|
console.log('WAITING => ', i)
|
2023-04-24 01:01:45 +03:00
|
|
|
}
|
|
|
|
|
2023-05-04 00:42:42 +03:00
|
|
|
expect(await needsMoney.getBalance()).to.be.lte(parseUnits('888.8'))
|
2023-04-24 01:01:45 +03:00
|
|
|
}).timeout(0)
|
|
|
|
|
|
|
|
it('buildDepositTransactions: multiple token deposits', async () => {
|
|
|
|
const instances = await core.getInstances(
|
|
|
|
[100, 1000, 10000, 100000].map((el) => {
|
|
|
|
return { token: 'dai', denomination: el }
|
|
|
|
})
|
|
|
|
)
|
|
|
|
|
|
|
|
const proxy = await core.getProxy()
|
|
|
|
const depositAmount = parseUnits('432100')
|
|
|
|
|
|
|
|
await dai.transfer(needsMoneyAddress, parseUnits('432100'))
|
|
|
|
dai = dai.connect(needsMoney)
|
|
|
|
|
|
|
|
const txs = await core.buildDepositTransactions(instances, {
|
|
|
|
depositsPerInstance: [1, 2, 3, 4]
|
|
|
|
})
|
|
|
|
|
|
|
|
await dai.approve(proxy.address, depositAmount)
|
|
|
|
|
|
|
|
for (let i = 0, len = txs.length; i < len; i++) {
|
|
|
|
await expect(() => needsMoney.sendTransaction(txs[i].request)).to.not.be.reverted
|
|
|
|
}
|
|
|
|
|
|
|
|
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
|
|
|
|
}).timeout(0)
|
|
|
|
|
|
|
|
it('createInvoice: should be able to create an invoice', async () => {
|
|
|
|
const instance = await core.getInstance('dai', '1000')
|
|
|
|
const invoice = await core.createInvoice(instance)
|
|
|
|
console.log(invoice)
|
|
|
|
}).timeout(0)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
})
|