sdk-monorepo/@tornado/sdk-data/src/index.ts
AlienTornadosaurusHex c5478b159d 0.0.12-alpha: Check HISTORY.md
Signed-off-by: AlienTornadosaurusHex <>
2023-05-31 20:53:28 +00:00

811 lines
25 KiB
TypeScript

// Archiving and zipping
import tar from 'tar'
// Fs
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'
import { opendir, readFile, rm, writeFile } from 'fs/promises'
import { createInterface } from 'readline'
// Ethers
import { BigNumber } from 'ethers'
// Local logic
import { AsyncUtils, NumberUtils, ErrorUtils } from '@tornado/sdk-utils'
// PouchDB
import PouchDB from 'pouchdb'
import * as PouchDBAdapterMemory from 'pouchdb-adapter-memory'
// @ts-ignore
import { toIndexableString } from 'pouchdb-collate'
// Register plugins
PouchDB.plugin(PouchDBAdapterMemory)
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECLARATIONS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export interface TornadoInstance {
network: number
symbol: string
decimals: number
denomination: number
deployBlock: number
address: string
}
export interface ClassicInstance extends TornadoInstance {
anonymityMiningEnabled: boolean
}
export interface TokenData {
network: number
decimals: number
address: string
}
export namespace Keys {
export interface InstanceLookup {
network: string
token: string
denomination: string
}
}
export interface RelayerProperties {
address: string
version: string
serviceFeePercent: number
miningFeePercent: number
status: string
chainId: number
prices: Map<string, BigNumber>
}
export namespace Options {
export interface Cache {
adapter?: string
dirPath?: string
persistent?: boolean
}
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ REST ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
export namespace Files {
export type PathGetter = (relative: string) => string
export function parentPath(filepath: string): string {
let path = filepath.split('/').slice(0, -1).join('/')
path = path[path.length - 1] === '/' ? path : path + '/'
return path
}
export function stripExtensions(filepath: string): string {
const tokens = filepath.split('/')
const stripped = tokens[tokens.length - 1].split('.')[0]
const prefix = tokens.slice(0, -1).join('/')
return prefix + (prefix !== '' ? '/' : '') + stripped
}
export const getModulesPath = (relative?: string, prefix?: string): string =>
(prefix ?? __dirname + '/../../node_modules/') + (relative ?? '')
export const getResourcePath = (relative?: string, prefix?: string): string =>
(prefix ?? __dirname + '/../resources/') + (relative ?? '')
export const getCachePath = (relative?: string, prefix?: string): string =>
(prefix ?? process.cwd() + '/cache/') + (relative ?? '')
export const cacheDirExists = (prefix?: string): boolean => existsSync(getCachePath('', prefix))
export const makeCacheDir = (prefix?: string): void => mkdirSync(getCachePath('', prefix))
export const loadRaw = (relative: string): Promise<Buffer> => readFile(getResourcePath(relative))
export const loadRawSync = (relative: string): Buffer => readFileSync(getResourcePath(relative))
export const writeRaw = (
relative: string,
data: string | NodeJS.ArrayBufferView,
flag: string
): Promise<void> => writeFile(getCachePath('', relative), data, { flag: flag })
export const writeRawSync = (relative: string, data: string | NodeJS.ArrayBufferView, flag: string): void =>
writeFileSync(getCachePath('', relative), data, { flag: flag })
export function gzipSync(fileOrDirPath: string, archivePath: string): void {
try {
const tokens = fileOrDirPath.split('/')
tar.create(
{
cwd: parentPath(fileOrDirPath),
file: stripExtensions(archivePath) + '.tar.gz',
gzip: true,
sync: true
},
[tokens[tokens.length - 1]]
)
} catch (err) {
throw ErrorUtils.ensureError(err)
}
}
export function gunzipSync(archivePath: string, extractPath: string): void {
try {
tar.extract({
cwd: extractPath,
file: stripExtensions(archivePath) + '.tar.gz',
sync: true
})
} catch (err) {
throw ErrorUtils.ensureError(err)
}
}
export async function wipeCache(prompt: boolean = true, dirPath?: string): Promise<void> {
const dir = await opendir(getCachePath('', dirPath))
const toRemove: string[] = []
const userInput = createInterface({ input: process.stdin, output: process.stdout })
for await (const entry of dir) {
if (entry.name.match('(Deposit.*)|(Withdrawal.*)|(Note.*)|(Invoice.*)'))
toRemove.push(getCachePath(entry.name, dirPath))
}
if (toRemove.length != 0) {
if (prompt) {
const promptString = `\nCache wipe requested, following would be wiped:\n\n${toRemove.join(
'\n'
)}\n\nContinue? (y/n): `
function wipeCachePrompt(prompt: string, resolve: any): void {
userInput.question(prompt, (answer) => {
if (answer == 'y') {
userInput.close()
resolve(true)
} else if (answer == 'n') {
userInput.close()
resolve(false)
} else wipeCachePrompt('', resolve)
})
}
const answer = await new Promise((resolve) => wipeCachePrompt(promptString, resolve))
if (answer)
await Promise.all(toRemove.map((entry) => rm(entry, { recursive: true, force: true }))).catch(
(err) => {
throw ErrorUtils.ensureError(err)
}
)
} else {
await Promise.all(toRemove.map((entry) => rm(entry, { recursive: true, force: true }))).catch(
(err) => {
throw ErrorUtils.ensureError(err)
}
)
}
}
}
}
export namespace Json {
const cachedJsonData = new Map<string, any>()
export async function load(
relativePath: string,
encoding: BufferEncoding = 'utf8',
pathGetter: Files.PathGetter = Files.getResourcePath
): Promise<any> {
if (cachedJsonData.has(relativePath)) return cachedJsonData.get(relativePath)
else {
const obj = JSON.parse(await readFile(pathGetter(relativePath), encoding))
cachedJsonData.set(relativePath, obj)
return obj
}
}
export function loadSync(
relativePath: string,
encoding: BufferEncoding = 'utf8',
pathGetter: Files.PathGetter = Files.getResourcePath
): any {
if (cachedJsonData.has(relativePath)) return cachedJsonData.get(relativePath)
else {
const obj = JSON.parse(readFileSync(pathGetter(relativePath), encoding))
cachedJsonData.set(relativePath, obj)
return obj
}
}
export function toMap<V>(jsonData: any): Map<string, V> {
return new Map<string, V>(Object.entries(jsonData))
}
export async function loadMap<V>(
relativePath: string,
encoding: BufferEncoding = 'utf8'
): Promise<Map<string, V>> {
return toMap<V>(await load(relativePath, encoding))
}
export function getError(...values: any[]): Error {
return ErrorUtils.getError(`there is no such entry for the key-value path [${values.join('][')}]`)
}
export function throwError(...values: any[]): void {
throw getError(...values)
}
export function getValue(jsonObj: any, keys: any[]): any {
for (let i = 0; i < keys.length; i++) {
jsonObj = jsonObj[keys[i]] ?? throwError(...keys.slice(0, i + 1))
}
return jsonObj
}
}
// TODO: Decide whether to also cache the data instead of just loading it for the function call
export namespace Onchain {
export async function getClassicInstanceData(
network: string,
token: string,
denomination: string
): Promise<ClassicInstance> {
const instanceData = Json.getValue(await Json.load('onchain/instances.json'), [network, token])
return {
network: +network,
symbol: token.toUpperCase(),
decimals: Json.getValue(instanceData, ['decimals']),
denomination: +denomination,
deployBlock: Json.getValue(instanceData, ['deployedBlockNumber', denomination]),
address: Json.getValue(instanceData, ['instanceAddress', denomination]),
anonymityMiningEnabled: Json.getValue(instanceData, ['miningEnabled'])
}
}
export function getClassicInstanceDataSync(
network: string,
token: string,
denomination: string
): ClassicInstance {
const instanceData = Json.getValue(Json.loadSync('onchain/instances.json'), [network, token])
return {
network: +network,
symbol: token.toUpperCase(),
decimals: Json.getValue(instanceData, ['decimals']),
denomination: +denomination,
deployBlock: Json.getValue(instanceData, ['deployedBlockNumber', denomination]),
address: Json.getValue(instanceData, ['instanceAddress', denomination]),
anonymityMiningEnabled: Json.getValue(instanceData, ['miningEnabled'])
}
}
export async function getInstanceLookupKeys(instanceAddress: string): Promise<Keys.InstanceLookup> {
// lookup some stuff first
const lookupObj: { [key: string]: string } = await Json.load('onchain/instanceAddresses.json')
const pathstring: string = Object.entries(lookupObj).find((el) => el[1] === instanceAddress)![0]
const network = pathstring.match('[0-9]+')![0],
token = pathstring.substring(network.length).match('[a-z]+')![0],
denomination = pathstring.substring(network.length + token.length)
return {
network: network,
token: token,
denomination: denomination
}
}
export function getInstanceLookupKeysSync(instanceAddress: string): Keys.InstanceLookup {
// lookup some stuff first
const lookupObj: { [key: string]: string } = Json.loadSync('onchain/instanceAddresses.json')
const pathstring: string = Object.entries(lookupObj).find((el) => el[1] === instanceAddress)![0]
const network = pathstring.match('[0-9]+')![0],
token = pathstring.substring(network.length).match('[a-z]+')![0],
denomination = pathstring.substring(network.length + token.length)
return {
network: network,
token: token,
denomination: denomination
}
}
export async function getPathstringBasedContent<T>(
filepath: string,
paths: Array<{
network?: string
token?: string
denomination?: string
}>
): Promise<Array<T>> {
const obj = await Json.load(filepath)
return await Promise.all(
paths.map((path) =>
Json.getValue(obj, [`${path.network ?? ''}${path.token ?? ''}${path.denomination ?? ''}`])
)
)
}
export function getPathstringBasedContentSync<T>(
filepath: string,
paths: Array<{
network?: string
token?: string
denomination?: string
}>
): Array<T> {
return paths.map((path) =>
Json.getValue(Json.loadSync(filepath), [
`${path.network ?? ''}${path.token ?? ''}${path.denomination ?? ''}`
])
)
}
export async function getNetworkSymbol(networkId: string): Promise<string> {
return (
await getPathstringBasedContent<string>('onchain/networkSymbols.json', [{ network: networkId }])
)[0]
}
export function getNetworkSymbolSync(networkId: string): string {
return getPathstringBasedContentSync<string>('onchain/networkSymbols.json', [{ network: networkId }])[0]
}
export function getInstanceAddresses(
paths: Array<{
network: string
token: string
denomination: string
}>
): Promise<Array<string>> {
return getPathstringBasedContent<string>('onchain/instanceAddresses.json', paths)
}
export function getInstanceAddressesSync(
paths: Array<{
network: string
token: string
denomination: string
}>
): Array<string> {
return getPathstringBasedContentSync<string>('onchain/instanceAddresses.json', paths)
}
export async function getInstanceAddress(
network: string,
token: string,
denomination: string
): Promise<string> {
return (await getInstanceAddresses([{ network: network, token: token, denomination: denomination }]))[0]
}
export function getInstanceAddressSync(network: string, token: string, denomination: string): string {
return getInstanceAddressesSync([{ network: network, token: token, denomination: denomination }])[0]
}
export async function getRegistryAddress(network?: string): Promise<string> {
network = network ?? '1'
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'registry'])
}
export function getRegistryAddressSync(network?: string): string {
network = network ?? '1'
return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'registry'])
}
export async function getRegistryDeployBlockNum(network?: string): Promise<number> {
network = network ?? '1'
return Json.getValue(await Json.load('onchain/deployedBlockNumbers.json'), [`${network}registry`])
}
export function getRegistryDeployBlockNumSync(network?: string): number {
network = network ?? '1'
return Json.getValue(Json.loadSync('onchain/deployedBlockNumbers.json'), [`${network}registry`])
}
export function getInstanceDeployBlockNums(
paths: Array<{
network: string
token: string
denomination: string
}>
): Promise<Array<number>> {
return getPathstringBasedContent<number>('onchain/deployedBlockNumbers.json', paths)
}
export function getInstanceDeployBlockNumsSync(
paths: Array<{
network: string
token: string
denomination: string
}>
): Array<number> {
return getPathstringBasedContentSync<number>('onchain/deployedBlockNumbers.json', paths)
}
export async function getInstanceDeployBlockNum(
network: string,
token: string,
denomination: string
): Promise<number> {
return (
await getInstanceDeployBlockNums([{ network: network, token: token, denomination: denomination }])
)[0]
}
export function getInstanceDeployBlockNumSync(
network: string,
token: string,
denomination: string
): number {
return getInstanceDeployBlockNumsSync([{ network: network, token: token, denomination: denomination }])[0]
}
export async function getProxyAddress(network: string): Promise<string> {
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'proxy'])
}
export async function getMulticallAddress(network: string): Promise<string> {
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'multicall'])
}
export async function getMulticall3Address(network: string): Promise<string> {
return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'multicall3'])
}
export function getProxyAddressSync(network: string): string {
return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'proxy'])
}
export function getMulticallAddressSync(network: string): string {
return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'multicall'])
}
export function getMulticall3AddressSync(network: string): string {
return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'multicall3'])
}
export async function getTokenAddress(network: string, token: string): Promise<string> {
return (
await getPathstringBasedContent<string>('onchain/tokenAddresses.json', [
{ network: network, token: token }
])
)[0]
}
export function getTokenAddressSync(network: string, token: string): string {
return getPathstringBasedContentSync<string>('onchain/tokenAddresses.json', [
{ network: network, token: token }
])[0]
}
export async function getTokenDecimals(network: string, token: string): Promise<number> {
return (
await getPathstringBasedContent<number>('onchain/decimals.json', [{ network: network, token: token }])
)[0]
}
export function getTokenDecimalsSync(network: string, token: string): number {
return getPathstringBasedContentSync<number>('onchain/decimals.json', [
{ network: network, token: token }
])[0]
}
}
export namespace Offchain {
export async function getUncensoredRpcURL(network: string, name: string = ''): Promise<string> {
const rpcs = Json.toMap<string>(
Json.getValue(await Json.load('offchain/infrastructure.json'), ['jrpc-uncensored', network])
)
if (name.length !== 0) {
return rpcs.get(name)!
}
let keys = rpcs.keys()
let randCount = NumberUtils.getRandomFromRange(0, rpcs.size - 1)
for (let i = 0; i < randCount; i++) keys.next()
return rpcs.get(keys.next().value)!
}
export function getUncensoredRpcURLSync(network: string, name: string = ''): string {
const rpcs = Json.toMap<string>(
Json.getValue(Json.loadSync('offchain/infrastructure.json'), ['jrpc-uncensored', network])
)
if (name.length !== 0) {
return rpcs.get(name)!
}
let keys = rpcs.keys()
let randCount = NumberUtils.getRandomFromRange(0, rpcs.size - 1)
for (let i = 0; i < randCount; i++) keys.next()
return rpcs.get(keys.next().value)!
}
export async function getClassicSubgraphURL(network: string): Promise<string> {
return Json.getValue(await Json.load('offchain/infrastructure.json'), ['subgraph', network])
}
export function getClassicSubgraphURLSync(network: string): string {
return Json.getValue(Json.loadSync('offchain/infrastructure.json'), ['subgraph', network])
}
}
export namespace Constants {
export const MERKLE_TREE_HEIGHT = 20
}
export namespace Docs {
// TODO: Probably find some easier way to lookup below docs for the end user...
export class Base {
_id: string
_rev?: string
constructor(id: string) {
this._id = id
}
}
export class Deposit extends Base {
blockNumber: number
leafIndex: number
commitment: string
transactionHash: string
timestamp: string
constructor(obj: any) {
const blockNumber = obj['blockNumber']
const transactionHash = obj['transactionHash']
const commitment = obj['args']['commitment']
const leafIndex = obj['args']['leafIndex']
const timestamp = (obj['args']['timestamp'] as BigNumber).toString()
// To preserve order because we will need it later
super(toIndexableString([blockNumber, leafIndex, commitment]))
this.commitment = commitment
this.blockNumber = blockNumber
this.leafIndex = leafIndex
this.transactionHash = transactionHash
this.timestamp = timestamp
}
}
export class Withdrawal extends Base {
blockNumber: number
to: string
nullifierHash: string
transactionHash: string
fee: string
constructor(obj: any) {
const blockNumber = obj['blockNumber']
const transactionHash = obj['transactionHash']
const to = obj['args']['to']
const nullifierHash = obj['args']['nullifierHash']
const fee = (obj['args']['fee'] as BigNumber).toString()
super(toIndexableString([blockNumber, to, nullifierHash]))
this.blockNumber = blockNumber
this.to = to
this.nullifierHash = nullifierHash
this.transactionHash = transactionHash
this.fee = fee
}
}
export class Note extends Base {
network: string
token: string
denomination: string
note: string
constructor(index: number, network: string, token: string, denomination: string, note: string) {
super(toIndexableString([index, network, denomination, token]))
this.network = network
this.token = token
this.denomination = denomination
this.note = note
}
}
export class Invoice extends Base {
network: string
token: string
denomination: string
invoice: string
constructor(index: number, network: string, token: string, denomination: string, invoice: string) {
super(toIndexableString([index, network, denomination, token]))
this.network = network
this.token = token
this.denomination = denomination
this.invoice = invoice
}
}
export class Relayer extends Base {
address: string
version: string
serviceFeePercent: number
miningFeePercent: number
status: string
chainId: number
prices: Map<string, BigNumber>
constructor(url: string, properties: RelayerProperties) {
super(toIndexableString([url]))
this.address = properties.address
this.version = properties.version
this.serviceFeePercent = properties.serviceFeePercent
this.miningFeePercent = properties.miningFeePercent
this.status = properties.status
this.chainId = properties.chainId
this.prices = properties.prices
}
}
}
export namespace Cache {
type Rows<T extends Docs.Base> = Array<{
doc?: T
id: PouchDB.Core.DocumentId
key: PouchDB.Core.DocumentKey
value: {
rev: PouchDB.Core.RevisionId
deleted?: boolean
}
}>
type RowsOptions =
| PouchDB.Core.AllDocsWithKeyOptions
| PouchDB.Core.AllDocsWithinRangeOptions
| PouchDB.Core.AllDocsOptions
export class Base<T extends Docs.Base> {
private _adapter: string
private _path: string
name: string
isOpen: boolean
db: PouchDB.Database<T>
private _unzip(dirPath?: string): void {
if (existsSync(this._path + '.tar.gz')) {
if (existsSync(this._path)) {
throw ErrorUtils.getError(`Can't load both ${this.name} and ${this.name + '.tar.gz'}, remove one!`)
} else Files.gunzipSync(this._path, Files.getCachePath('', dirPath))
}
}
constructor(name: string, options?: Options.Cache) {
this.name = name
if (options?.persistent === false && options?.adapter !== 'memory' && options?.adapter !== null)
throw ErrorUtils.getError('Cache.new: if not persistent, cache must use memory adapter.')
if (options?.adapter === 'memory' && options?.persistent === true)
throw ErrorUtils.getError("Cache.new: can't specify memory adapter if persistent.")
const dbAdapter = options?.adapter ?? (options?.persistent === false ? 'memory' : 'leveldb')
if (options?.dirPath)
if (options.dirPath.charAt(options.dirPath.length - 1) != '/') options.dirPath += '/'
if (!Files.cacheDirExists(options?.dirPath)) Files.makeCacheDir()
this._path = Files.getCachePath(name, options?.dirPath)
this._adapter = dbAdapter
this._unzip(options?.dirPath)
this.db = new PouchDB<T>(this._path, { adapter: dbAdapter })
this.isOpen = true
}
async zip(outDirPath?: string, close: boolean = false): Promise<void> {
await this.close()
if (outDirPath) outDirPath = outDirPath[outDirPath.length - 1] !== '/' ? outDirPath + '/' : outDirPath
Files.gzipSync(this._path, (outDirPath ?? Files.parentPath(this._path)) + this.name)
if (!close) this.db = new PouchDB<T>(this._path, { adapter: this._adapter })
this.isOpen = !close
}
async jsonify(outDirPath?: string): Promise<void> {
const docs = (await this.getRows()).map((row) => {
row.doc!._rev = undefined
return row.doc
})
if (outDirPath) outDirPath = outDirPath[outDirPath.length - 1] !== '/' ? outDirPath + '/' : outDirPath
Files.writeRawSync(
(outDirPath ?? Files.parentPath(this._path)) + this.name + '.json',
JSON.stringify(docs, null, 2),
'w'
)
}
async get(keys: Array<any>): Promise<T> {
return await this.db.get(toIndexableString(keys)).catch((err) => {
throw ErrorUtils.ensureError(err)
})
}
async getRows(
emptyError: Error = ErrorUtils.getError(`Base.getRows: there is no cache entry for ${this.name}`),
options: RowsOptions = { include_docs: true, attachments: false }
): Promise<Rows<T>> {
const docs = await this.db.allDocs(options).catch((err) => {
throw ErrorUtils.ensureError(err)
})
// If calling from an external function that wants this to fail
// For example if the DB was opened in one invocation
// Then fail and delete the EMTPY db
if (docs.total_rows === 0 && emptyError) {
await this.clear()
throw emptyError
}
return docs.rows as Rows<T>
}
async close(): Promise<void> {
await this.db.close().catch((err) => {
throw ErrorUtils.ensureError(err)
})
}
async clear(): Promise<void> {
await this.db.destroy().catch((err) => {
throw ErrorUtils.ensureError(err)
})
}
}
export abstract class Syncable<T extends Docs.Base> extends Base<T> {
pooler?: AsyncUtils.PromisePooler
constructor(name: string, options?: Options.Cache) {
super(name, options)
}
abstract buildDoc(response: any): Docs.Base
abstract getCallbacks(...args: Array<any>): Array<AsyncUtils.Callback>
abstract getErrorHandlers(...args: Array<any>): Array<AsyncUtils.ErrorHandler>
initializePooler(
callbacks: Array<AsyncUtils.Callback>,
errorHandlers: Array<AsyncUtils.ErrorHandler>,
concurrencyLimit: number
): void {
if (this.pooler) this.pooler.reset()
this.pooler = new AsyncUtils.PromisePooler(callbacks, errorHandlers, concurrencyLimit)
}
async close(): Promise<void> {
if (this.pooler && this.pooler.pending)
throw ErrorUtils.getError("Syncable.close: can't clear while pooler still has pending promises.")
await super.close()
}
async clear(): Promise<void> {
if (this.pooler && this.pooler.pending)
throw ErrorUtils.getError("Syncable.clear: can't clear while pooler still has pending promises.")
await super.clear()
}
}
}