Compare commits
12 Commits
main
...
mainnet-v4
Author | SHA1 | Date | |
---|---|---|---|
89367a8b43 | |||
8ebcfe6b55 | |||
8a7bb849cf | |||
a2f7adce52 | |||
776d1a832d | |||
4a996ddb91 | |||
01897a60e3 | |||
d1582f1d35 | |||
49b4751c20 | |||
cc56bd77b5 | |||
1435457e67 | |||
e60b4a12e5 |
3
.dockerignore
Normal file
3
.dockerignore
Normal file
@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
.env
|
||||
.git
|
9
.editorconfig
Normal file
9
.editorconfig
Normal file
@ -0,0 +1,9 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
24
.env.example
Normal file
24
.env.example
Normal file
@ -0,0 +1,24 @@
|
||||
NET_ID=1
|
||||
HTTP_RPC_URL=https://api.securerpc.com/v1
|
||||
# WS_RPC_URL=wss://mainnet.infura.io/ws/v3/
|
||||
# ORACLE_RPC_URL should always point to the mainnet
|
||||
ORACLE_RPC_URL=https://api.securerpc.com/v1
|
||||
REDIS_URL=redis://127.0.0.1:6379
|
||||
|
||||
# DNS settings
|
||||
VIRTUAL_HOST=example.duckdns.org
|
||||
LETSENCRYPT_HOST=example.duckdns.org
|
||||
APP_PORT=8000
|
||||
|
||||
# without 0x prefix
|
||||
PRIVATE_KEY=
|
||||
# 0.4 means 0.4%
|
||||
REGULAR_TORNADO_WITHDRAW_FEE=0.4
|
||||
MINING_SERVICE_FEE=0.05
|
||||
REWARD_ACCOUNT=
|
||||
CONFIRMATIONS=4
|
||||
|
||||
# in GWEI
|
||||
MAX_GAS_PRICE=1000
|
||||
BASE_FEE_RESERVE_PERCENTAGE=25
|
||||
AGGREGATOR=0x8cb1436F64a3c33aD17bb42F94e255c4c0E871b2
|
45
.eslintrc.json
Normal file
45
.eslintrc.json
Normal file
@ -0,0 +1,45 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"browser": true,
|
||||
"es6": true,
|
||||
"mocha": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"globals": {
|
||||
"Atomics": "readonly",
|
||||
"SharedArrayBuffer": "readonly"
|
||||
},
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2018
|
||||
},
|
||||
"rules": {
|
||||
"indent": [
|
||||
"error",
|
||||
2,
|
||||
{
|
||||
"SwitchCase": 1
|
||||
}
|
||||
],
|
||||
"linebreak-style": ["error", "unix"],
|
||||
"quotes": [
|
||||
"error",
|
||||
"single",
|
||||
{
|
||||
"avoidEscape": true
|
||||
}
|
||||
],
|
||||
"semi": ["error", "never"],
|
||||
"object-curly-spacing": ["error", "always"],
|
||||
"require-await": "error",
|
||||
"comma-dangle": ["error", "only-multiline"],
|
||||
"space-before-function-paren": [
|
||||
"error",
|
||||
{
|
||||
"anonymous": "always",
|
||||
"named": "never",
|
||||
"asyncArrow": "always"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
99
.github/workflows/build.yml
vendored
Normal file
99
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
name: build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['*']
|
||||
tags: ['v[0-9]+.[0-9]+.[0-9]+']
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
- run: yarn install
|
||||
- run: yarn test
|
||||
- run: yarn lint
|
||||
- name: Telegram Failure Notification
|
||||
uses: appleboy/telegram-action@master
|
||||
if: failure()
|
||||
with:
|
||||
message: ❗ Build failed for [${{ github.repository }}](https://github.com/${{ github.repository }}/actions) because of ${{ github.actor }}
|
||||
format: markdown
|
||||
to: ${{ secrets.TELEGRAM_CHAT_ID }}
|
||||
token: ${{ secrets.TELEGRAM_BOT_TOKEN }}
|
||||
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: startsWith(github.ref, 'refs/tags')
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set vars
|
||||
id: vars
|
||||
run: |
|
||||
echo "::set-output name=version::$(echo ${GITHUB_REF#refs/tags/v})"
|
||||
echo "::set-output name=repo_name::$(echo ${GITHUB_REPOSITORY#*/})"
|
||||
|
||||
- name: Check package.json version vs tag
|
||||
run: |
|
||||
[ ${{ steps.vars.outputs.version }} = $(grep '"version":' package.json | grep -o "[0-9.]*") ] || (echo "Git tag doesn't match version in package.json" && false)
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v1.1.0
|
||||
with:
|
||||
dockerfile: Dockerfile
|
||||
repository: tornadocash/relayer
|
||||
tag_with_ref: true
|
||||
tags: mining,candidate
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Telegram Message Notify
|
||||
uses: appleboy/telegram-action@master
|
||||
with:
|
||||
to: ${{ secrets.TELEGRAM_CHAT_ID }}
|
||||
token: ${{ secrets.TELEGRAM_BOT_TOKEN }}
|
||||
message: 🚀 Published a [${{ steps.vars.outputs.repo_name }}](https://github.com/${{ github.repository }}) version ${{ steps.vars.outputs.version }} to docker hub
|
||||
debug: true
|
||||
format: markdown
|
||||
|
||||
- name: Telegram Relayer Channel Notification
|
||||
uses: appleboy/telegram-action@master
|
||||
with:
|
||||
to: ${{ secrets.TELEGRAM_RELAYER_CHAT_ID }}
|
||||
token: ${{ secrets.TELEGRAM_BOT_TOKEN }}
|
||||
message: |
|
||||
🚀 Published a new version of the relayer node service for mainnet to docker hub: `tornadocash/relayer:v${{ steps.vars.outputs.version }}` and `tornadocash/relayer:mining`.
|
||||
|
||||
❗️Please update your mainnet nodes ❗️
|
||||
DO NOT TOUCH SIDECHAINS AND NOVA RELAYERS.
|
||||
|
||||
debug: true
|
||||
format: markdown
|
||||
|
||||
- name: Discord Relayer Channel Notification
|
||||
env:
|
||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_RELAYER_WEBHOOK }}
|
||||
uses: Ilshidur/action-discord@master
|
||||
with:
|
||||
args: |
|
||||
🚀 Published a new version of the relayer node service for mainnet to docker hub: `tornadocash/relayer:v${{ steps.vars.outputs.version }}` and `tornadocash/relayer:mining`.
|
||||
|
||||
❗️Please update your mainnet nodes ❗️
|
||||
DO NOT TOUCH SIDECHAINS AND NOVA RELAYERS.
|
||||
|
||||
- name: Telegram Failure Notification
|
||||
uses: appleboy/telegram-action@master
|
||||
if: failure()
|
||||
with:
|
||||
message: ❗ Failed to publish [${{ steps.vars.outputs.repo_name }}](https://github.com/${{ github.repository }}/actions):v${{ steps.vars.outputs.version }} for mainnet because of ${{ github.actor }}
|
||||
format: markdown
|
||||
to: ${{ secrets.TELEGRAM_CHAT_ID }}
|
||||
token: ${{ secrets.TELEGRAM_BOT_TOKEN }}
|
11
.gitignore
vendored
11
.gitignore
vendored
@ -1,4 +1,9 @@
|
||||
node_modules
|
||||
|
||||
.vscode
|
||||
node_modules/
|
||||
.env
|
||||
.env*
|
||||
.env.mainnet
|
||||
.env.kovan
|
||||
kovan.*
|
||||
dump.rdb
|
||||
.idea
|
||||
yarn-error.log
|
1
.npmrc
Normal file
1
.npmrc
Normal file
@ -0,0 +1 @@
|
||||
@tornado:registry=https://git.tornado.ws/api/packages/tornado-packages/npm/
|
1
.prettierignore
Normal file
1
.prettierignore
Normal file
@ -0,0 +1 @@
|
||||
keys/TreeUpdate.json
|
7
.prettierrc
Normal file
7
.prettierrc
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"semi": false,
|
||||
"arrowParens": "avoid",
|
||||
"singleQuote": true,
|
||||
"printWidth": 110,
|
||||
"trailingComma": "all"
|
||||
}
|
9
Dockerfile
Normal file
9
Dockerfile
Normal file
@ -0,0 +1,9 @@
|
||||
FROM node:16
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json yarn.lock ./
|
||||
RUN yarn && yarn cache clean --force
|
||||
COPY . .
|
||||
|
||||
EXPOSE 8000
|
||||
ENTRYPOINT ["yarn"]
|
128
README.md
128
README.md
@ -1,60 +1,100 @@
|
||||
# Relayer for Tornado Cash [![Build Status](https://github.com/tornadocash/relayer/workflows/build/badge.svg)](https://github.com/tornadocash/relayer/actions)![Sidechains version](https://img.shields.io/badge/version-5.2.1-blue?logo=docker)![Mainnet version](https://img.shields.io/badge/version-4.1.5-blue?logo=docker)
|
||||
# Relayer for Tornado Cash [![Build Status](https://github.com/tornadocash/relayer/workflows/build/badge.svg)](https://github.com/tornadocash/relayer/actions) [![Docker Image Version (latest semver)](https://img.shields.io/docker/v/tornadocash/relayer?logo=docker&logoColor=%23FFFFFF&sort=semver)](https://hub.docker.com/repository/docker/tornadocash/relayer)
|
||||
|
||||
**\*Tornado Cash was sanctioned by the US Treasury on 08/08/2022, this makes it illegal for US citizens to interact with Tornado Cash and all of it's associated deployed smart contracts. Please understand the laws where you live and take all necessary steps to protect and anonymize yourself.**
|
||||
__*Tornado Cash was sanctioned by the US Treasury on 08/08/2022, this makes it illegal for US citizens to interact with Tornado Cash and all of it's associated deployed smart contracts. Please understand the laws where you live and take all necessary steps to protect and anonymize yourself.__
|
||||
|
||||
**\*It is recommended to run your Relayer on a VPS instnace (Virtual Private Server). Ensure SSH configuration is enabled for security, you can find information about SSH keygen and management [here](https://www.ssh.com/academy/ssh/keygen).**
|
||||
__*It is recommended to run your Relayer on a VPS instance ([Virtual Private Server](https://njal.la/)). Ensure SSH configuration is enabled for security, you can find information about SSH keygen and management [here](https://www.ssh.com/academy/ssh/keygen).__
|
||||
|
||||
## Deploy with script and docker-compose
|
||||
## Deploy with docker-compose (recommended)
|
||||
|
||||
_The following instructions are for Ubuntu 22.10, other operating systems may vary._
|
||||
*The following instructions are for Ubuntu 22.10, other operating systems may vary. These instructions include automated SSL configuration with LetsEncrypt.*
|
||||
|
||||
#### Installation:
|
||||
__PREREQUISITES__
|
||||
1. Update core dependencies
|
||||
- `sudo apt-get update`
|
||||
2. Install docker-compose
|
||||
- `curl -SL https://github.com/docker/compose/releases/download/v2.16.0/docker-compose-linux-x86_64 -o /usr/local/bin/docker-compose && sudo chmod +x /usr/local/bin/docker-compose`
|
||||
3. Install Docker
|
||||
- `curl -fsSL https://get.docker.com -o get-docker.sh && chmod +x get-docker.sh && ./get-docker.sh`
|
||||
4. Install git
|
||||
- `sudo apt-get install git-all`
|
||||
5. Install nginx
|
||||
- `sudo apt install nginx`
|
||||
6. Stop apache2 instance (enabled by default)
|
||||
- `sudo systemctl stop apache2`
|
||||
|
||||
Just run in terminal:
|
||||
__FIREWALL CONFIGURATION__
|
||||
|
||||
_* Warning: Failure to configure SSH as the first UFW rule, will lock you out of the instance_
|
||||
|
||||
1. Make sure UFW is installed by running `apt update` and `apt install ufw`
|
||||
2. Allow SSH in the first position in UFW by running `ufw insert 1 allow ssh`*
|
||||
3. Allow HTTP, and HTTPS by running `ufw allow https/tcp/http`
|
||||
4. Finalize changes and enable firewall `ufw enable`
|
||||
|
||||
__DEPLOYMENT__
|
||||
1. Clone the repository and enter the directory
|
||||
- `git clone https://git.tornado.ws/tornadocash/classic-relayer -b mainnet-v4 && cd classic-relayer`
|
||||
2. Clone the example environment file `.env.example` to configure for the preferred network - `cp .env.example .env` , then fill `.env` file.
|
||||
- Set `PRIVATE_KEY` for your relayer address (remove the 0x from your private key)
|
||||
- Set `VIRTUAL_HOST` and `LETSENCRYPT_HOST` to your domain address
|
||||
- add a A record DNS record with the value assigned to your instance IP address to configure the domain
|
||||
- Set `RELAYER_FEE` to what you would like to charge as your fee (remember 0.3% is deducted from your staked relayer balance)
|
||||
- Set `RPC_URL` to a non-censoring RPC endpoint (You can [run your own](https://github.com/feshchenkod/rpc-nodes), or use a [free option](https://chainnodes.org/))
|
||||
- Set `ORACLE_RPC_URL` to an Ethereum native RPC endpoint
|
||||
|
||||
4. Uncomment the `env_file` lines (remove `# `) for the associated network services in `docker-compose.yml`
|
||||
5. Build and deploy the docker source by specifying the network through:
|
||||
|
||||
- `npm run build`
|
||||
- `docker-compose up -d`
|
||||
5. Visit your domain address and check the `/status` endpoint and ensure there is no errors in the `status` field
|
||||
|
||||
__NGINX REVERSE PROXY__
|
||||
1. Copy the pre-modified nginx policy as your default policy
|
||||
- `cp tornado.conf /etc/nginx/sites-available/default`
|
||||
2. Append the default nginx configuration to include streams
|
||||
- `echo "stream { map_hash_bucket_size 128; map_hash_max_size 128; include /etc/nginx/conf.d/streams/*.conf; }" >> /etc/nginx/nginx.conf`
|
||||
3. Create the stream configuration
|
||||
- `mkdir /etc/nginx/conf.d/streams && cp tornado-stream.conf /etc/nginx/conf.d/streams/tornado-stream.conf`
|
||||
4. Start nginx to make sure the configuration is correct
|
||||
- `sudo systemctl restart nginx`
|
||||
5. Stop nginx
|
||||
- `sudo systemctl stop nginx`
|
||||
|
||||
## Run locally
|
||||
|
||||
1. `npm i`
|
||||
2. `cp .env.example .env`
|
||||
3. Modify `.env` as needed
|
||||
4. `npm run start`
|
||||
5. Go to `http://127.0.0.1:8000`
|
||||
6. In order to execute withdraw request, you can run following command
|
||||
|
||||
```bash
|
||||
curl -s https://git.tornado.ws/tornadocash/tornado-relayer/raw/branch/main/install.sh | bash
|
||||
curl -X POST -H 'content-type:application/json' --data '<input data>' http://127.0.0.1:8000/relay
|
||||
```
|
||||
|
||||
#### Configuring environments:
|
||||
Relayer should return a transaction hash.
|
||||
|
||||
1. Go to `tornado-relayer` folder on the server home directory
|
||||
2. Check environment files:
|
||||
_Note._ If you want to change contracts' addresses go to [config.js](./config.js) file.
|
||||
|
||||
By default each network is preconfigured the naming of `.env.<NETWORK>`
|
||||
## Input data example
|
||||
|
||||
- `.env.eth` for Ethereum Mainnet
|
||||
- `.env.bsc` for Binance Smart Chain
|
||||
- `.env.arb` for Arbitrum
|
||||
- `.env.op` for Optimism
|
||||
- `.env.gnosis` for Gnosis (xdai)
|
||||
- `.env.polygon` for Polygon (matic)
|
||||
- `.env.avax` for Avalanche C-Chain
|
||||
```json
|
||||
{
|
||||
"proof": "0x0f8cb4c2ca9cbb23a5f21475773e19e39d3470436d7296f25c8730d19d88fcef2986ec694ad094f4c5fff79a4e5043bd553df20b23108bc023ec3670718143c20cc49c6d9798e1ae831fd32a878b96ff8897728f9b7963f0d5a4b5574426ac6203b2456d360b8e825d8f5731970bf1fc1b95b9713e3b24203667ecdd5939c2e40dec48f9e51d9cc8dc2f7f3916f0e9e31519c7df2bea8c51a195eb0f57beea4924cb846deaa78cdcbe361a6c310638af6f6157317bc27d74746bfaa2e1f8d2e9088fd10fa62100740874cdffdd6feb15c95c5a303f6bc226d5e51619c5b825471a17ddfeb05b250c0802261f7d05cf29a39a72c13e200e5bc721b0e4c50d55e6",
|
||||
"args": [
|
||||
"0x1579d41e5290ab5bcec9a7df16705e49b5c0b869095299196c19c5e14462c9e3",
|
||||
"0x0cf7f49c5b35c48b9e1d43713e0b46a75977e3d10521e9ac1e4c3cd5e3da1c5d",
|
||||
"0x03ebd0748aa4d1457cf479cce56309641e0a98f5",
|
||||
"0xbd4369dc854c5d5b79fe25492e3a3cfcb5d02da5",
|
||||
"0x000000000000000000000000000000000000000000000000058d15e176280000",
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000"
|
||||
],
|
||||
"contract": "0xA27E34Ad97F171846bAf21399c370c9CE6129e0D"
|
||||
}
|
||||
```
|
||||
|
||||
3. Configure (fill) environment files for those networks on which the relayer will be deployed:
|
||||
|
||||
- Set `PRIVATE_KEY` to your relayer address (remove the 0x from your private key) to each environment file
|
||||
- _It is recommended not to reuse the same private keys for each network as a security measure_
|
||||
- Set `VIRTUAL_HOST` and `LETSENCRYPT_HOST` a unique subndomain for every network to each environment file
|
||||
- eg: `mainnet.example.com` for Ethereum, `binance.example.com` for Binance etc
|
||||
- add a A wildcard record DNS record with the value assigned to your instance IP address to configure subdomains
|
||||
- Set `RELAYER_FEE` to what you would like to charge as your fee (remember 0.3% is deducted from your staked relayer balance)
|
||||
- Set `RPC_URL` to a non-censoring RPC (You can [run your own](https://github.com/feshchenkod/rpc-nodes), or use a [free option](https://chainnodes.org/))
|
||||
- Set `ORACLE_RPC_URL` to an Ethereum native RPC endpoint
|
||||
|
||||
4(Optional). If you want to run relayer for [Nova](https://nova.tornado.ws), fill `.env.nova` file by instructions in [Nova branch](https://git.tornado.ws/tornadocash/tornado-relayer/src/branch/nova), because config is very specific
|
||||
|
||||
#### Deployment:
|
||||
|
||||
1. Build and deploy the docker source for the configured networks specified via `--profile <NETWORK_SYMBOL>`, for example (if you run relayer only for Ethereum Mainnet, Binance Smart Chain and Arbitrum):
|
||||
|
||||
- `docker-compose --profile eth --profile bsc --profile arb up -d`
|
||||
|
||||
2. Visit your domain addresses and check each `/status` endpoint to ensure there is no errors in the `status` fields
|
||||
2. Optional: if you want to run Nova relayer, just add `--profile nova` to docker-compose command
|
||||
|
||||
If you want to change some relayer parameters, for example, RPC url or fee percent, stop the relayer software with command `docker-compose down --remove-orphans`, change in corresponding `.env.{name}` file what you need and rerun relayer as described above.
|
||||
|
||||
#### Disclaimer:
|
||||
Disclaimer:
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
319
abis/Aggregator.abi.json
Normal file
319
abis/Aggregator.abi.json
Normal file
@ -0,0 +1,319 @@
|
||||
[
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32[]",
|
||||
"name": "domains",
|
||||
"type": "bytes32[]"
|
||||
}
|
||||
],
|
||||
"name": "bulkResolve",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address[]",
|
||||
"name": "result",
|
||||
"type": "address[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract Governance",
|
||||
"name": "governance",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "getAllProposals",
|
||||
"outputs": [
|
||||
{
|
||||
"components": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "proposer",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "target",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "startTime",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "endTime",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "forVotes",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "againstVotes",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "executed",
|
||||
"type": "bool"
|
||||
},
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "extended",
|
||||
"type": "bool"
|
||||
},
|
||||
{
|
||||
"internalType": "enum Governance.ProposalState",
|
||||
"name": "state",
|
||||
"type": "uint8"
|
||||
}
|
||||
],
|
||||
"internalType": "struct GovernanceAggregator.Proposal[]",
|
||||
"name": "proposals",
|
||||
"type": "tuple[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract Governance",
|
||||
"name": "governance",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address[]",
|
||||
"name": "accs",
|
||||
"type": "address[]"
|
||||
}
|
||||
],
|
||||
"name": "getGovernanceBalances",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "amounts",
|
||||
"type": "uint256[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address[]",
|
||||
"name": "fromTokens",
|
||||
"type": "address[]"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "oneUnitAmounts",
|
||||
"type": "uint256[]"
|
||||
}
|
||||
],
|
||||
"name": "getPricesInETH",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "prices",
|
||||
"type": "uint256[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract Governance",
|
||||
"name": "governance",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "account",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "getUserData",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "balance",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "latestProposalId",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "latestProposalIdState",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "timelock",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "delegatee",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract Miner",
|
||||
"name": "miner",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address[]",
|
||||
"name": "instances",
|
||||
"type": "address[]"
|
||||
}
|
||||
],
|
||||
"name": "minerRates",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "_rates",
|
||||
"type": "uint256[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "node",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "resolve",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract RewardSwap",
|
||||
"name": "swap",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "swapState",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "balance",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "poolWeight",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract Miner",
|
||||
"name": "miner",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address[]",
|
||||
"name": "instances",
|
||||
"type": "address[]"
|
||||
},
|
||||
{
|
||||
"internalType": "contract RewardSwap",
|
||||
"name": "swap",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "miningData",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "_rates",
|
||||
"type": "uint256[]"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "balance",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "poolWeight",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address[]",
|
||||
"name": "fromTokens",
|
||||
"type": "address[]"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "oneUnitAmounts",
|
||||
"type": "uint256[]"
|
||||
},
|
||||
{
|
||||
"internalType": "contract RewardSwap",
|
||||
"name": "swap",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "marketData",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256[]",
|
||||
"name": "prices",
|
||||
"type": "uint256[]"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "balance",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
}
|
||||
]
|
1077
abis/mining.abi.json
Normal file
1077
abis/mining.abi.json
Normal file
File diff suppressed because it is too large
Load Diff
252
abis/swap.abi.json
Normal file
252
abis/swap.abi.json
Normal file
@ -0,0 +1,252 @@
|
||||
[
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_torn",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_miner",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "_miningCap",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "_initialLiquidity",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "constructor"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "newWeight",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "PoolWeightUpdated",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "recipient",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "pTORN",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "TORN",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "Swap",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "DURATION",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "initialLiquidity",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "liquidity",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "miner",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "poolWeight",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "node",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "resolve",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "startTimestamp",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "tokensSold",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "torn",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "contract IERC20",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "recipient",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "swap",
|
||||
"outputs": [],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "amount",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "getExpectedReturn",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "tornVirtualBalance",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "newWeight",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "setPoolWeight",
|
||||
"outputs": [],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
}
|
||||
]
|
498
abis/tornadoABI.json
Normal file
498
abis/tornadoABI.json
Normal file
@ -0,0 +1,498 @@
|
||||
[
|
||||
{
|
||||
"constant": false,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "_newOperator",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "changeOperator",
|
||||
"outputs": [],
|
||||
"payable": false,
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "nullifierHashes",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": false,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes",
|
||||
"name": "_proof",
|
||||
"type": "bytes"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_root",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_nullifierHash",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "address payable",
|
||||
"name": "_recipient",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address payable",
|
||||
"name": "_relayer",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "_fee",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "_refund",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "withdraw",
|
||||
"outputs": [],
|
||||
"payable": true,
|
||||
"stateMutability": "payable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "verifier",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "contract IVerifier",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_left",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_right",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "hashLeftRight",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "pure",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "FIELD_SIZE",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "levels",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint32",
|
||||
"name": "",
|
||||
"type": "uint32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "operator",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_root",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "isKnownRoot",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "commitments",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "denomination",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "currentRootIndex",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint32",
|
||||
"name": "",
|
||||
"type": "uint32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": false,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "_newVerifier",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "updateVerifier",
|
||||
"outputs": [],
|
||||
"payable": false,
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": false,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_commitment",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "deposit",
|
||||
"outputs": [],
|
||||
"payable": true,
|
||||
"stateMutability": "payable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "getLastRoot",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "roots",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "ROOT_HISTORY_SIZE",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint32",
|
||||
"name": "",
|
||||
"type": "uint32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_nullifierHash",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "isSpent",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "zeros",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "ZERO_VALUE",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "filledSubtrees",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"constant": true,
|
||||
"inputs": [],
|
||||
"name": "nextIndex",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "uint32",
|
||||
"name": "",
|
||||
"type": "uint32"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract IVerifier",
|
||||
"name": "_verifier",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "_denomination",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint32",
|
||||
"name": "_merkleTreeHeight",
|
||||
"type": "uint32"
|
||||
},
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "_operator",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"payable": false,
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "constructor"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "bytes32",
|
||||
"name": "commitment",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint32",
|
||||
"name": "leafIndex",
|
||||
"type": "uint32"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "timestamp",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "Deposit",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "address",
|
||||
"name": "to",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "bytes32",
|
||||
"name": "nullifierHash",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "relayer",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "uint256",
|
||||
"name": "fee",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "Withdrawal",
|
||||
"type": "event"
|
||||
}
|
||||
]
|
171
abis/tornadoProxyABI.json
Normal file
171
abis/tornadoProxyABI.json
Normal file
@ -0,0 +1,171 @@
|
||||
[
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_tornadoTrees",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "_governance",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "contract ITornado[]",
|
||||
"name": "_instances",
|
||||
"type": "address[]"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "constructor"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "governance",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract ITornado",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "instances",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "node",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "resolve",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "tornadoTrees",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "contract ITornadoTrees",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract ITornado",
|
||||
"name": "tornado",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "commitment",
|
||||
"type": "bytes32"
|
||||
}
|
||||
],
|
||||
"name": "deposit",
|
||||
"outputs": [],
|
||||
"stateMutability": "payable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract ITornado",
|
||||
"name": "instance",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "update",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"name": "updateInstances",
|
||||
"outputs": [],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "contract ITornado",
|
||||
"name": "tornado",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes",
|
||||
"name": "proof",
|
||||
"type": "bytes"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "root",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "bytes32",
|
||||
"name": "nullifierHash",
|
||||
"type": "bytes32"
|
||||
},
|
||||
{
|
||||
"internalType": "address payable",
|
||||
"name": "recipient",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "address payable",
|
||||
"name": "relayer",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "fee",
|
||||
"type": "uint256"
|
||||
},
|
||||
{
|
||||
"internalType": "uint256",
|
||||
"name": "refund",
|
||||
"type": "uint256"
|
||||
}
|
||||
],
|
||||
"name": "withdraw",
|
||||
"outputs": [],
|
||||
"stateMutability": "payable",
|
||||
"type": "function"
|
||||
}
|
||||
]
|
1
app.js
Normal file
1
app.js
Normal file
@ -0,0 +1 @@
|
||||
module.exports = require('./src/server')
|
85902
cache/accounts_farmer_1.json
vendored
Normal file
85902
cache/accounts_farmer_1.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5878
cache/accounts_farmer_5.json
vendored
Normal file
5878
cache/accounts_farmer_5.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,12 +1,14 @@
|
||||
version: "2"
|
||||
version: '2'
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis
|
||||
restart: always
|
||||
command: [redis-server, --appendonly, "yes"]
|
||||
command: [redis-server, --appendonly, 'yes']
|
||||
volumes:
|
||||
- redis:/data
|
||||
ports:
|
||||
- '127.0.0.1:6379:6379'
|
||||
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
@ -43,15 +45,14 @@ services:
|
||||
- nginx
|
||||
- dockergen
|
||||
|
||||
# ---------------------- ETH Mainnet ----------------------- #
|
||||
# ---------------------- ETH ----------------------- #
|
||||
|
||||
eth-server:
|
||||
build: .
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["eth"]
|
||||
restart: always
|
||||
command: server
|
||||
env_file: .env.eth
|
||||
env_file: .env
|
||||
environment:
|
||||
NET_ID: 1
|
||||
REDIS_URL: redis://redis/0
|
||||
@ -60,10 +61,9 @@ services:
|
||||
|
||||
eth-treeWatcher:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["eth"]
|
||||
restart: always
|
||||
command: treeWatcher
|
||||
env_file: .env.eth
|
||||
env_file: .env
|
||||
environment:
|
||||
NET_ID: 1
|
||||
REDIS_URL: redis://redis/0
|
||||
@ -71,10 +71,9 @@ services:
|
||||
|
||||
eth-priceWatcher:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["eth"]
|
||||
restart: always
|
||||
command: priceWatcher
|
||||
env_file: .env.eth
|
||||
env_file: .env
|
||||
environment:
|
||||
NET_ID: 1
|
||||
REDIS_URL: redis://redis/0
|
||||
@ -82,10 +81,9 @@ services:
|
||||
|
||||
eth-healthWatcher:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["eth"]
|
||||
restart: always
|
||||
command: healthWatcher
|
||||
env_file: .env.eth
|
||||
env_file: .env
|
||||
environment:
|
||||
NET_ID: 1
|
||||
REDIS_URL: redis://redis/0
|
||||
@ -93,24 +91,21 @@ services:
|
||||
|
||||
eth-worker1:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["eth"]
|
||||
restart: always
|
||||
command: worker
|
||||
env_file: .env.eth
|
||||
env_file: .env
|
||||
environment:
|
||||
NET_ID: 1
|
||||
REDIS_URL: redis://redis/0
|
||||
depends_on: [redis, eth-server]
|
||||
|
||||
# # This is additional worker for ethereum mainnet
|
||||
# # So you can process transactions from multiple addresses, but before it you need to set up those addresses as workers
|
||||
# eth-worker2:
|
||||
# worker2:
|
||||
# image: tornadocash/relayer:mainnet-v4
|
||||
# profiles: [ 'eth' ]
|
||||
# restart: always
|
||||
# command: worker
|
||||
# env_file: .env2.eth
|
||||
# env_file: .env
|
||||
# environment:
|
||||
# PRIVATE_KEY: qwe
|
||||
# REDIS_URL: redis://redis/0
|
||||
|
||||
# # this container will proxy *.onion domain to the server container
|
||||
@ -129,340 +124,6 @@ services:
|
||||
# ...
|
||||
# -----END RSA PRIVATE KEY-----
|
||||
|
||||
# # auto update docker containers when new image is pushed to docker hub (be careful with that)
|
||||
# watchtower:
|
||||
# image: v2tec/watchtower
|
||||
# restart: always
|
||||
# volumes:
|
||||
# - /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
# # this container will send Telegram notifications when other containers are stopped/restarted
|
||||
# # it's best to run this container on some other instance, otherwise it can't notify if the whole instance goes down
|
||||
# notifier:
|
||||
# image: poma/docker-telegram-notifier
|
||||
# restart: always
|
||||
# volumes:
|
||||
# - /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
# environment:
|
||||
# # How to create bot: https://core.telegram.org/bots#3-how-do-i-create-a-bot
|
||||
# # How to get chat id: https://stackoverflow.com/questions/32423837/telegram-bot-how-to-get-a-group-chat-id/32572159#32572159
|
||||
# TELEGRAM_NOTIFIER_BOT_TOKEN: ...
|
||||
# TELEGRAM_NOTIFIER_CHAT_ID: ...
|
||||
|
||||
# # this container will send Telegram notifications if specified address doesn't have enough funds
|
||||
# monitor_mainnet:
|
||||
# image: peppersec/monitor_eth
|
||||
# restart: always
|
||||
# environment:
|
||||
# TELEGRAM_NOTIFIER_BOT_TOKEN: ...
|
||||
# TELEGRAM_NOTIFIER_CHAT_ID: ...
|
||||
# ADDRESS: '0x0000000000000000000000000000000000000000'
|
||||
# THRESHOLD: 0.5 # ETH
|
||||
# RPC_URL: https://mainnet.infura.io
|
||||
# BLOCK_EXPLORER: etherscan.io
|
||||
|
||||
# -------------------------------------------------- #
|
||||
|
||||
# ---------------------- BSC (Binance Smart Chain) ----------------------- #
|
||||
|
||||
bsc-server:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["bsc"]
|
||||
restart: always
|
||||
command: server
|
||||
env_file: .env.bsc
|
||||
environment:
|
||||
NET_ID: 56
|
||||
REDIS_URL: redis://redis/1
|
||||
nginx_proxy_read_timeout: 600
|
||||
depends_on: [redis]
|
||||
|
||||
bsc-healthWatcher:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["bsc"]
|
||||
restart: always
|
||||
command: healthWatcher
|
||||
env_file: .env.bsc
|
||||
environment:
|
||||
NET_ID: 56
|
||||
REDIS_URL: redis://redis/1
|
||||
depends_on: [redis, bsc-server]
|
||||
|
||||
bsc-worker1:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["bsc"]
|
||||
restart: always
|
||||
command: worker
|
||||
env_file: .env.bsc
|
||||
environment:
|
||||
NET_ID: 56
|
||||
REDIS_URL: redis://redis/1
|
||||
depends_on: [redis, bsc-server]
|
||||
|
||||
# -------------------------------------------------- #
|
||||
|
||||
# ---------------------- Polygon (MATIC) --------------------- #
|
||||
|
||||
polygon-server:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["polygon"]
|
||||
restart: always
|
||||
command: server
|
||||
env_file: .env.polygon
|
||||
environment:
|
||||
NET_ID: 137
|
||||
REDIS_URL: redis://redis/2
|
||||
nginx_proxy_read_timeout: 600
|
||||
depends_on: [redis]
|
||||
|
||||
polygon-healthWatcher:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["polygon"]
|
||||
restart: always
|
||||
command: healthWatcher
|
||||
env_file: .env.polygon
|
||||
environment:
|
||||
NET_ID: 137
|
||||
REDIS_URL: redis://redis/2
|
||||
depends_on: [redis, polygon-server]
|
||||
|
||||
polygon-worker1:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["polygon"]
|
||||
restart: always
|
||||
command: worker
|
||||
env_file: .env.polygon
|
||||
environment:
|
||||
NET_ID: 137
|
||||
REDIS_URL: redis://redis/2
|
||||
depends_on: [redis, polygon-server]
|
||||
|
||||
# -------------------------------------------------- #
|
||||
|
||||
# ---------------------- Gnosis (XDAI) ---------------------- #
|
||||
|
||||
gnosis-server:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["gnosis"]
|
||||
restart: always
|
||||
command: server
|
||||
env_file: .env.gnosis
|
||||
environment:
|
||||
NET_ID: 100
|
||||
REDIS_URL: redis://redis/3
|
||||
nginx_proxy_read_timeout: 600
|
||||
depends_on: [redis]
|
||||
|
||||
gnosis-healthWatcher:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["gnosis"]
|
||||
restart: always
|
||||
command: healthWatcher
|
||||
env_file: .env.gnosis
|
||||
environment:
|
||||
NET_ID: 100
|
||||
REDIS_URL: redis://redis/3
|
||||
depends_on: [redis, gnosis-server]
|
||||
|
||||
gnosis-worker1:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["gnosis"]
|
||||
restart: always
|
||||
command: worker
|
||||
env_file: .env.gnosis
|
||||
environment:
|
||||
NET_ID: 100
|
||||
REDIS_URL: redis://redis/3
|
||||
depends_on: [redis, gnosis-server]
|
||||
|
||||
# -------------------------------------------------- #
|
||||
|
||||
# ---------------------- AVAX ---------------------- #
|
||||
|
||||
avax-server:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["avax"]
|
||||
restart: always
|
||||
command: server
|
||||
env_file: .env.avax
|
||||
environment:
|
||||
NET_ID: 43114
|
||||
REDIS_URL: redis://redis/4
|
||||
nginx_proxy_read_timeout: 600
|
||||
depends_on: [redis]
|
||||
|
||||
avax-healthWatcher:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["avax"]
|
||||
restart: always
|
||||
command: healthWatcher
|
||||
env_file: .env.avax
|
||||
environment:
|
||||
NET_ID: 43114
|
||||
REDIS_URL: redis://redis/4
|
||||
depends_on: [redis, avax-server]
|
||||
|
||||
avax-worker1:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["avax"]
|
||||
restart: always
|
||||
command: worker
|
||||
env_file: .env.avax
|
||||
environment:
|
||||
NET_ID: 43114
|
||||
REDIS_URL: redis://redis/4
|
||||
depends_on: [redis, avax-server]
|
||||
|
||||
# -------------------------------------------------- #
|
||||
|
||||
# ---------------------- OP ------------------------ #
|
||||
|
||||
op-server:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["op"]
|
||||
restart: always
|
||||
command: server
|
||||
env_file: .env.op
|
||||
environment:
|
||||
NET_ID: 10
|
||||
REDIS_URL: redis://redis/5
|
||||
nginx_proxy_read_timeout: 600
|
||||
depends_on: [redis]
|
||||
|
||||
op-healthWatcher:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["op"]
|
||||
restart: always
|
||||
command: healthWatcher
|
||||
env_file: .env.op
|
||||
environment:
|
||||
NET_ID: 10
|
||||
REDIS_URL: redis://redis/5
|
||||
depends_on: [redis, op-server]
|
||||
|
||||
op-worker1:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["op"]
|
||||
restart: always
|
||||
command: worker
|
||||
env_file: .env.op
|
||||
environment:
|
||||
NET_ID: 10
|
||||
REDIS_URL: redis://redis/5
|
||||
depends_on: [redis, op-server]
|
||||
|
||||
# -------------------------------------------------- #
|
||||
|
||||
# ---------------------- Arbitrum ----------------------- #
|
||||
|
||||
arb-server:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["arb"]
|
||||
restart: always
|
||||
command: server
|
||||
env_file: .env.arb
|
||||
environment:
|
||||
NET_ID: 42161
|
||||
REDIS_URL: redis://redis/6
|
||||
nginx_proxy_read_timeout: 600
|
||||
depends_on: [redis]
|
||||
|
||||
arb-healthWatcher:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["arb"]
|
||||
restart: always
|
||||
command: healthWatcher
|
||||
env_file: .env.arb
|
||||
environment:
|
||||
NET_ID: 42161
|
||||
REDIS_URL: redis://redis/6
|
||||
depends_on: [redis, arb-server]
|
||||
|
||||
arb-worker1:
|
||||
image: tornadocash/relayer:sidechain-v5
|
||||
profiles: ["arb"]
|
||||
restart: always
|
||||
command: worker
|
||||
env_file: .env.arb
|
||||
environment:
|
||||
NET_ID: 42161
|
||||
REDIS_URL: redis://redis/6
|
||||
depends_on: [redis, arb-server]
|
||||
|
||||
# -------------------------------------------------- #
|
||||
|
||||
# ---------------------- Goerli (Ethereum Testnet) ---------------------- #
|
||||
|
||||
goerli-server:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["geth"]
|
||||
restart: always
|
||||
command: server
|
||||
env_file: .env.goerli
|
||||
environment:
|
||||
NET_ID: 5
|
||||
REDIS_URL: redis://redis/7
|
||||
nginx_proxy_read_timeout: 600
|
||||
depends_on: [redis]
|
||||
|
||||
goerli-treeWatcher:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["goerli"]
|
||||
restart: always
|
||||
command: treeWatcher
|
||||
env_file: .env.goerli
|
||||
environment:
|
||||
NET_ID: 5
|
||||
REDIS_URL: redis://redis/7
|
||||
depends_on: [redis, goerli-server]
|
||||
|
||||
goerli-priceWatcher:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["goerli"]
|
||||
restart: always
|
||||
command: priceWatcher
|
||||
env_file: .env.goerli
|
||||
environment:
|
||||
NET_ID: 5
|
||||
REDIS_URL: redis://redis/7
|
||||
depends_on: [redis, goerli-server]
|
||||
|
||||
goerli-healthWatcher:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["goerli"]
|
||||
restart: always
|
||||
command: healthWatcher
|
||||
env_file: .env.goerli
|
||||
environment:
|
||||
NET_ID: 5
|
||||
REDIS_URL: redis://redis/7
|
||||
depends_on: [redis, goerli-server]
|
||||
|
||||
goerli-worker1:
|
||||
image: tornadocash/relayer:mainnet-v4
|
||||
profiles: ["goerli"]
|
||||
restart: always
|
||||
command: worker
|
||||
env_file: .env.goerli
|
||||
environment:
|
||||
NET_ID: 5
|
||||
REDIS_URL: redis://redis/7
|
||||
depends_on: [redis, goerli-server]
|
||||
# -------------------------------------------------- #
|
||||
|
||||
# ---------------------- Tornado Nova (Gnosis Chain) ----------------------- #
|
||||
|
||||
server:
|
||||
image: tornadocash/relayer:nova
|
||||
profiles: ["nova"]
|
||||
restart: always
|
||||
command: start:prod
|
||||
env_file: .env.nova
|
||||
environment:
|
||||
REDIS_URL: redis://redis/8
|
||||
nginx_proxy_read_timeout: 600
|
||||
depends_on: [redis]
|
||||
|
||||
volumes:
|
||||
conf:
|
||||
vhost:
|
||||
|
120
install.sh
120
install.sh
@ -1,120 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Script must be running from root
|
||||
if [ "$EUID" -ne 0 ];
|
||||
then echo "Please run as root";
|
||||
exit 1;
|
||||
fi;
|
||||
|
||||
relayer_soft_git_repo="https://git.tornado.ws/tornadocash/tornado-relayer";
|
||||
|
||||
user_home_dir=$(eval echo ~$USER);
|
||||
relayer_folder="$user_home_dir/tornado-relayer";
|
||||
relayer_mainnet_soft_source_folder="$relayer_folder/mainnet-soft-source";
|
||||
relayer_sidechains_soft_source_folder="$relayer_folder/sidechains-soft-source";
|
||||
nova_relayer_soft_source_folder="$relayer_folder/nova-soft-source";
|
||||
script_log_file="/tmp/tornado-relayer-installation.log"
|
||||
if [ -f $script_log_file ]; then rm $script_log_file; fi;
|
||||
|
||||
function echo_log_err(){
|
||||
echo $1 1>&2;
|
||||
echo -e "$1\n" &>> $script_log_file;
|
||||
}
|
||||
|
||||
function echo_log_err_and_exit(){
|
||||
echo_log_err "$1";
|
||||
exit 1;
|
||||
}
|
||||
|
||||
function is_package_installed(){
|
||||
if [ $(dpkg-query -W -f='${Status}' $1 2>/dev/null | grep -c "ok installed") -eq 0 ]; then return 1; else return 0; fi;
|
||||
}
|
||||
|
||||
function install_requred_packages(){
|
||||
apt update &>> $script_log_file;
|
||||
|
||||
requred_packages=("curl" "git-all" "ufw" "nginx");
|
||||
local package;
|
||||
for package in ${requred_packages[@]}; do
|
||||
if ! is_package_installed $package; then
|
||||
# Kill apache process, because Debian configuring nginx package right during installation
|
||||
if [ $package = "nginx" ]; then systemctl stop apache2; fi;
|
||||
apt install --yes --force-yes -o DPkg::Options::="--force-confold" $package &>> $script_log_file;
|
||||
if ! is_package_installed $package; then
|
||||
echo_log_err_and_exit "Error: cannot install \"$package\" package";
|
||||
fi;
|
||||
fi;
|
||||
done;
|
||||
|
||||
echo -e "\nAll required packages installed successfully";
|
||||
}
|
||||
|
||||
function install_node(){
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash;
|
||||
. ~/.nvm/nvm.sh;
|
||||
. ~/.profile;
|
||||
. ~/.bashrc;
|
||||
nvm install 14.21.3;
|
||||
}
|
||||
|
||||
function install_repositories(){
|
||||
git clone $relayer_soft_git_repo -b main $relayer_folder
|
||||
git clone $relayer_soft_git_repo -b mainnet-v4 $relayer_mainnet_soft_source_folder;
|
||||
git clone $relayer_soft_git_repo -b sidechain-v5 $relayer_sidechains_soft_source_folder;
|
||||
git clone $relayer_soft_git_repo -b nova $nova_relayer_soft_source_folder;
|
||||
}
|
||||
|
||||
function install_docker_utilities(){
|
||||
local kernel_name=$(uname -s);
|
||||
local processor_type=$(uname -m);
|
||||
|
||||
curl -SL https://github.com/docker/compose/releases/download/v2.16.0/docker-compose-$kernel_name-$processor_type -o /usr/local/bin/docker-compose;
|
||||
chmod +x /usr/local/bin/docker-compose;
|
||||
|
||||
curl -s https://get.docker.com | bash;
|
||||
}
|
||||
|
||||
function configure_firewall(){
|
||||
ufw allow https/tcp;
|
||||
ufw allow http/tcp;
|
||||
ufw insert 1 allow OpenSSH;
|
||||
echo "y" | ufw enable;
|
||||
}
|
||||
|
||||
function configure_nginx_reverse_proxy(){
|
||||
systemctl stop apache2;
|
||||
|
||||
cp $relayer_folder/tornado.conf /etc/nginx/sites-available/default;
|
||||
echo "stream { map_hash_bucket_size 128; map_hash_max_size 128; include /etc/nginx/conf.d/streams/*.conf; }" >> /etc/nginx/nginx.conf;
|
||||
mkdir /etc/nginx/conf.d/streams;
|
||||
cp $relayer_folder/tornado-stream.conf /etc/nginx/conf.d/streams/tornado-stream.conf;
|
||||
|
||||
systemctl restart nginx;
|
||||
systemctl stop nginx;
|
||||
}
|
||||
|
||||
function build_relayer_docker_containers(){
|
||||
cd $relayer_mainnet_soft_source_folder && npm run build;
|
||||
cd $relayer_sidechains_soft_source_folder && npm run build;
|
||||
cd $nova_relayer_soft_source_folder && npm run build:docker;
|
||||
}
|
||||
|
||||
function prepare_environments(){
|
||||
cp $relayer_mainnet_soft_source_folder/.env.example $relayer_folder/.env.eth;
|
||||
cp $nova_relayer_soft_source_folder/.env.example $relayer_folder/.env.nova;
|
||||
tee $relayer_folder/.env.bsc $relayer_folder/.env.arb $relayer_folder/.env.goerli $relayer_folder/.env.polygon $relayer_folder/.env.op \
|
||||
$relayer_folder/.env.avax $relayer_folder/.env.gnosis < $relayer_sidechains_soft_source_folder/.env.example > /dev/null;
|
||||
}
|
||||
|
||||
function main(){
|
||||
install_requred_packages;
|
||||
install_node;
|
||||
install_repositories;
|
||||
configure_firewall;
|
||||
configure_nginx_reverse_proxy;
|
||||
install_docker_utilities;
|
||||
build_relayer_docker_containers;
|
||||
prepare_environments;
|
||||
cd $relayer_folder;
|
||||
}
|
||||
|
||||
main;
|
647134
keys/TreeUpdate.json
Normal file
647134
keys/TreeUpdate.json
Normal file
File diff suppressed because one or more lines are too long
BIN
keys/TreeUpdate_proving_key.bin
Normal file
BIN
keys/TreeUpdate_proving_key.bin
Normal file
Binary file not shown.
1
monitoring/.POSTGRES_PASSWORD
Normal file
1
monitoring/.POSTGRES_PASSWORD
Normal file
@ -0,0 +1 @@
|
||||
zabbix
|
1
monitoring/.POSTGRES_USER
Normal file
1
monitoring/.POSTGRES_USER
Normal file
@ -0,0 +1 @@
|
||||
zabbix
|
37
monitoring/.env_agent
Normal file
37
monitoring/.env_agent
Normal file
@ -0,0 +1,37 @@
|
||||
ZBX_HOSTNAME=Zabbix
|
||||
# ZBX_SOURCEIP=
|
||||
# ZBX_DEBUGLEVEL=3
|
||||
# ZBX_ENABLEREMOTECOMMANDS=0 # Deprecated since 5.0.0
|
||||
# ZBX_LOGREMOTECOMMANDS=0
|
||||
# ZBX_HOSTINTERFACE= # Available since 4.4.0
|
||||
# ZBX_HOSTINTERFACEITEM= # Available since 4.4.0
|
||||
# ZBX_SERVER_HOST=10.110.0.5
|
||||
# ZBX_PASSIVE_ALLOW=true
|
||||
# ZBX_PASSIVESERVERS=
|
||||
# ZBX_ACTIVE_ALLOW=true
|
||||
# ZBX_ACTIVESERVERS=
|
||||
# ZBX_LISTENIP=
|
||||
# ZBX_STARTAGENTS=3
|
||||
# ZBX_HOSTNAMEITEM=system.hostname
|
||||
# ZBX_METADATA=
|
||||
# ZBX_METADATAITEM=
|
||||
# ZBX_REFRESHACTIVECHECKS=120
|
||||
# ZBX_BUFFERSEND=5
|
||||
# ZBX_BUFFERSIZE=100
|
||||
# ZBX_MAXLINESPERSECOND=20
|
||||
# ZBX_ALIAS=""
|
||||
# ZBX_TIMEOUT=3
|
||||
# ZBX_UNSAFEUSERPARAMETERS=0
|
||||
# ZBX_LOADMODULE="dummy1.so,dummy2.so,dummy10.so"
|
||||
# ZBX_TLSCONNECT=unencrypted
|
||||
# ZBX_TLSACCEPT=unencrypted
|
||||
# ZBX_TLSCAFILE=
|
||||
# ZBX_TLSCRLFILE=
|
||||
# ZBX_TLSSERVERCERTISSUER=
|
||||
# ZBX_TLSSERVERCERTSUBJECT=
|
||||
# ZBX_TLSCERTFILE=
|
||||
# ZBX_TLSKEYFILE=
|
||||
# ZBX_TLSPSKIDENTITY=
|
||||
# ZBX_TLSPSKFILE=
|
||||
# ZBX_DENYKEY=system.run[*]
|
||||
# ZBX_ALLOWKEY=
|
9
monitoring/.env_db_pgsql
Normal file
9
monitoring/.env_db_pgsql
Normal file
@ -0,0 +1,9 @@
|
||||
# DB_SERVER_HOST=postgres-server
|
||||
# DB_SERVER_PORT=5432
|
||||
# POSTGRES_USER=zabbix
|
||||
POSTGRES_USER_FILE=/run/secrets/POSTGRES_USER
|
||||
# POSTGRES_PASSWORD=zabbix
|
||||
POSTGRES_PASSWORD_FILE=/run/secrets/POSTGRES_PASSWORD
|
||||
POSTGRES_DB=zabbix
|
||||
# DB_SERVER_SCHEMA=public
|
||||
# ENABLE_TIMESCALEDB=tru
|
60
monitoring/.env_srv
Normal file
60
monitoring/.env_srv
Normal file
@ -0,0 +1,60 @@
|
||||
# ZBX_LISTENIP=
|
||||
# ZBX_HISTORYSTORAGEURL=http://elasticsearch:9200/ # Available since 3.4.5
|
||||
# ZBX_HISTORYSTORAGETYPES=uint,dbl,str,log,text # Available since 3.4.5
|
||||
# ZBX_DBTLSCONNECT=required # Available since 5.0.0
|
||||
# ZBX_DBTLSCAFILE=/run/secrets/root-ca.pem # Available since 5.0.0
|
||||
# ZBX_DBTLSCERTFILE=/run/secrets/client-cert.pem # Available since 5.0.0
|
||||
# ZBX_DBTLSKEYFILE=/run/secrets/client-key.pem # Available since 5.0.0
|
||||
# ZBX_DBTLSCIPHER= # Available since 5.0.0
|
||||
# ZBX_DBTLSCIPHER13= # Available since 5.0.0
|
||||
# ZBX_DEBUGLEVEL=3
|
||||
# ZBX_STARTPOLLERS=5
|
||||
# ZBX_IPMIPOLLERS=0
|
||||
# ZBX_STARTPREPROCESSORS=3 # Available since 3.4.0
|
||||
# ZBX_STARTPOLLERSUNREACHABLE=1
|
||||
# ZBX_STARTTRAPPERS=5
|
||||
# ZBX_STARTPINGERS=1
|
||||
# ZBX_STARTDISCOVERERS=1
|
||||
# ZBX_STARTHTTPPOLLERS=1
|
||||
# ZBX_STARTTIMERS=1
|
||||
# ZBX_STARTESCALATORS=1
|
||||
# ZBX_STARTALERTERS=3 # Available since 3.4.0
|
||||
# ZBX_JAVAGATEWAY_ENABLE=true
|
||||
# ZBX_JAVAGATEWAY=zabbix-java-gateway
|
||||
# ZBX_JAVAGATEWAYPORT=10052
|
||||
# ZBX_STARTJAVAPOLLERS=5
|
||||
# ZBX_STARTVMWARECOLLECTORS=0
|
||||
# ZBX_VMWAREFREQUENCY=60
|
||||
# ZBX_VMWAREPERFFREQUENCY=60
|
||||
# ZBX_VMWARECACHESIZE=8M
|
||||
# ZBX_VMWARETIMEOUT=10
|
||||
# ZBX_ENABLE_SNMP_TRAPS=true
|
||||
# ZBX_SOURCEIP=
|
||||
# ZBX_HOUSEKEEPINGFREQUENCY=1
|
||||
# ZBX_MAXHOUSEKEEPERDELETE=5000
|
||||
# ZBX_SENDERFREQUENCY=30
|
||||
# ZBX_CACHESIZE=8M
|
||||
# ZBX_CACHEUPDATEFREQUENCY=60
|
||||
# ZBX_STARTDBSYNCERS=4
|
||||
# ZBX_HISTORYCACHESIZE=16M
|
||||
# ZBX_HISTORYINDEXCACHESIZE=4M
|
||||
# ZBX_TRENDCACHESIZE=4M
|
||||
# ZBX_VALUECACHESIZE=8M
|
||||
# ZBX_TIMEOUT=4
|
||||
# ZBX_TRAPPERIMEOUT=300
|
||||
# ZBX_UNREACHABLEPERIOD=45
|
||||
# ZBX_UNAVAILABLEDELAY=60
|
||||
# ZBX_UNREACHABLEDELAY=15
|
||||
# ZBX_LOGSLOWQUERIES=3000
|
||||
# ZBX_EXPORTFILESIZE=
|
||||
# ZBX_STARTPROXYPOLLERS=1
|
||||
# ZBX_PROXYCONFIGFREQUENCY=3600
|
||||
# ZBX_PROXYDATAFREQUENCY=1
|
||||
# ZBX_LOADMODULE="dummy1.so,dummy2.so,dummy10.so"
|
||||
# ZBX_TLSCAFILE=
|
||||
# ZBX_TLSCRLFILE=
|
||||
# ZBX_TLSCERTFILE=
|
||||
# ZBX_TLSKEYFILE=
|
||||
# ZBX_VAULTDBPATH=
|
||||
# ZBX_VAULTURL=https://127.0.0.1:8200
|
||||
# VAULT_TOKEN=
|
26
monitoring/.env_web
Normal file
26
monitoring/.env_web
Normal file
@ -0,0 +1,26 @@
|
||||
# ZBX_SERVER_HOST=zabbix-server
|
||||
# ZBX_SERVER_PORT=10051
|
||||
# ZBX_SERVER_NAME=Monitoring
|
||||
# ZBX_DB_ENCRYPTION=true # Available since 5.0.0
|
||||
# ZBX_DB_KEY_FILE=/run/secrets/client-key.pem # Available since 5.0.0
|
||||
# ZBX_DB_CERT_FILE=/run/secrets/client-cert.pem # Available since 5.0.0
|
||||
# ZBX_DB_CA_FILE=/run/secrets/root-ca.pem # Available since 5.0.0
|
||||
# ZBX_DB_VERIFY_HOST=false # Available since 5.0.0
|
||||
# ZBX_DB_CIPHER_LIST= # Available since 5.0.0
|
||||
# ZBX_VAULTDBPATH=
|
||||
# ZBX_VAULTURL=https://127.0.0.1:8200
|
||||
# VAULT_TOKEN=
|
||||
# ZBX_HISTORYSTORAGEURL=http://elasticsearch:9200/ # Available since 3.4.5
|
||||
# ZBX_HISTORYSTORAGETYPES=['uint', 'dbl', 'str', 'text', 'log'] # Available since 3.4.5
|
||||
# ENABLE_WEB_ACCESS_LOG=true
|
||||
# ZBX_MAXEXECUTIONTIME=600
|
||||
# ZBX_MEMORYLIMIT=128M
|
||||
# ZBX_POSTMAXSIZE=16M
|
||||
# ZBX_UPLOADMAXFILESIZE=2M
|
||||
# ZBX_MAXINPUTTIME=300
|
||||
# ZBX_SESSION_NAME=zbx_sessionid
|
||||
# Timezone one of: http://php.net/manual/en/timezones.php
|
||||
# PHP_TZ=Europe/Riga
|
||||
# ZBX_DENY_GUI_ACCESS=false
|
||||
# ZBX_GUI_ACCESS_IP_RANGE=['127.0.0.1']
|
||||
# ZBX_GUI_WARNING_MSG=Zabbix is under maintenance.
|
67
monitoring/README.md
Normal file
67
monitoring/README.md
Normal file
@ -0,0 +1,67 @@
|
||||
# Installing the Zabbix server
|
||||
|
||||
Change default passwords, ports and set listen IP (ports `8080/tcp` and `10051/tcp` will be open on all interfaces, use a firewall or specify the address of the required interface), then run:
|
||||
|
||||
```bash
|
||||
wget https://github.com/tornadocash/tornado-relayer/raw/master/monitoring/zabbix.tar.gz
|
||||
mkdir $HOME/monitoring/
|
||||
tar -xzf zabbix.tar.gz -C $HOME/monitoring/
|
||||
cd $HOME/monitoring/
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
# Installing the Zabbix agent
|
||||
|
||||
Download package from repository [https://repo.zabbix.com/zabbix/5.2/ubuntu/pool/main/z/zabbix/](https://repo.zabbix.com/zabbix/5.2/ubuntu/pool/main/z/zabbix/) and run:
|
||||
|
||||
```bash
|
||||
sudo dpkg -i zabbix-agent_5.2.*.deb
|
||||
sudo usermod -aG docker zabbix
|
||||
```
|
||||
|
||||
Change default values in `/etc/zabbix/zabbix_agent2.conf`:
|
||||
|
||||
- `Hostname` the same as in the zabbix-server web interface;
|
||||
- `Server` and `ServerActive` set zabbix server IP or DNS name;
|
||||
- `ListenIP` to local network IP available from zabbix server or set firewall rules to restrict access to port `10050`;
|
||||
- uncomment `Plugins.Docker.Endpoint=unix:///var/run/docker.sock`.
|
||||
|
||||
Then run:
|
||||
|
||||
```bash
|
||||
sudo systemctl enable zabbix-agent2.service
|
||||
sudo systemctl restart zabbix-agent2.service
|
||||
```
|
||||
|
||||
# Adding the host
|
||||
|
||||
Log into your Zabbix server (defaul login and passord: `Admin` - `zabbix`) and click on the Configuration tab and then the Hosts tab. Click the Create host button near the top right corner. In the resulting page, change the Host name and IP ADDRESS sections to match the information for your remote server. Set `{$URL}` macros to relayer host, example `http://localhost/v1/status` or `https://domain.name/v1/status`.
|
||||
|
||||
# Import templates
|
||||
|
||||
Import templates using the WebUI:
|
||||
|
||||
- [Docker-template.yaml](/monitoring/templates/Docker-template.yaml);
|
||||
- [Tornado-relayer-template.yaml](/monitoring/templates/Tornado-relayer-template.yaml).
|
||||
|
||||
Link templates with added host. It is also recommended to link `Linux CPU by Zabbix agent`, `Linux filesystems by Zabbix agent` and `Linux memory by Zabbix agent` templates to the host.
|
||||
|
||||
# Alerts
|
||||
|
||||
In WebUI - Administration -> Media types -> Telegram:
|
||||
|
||||
```
|
||||
https://git.zabbix.com/projects/ZBX/repos/zabbix/browse/templates/media/telegram
|
||||
|
||||
1. Register bot: send "/newbot" to @BotFather and follow instructions
|
||||
2. Copy and paste the obtained token into the "Token" field above
|
||||
3. If you want to send personal notifications, you need to get chat id of the user you want to send messages to:
|
||||
3.1. Send "/getid" to "@myidbot" in Telegram messenger
|
||||
3.2. Copy returned chat id and save it in the "Telegram Webhook" media for the user
|
||||
3.3. Ask the user to send "/start" to your bot (Telegram bot won't send anything to the user without it)
|
||||
4. If you want to send group notifications, you need to get group id of the group you want to send messages to:
|
||||
4.1. Add "@myidbot" to your group
|
||||
4.2. Send "/getgroupid@myidbot" in your group
|
||||
4.3. Copy returned group id save it in the "Telegram Webhook" media for the user you created for group notifications
|
||||
4.4. Send "/start@your_bot_name_here" in your group (Telegram bot won't send anything to the group without it)
|
||||
```
|
186
monitoring/docker-compose.yml
Normal file
186
monitoring/docker-compose.yml
Normal file
@ -0,0 +1,186 @@
|
||||
# Restrict access to 10051/tcp on public ip
|
||||
|
||||
version: '3.5'
|
||||
services:
|
||||
zabbix-server:
|
||||
image: zabbix/zabbix-server-pgsql:alpine-5.2-latest
|
||||
restart: always
|
||||
ports:
|
||||
- '10051:10051'
|
||||
volumes:
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- ./zbx_env/usr/lib/zabbix/alertscripts:/usr/lib/zabbix/alertscripts:ro
|
||||
- ./zbx_env/usr/lib/zabbix/externalscripts:/usr/lib/zabbix/externalscripts:ro
|
||||
- ./zbx_env/var/lib/zabbix/export:/var/lib/zabbix/export:rw
|
||||
- ./zbx_env/var/lib/zabbix/modules:/var/lib/zabbix/modules:ro
|
||||
- ./zbx_env/var/lib/zabbix/enc:/var/lib/zabbix/enc:ro
|
||||
- ./zbx_env/var/lib/zabbix/ssh_keys:/var/lib/zabbix/ssh_keys:ro
|
||||
- ./zbx_env/var/lib/zabbix/mibs:/var/lib/zabbix/mibs:ro
|
||||
- ./zbx_env/var/lib/zabbix/snmptraps:/var/lib/zabbix/snmptraps:ro
|
||||
ulimits:
|
||||
nproc: 65535
|
||||
nofile:
|
||||
soft: 20000
|
||||
hard: 40000
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '0.70'
|
||||
memory: 1G
|
||||
reservations:
|
||||
cpus: '0.5'
|
||||
memory: 512M
|
||||
env_file:
|
||||
- .env_db_pgsql
|
||||
- .env_srv
|
||||
secrets:
|
||||
- POSTGRES_USER
|
||||
- POSTGRES_PASSWORD
|
||||
depends_on:
|
||||
- postgres-server
|
||||
networks:
|
||||
zbx_net_backend:
|
||||
aliases:
|
||||
- zabbix-server
|
||||
- zabbix-server-pgsql
|
||||
- zabbix-server-alpine-pgsql
|
||||
- zabbix-server-pgsql-alpine
|
||||
zbx_net_frontend:
|
||||
stop_grace_period: 30s
|
||||
sysctls:
|
||||
- net.ipv4.ip_local_port_range=1024 65000
|
||||
- net.ipv4.conf.all.accept_redirects=0
|
||||
- net.ipv4.conf.all.secure_redirects=0
|
||||
- net.ipv4.conf.all.send_redirects=0
|
||||
labels:
|
||||
com.zabbix.description: 'Zabbix server with PostgreSQL database support'
|
||||
com.zabbix.company: 'Zabbix LLC'
|
||||
com.zabbix.component: 'zabbix-server'
|
||||
com.zabbix.dbtype: 'pgsql'
|
||||
com.zabbix.os: 'alpine'
|
||||
|
||||
zabbix-web:
|
||||
image: zabbix/zabbix-web-nginx-pgsql:alpine-5.2-latest
|
||||
restart: always
|
||||
ports:
|
||||
- '8080:8080'
|
||||
volumes:
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- ./zbx_env/etc/ssl/nginx:/etc/ssl/nginx:ro
|
||||
- ./zbx_env/usr/share/zabbix/modules/:/usr/share/zabbix/modules/:ro
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '0.70'
|
||||
memory: 512M
|
||||
reservations:
|
||||
cpus: '0.5'
|
||||
memory: 256M
|
||||
env_file:
|
||||
- .env_db_pgsql
|
||||
- .env_web
|
||||
secrets:
|
||||
- POSTGRES_USER
|
||||
- POSTGRES_PASSWORD
|
||||
depends_on:
|
||||
- postgres-server
|
||||
- zabbix-server
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/']
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
networks:
|
||||
zbx_net_backend:
|
||||
aliases:
|
||||
- zabbix-web-nginx-pgsql
|
||||
- zabbix-web-nginx-alpine-pgsql
|
||||
- zabbix-web-nginx-pgsql-alpine
|
||||
zbx_net_frontend:
|
||||
stop_grace_period: 10s
|
||||
sysctls:
|
||||
- net.core.somaxconn=65535
|
||||
labels:
|
||||
com.zabbix.description: 'Zabbix frontend on Nginx web-server with PostgreSQL database support'
|
||||
com.zabbix.company: 'Zabbix LLC'
|
||||
com.zabbix.component: 'zabbix-frontend'
|
||||
com.zabbix.webserver: 'nginx'
|
||||
com.zabbix.dbtype: 'pgsql'
|
||||
com.zabbix.os: 'alpine'
|
||||
|
||||
zabbix-agent:
|
||||
image: zabbix/zabbix-agent2:alpine-5.2-latest
|
||||
restart: always
|
||||
volumes:
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
- /etc/timezone:/etc/timezone:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
env_file:
|
||||
- .env_agent
|
||||
privileged: true
|
||||
user: root
|
||||
pid: 'host'
|
||||
networks:
|
||||
zbx_net_backend:
|
||||
aliases:
|
||||
- zabbix-agent
|
||||
- zabbix-agent-passive
|
||||
- zabbix-agent-alpine
|
||||
stop_grace_period: 5s
|
||||
|
||||
postgres-server:
|
||||
image: postgres:alpine
|
||||
restart: always
|
||||
volumes:
|
||||
- ./zbx_env/var/lib/postgresql/data:/var/lib/postgresql/data:rw
|
||||
env_file:
|
||||
- .env_db_pgsql
|
||||
secrets:
|
||||
- POSTGRES_USER
|
||||
- POSTGRES_PASSWORD
|
||||
stop_grace_period: 1m
|
||||
networks:
|
||||
zbx_net_backend:
|
||||
aliases:
|
||||
- postgres-server
|
||||
- pgsql-server
|
||||
- pgsql-database
|
||||
|
||||
portainer:
|
||||
image: portainer/portainer:latest
|
||||
restart: always
|
||||
ports:
|
||||
- '9000:9000'
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- portainer-data:/data
|
||||
|
||||
networks:
|
||||
zbx_net_frontend:
|
||||
driver: bridge
|
||||
driver_opts:
|
||||
com.docker.network.enable_ipv6: 'false'
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.16.238.0/24
|
||||
zbx_net_backend:
|
||||
driver: bridge
|
||||
driver_opts:
|
||||
com.docker.network.enable_ipv6: 'false'
|
||||
internal: true
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.16.239.0/24
|
||||
|
||||
secrets:
|
||||
POSTGRES_USER:
|
||||
file: ./.POSTGRES_USER
|
||||
POSTGRES_PASSWORD:
|
||||
file: ./.POSTGRES_PASSWORD
|
||||
|
||||
volumes:
|
||||
portainer-data:
|
393
monitoring/templates/Docker-template.yaml
Normal file
393
monitoring/templates/Docker-template.yaml
Normal file
@ -0,0 +1,393 @@
|
||||
zabbix_export:
|
||||
version: '5.2'
|
||||
date: '2021-11-29T12:29:17Z'
|
||||
groups:
|
||||
- name: Docker
|
||||
templates:
|
||||
- template: Docker
|
||||
name: Docker
|
||||
description: |
|
||||
Get Docker engine metrics from plugin for the New Zabbix Agent (zabbix-agent2).
|
||||
|
||||
You can discuss this template or leave feedback on our forum
|
||||
|
||||
Template tooling version used: 0.38
|
||||
groups:
|
||||
- name: Docker
|
||||
applications:
|
||||
- name: Docker
|
||||
- name: 'Zabbix raw items'
|
||||
items:
|
||||
- name: 'Docker: Get containers'
|
||||
key: docker.containers
|
||||
history: '0'
|
||||
trends: '0'
|
||||
value_type: TEXT
|
||||
applications:
|
||||
- name: 'Zabbix raw items'
|
||||
- name: 'Docker: Containers paused'
|
||||
type: DEPENDENT
|
||||
key: docker.containers.paused
|
||||
delay: '0'
|
||||
history: 7d
|
||||
description: 'Total number of containers paused on this host'
|
||||
applications:
|
||||
- name: Docker
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.ContainersPaused
|
||||
master_item:
|
||||
key: docker.info
|
||||
- name: 'Docker: Containers running'
|
||||
type: DEPENDENT
|
||||
key: docker.containers.running
|
||||
delay: '0'
|
||||
history: 7d
|
||||
description: 'Total number of containers running on this host'
|
||||
applications:
|
||||
- name: Docker
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.ContainersRunning
|
||||
master_item:
|
||||
key: docker.info
|
||||
- name: 'Docker: Containers stopped'
|
||||
type: DEPENDENT
|
||||
key: docker.containers.stopped
|
||||
delay: '0'
|
||||
history: 7d
|
||||
description: 'Total number of containers stopped on this host'
|
||||
applications:
|
||||
- name: Docker
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.ContainersStopped
|
||||
master_item:
|
||||
key: docker.info
|
||||
triggers:
|
||||
- expression: '{avg(5m)}>=1'
|
||||
name: 'Docker: containers is stopped'
|
||||
priority: HIGH
|
||||
- name: 'Docker: Containers total'
|
||||
type: DEPENDENT
|
||||
key: docker.containers.total
|
||||
delay: '0'
|
||||
history: 7d
|
||||
description: 'Total number of containers on this host'
|
||||
applications:
|
||||
- name: Docker
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.Containers
|
||||
master_item:
|
||||
key: docker.info
|
||||
- name: 'Docker: Get images'
|
||||
key: docker.images
|
||||
history: '0'
|
||||
trends: '0'
|
||||
status: DISABLED
|
||||
value_type: TEXT
|
||||
applications:
|
||||
- name: 'Zabbix raw items'
|
||||
- name: 'Docker: Get info'
|
||||
key: docker.info
|
||||
history: '0'
|
||||
trends: '0'
|
||||
value_type: TEXT
|
||||
applications:
|
||||
- name: 'Zabbix raw items'
|
||||
- name: 'Docker: Memory total'
|
||||
type: DEPENDENT
|
||||
key: docker.mem.total
|
||||
delay: '0'
|
||||
history: 7d
|
||||
status: DISABLED
|
||||
units: B
|
||||
applications:
|
||||
- name: Docker
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.MemTotal
|
||||
master_item:
|
||||
key: docker.info
|
||||
- name: 'Docker: Ping'
|
||||
key: docker.ping
|
||||
history: 7h
|
||||
applications:
|
||||
- name: Docker
|
||||
valuemap:
|
||||
name: 'Service state'
|
||||
preprocessing:
|
||||
- type: DISCARD_UNCHANGED_HEARTBEAT
|
||||
parameters:
|
||||
- 10m
|
||||
triggers:
|
||||
- expression: '{last()}=0'
|
||||
name: 'Docker: Service is down'
|
||||
priority: AVERAGE
|
||||
manual_close: 'YES'
|
||||
discovery_rules:
|
||||
- name: 'Containers discovery'
|
||||
key: 'docker.containers.discovery[true]'
|
||||
delay: 15m
|
||||
filter:
|
||||
evaltype: AND
|
||||
conditions:
|
||||
- macro: '{#NAME}'
|
||||
value: '{$DOCKER.LLD.FILTER.CONTAINER.MATCHES}'
|
||||
formulaid: A
|
||||
- macro: '{#NAME}'
|
||||
value: '{$DOCKER.LLD.FILTER.CONTAINER.NOT_MATCHES}'
|
||||
operator: NOT_MATCHES_REGEX
|
||||
formulaid: B
|
||||
description: |
|
||||
Discovery for containers metrics
|
||||
|
||||
Parameter:
|
||||
true - Returns all containers
|
||||
false - Returns only running containers
|
||||
item_prototypes:
|
||||
- name: 'Container {#NAME}: Finished at'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.finished["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
value_type: FLOAT
|
||||
units: unixtime
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.State.FinishedAt
|
||||
- type: DISCARD_UNCHANGED_HEARTBEAT
|
||||
parameters:
|
||||
- 1d
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
- name: 'Container {#NAME}: Restart count'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.restart_count["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.RestartCount
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
trigger_prototypes:
|
||||
- expression: '{last()}>5'
|
||||
name: 'Container {#NAME}: restarting constantly'
|
||||
opdata: '{ITEM.VALUE}'
|
||||
priority: HIGH
|
||||
- name: 'Container {#NAME}: Started at'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.started["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
value_type: FLOAT
|
||||
units: unixtime
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.State.StartedAt
|
||||
- type: DISCARD_UNCHANGED_HEARTBEAT
|
||||
parameters:
|
||||
- 1d
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
- name: 'Container {#NAME}: Error'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.state.error["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
trends: '0'
|
||||
value_type: CHAR
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.State.Error
|
||||
- type: DISCARD_UNCHANGED_HEARTBEAT
|
||||
parameters:
|
||||
- 1d
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
trigger_prototypes:
|
||||
- expression: '{diff()}=1 and {strlen()}>0'
|
||||
name: 'Container {#NAME}: An error has occurred in the container'
|
||||
priority: WARNING
|
||||
description: 'Container {#NAME} has an error. Ack to close.'
|
||||
manual_close: 'YES'
|
||||
- name: 'Container {#NAME}: Exit code'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.state.exitcode["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.State.ExitCode
|
||||
- type: DISCARD_UNCHANGED_HEARTBEAT
|
||||
parameters:
|
||||
- 1d
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
- name: 'Container {#NAME}: Paused'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.state.paused["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
valuemap:
|
||||
name: 'Docker flag'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.State.Paused
|
||||
- type: BOOL_TO_DECIMAL
|
||||
parameters:
|
||||
- ''
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
- name: 'Container {#NAME}: Restarting'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.state.restarting["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
valuemap:
|
||||
name: 'Docker flag'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.State.Restarting
|
||||
- type: BOOL_TO_DECIMAL
|
||||
parameters:
|
||||
- ''
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
- name: 'Container {#NAME}: Running'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.state.running["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
valuemap:
|
||||
name: 'Docker flag'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.State.Running
|
||||
- type: BOOL_TO_DECIMAL
|
||||
parameters:
|
||||
- ''
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
- name: 'Container {#NAME}: Status'
|
||||
type: DEPENDENT
|
||||
key: 'docker.container_info.state.status["{#NAME}"]'
|
||||
delay: '0'
|
||||
history: 7d
|
||||
trends: '0'
|
||||
value_type: CHAR
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.State.Status
|
||||
- type: DISCARD_UNCHANGED_HEARTBEAT
|
||||
parameters:
|
||||
- 1h
|
||||
master_item:
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
- name: 'Container {#NAME}: Get info'
|
||||
key: 'docker.container_info["{#NAME}"]'
|
||||
history: '0'
|
||||
trends: '0'
|
||||
value_type: CHAR
|
||||
description: 'Return low-level information about a container'
|
||||
application_prototypes:
|
||||
- name: 'Docker: Container {#NAME}'
|
||||
trigger_prototypes:
|
||||
- expression: '{Docker:docker.container_info.state.exitcode["{#NAME}"].last()}>0 and {Docker:docker.container_info.state.running["{#NAME}"].last()}=0'
|
||||
name: 'Container {#NAME}: Container has been stopped with error code'
|
||||
opdata: 'Exit code: {ITEM.LASTVALUE1}'
|
||||
priority: AVERAGE
|
||||
manual_close: 'YES'
|
||||
macros:
|
||||
- macro: '{$DOCKER.LLD.FILTER.CONTAINER.MATCHES}'
|
||||
value: '.*'
|
||||
description: 'Filter of discoverable containers'
|
||||
- macro: '{$DOCKER.LLD.FILTER.CONTAINER.NOT_MATCHES}'
|
||||
value: CHANGE_IF_NEEDED
|
||||
description: 'Filter to exclude discovered containers'
|
||||
- macro: '{$DOCKER.LLD.FILTER.IMAGE.MATCHES}'
|
||||
value: '.*'
|
||||
description: 'Filter of discoverable images'
|
||||
- macro: '{$DOCKER.LLD.FILTER.IMAGE.NOT_MATCHES}'
|
||||
value: CHANGE_IF_NEEDED
|
||||
description: 'Filter to exclude discovered images'
|
||||
graphs:
|
||||
- name: 'Docker: Containers'
|
||||
graph_items:
|
||||
- drawtype: GRADIENT_LINE
|
||||
color: 1A7C11
|
||||
item:
|
||||
host: Docker
|
||||
key: docker.containers.running
|
||||
- sortorder: '1'
|
||||
drawtype: BOLD_LINE
|
||||
color: 2774A4
|
||||
item:
|
||||
host: Docker
|
||||
key: docker.containers.paused
|
||||
- sortorder: '2'
|
||||
drawtype: BOLD_LINE
|
||||
color: F63100
|
||||
item:
|
||||
host: Docker
|
||||
key: docker.containers.stopped
|
||||
- sortorder: '3'
|
||||
drawtype: BOLD_LINE
|
||||
color: A54F10
|
||||
item:
|
||||
host: Docker
|
||||
key: docker.containers.total
|
||||
- name: 'Docker: Memory total'
|
||||
graph_items:
|
||||
- drawtype: BOLD_LINE
|
||||
color: 1A7C11
|
||||
item:
|
||||
host: Docker
|
||||
key: docker.mem.total
|
||||
value_maps:
|
||||
- name: 'Docker flag'
|
||||
mappings:
|
||||
- value: '0'
|
||||
newvalue: 'False'
|
||||
- value: '1'
|
||||
newvalue: 'True'
|
||||
- name: 'Service state'
|
||||
mappings:
|
||||
- value: '0'
|
||||
newvalue: Down
|
||||
- value: '1'
|
||||
newvalue: Up
|
70
monitoring/templates/Tornado-relayer-template.yaml
Normal file
70
monitoring/templates/Tornado-relayer-template.yaml
Normal file
@ -0,0 +1,70 @@
|
||||
zabbix_export:
|
||||
version: '5.2'
|
||||
date: '2021-12-01T13:26:59Z'
|
||||
groups:
|
||||
- name: Templates/Applications
|
||||
templates:
|
||||
- template: Tornado-relayer
|
||||
name: Tornado-relayer
|
||||
groups:
|
||||
- name: Templates/Applications
|
||||
items:
|
||||
- name: 'tornado-relayer: health.error'
|
||||
type: DEPENDENT
|
||||
key: tornado-relayer.health.error
|
||||
delay: '0'
|
||||
trends: '0'
|
||||
value_type: TEXT
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.health.error
|
||||
master_item:
|
||||
key: 'web.page.get[{$URL}]'
|
||||
triggers:
|
||||
- expression: '{last()}<>""'
|
||||
name: 'tornado-relayer: health error'
|
||||
priority: AVERAGE
|
||||
- name: 'tornado-relayer: health.status'
|
||||
type: DEPENDENT
|
||||
key: tornado-relayer.health.status
|
||||
delay: '0'
|
||||
trends: '0'
|
||||
value_type: TEXT
|
||||
preprocessing:
|
||||
- type: JSONPATH
|
||||
parameters:
|
||||
- $.health.status
|
||||
master_item:
|
||||
key: 'web.page.get[{$URL}]'
|
||||
triggers:
|
||||
- expression: '{last(#3)}<>"true"'
|
||||
name: 'tornado-relayer: health status <> true'
|
||||
priority: HIGH
|
||||
- name: 'tornado-relayer: data'
|
||||
type: ZABBIX_ACTIVE
|
||||
key: 'web.page.get[{$URL}]'
|
||||
history: '0'
|
||||
trends: '0'
|
||||
value_type: TEXT
|
||||
preprocessing:
|
||||
- type: REGEX
|
||||
parameters:
|
||||
- '\n\s?\n([\s\S]*)'
|
||||
- \1
|
||||
httptests:
|
||||
- name: 'tornado-relayer: status page'
|
||||
agent: 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/80.0.3987.87 Chrome/80.0.3987.87 Safari/537.36'
|
||||
steps:
|
||||
- name: 'status page'
|
||||
url: '{$URL}'
|
||||
follow_redirects: 'NO'
|
||||
required: status
|
||||
status_codes: '200'
|
||||
triggers:
|
||||
- expression: '{Tornado-relayer:web.test.fail[tornado-relayer: status page].last()}>0'
|
||||
name: 'tornado-relayer: status page failed'
|
||||
priority: AVERAGE
|
||||
- expression: '{Tornado-relayer:web.test.rspcode[tornado-relayer: status page,status page].last(#3)}<>200'
|
||||
name: 'tornado-relayer: status page rspcode <>200'
|
||||
priority: HIGH
|
BIN
monitoring/zabbix.tar.gz
Normal file
BIN
monitoring/zabbix.tar.gz
Normal file
Binary file not shown.
50
package.json
Normal file
50
package.json
Normal file
@ -0,0 +1,50 @@
|
||||
{
|
||||
"name": "relay",
|
||||
"version": "4.1.6",
|
||||
"description": "Relayer for Tornado.cash privacy solution. https://tornado.cash",
|
||||
"scripts": {
|
||||
"server": "node src/server.js",
|
||||
"worker": "node src/worker",
|
||||
"treeWatcher": "node src/treeWatcher",
|
||||
"priceWatcher": "node src/priceWatcher",
|
||||
"healthWatcher": "node src/healthWatcher",
|
||||
"eslint": "eslint --ext .js --ignore-path .gitignore .",
|
||||
"prettier:check": "npx prettier --check . --config .prettierrc",
|
||||
"prettier:fix": "npx prettier --write . --config .prettierrc",
|
||||
"lint": "yarn eslint && yarn prettier:check",
|
||||
"test": "mocha",
|
||||
"build": "docker build -t tornadocash/relayer:mainnet-v4 .",
|
||||
"start": "docker-compose up -d redis && concurrently \"yarn server\" \"yarn priceWatcher\" \"yarn treeWatcher\" \"yarn worker\" \"yarn healthWatcher\""
|
||||
},
|
||||
"author": "tornado.cash",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@tornado/anonymity-mining": "^2.1.5",
|
||||
"@tornado/circomlib": "^0.0.21",
|
||||
"@tornado/fixed-merkle-tree": "^0.4",
|
||||
"@tornado/tornado-config": "^1",
|
||||
"@tornado/tornado-oracles": "1.2.2",
|
||||
"@tornado/tx-manager": "^0.4.9",
|
||||
"ajv": "^6.12.5",
|
||||
"async-mutex": "^0.2.4",
|
||||
"bull": "^3.12.1",
|
||||
"concurrently": "^8.2.0",
|
||||
"dotenv": "^8.2.0",
|
||||
"eth-ens-namehash": "^2.0.8",
|
||||
"express": "^4.17.1",
|
||||
"ioredis": "^4.14.1",
|
||||
"node-fetch": "^2.6.7",
|
||||
"uuid": "^8.3.0",
|
||||
"web3": "^1.3.0",
|
||||
"web3-core-promievent": "^1.3.0",
|
||||
"web3-utils": "^1.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"chai": "^4.2.0",
|
||||
"eslint": "^6.6.0",
|
||||
"eslint-config-prettier": "^6.12.0",
|
||||
"eslint-plugin-prettier": "^3.1.4",
|
||||
"mocha": "^8.1.3",
|
||||
"prettier": "^2.1.2"
|
||||
}
|
||||
}
|
28
src/config.js
Normal file
28
src/config.js
Normal file
@ -0,0 +1,28 @@
|
||||
require('dotenv').config()
|
||||
|
||||
const { jobType } = require('./constants')
|
||||
const tornConfig = require('@tornado/tornado-config')
|
||||
module.exports = {
|
||||
netId: Number(process.env.NET_ID) || 1,
|
||||
redisUrl: process.env.REDIS_URL || 'redis://127.0.0.1:6379',
|
||||
httpRpcUrl: process.env.HTTP_RPC_URL,
|
||||
wsRpcUrl: process.env.WS_RPC_URL,
|
||||
oracleRpcUrl: process.env.ORACLE_RPC_URL || 'https://api.securerpc.com/v1',
|
||||
aggregatorAddress: process.env.AGGREGATOR,
|
||||
minerMerkleTreeHeight: 20,
|
||||
privateKey: process.env.PRIVATE_KEY,
|
||||
instances: tornConfig.instances,
|
||||
torn: tornConfig,
|
||||
port: process.env.APP_PORT || 8000,
|
||||
tornadoServiceFee: Number(process.env.REGULAR_TORNADO_WITHDRAW_FEE),
|
||||
miningServiceFee: Number(process.env.MINING_SERVICE_FEE),
|
||||
rewardAccount: process.env.REWARD_ACCOUNT,
|
||||
governanceAddress: '0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce',
|
||||
tornadoGoerliProxy: '0x454d870a72e29d5E5697f635128D18077BD04C60',
|
||||
gasLimits: {
|
||||
[jobType.MINING_REWARD]: 455000,
|
||||
[jobType.MINING_WITHDRAW]: 400000,
|
||||
},
|
||||
minimumBalance: '500000000000000000',
|
||||
baseFeeReserve: Number(process.env.BASE_FEE_RESERVE_PERCENTAGE),
|
||||
}
|
20
src/constants.js
Normal file
20
src/constants.js
Normal file
@ -0,0 +1,20 @@
|
||||
const jobType = Object.freeze({
|
||||
TORNADO_WITHDRAW: 'TORNADO_WITHDRAW',
|
||||
MINING_REWARD: 'MINING_REWARD',
|
||||
MINING_WITHDRAW: 'MINING_WITHDRAW',
|
||||
})
|
||||
|
||||
const status = Object.freeze({
|
||||
QUEUED: 'QUEUED',
|
||||
ACCEPTED: 'ACCEPTED',
|
||||
SENT: 'SENT',
|
||||
MINED: 'MINED',
|
||||
RESUBMITTED: 'RESUBMITTED',
|
||||
CONFIRMED: 'CONFIRMED',
|
||||
FAILED: 'FAILED',
|
||||
})
|
||||
|
||||
module.exports = {
|
||||
jobType,
|
||||
status,
|
||||
}
|
55
src/contollers/controller.js
Normal file
55
src/contollers/controller.js
Normal file
@ -0,0 +1,55 @@
|
||||
const {
|
||||
getTornadoWithdrawInputError,
|
||||
getMiningRewardInputError,
|
||||
getMiningWithdrawInputError,
|
||||
} = require('../modules/validator')
|
||||
const { postJob } = require('../queue')
|
||||
const { jobType } = require('../constants')
|
||||
|
||||
async function tornadoWithdraw(req, res) {
|
||||
const inputError = getTornadoWithdrawInputError(req.body)
|
||||
if (inputError) {
|
||||
console.log('Invalid input:', inputError)
|
||||
return res.status(400).json({ error: inputError })
|
||||
}
|
||||
|
||||
const id = await postJob({
|
||||
type: jobType.TORNADO_WITHDRAW,
|
||||
request: req.body,
|
||||
})
|
||||
return res.json({ id })
|
||||
}
|
||||
|
||||
async function miningReward(req, res) {
|
||||
const inputError = getMiningRewardInputError(req.body)
|
||||
if (inputError) {
|
||||
console.log('Invalid input:', inputError)
|
||||
return res.status(400).json({ error: inputError })
|
||||
}
|
||||
|
||||
const id = await postJob({
|
||||
type: jobType.MINING_REWARD,
|
||||
request: req.body,
|
||||
})
|
||||
return res.json({ id })
|
||||
}
|
||||
|
||||
async function miningWithdraw(req, res) {
|
||||
const inputError = getMiningWithdrawInputError(req.body)
|
||||
if (inputError) {
|
||||
console.log('Invalid input:', inputError)
|
||||
return res.status(400).json({ error: inputError })
|
||||
}
|
||||
|
||||
const id = await postJob({
|
||||
type: jobType.MINING_WITHDRAW,
|
||||
request: req.body,
|
||||
})
|
||||
return res.json({ id })
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
tornadoWithdraw,
|
||||
miningReward,
|
||||
miningWithdraw,
|
||||
}
|
4
src/contollers/index.js
Normal file
4
src/contollers/index.js
Normal file
@ -0,0 +1,4 @@
|
||||
module.exports = {
|
||||
controller: require('./controller'),
|
||||
status: require('./status'),
|
||||
}
|
41
src/contollers/status.js
Normal file
41
src/contollers/status.js
Normal file
@ -0,0 +1,41 @@
|
||||
const queue = require('../queue')
|
||||
const { netId, tornadoServiceFee, miningServiceFee, instances, rewardAccount } = require('../config')
|
||||
const { version } = require('../../package.json')
|
||||
const { redis } = require('../modules/redis')
|
||||
const { readRelayerErrors } = require('../utils')
|
||||
|
||||
async function status(req, res) {
|
||||
const ethPrices = await redis.hgetall('prices')
|
||||
const health = await redis.hgetall('health')
|
||||
health.errorsLog = await readRelayerErrors(redis)
|
||||
const { waiting: currentQueue } = await queue.queue.getJobCounts()
|
||||
|
||||
res.json({
|
||||
rewardAccount,
|
||||
instances: instances[netId],
|
||||
netId,
|
||||
ethPrices,
|
||||
tornadoServiceFee,
|
||||
miningServiceFee,
|
||||
version,
|
||||
health,
|
||||
currentQueue,
|
||||
})
|
||||
}
|
||||
|
||||
function index(req, res) {
|
||||
res.send(
|
||||
'This is <a href=https://tornado.cash>tornado.cash</a> Relayer service. Check the <a href=/v1/status>/status</a> for settings',
|
||||
)
|
||||
}
|
||||
|
||||
async function getJob(req, res) {
|
||||
const status = await queue.getJobStatus(req.params.id)
|
||||
return status ? res.json(status) : res.status(400).json({ error: "The job doesn't exist" })
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
status,
|
||||
index,
|
||||
getJob,
|
||||
}
|
21
src/healthWatcher.js
Normal file
21
src/healthWatcher.js
Normal file
@ -0,0 +1,21 @@
|
||||
const { setSafeInterval, toBN, fromWei, RelayerError } = require('./utils')
|
||||
const { privateKey, minimumBalance } = require('./config')
|
||||
const { redis } = require('./modules/redis')
|
||||
const web3 = require('./modules/web3')()
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
const { address } = web3.eth.accounts.privateKeyToAccount(privateKey)
|
||||
const balance = await web3.eth.getBalance(address)
|
||||
if (toBN(balance).lt(toBN(minimumBalance))) {
|
||||
throw new RelayerError(`Not enough balance, less than ${fromWei(minimumBalance)} ETH`, 1)
|
||||
}
|
||||
|
||||
await redis.hset('health', { status: true, error: '' })
|
||||
} catch (e) {
|
||||
console.error('healthWatcher', e.message)
|
||||
await redis.hset('health', { status: false, error: e.message })
|
||||
}
|
||||
}
|
||||
|
||||
setSafeInterval(main, 30 * 1000)
|
11
src/modules/redis.js
Normal file
11
src/modules/redis.js
Normal file
@ -0,0 +1,11 @@
|
||||
const { createClient } = require('ioredis')
|
||||
const { redisUrl } = require('../config')
|
||||
|
||||
const redis = createClient(redisUrl)
|
||||
const redisSubscribe = createClient(redisUrl)
|
||||
|
||||
module.exports = {
|
||||
redis,
|
||||
redisSubscribe,
|
||||
redisUrl,
|
||||
}
|
29
src/modules/resolver.js
Normal file
29
src/modules/resolver.js
Normal file
@ -0,0 +1,29 @@
|
||||
const { aggregatorAddress } = require('../config')
|
||||
const web3 = require('./web3')()
|
||||
|
||||
const aggregator = new web3.eth.Contract(require('../../abis/Aggregator.abi.json'), aggregatorAddress)
|
||||
const ens = require('eth-ens-namehash')
|
||||
|
||||
class ENSResolver {
|
||||
constructor() {
|
||||
this.addresses = {}
|
||||
}
|
||||
|
||||
async resolve(domains) {
|
||||
if (!Array.isArray(domains)) {
|
||||
domains = [domains]
|
||||
}
|
||||
|
||||
const unresolved = domains.filter(d => !this.addresses[d])
|
||||
if (unresolved.length) {
|
||||
const resolved = await aggregator.methods.bulkResolve(unresolved.map(ens.hash)).call()
|
||||
for (let i = 0; i < resolved.length; i++) {
|
||||
this.addresses[domains[i]] = resolved[i]
|
||||
}
|
||||
}
|
||||
|
||||
const addresses = domains.map(domain => this.addresses[domain])
|
||||
return addresses.length === 1 ? addresses[0] : addresses
|
||||
}
|
||||
}
|
||||
module.exports = new ENSResolver()
|
200
src/modules/validator.js
Normal file
200
src/modules/validator.js
Normal file
@ -0,0 +1,200 @@
|
||||
const { isAddress, toChecksumAddress } = require('web3-utils')
|
||||
const { getInstance } = require('../utils')
|
||||
const { rewardAccount } = require('../config')
|
||||
|
||||
const Ajv = require('ajv')
|
||||
const ajv = new Ajv({ format: 'fast' })
|
||||
|
||||
ajv.addKeyword('isAddress', {
|
||||
validate: (schema, data) => {
|
||||
try {
|
||||
return isAddress(data)
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
},
|
||||
errors: true,
|
||||
})
|
||||
|
||||
ajv.addKeyword('isKnownContract', {
|
||||
validate: (schema, data) => {
|
||||
try {
|
||||
return !!getInstance(data)
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
},
|
||||
errors: true,
|
||||
})
|
||||
|
||||
ajv.addKeyword('isFeeRecipient', {
|
||||
validate: (schema, data) => {
|
||||
try {
|
||||
return toChecksumAddress(rewardAccount) === toChecksumAddress(data)
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
},
|
||||
errors: true,
|
||||
})
|
||||
|
||||
const addressType = { type: 'string', pattern: '^0x[a-fA-F0-9]{40}$', isAddress: true }
|
||||
const proofType = { type: 'string', pattern: '^0x[a-fA-F0-9]{512}$' }
|
||||
const encryptedAccountType = { type: 'string', pattern: '^0x[a-fA-F0-9]{392}$' }
|
||||
const bytes32Type = { type: 'string', pattern: '^0x[a-fA-F0-9]{64}$' }
|
||||
const instanceType = { ...addressType, isKnownContract: true }
|
||||
const relayerType = { ...addressType, isFeeRecipient: true }
|
||||
|
||||
const tornadoWithdrawSchema = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
proof: proofType,
|
||||
contract: instanceType,
|
||||
args: {
|
||||
type: 'array',
|
||||
maxItems: 6,
|
||||
minItems: 6,
|
||||
items: [bytes32Type, bytes32Type, addressType, relayerType, bytes32Type, bytes32Type],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['proof', 'contract', 'args'],
|
||||
}
|
||||
|
||||
const miningRewardSchema = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
proof: proofType,
|
||||
args: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
rate: bytes32Type,
|
||||
fee: bytes32Type,
|
||||
instance: instanceType,
|
||||
rewardNullifier: bytes32Type,
|
||||
extDataHash: bytes32Type,
|
||||
depositRoot: bytes32Type,
|
||||
withdrawalRoot: bytes32Type,
|
||||
extData: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
relayer: relayerType,
|
||||
encryptedAccount: encryptedAccountType,
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['relayer', 'encryptedAccount'],
|
||||
},
|
||||
account: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
inputRoot: bytes32Type,
|
||||
inputNullifierHash: bytes32Type,
|
||||
outputRoot: bytes32Type,
|
||||
outputPathIndices: bytes32Type,
|
||||
outputCommitment: bytes32Type,
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: [
|
||||
'inputRoot',
|
||||
'inputNullifierHash',
|
||||
'outputRoot',
|
||||
'outputPathIndices',
|
||||
'outputCommitment',
|
||||
],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: [
|
||||
'rate',
|
||||
'fee',
|
||||
'instance',
|
||||
'rewardNullifier',
|
||||
'extDataHash',
|
||||
'depositRoot',
|
||||
'withdrawalRoot',
|
||||
'extData',
|
||||
'account',
|
||||
],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['proof', 'args'],
|
||||
}
|
||||
|
||||
const miningWithdrawSchema = {
|
||||
type: 'object',
|
||||
properties: {
|
||||
proof: proofType,
|
||||
args: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
amount: bytes32Type,
|
||||
extDataHash: bytes32Type,
|
||||
extData: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
fee: bytes32Type,
|
||||
recipient: addressType,
|
||||
relayer: relayerType,
|
||||
encryptedAccount: encryptedAccountType,
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['fee', 'relayer', 'encryptedAccount', 'recipient'],
|
||||
},
|
||||
account: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
inputRoot: bytes32Type,
|
||||
inputNullifierHash: bytes32Type,
|
||||
outputRoot: bytes32Type,
|
||||
outputPathIndices: bytes32Type,
|
||||
outputCommitment: bytes32Type,
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: [
|
||||
'inputRoot',
|
||||
'inputNullifierHash',
|
||||
'outputRoot',
|
||||
'outputPathIndices',
|
||||
'outputCommitment',
|
||||
],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['amount', 'extDataHash', 'extData', 'account'],
|
||||
},
|
||||
},
|
||||
additionalProperties: false,
|
||||
required: ['proof', 'args'],
|
||||
}
|
||||
|
||||
const validateTornadoWithdraw = ajv.compile(tornadoWithdrawSchema)
|
||||
const validateMiningReward = ajv.compile(miningRewardSchema)
|
||||
const validateMiningWithdraw = ajv.compile(miningWithdrawSchema)
|
||||
|
||||
function getInputError(validator, data) {
|
||||
validator(data)
|
||||
if (validator.errors) {
|
||||
const error = validator.errors[0]
|
||||
return `${error.dataPath} ${error.message}`
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function getTornadoWithdrawInputError(data) {
|
||||
return getInputError(validateTornadoWithdraw, data)
|
||||
}
|
||||
|
||||
function getMiningRewardInputError(data) {
|
||||
return getInputError(validateMiningReward, data)
|
||||
}
|
||||
|
||||
function getMiningWithdrawInputError(data) {
|
||||
return getInputError(validateMiningWithdraw, data)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getTornadoWithdrawInputError,
|
||||
getMiningRewardInputError,
|
||||
getMiningWithdrawInputError,
|
||||
}
|
30
src/modules/web3.js
Normal file
30
src/modules/web3.js
Normal file
@ -0,0 +1,30 @@
|
||||
const Web3 = require('web3')
|
||||
const { oracleRpcUrl, httpRpcUrl, wsRpcUrl } = require('../config')
|
||||
const getWeb3 = (type = 'http') => {
|
||||
let url
|
||||
switch (type) {
|
||||
case 'oracle':
|
||||
url = oracleRpcUrl
|
||||
break
|
||||
case 'ws':
|
||||
url = wsRpcUrl
|
||||
return new Web3(
|
||||
new Web3.providers.WebsocketProvider(wsRpcUrl, {
|
||||
clientConfig: {
|
||||
maxReceivedFrameSize: 100000000,
|
||||
maxReceivedMessageSize: 100000000,
|
||||
},
|
||||
}),
|
||||
)
|
||||
case 'http':
|
||||
default:
|
||||
url = httpRpcUrl
|
||||
break
|
||||
}
|
||||
return new Web3(
|
||||
new Web3.providers.HttpProvider(url, {
|
||||
timeout: 200000, // ms
|
||||
}),
|
||||
)
|
||||
}
|
||||
module.exports = getWeb3
|
19
src/priceWatcher.js
Normal file
19
src/priceWatcher.js
Normal file
@ -0,0 +1,19 @@
|
||||
const { setSafeInterval, RelayerError, logRelayerError } = require('./utils')
|
||||
const { redis } = require('./modules/redis')
|
||||
const { TokenPriceOracle } = require('@tornado/tornado-oracles')
|
||||
const { oracleRpcUrl } = require('./config')
|
||||
|
||||
const priceOracle = new TokenPriceOracle(oracleRpcUrl)
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
const ethPrices = await priceOracle.fetchPrices()
|
||||
await redis.hmset('prices', ethPrices)
|
||||
console.log('Wrote following prices to redis', ethPrices)
|
||||
} catch (e) {
|
||||
await logRelayerError(redis, e)
|
||||
console.error('priceWatcher error', e)
|
||||
}
|
||||
}
|
||||
|
||||
setSafeInterval(main, 30 * 1000)
|
57
src/queue.js
Normal file
57
src/queue.js
Normal file
@ -0,0 +1,57 @@
|
||||
const { v4: uuid } = require('uuid')
|
||||
const Queue = require('bull')
|
||||
|
||||
const { netId } = require('./config')
|
||||
const { status } = require('./constants')
|
||||
const { redis, redisUrl } = require('./modules/redis')
|
||||
|
||||
const queue = new Queue(`proofs_${netId}`, redisUrl, {
|
||||
lockDuration: 300000, // Key expiration time for job locks.
|
||||
lockRenewTime: 30000, // Interval on which to acquire the job lock
|
||||
stalledInterval: 30000, // How often check for stalled jobs (use 0 for never checking).
|
||||
maxStalledCount: 3, // Max amount of times a stalled job will be re-processed.
|
||||
guardInterval: 5000, // Poll interval for delayed jobs and added jobs.
|
||||
retryProcessDelay: 5000, // delay before processing next job in case of internal error.
|
||||
drainDelay: 5, // A timeout for when the queue is in drained state (empty waiting for jobs).
|
||||
})
|
||||
|
||||
async function postJob({ type, request }) {
|
||||
const id = uuid()
|
||||
|
||||
const job = await queue.add(
|
||||
{
|
||||
id,
|
||||
type,
|
||||
status: status.QUEUED,
|
||||
...request, // proof, args, ?contract
|
||||
},
|
||||
{
|
||||
//removeOnComplete: true
|
||||
},
|
||||
)
|
||||
await redis.set(`job:${id}`, job.id)
|
||||
return id
|
||||
}
|
||||
|
||||
async function getJob(uuid) {
|
||||
const id = await redis.get(`job:${uuid}`)
|
||||
return queue.getJobFromId(id)
|
||||
}
|
||||
|
||||
async function getJobStatus(uuid) {
|
||||
const job = await getJob(uuid)
|
||||
if (!job) {
|
||||
return null
|
||||
}
|
||||
return {
|
||||
...job.data,
|
||||
failedReason: job.failedReason,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
postJob,
|
||||
getJob,
|
||||
getJobStatus,
|
||||
queue,
|
||||
}
|
30
src/router.js
Normal file
30
src/router.js
Normal file
@ -0,0 +1,30 @@
|
||||
const { controller, status } = require('./contollers')
|
||||
const router = require('express').Router()
|
||||
|
||||
// Add CORS headers
|
||||
router.use((req, res, next) => {
|
||||
res.header('X-Frame-Options', 'DENY')
|
||||
res.header('Access-Control-Allow-Origin', '*')
|
||||
res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept')
|
||||
next()
|
||||
})
|
||||
|
||||
// Log error to console but don't send it to the client to avoid leaking data
|
||||
router.use((err, req, res, next) => {
|
||||
if (err) {
|
||||
console.error(err)
|
||||
return res.sendStatus(500)
|
||||
}
|
||||
next()
|
||||
})
|
||||
|
||||
router.get('/', status.index)
|
||||
router.get('/v1/status', status.status)
|
||||
router.get('/v1/jobs/:id', status.getJob)
|
||||
router.post('/v1/tornadoWithdraw', controller.tornadoWithdraw)
|
||||
router.get('/status', status.status)
|
||||
router.post('/relay', controller.tornadoWithdraw)
|
||||
router.post('/v1/miningReward', controller.miningReward)
|
||||
router.post('/v1/miningWithdraw', controller.miningWithdraw)
|
||||
|
||||
module.exports = router
|
14
src/server.js
Normal file
14
src/server.js
Normal file
@ -0,0 +1,14 @@
|
||||
const express = require('express')
|
||||
const { port, rewardAccount } = require('./config')
|
||||
const { version } = require('../package.json')
|
||||
const { isAddress } = require('./utils')
|
||||
const router = require('./router')
|
||||
|
||||
if (!isAddress(rewardAccount)) {
|
||||
throw new Error('No REWARD_ACCOUNT specified')
|
||||
}
|
||||
const app = express()
|
||||
app.use(express.json())
|
||||
app.use(router)
|
||||
app.listen(port)
|
||||
console.log(`Relayer ${version} started on port ${port}`)
|
136
src/treeWatcher.js
Normal file
136
src/treeWatcher.js
Normal file
@ -0,0 +1,136 @@
|
||||
const MerkleTree = require('@tornado/fixed-merkle-tree')
|
||||
const { minerMerkleTreeHeight, torn, netId } = require('./config')
|
||||
const { poseidonHash2, toBN, logRelayerError } = require('./utils')
|
||||
const resolver = require('./modules/resolver')
|
||||
const web3 = require('./modules/web3')('ws')
|
||||
const MinerABI = require('../abis/mining.abi.json')
|
||||
const { redis } = require('./modules/redis')
|
||||
let contract
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
let tree, eventSubscription, blockSubscription
|
||||
|
||||
async function fetchEvents(fromBlock, toBlock) {
|
||||
if (fromBlock <= toBlock) {
|
||||
try {
|
||||
return await contract.getPastEvents('NewAccount', {
|
||||
fromBlock,
|
||||
toBlock,
|
||||
})
|
||||
} catch (error) {
|
||||
const midBlock = (fromBlock + toBlock) >> 1
|
||||
|
||||
if (midBlock - fromBlock < 2) {
|
||||
throw new Error(`error fetching events: ${error.message}`)
|
||||
}
|
||||
|
||||
const arr1 = await fetchEvents(fromBlock, midBlock)
|
||||
const arr2 = await fetchEvents(midBlock + 1, toBlock)
|
||||
return [...arr1, ...arr2]
|
||||
}
|
||||
}
|
||||
return []
|
||||
}
|
||||
|
||||
async function processNewEvent(err, event) {
|
||||
if (err) {
|
||||
throw new Error(`Event handler error: ${err}`)
|
||||
// console.error(err)
|
||||
// return
|
||||
}
|
||||
|
||||
console.log(
|
||||
`New account event
|
||||
Index: ${event.returnValues.index}
|
||||
Commitment: ${event.returnValues.commitment}
|
||||
Nullifier: ${event.returnValues.nullifier}
|
||||
EncAcc: ${event.returnValues.encryptedAccount}`,
|
||||
)
|
||||
const { commitment, index } = event.returnValues
|
||||
if (tree.elements().length === Number(index)) {
|
||||
tree.insert(toBN(commitment))
|
||||
await updateRedis()
|
||||
} else if (tree.elements().length === Number(index) + 1) {
|
||||
console.log('Replacing element', index)
|
||||
tree.update(index, toBN(commitment))
|
||||
await updateRedis()
|
||||
} else {
|
||||
console.log(`Invalid element index ${index}, rebuilding tree`)
|
||||
rebuild()
|
||||
}
|
||||
}
|
||||
|
||||
async function processNewBlock(err) {
|
||||
if (err) {
|
||||
throw new Error(`Event handler error: ${err}`)
|
||||
// console.error(err)
|
||||
// return
|
||||
}
|
||||
// what if updateRedis takes more than 15 sec?
|
||||
await updateRedis()
|
||||
}
|
||||
|
||||
async function updateRedis() {
|
||||
const rootOnContract = await contract.methods.getLastAccountRoot().call()
|
||||
if (!tree.root().eq(toBN(rootOnContract))) {
|
||||
console.log(`Invalid tree root: ${tree.root()} != ${toBN(rootOnContract)}, rebuilding tree`)
|
||||
rebuild()
|
||||
return
|
||||
}
|
||||
const rootInRedis = await redis.get('tree:root')
|
||||
if (!rootInRedis || !tree.root().eq(toBN(rootInRedis))) {
|
||||
const serializedTree = JSON.stringify(tree.serialize())
|
||||
await redis.set('tree:elements', serializedTree)
|
||||
await redis.set('tree:root', tree.root().toString())
|
||||
await redis.publish('treeUpdate', tree.root().toString())
|
||||
console.log('Updated tree in redis, new root:', tree.root().toString())
|
||||
} else {
|
||||
console.log('Tree in redis is up to date, skipping update')
|
||||
}
|
||||
}
|
||||
|
||||
function rebuild() {
|
||||
process.exit(1)
|
||||
// await eventSubscription.unsubscribe()
|
||||
// await blockSubscription.unsubscribe()
|
||||
// setTimeout(init, 3000)
|
||||
}
|
||||
|
||||
async function init() {
|
||||
try {
|
||||
console.log('Initializing')
|
||||
const miner = await resolver.resolve(torn.miningV2.address)
|
||||
contract = new web3.eth.Contract(MinerABI, miner)
|
||||
|
||||
const cachedEvents = require(`../cache/accounts_farmer_${netId}.json`)
|
||||
const cachedCommitments = cachedEvents.map(e => toBN(e.commitment))
|
||||
|
||||
const toBlock = await web3.eth.getBlockNumber()
|
||||
const [{ blockNumber: fromBlock }] = cachedEvents.slice(-1)
|
||||
|
||||
const newEvents = await fetchEvents(fromBlock + 1, toBlock)
|
||||
const newCommitments = newEvents
|
||||
.sort((a, b) => a.returnValues.index - b.returnValues.index)
|
||||
.map(e => toBN(e.returnValues.commitment))
|
||||
.filter((item, index, arr) => !index || item !== arr[index - 1])
|
||||
|
||||
const commitments = cachedCommitments.concat(newCommitments)
|
||||
|
||||
tree = new MerkleTree(minerMerkleTreeHeight, commitments, { hashFunction: poseidonHash2 })
|
||||
await updateRedis()
|
||||
console.log(`Rebuilt tree with ${commitments.length} elements, root: ${tree.root()}`)
|
||||
|
||||
eventSubscription = contract.events.NewAccount({ fromBlock: toBlock + 1 }, processNewEvent)
|
||||
blockSubscription = web3.eth.subscribe('newBlockHeaders', processNewBlock)
|
||||
} catch (e) {
|
||||
await logRelayerError(redis, e)
|
||||
console.error('error on init treeWatcher', e.message)
|
||||
}
|
||||
}
|
||||
|
||||
init()
|
||||
|
||||
process.on('unhandledRejection', error => {
|
||||
console.error('Unhandled promise rejection', error)
|
||||
process.exit(1)
|
||||
})
|
142
src/utils.js
Normal file
142
src/utils.js
Normal file
@ -0,0 +1,142 @@
|
||||
const { instances, netId } = require('./config')
|
||||
const { poseidon } = require('@tornado/circomlib')
|
||||
const { toBN, toChecksumAddress, BN, fromWei, isAddress, toWei } = require('web3-utils')
|
||||
|
||||
const addressMap = new Map()
|
||||
const instance = instances[netId]
|
||||
|
||||
for (const [currency, { instanceAddress, symbol, decimals }] of Object.entries(instance)) {
|
||||
Object.entries(instanceAddress).forEach(([amount, address]) =>
|
||||
addressMap.set(`${netId}_${address}`, {
|
||||
currency,
|
||||
amount,
|
||||
symbol,
|
||||
decimals,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
const sleep = ms => new Promise(res => setTimeout(res, ms))
|
||||
|
||||
function getInstance(address) {
|
||||
const key = `${netId}_${toChecksumAddress(address)}`
|
||||
if (addressMap.has(key)) {
|
||||
return addressMap.get(key)
|
||||
} else {
|
||||
throw new Error('Unknown contact address')
|
||||
}
|
||||
}
|
||||
|
||||
const poseidonHash = items => toBN(poseidon(items).toString())
|
||||
const poseidonHash2 = (a, b) => poseidonHash([a, b])
|
||||
|
||||
function setSafeInterval(func, interval) {
|
||||
func()
|
||||
.catch(console.error)
|
||||
.finally(() => {
|
||||
setTimeout(() => setSafeInterval(func, interval), interval)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* A promise that resolves when the source emits specified event
|
||||
*/
|
||||
function when(source, event) {
|
||||
return new Promise((resolve, reject) => {
|
||||
source
|
||||
.once(event, payload => {
|
||||
resolve(payload)
|
||||
})
|
||||
.on('error', error => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function fromDecimals(value, decimals) {
|
||||
value = value.toString()
|
||||
let ether = value.toString()
|
||||
const base = new BN('10').pow(new BN(decimals))
|
||||
const baseLength = base.toString(10).length - 1 || 1
|
||||
|
||||
const negative = ether.substring(0, 1) === '-'
|
||||
if (negative) {
|
||||
ether = ether.substring(1)
|
||||
}
|
||||
|
||||
if (ether === '.') {
|
||||
throw new Error('[ethjs-unit] while converting number ' + value + ' to wei, invalid value')
|
||||
}
|
||||
|
||||
// Split it into a whole and fractional part
|
||||
const comps = ether.split('.')
|
||||
if (comps.length > 2) {
|
||||
throw new Error('[ethjs-unit] while converting number ' + value + ' to wei, too many decimal points')
|
||||
}
|
||||
|
||||
let whole = comps[0]
|
||||
let fraction = comps[1]
|
||||
|
||||
if (!whole) {
|
||||
whole = '0'
|
||||
}
|
||||
if (!fraction) {
|
||||
fraction = '0'
|
||||
}
|
||||
if (fraction.length > baseLength) {
|
||||
throw new Error('[ethjs-unit] while converting number ' + value + ' to wei, too many decimal places')
|
||||
}
|
||||
|
||||
while (fraction.length < baseLength) {
|
||||
fraction += '0'
|
||||
}
|
||||
|
||||
whole = new BN(whole)
|
||||
fraction = new BN(fraction)
|
||||
let wei = whole.mul(base).add(fraction)
|
||||
|
||||
if (negative) {
|
||||
wei = wei.mul(negative)
|
||||
}
|
||||
|
||||
return new BN(wei.toString(10), 10)
|
||||
}
|
||||
|
||||
class RelayerError extends Error {
|
||||
constructor(message, score = 0) {
|
||||
super(message)
|
||||
this.score = score
|
||||
}
|
||||
}
|
||||
|
||||
const logRelayerError = async (redis, e) => {
|
||||
await redis.zadd('errors', 'INCR', e.score || 1, e.message)
|
||||
}
|
||||
|
||||
const readRelayerErrors = async redis => {
|
||||
const set = await redis.zrevrange('errors', 0, -1, 'WITHSCORES')
|
||||
const errors = []
|
||||
while (set.length) {
|
||||
const [message, score] = set.splice(0, 2)
|
||||
errors.push({ message, score })
|
||||
}
|
||||
return errors
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getInstance,
|
||||
setSafeInterval,
|
||||
poseidonHash2,
|
||||
sleep,
|
||||
when,
|
||||
fromDecimals,
|
||||
toBN,
|
||||
toChecksumAddress,
|
||||
fromWei,
|
||||
toWei,
|
||||
BN,
|
||||
isAddress,
|
||||
RelayerError,
|
||||
logRelayerError,
|
||||
readRelayerErrors,
|
||||
}
|
359
src/worker.js
Normal file
359
src/worker.js
Normal file
@ -0,0 +1,359 @@
|
||||
const fs = require('fs')
|
||||
const MerkleTree = require('@tornado/fixed-merkle-tree')
|
||||
const { TornadoFeeOracleV4, bump } = require('@tornado/tornado-oracles')
|
||||
const { Utils, Controller } = require('@tornado/anonymity-mining')
|
||||
|
||||
const swapABI = require('../abis/swap.abi.json')
|
||||
const miningABI = require('../abis/mining.abi.json')
|
||||
const tornadoABI = require('../abis/tornadoABI.json')
|
||||
const tornadoProxyABI = require('../abis/tornadoProxyABI.json')
|
||||
const { queue } = require('./queue')
|
||||
const {
|
||||
poseidonHash2,
|
||||
getInstance,
|
||||
isAddress,
|
||||
sleep,
|
||||
toBN,
|
||||
toChecksumAddress,
|
||||
RelayerError,
|
||||
logRelayerError,
|
||||
} = require('./utils')
|
||||
const { jobType, status } = require('./constants')
|
||||
const {
|
||||
torn,
|
||||
netId,
|
||||
gasLimits,
|
||||
privateKey,
|
||||
httpRpcUrl,
|
||||
oracleRpcUrl,
|
||||
baseFeeReserve,
|
||||
miningServiceFee,
|
||||
tornadoServiceFee,
|
||||
tornadoGoerliProxy,
|
||||
rewardAccount,
|
||||
} = require('./config')
|
||||
const resolver = require('./modules/resolver')
|
||||
const { TxManager } = require('@tornado/tx-manager')
|
||||
const { redis, redisSubscribe } = require('./modules/redis')
|
||||
const getWeb3 = require('./modules/web3')
|
||||
|
||||
let web3
|
||||
let currentTx
|
||||
let currentJob
|
||||
let tree
|
||||
let txManager
|
||||
let controller
|
||||
let swap
|
||||
let minerContract
|
||||
const feeOracle = new TornadoFeeOracleV4(netId, oracleRpcUrl)
|
||||
|
||||
async function fetchTree() {
|
||||
const elements = await redis.get('tree:elements')
|
||||
const convert = (_, val) => (typeof val === 'string' ? toBN(val) : val)
|
||||
tree = MerkleTree.deserialize(JSON.parse(elements, convert), poseidonHash2)
|
||||
|
||||
if (currentTx && currentJob && ['MINING_REWARD', 'MINING_WITHDRAW'].includes(currentJob.data.type)) {
|
||||
const { proof, args } = currentJob.data
|
||||
if (toBN(args.account.inputRoot).eq(toBN(tree.root()))) {
|
||||
console.log('Account root is up to date. Skipping Root Update operation...')
|
||||
return
|
||||
} else {
|
||||
console.log('Account root is outdated. Starting Root Update operation...')
|
||||
}
|
||||
|
||||
const update = await controller.treeUpdate(args.account.outputCommitment, tree)
|
||||
|
||||
const minerAddress = await resolver.resolve(torn.miningV2.address)
|
||||
const instance = new web3.eth.Contract(miningABI, minerAddress)
|
||||
const data =
|
||||
currentJob.data.type === 'MINING_REWARD'
|
||||
? instance.methods.reward(proof, args, update.proof, update.args).encodeABI()
|
||||
: instance.methods.withdraw(proof, args, update.proof, update.args).encodeABI()
|
||||
await currentTx.replace({
|
||||
to: minerAddress,
|
||||
data,
|
||||
})
|
||||
console.log('replaced pending tx')
|
||||
}
|
||||
}
|
||||
|
||||
async function start() {
|
||||
try {
|
||||
await clearErrors()
|
||||
web3 = getWeb3()
|
||||
const { CONFIRMATIONS, MAX_GAS_PRICE } = process.env
|
||||
txManager = new TxManager({
|
||||
privateKey,
|
||||
rpcUrl: httpRpcUrl,
|
||||
config: {
|
||||
CONFIRMATIONS,
|
||||
MAX_GAS_PRICE,
|
||||
THROW_ON_REVERT: false,
|
||||
BASE_FEE_RESERVE_PERCENTAGE: baseFeeReserve,
|
||||
},
|
||||
})
|
||||
swap = new web3.eth.Contract(swapABI, await resolver.resolve(torn.rewardSwap.address))
|
||||
minerContract = new web3.eth.Contract(miningABI, await resolver.resolve(torn.miningV2.address))
|
||||
redisSubscribe.subscribe('treeUpdate', fetchTree)
|
||||
await fetchTree()
|
||||
const provingKeys = {
|
||||
treeUpdateCircuit: require('../keys/TreeUpdate.json'),
|
||||
treeUpdateProvingKey: fs.readFileSync('./keys/TreeUpdate_proving_key.bin').buffer,
|
||||
}
|
||||
controller = new Controller({ provingKeys })
|
||||
await controller.init()
|
||||
queue.process(processJob)
|
||||
console.log('Worker started')
|
||||
} catch (e) {
|
||||
await logRelayerError(redis, e)
|
||||
console.error('error on start worker', e.message)
|
||||
}
|
||||
}
|
||||
|
||||
function checkFee({ data }) {
|
||||
if (data.type === jobType.TORNADO_WITHDRAW) {
|
||||
return checkTornadoFee(data)
|
||||
}
|
||||
return checkMiningFee(data)
|
||||
}
|
||||
|
||||
async function checkTornadoFee({ args, contract }) {
|
||||
const { currency, amount, decimals } = getInstance(contract)
|
||||
const [userProvidedFee, refund] = [args[4], args[5]]
|
||||
|
||||
const ethPrice = await redis.hget('prices', currency)
|
||||
const relayerEstimatedFee = await feeOracle.calculateWithdrawalFeeViaRelayer(
|
||||
'relayer_withdrawal_check_v4',
|
||||
{},
|
||||
tornadoServiceFee,
|
||||
currency,
|
||||
amount,
|
||||
decimals,
|
||||
refund,
|
||||
ethPrice,
|
||||
)
|
||||
if (toBN(relayerEstimatedFee).gt(toBN(userProvidedFee))) {
|
||||
throw new RelayerError(
|
||||
'Provided fee is not enough. Probably it is a Gas Price spike, try to resubmit.',
|
||||
0,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function checkMiningFee({ args }) {
|
||||
const gasPrice = await feeOracle.getGasPriceInHex()
|
||||
const ethPrice = await redis.hget('prices', 'torn')
|
||||
const isMiningReward = currentJob.data.type === jobType.MINING_REWARD
|
||||
const providedFee = isMiningReward ? toBN(args.fee) : toBN(args.extData.fee)
|
||||
|
||||
const expense = toBN(gasPrice).mul(toBN(gasLimits[currentJob.data.type]))
|
||||
const expenseInTorn = expense.mul(toBN(1e18)).div(toBN(ethPrice))
|
||||
// todo make aggregator for ethPrices and rewardSwap data
|
||||
const balance = await swap.methods.tornVirtualBalance().call()
|
||||
const poolWeight = await swap.methods.poolWeight().call()
|
||||
const expenseInPoints = Utils.reverseTornadoFormula({ balance, tokens: expenseInTorn, poolWeight })
|
||||
/* eslint-disable */
|
||||
const serviceFeePercent = isMiningReward
|
||||
? toBN(0)
|
||||
: toBN(args.amount)
|
||||
.sub(providedFee) // args.amount includes fee
|
||||
.mul(toBN(parseInt(miningServiceFee * 1e10)))
|
||||
.div(toBN(1e10 * 100))
|
||||
/* eslint-enable */
|
||||
const desiredFee = expenseInPoints.add(serviceFeePercent) // in points
|
||||
console.log(
|
||||
'user provided fee, desired fee, serviceFeePercent',
|
||||
providedFee.toString(),
|
||||
desiredFee.toString(),
|
||||
serviceFeePercent.toString(),
|
||||
)
|
||||
if (toBN(providedFee).lt(desiredFee)) {
|
||||
throw new RelayerError('Provided fee is not enough. Probably it is a Gas Price spike, try to resubmit.')
|
||||
}
|
||||
}
|
||||
|
||||
async function getProxyContract() {
|
||||
let proxyAddress
|
||||
if (netId === 5) {
|
||||
proxyAddress = tornadoGoerliProxy
|
||||
} else {
|
||||
proxyAddress = await resolver.resolve(torn.tornadoRouter.address)
|
||||
}
|
||||
const contract = new web3.eth.Contract(tornadoProxyABI, proxyAddress)
|
||||
|
||||
return {
|
||||
contract,
|
||||
isOldProxy: checkOldProxy(proxyAddress),
|
||||
}
|
||||
}
|
||||
|
||||
function checkOldProxy(address) {
|
||||
const OLD_PROXY = '0x905b63Fff465B9fFBF41DeA908CEb12478ec7601'
|
||||
return toChecksumAddress(address) === toChecksumAddress(OLD_PROXY)
|
||||
}
|
||||
|
||||
async function checkRecipient({ data }) {
|
||||
// Checks only for default withdrawals
|
||||
if (data.type !== jobType.TORNADO_WITHDRAW) return
|
||||
|
||||
const recipient = data.args[2]
|
||||
if (!isAddress(recipient)) throw new Error('Recipient address is invalid')
|
||||
|
||||
const addressCode = await web3.eth.getCode(toChecksumAddress(recipient))
|
||||
if (addressCode !== '0x') throw new Error('Recipient cannot be a smart-contract, only EOA')
|
||||
}
|
||||
|
||||
async function getTxObject({ data }) {
|
||||
if (data.type === jobType.TORNADO_WITHDRAW) {
|
||||
let { contract, isOldProxy } = await getProxyContract()
|
||||
|
||||
let calldata = contract.methods.withdraw(data.contract, data.proof, ...data.args).encodeABI()
|
||||
|
||||
if (isOldProxy && getInstance(data.contract).currency !== 'eth') {
|
||||
contract = new web3.eth.Contract(tornadoABI, data.contract)
|
||||
calldata = contract.methods.withdraw(data.proof, ...data.args).encodeABI()
|
||||
}
|
||||
|
||||
const incompleteTx = {
|
||||
value: data.args[5],
|
||||
to: contract._address,
|
||||
data: calldata,
|
||||
}
|
||||
const [gasPrice, gasLimit] = await Promise.all([
|
||||
feeOracle.getGasPrice('relayer_withdrawal'),
|
||||
feeOracle.getGasLimit(incompleteTx, 'relayer_withdrawal'),
|
||||
])
|
||||
|
||||
return Object.assign({ gasLimit, gasPrice }, incompleteTx)
|
||||
} else {
|
||||
const method = data.type === jobType.MINING_REWARD ? 'reward' : 'withdraw'
|
||||
const calldata = minerContract.methods[method](data.proof, data.args).encodeABI()
|
||||
return {
|
||||
to: minerContract._address,
|
||||
data: calldata,
|
||||
gasLimit: gasLimits[data.type],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function isOutdatedTreeRevert(receipt, currentTx) {
|
||||
try {
|
||||
await web3.eth.call(currentTx.tx, receipt.blockNumber)
|
||||
console.log('Simulated call successful')
|
||||
return false
|
||||
} catch (e) {
|
||||
console.log('Decoded revert reason:', e.message)
|
||||
return (
|
||||
e.message.indexOf('Outdated account merkle root') !== -1 ||
|
||||
e.message.indexOf('Outdated tree update merkle root') !== -1
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
async function processJob(job) {
|
||||
try {
|
||||
if (!jobType[job.data.type]) {
|
||||
throw new RelayerError(`Unknown job type: ${job.data.type}`)
|
||||
}
|
||||
currentJob = job
|
||||
await updateStatus(status.ACCEPTED)
|
||||
console.log(`Start processing a new ${job.data.type} job #${job.id}`)
|
||||
await submitTx(job)
|
||||
} catch (e) {
|
||||
console.error('processJob', e.message)
|
||||
await updateStatus(status.FAILED)
|
||||
throw new RelayerError(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
async function checkRevert(tx) {
|
||||
try {
|
||||
await web3.eth.estimateGas(Object.assign({ from: rewardAccount }, tx))
|
||||
} catch (e) {
|
||||
throw new Error('Estimation error: transaction will possibly be reverted')
|
||||
}
|
||||
}
|
||||
|
||||
async function submitTx(job, retry = 0) {
|
||||
await checkRecipient(job)
|
||||
await checkFee(job)
|
||||
const tx = await getTxObject(job)
|
||||
await checkRevert(tx)
|
||||
currentTx = await txManager.createTx(tx)
|
||||
|
||||
if (job.data.type !== jobType.TORNADO_WITHDRAW) {
|
||||
await fetchTree()
|
||||
}
|
||||
|
||||
try {
|
||||
const receipt = await currentTx
|
||||
.send()
|
||||
.on('transactionHash', txHash => {
|
||||
updateTxHash(txHash)
|
||||
updateStatus(status.SENT)
|
||||
})
|
||||
.on('mined', receipt => {
|
||||
console.log('Mined in block', receipt.blockNumber)
|
||||
updateStatus(status.MINED)
|
||||
})
|
||||
.on('confirmations', updateConfirmations)
|
||||
|
||||
if (receipt.status === 1) {
|
||||
await updateStatus(status.CONFIRMED)
|
||||
} else {
|
||||
if (job.data.type !== jobType.TORNADO_WITHDRAW && (await isOutdatedTreeRevert(receipt, currentTx))) {
|
||||
if (retry < 3) {
|
||||
await updateStatus(status.RESUBMITTED)
|
||||
await submitTx(job, retry + 1)
|
||||
} else {
|
||||
throw new RelayerError('Tree update retry limit exceeded')
|
||||
}
|
||||
} else {
|
||||
throw new RelayerError('Submitted transaction failed')
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// todo this could result in duplicated error logs
|
||||
// todo handle a case where account tree is still not up to date (wait and retry)?
|
||||
if (
|
||||
job.data.type !== jobType.TORNADO_WITHDRAW &&
|
||||
(e.message.indexOf('Outdated account merkle root') !== -1 ||
|
||||
e.message.indexOf('Outdated tree update merkle root') !== -1)
|
||||
) {
|
||||
if (retry < 5) {
|
||||
await sleep(3000)
|
||||
console.log('Tree is still not up to date, resubmitting')
|
||||
await submitTx(job, retry + 1)
|
||||
} else {
|
||||
throw new RelayerError('Tree update retry limit exceeded')
|
||||
}
|
||||
} else {
|
||||
throw new RelayerError(`Revert by smart contract ${e.message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function updateTxHash(txHash) {
|
||||
console.log(`A new successfully sent tx ${txHash}`)
|
||||
currentJob.data.txHash = txHash
|
||||
await currentJob.update(currentJob.data)
|
||||
}
|
||||
|
||||
async function updateConfirmations(confirmations) {
|
||||
console.log(`Confirmations count ${confirmations}`)
|
||||
currentJob.data.confirmations = confirmations
|
||||
await currentJob.update(currentJob.data)
|
||||
}
|
||||
|
||||
async function updateStatus(status) {
|
||||
console.log(`Job status updated ${status}`)
|
||||
currentJob.data.status = status
|
||||
await currentJob.update(currentJob.data)
|
||||
}
|
||||
|
||||
async function clearErrors() {
|
||||
console.log('Errors list cleared')
|
||||
await redis.del('errors')
|
||||
}
|
||||
|
||||
start()
|
157
test/validator.js
Normal file
157
test/validator.js
Normal file
@ -0,0 +1,157 @@
|
||||
require('chai').should()
|
||||
|
||||
const {
|
||||
getTornadoWithdrawInputError,
|
||||
getMiningRewardInputError,
|
||||
getMiningWithdrawInputError,
|
||||
} = require('../src/modules/validator')
|
||||
|
||||
describe('Validator', () => {
|
||||
describe('#getTornadoWithdrawInputError', () => {
|
||||
it('should work', () => {
|
||||
getTornadoWithdrawInputError(withdrawData)
|
||||
})
|
||||
|
||||
it('should throw for incorrect proof', () => {
|
||||
const malformedData = { ...withdrawData }
|
||||
malformedData.proof = '0xbeef'
|
||||
getTornadoWithdrawInputError(malformedData).should.be.equal(
|
||||
'.proof should match pattern "^0x[a-fA-F0-9]{512}$"',
|
||||
)
|
||||
})
|
||||
it('should throw if unknown contract', () => {
|
||||
const malformedData = { ...withdrawData }
|
||||
malformedData.contract = '0xf17f52151ebef6c7334fad080c5704d77216b732'
|
||||
getTornadoWithdrawInputError(malformedData).should.be.equal(
|
||||
'.contract should pass "isKnownContract" keyword validation',
|
||||
)
|
||||
})
|
||||
it('should throw something is missing', () => {
|
||||
const malformedData = { ...withdrawData }
|
||||
delete malformedData.proof
|
||||
getTornadoWithdrawInputError(malformedData).should.be.equal(" should have required property 'proof'")
|
||||
malformedData.proof = withdrawData.proof
|
||||
|
||||
delete malformedData.args
|
||||
getTornadoWithdrawInputError(malformedData).should.be.equal(" should have required property 'args'")
|
||||
malformedData.args = withdrawData.args
|
||||
|
||||
delete malformedData.contract
|
||||
getTornadoWithdrawInputError(malformedData).should.be.equal(" should have required property 'contract'")
|
||||
malformedData.contract = withdrawData.contract
|
||||
})
|
||||
})
|
||||
|
||||
describe('#getMiningRewardInputError', () => {
|
||||
it('should work', () => {
|
||||
getMiningRewardInputError(rewardData)
|
||||
})
|
||||
|
||||
it('should throw for incorrect proof', () => {
|
||||
const malformedData = { ...rewardData }
|
||||
malformedData.proof = '0xbeef'
|
||||
getMiningRewardInputError(malformedData).should.be.equal(
|
||||
'.proof should match pattern "^0x[a-fA-F0-9]{512}$"',
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw something is missing', () => {
|
||||
const malformedData = { ...rewardData }
|
||||
delete malformedData.proof
|
||||
getMiningRewardInputError(malformedData).should.be.equal(" should have required property 'proof'")
|
||||
malformedData.proof = rewardData.proof
|
||||
|
||||
delete malformedData.args
|
||||
getMiningRewardInputError(malformedData).should.be.equal(" should have required property 'args'")
|
||||
malformedData.args = rewardData.args
|
||||
})
|
||||
})
|
||||
|
||||
describe('#getMiningWithdrawInputError', () => {
|
||||
it('should work', () => {
|
||||
getMiningWithdrawInputError(miningWithdrawData)
|
||||
})
|
||||
|
||||
it('should throw for incorrect proof', () => {
|
||||
const malformedData = { ...miningWithdrawData }
|
||||
malformedData.proof = '0xbeef'
|
||||
getMiningWithdrawInputError(malformedData).should.be.equal(
|
||||
'.proof should match pattern "^0x[a-fA-F0-9]{512}$"',
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw something is missing', () => {
|
||||
const malformedData = { ...miningWithdrawData }
|
||||
delete malformedData.proof
|
||||
getMiningWithdrawInputError(malformedData).should.be.equal(" should have required property 'proof'")
|
||||
malformedData.proof = miningWithdrawData.proof
|
||||
|
||||
delete malformedData.args
|
||||
getMiningWithdrawInputError(malformedData).should.be.equal(" should have required property 'args'")
|
||||
malformedData.args = miningWithdrawData.args
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const withdrawData = {
|
||||
proof:
|
||||
'0x0f8cb4c2ca9cbb23a5f21475773e19e39d3470436d7296f25c8730d19d88fcef2986ec694ad094f4c5fff79a4e5043bd553df20b23108bc023ec3670718143c20cc49c6d9798e1ae831fd32a878b96ff8897728f9b7963f0d5a4b5574426ac6203b2456d360b8e825d8f5731970bf1fc1b95b9713e3b24203667ecdd5939c2e40dec48f9e51d9cc8dc2f7f3916f0e9e31519c7df2bea8c51a195eb0f57beea4924cb846deaa78cdcbe361a6c310638af6f6157317bc27d74746bfaa2e1f8d2e9088fd10fa62100740874cdffdd6feb15c95c5a303f6bc226d5e51619c5b825471a17ddfeb05b250c0802261f7d05cf29a39a72c13e200e5bc721b0e4c50d55e6',
|
||||
args: [
|
||||
'0x1579d41e5290ab5bcec9a7df16705e49b5c0b869095299196c19c5e14462c9e3',
|
||||
'0x0cf7f49c5b35c48b9e1d43713e0b46a75977e3d10521e9ac1e4c3cd5e3da1c5d',
|
||||
'0xbd4369dc854c5d5b79fe25492e3a3cfcb5d02da5',
|
||||
'0x0000000000000000000000000000000000000000',
|
||||
'0x000000000000000000000000000000000000000000000000058d15e176280000',
|
||||
'0x0000000000000000000000000000000000000000000000000000000000000000',
|
||||
],
|
||||
contract: '0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936',
|
||||
}
|
||||
|
||||
const rewardData = {
|
||||
proof:
|
||||
'0x2e0f4c76b35ce3275bf57492cbe12ddc76fae4eabdbeaacdcc7cd5255d0abb2325bd80b2a867f9c1bab854de5d7c443a18eb9ad796943dd53c30c04e8f0a37ae164916c932776b3c28dd49808a5d5e1648d8bc9006b2386096b88757644ce8f102f7e2f1505bb66385a1d53a101922a17d8ab653694dedd7d150ec71d543202e0f0a67e5d59904d75af1c52bef4dfac0a302c2beb2ca3bb29b6bbbe1038368702e5ba8d6d829d74968a94e321cc91cccbc0654f5df6460a0a6ad73b06c42b7d1289ff36655fc7106b5538bd2c6617dd0c313919331e63bcb4de9c9b45dc2207b098a5729efbecf79a4cab39ade3c99e5772bfbe5ae75d932facbf9e0910a34ae',
|
||||
args: {
|
||||
rate: '0x000000000000000000000000000000000000000000000000000000000000000a',
|
||||
fee: '0x0000000000000000000000000000000000000000000000000000000000000000',
|
||||
instance: '0x8b3f5393bA08c24cc7ff5A66a832562aAB7bC95f',
|
||||
rewardNullifier: '0x08fdc416b85c76d246925994ae0c0df539789fd1669c45b57104907c7ef8b0b5',
|
||||
extDataHash: '0x006c5f12c20933beab10cfffab31ea0c9d736cf9aa868ee29eed3047d4ea4c2e',
|
||||
depositRoot: '0x0405962838a47fb25ffd75d80d53b268654a06bc1bdde7e5ad94c675c2f2f0ff',
|
||||
withdrawalRoot: '0x1cd83f5df5dbc826fecbf6be87f05db9c9dc617a3f1b1f3a421b1335c1ff7dbf',
|
||||
extData: {
|
||||
relayer: '0x0000000000000000000000000000000000000000',
|
||||
encryptedAccount:
|
||||
'0x6a8494fca4c433ef323d03f0db3fede90c3d2c6f216d73345ffc77ceec79622f327a83c4254063a3027620c262835e335fa32c33600a70547a53b2aa311d3ff35cf943e8f9e8f321f60d4266f680e0606a5837d78deb4d74c8b4fa3e9b67414513c71b73e38995cd8d57fd08aa9e135b342cecaf4128d4cfbb26148022e7a87da8b2423440b62034be202a6a48b45baa9736def6455771b442baaf2358fc52aa6c1d14a9a452b064d280fafd69f2a3ba416c10c1d8276f1c3810c664b24e0f1eefc75d63',
|
||||
},
|
||||
account: {
|
||||
inputRoot: '0x22e875e5e54d8569fb40d0c568984e87b4c97da6383d8d8a334a79e22b48fd54',
|
||||
inputNullifierHash: '0x24be972a00e3938a58f44ea6f8ead271ecdd6ab2cab42d1910fb7190b5816188',
|
||||
outputRoot: '0x04a3cd1e37487dcee5da51cbce4245742903262a5824aef77fb7aff84a3cb053',
|
||||
outputPathIndices: '0x0000000000000000000000000000000000000000000000000000000000000000',
|
||||
outputCommitment: '0x0ae58c1605312bd42fffdfc41d5e0f9a364ad458717c522bf9338068ab258601',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const miningWithdrawData = {
|
||||
proof:
|
||||
'0x087c02cdc5946b44f295e1adb8b65341708fe43854e44f05f205da6e46e2e4c4248b2dd5ee30236e7be2ea657265765b4e43dae263d67ff43190bb806faaafc10dd0a771f9d589b5061ddf0a713f27fc0b496d1b136dc4e98838b88f60efb072087c3018fa5c25b1f78b4bb968291b9afa3966d976e961d0a86719a8e07d771209dad29620f3bc2fc21c00510749a19e7ff369ade6b9fd1a7f05b74e70faee771fd839c710bd983927c9d3d5f39bb5e839a2ece19e899c4d50a91b29d5ac3f1a0e8faf7eeb2f6f672561bfba39bcb1d851f6c97d5c14b7fce6661cf315af3468119855a426fc4df511e848011bcdb704369deba20541a7651ab4d5813a60c056',
|
||||
args: {
|
||||
amount: '0x000000000000000000000000000000000000000000000000000000000000000f',
|
||||
fee: '0x0000000000000000000000000000000000000000000000000000000000000000',
|
||||
extDataHash: '0x00d95a201b89061613b5bc539bcf8fdee63a400ea80f1f5e813d6aacfee3ec67',
|
||||
extData: {
|
||||
recipient: '0xf17f52151ebef6c7334fad080c5704d77216b732',
|
||||
relayer: '0x0000000000000000000000000000000000000000',
|
||||
encryptedAccount:
|
||||
'0x4bd7f84edab796b390181d8b1dd850c418c8b3fe41d63b9677b7b99a2fadc505dcc70df336a42847dc00fa39175d16ddfec0d80dc166282e024b5371f561467651ed94e71524fa2e365a8330b053d5cff7c3bcc3564b335fb9e74fb805a3a6e760b811db60e5d6b4e154376196c3cb61457bac6d5ea804f63208a389555cde72f40ab1b94705e728f692e699fc441504b9df34390b3992a1a1eac160dcf0df0b5c5a9ec9cd6c0c8f5f8aa11627fdf2b3bedece5836e9ca38b09d70ff7ba06702971d245d',
|
||||
},
|
||||
account: {
|
||||
inputRoot: '0x1a756aeee7f7d05f276b20c8ca83150e110e1a436c2d959e501ab306420ab536',
|
||||
inputNullifierHash: '0x0dc8ea0330171a1f868ef5f3f9f92e919d7be754846f6145c5e7819e87738e65',
|
||||
outputRoot: '0x0d9d85371bd8c941400ae54815491799e98d1f335a9d263e41f0b81f22b55aa8',
|
||||
outputPathIndices: '0x0000000000000000000000000000000000000000000000000000000000000001',
|
||||
outputCommitment: '0x1ebd38a8bc53f47386687386397c8b5cefd33d55341b62a2a576b39d9bcec57c',
|
||||
},
|
||||
},
|
||||
}
|
Loading…
Reference in New Issue
Block a user