init
This commit is contained in:
commit
d464eef3f6
5
.dockerignore
Normal file
5
.dockerignore
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
node_modules
|
||||||
|
.git
|
||||||
|
Dockerfile
|
||||||
|
cache
|
||||||
|
artifacts
|
9
.editorconfig
Normal file
9
.editorconfig
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
end_of_line = lf
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
27
.eslintrc
Normal file
27
.eslintrc
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"env": {
|
||||||
|
"node": true,
|
||||||
|
"browser": true,
|
||||||
|
"es6": true,
|
||||||
|
"mocha": true
|
||||||
|
},
|
||||||
|
"extends": ["eslint:recommended", "plugin:prettier/recommended", "prettier"],
|
||||||
|
"globals": {
|
||||||
|
"Atomics": "readonly",
|
||||||
|
"SharedArrayBuffer": "readonly"
|
||||||
|
},
|
||||||
|
"parser": "babel-eslint",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2018
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"indent": ["error", 2],
|
||||||
|
"linebreak-style": ["error", "unix"],
|
||||||
|
"quotes": ["error", "single"],
|
||||||
|
"semi": ["error", "never"],
|
||||||
|
"object-curly-spacing": ["error", "always"],
|
||||||
|
"comma-dangle": ["error", "always-multiline"],
|
||||||
|
"require-await": "error",
|
||||||
|
"prettier/prettier": ["error", { "printWidth": 110 }]
|
||||||
|
}
|
||||||
|
}
|
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
*.sol linguist-language=Solidity
|
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
node_modules
|
||||||
|
|
||||||
|
#Hardhat files
|
||||||
|
cache
|
||||||
|
artifacts
|
1
.nvmrc
Normal file
1
.nvmrc
Normal file
@ -0,0 +1 @@
|
|||||||
|
12
|
8
.prettierignore
Normal file
8
.prettierignore
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
.vscode
|
||||||
|
build
|
||||||
|
circuits
|
||||||
|
scripts
|
||||||
|
contracts/verifiers/RewardVerifier.sol
|
||||||
|
contracts/verifiers/WithdrawVerifier.sol
|
||||||
|
contracts/verifiers/TreeUpdateVerifier.sol
|
||||||
|
contracts/utils/FloatMath.sol
|
16
.prettierrc
Normal file
16
.prettierrc
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "all",
|
||||||
|
"bracketSpacing": true,
|
||||||
|
"semi": false,
|
||||||
|
"printWidth": 110,
|
||||||
|
"overrides": [
|
||||||
|
{
|
||||||
|
"files": "*.sol",
|
||||||
|
"options": {
|
||||||
|
"singleQuote": false,
|
||||||
|
"printWidth": 130
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
13
.solhint.json
Normal file
13
.solhint.json
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"extends": "solhint:recommended",
|
||||||
|
"rules": {
|
||||||
|
"prettier/prettier": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"printWidth": 110
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"quotes": ["error", "double"]
|
||||||
|
},
|
||||||
|
"plugins": ["prettier"]
|
||||||
|
}
|
22
Dockerfile
Normal file
22
Dockerfile
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
FROM node:14-buster
|
||||||
|
|
||||||
|
ENTRYPOINT bash
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y libgmp-dev nlohmann-json3-dev nasm g++ git curl && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||||
|
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||||
|
RUN cargo install zkutil
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY package.json yarn.lock ./
|
||||||
|
RUN yarn
|
||||||
|
COPY circuits circuits
|
||||||
|
COPY scripts scripts
|
||||||
|
# ENV NODE_OPTIONS='--trace-gc --trace-gc-ignore-scavenger --max-old-space-size=2048000 --initial-old-space-size=2048000 --no-global-gc-scheduling --no-incremental-marking --max-semi-space-size=1024 --initial-heap-size=2048000'
|
||||||
|
ENV NODE_OPTIONS='--max-old-space-size=2048000'
|
||||||
|
RUN mkdir -p build/circuits
|
||||||
|
RUN yarn circuit:batchTreeUpdateLarge
|
||||||
|
RUN yarn circuit:batchTreeUpdateWitness
|
||||||
|
COPY . .
|
22
LICENSE
Normal file
22
LICENSE
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2018 Truffle
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
18
README.md
Normal file
18
README.md
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Tornado.cash trees [![Build Status](https://github.com/tornadocash/tornado-anonymity-mining/workflows/build/badge.svg)](https://github.com/tornadocash/tornado-anonymity-mining/actions) [![npm](https://img.shields.io/npm/v/tornado-anonymity-mining)](https://www.npmjs.com/package/tornado-anonymity-mining)
|
||||||
|
|
||||||
|
This repo implements a more optimized version of the [TornadoTrees](https://github.com/tornadocash/tornado-anonymity-mining/blob/080d0f83665fa686d7fe42dd57fb5975d0f1ca58/contracts/TornadoTrees.sol) mechanism.
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
1. node 12
|
||||||
|
2. yarn
|
||||||
|
3. zkutil (`brew install rust && cargo install zkutil`)
|
||||||
|
|
||||||
|
## Start
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ yarn
|
||||||
|
$ cp .env.example .env
|
||||||
|
$ yarn circuit
|
||||||
|
$ yarn test
|
||||||
|
```
|
106
circuits/BatchTreeUpdate.circom
Normal file
106
circuits/BatchTreeUpdate.circom
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||||
|
include "../node_modules/circomlib/circuits/bitify.circom";
|
||||||
|
include "./MerkleTreeUpdater.circom";
|
||||||
|
include "./Utils.circom";
|
||||||
|
|
||||||
|
template TreeLayer(height) {
|
||||||
|
signal input ins[1 << (height + 1)];
|
||||||
|
signal output outs[1 << height];
|
||||||
|
|
||||||
|
component hash[1 << height];
|
||||||
|
for(var i = 0; i < (1 << height); i++) {
|
||||||
|
hash[i] = HashLeftRight();
|
||||||
|
hash[i].left <== ins[i * 2];
|
||||||
|
hash[i].right <== ins[i * 2 + 1];
|
||||||
|
hash[i].hash ==> outs[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Inserts a leaf batch into a tree
|
||||||
|
// Checks that tree previously contained zero leaves in the same position
|
||||||
|
template BatchTreeUpdate(levels, batchLevels, zeroBatchLeaf) {
|
||||||
|
var height = levels - batchLevels;
|
||||||
|
var nLeaves = 1 << batchLevels;
|
||||||
|
signal input argsHash;
|
||||||
|
signal private input oldRoot;
|
||||||
|
signal private input newRoot;
|
||||||
|
signal private input pathIndices;
|
||||||
|
signal private input pathElements[height];
|
||||||
|
signal private input hashes[nLeaves];
|
||||||
|
signal private input instances[nLeaves];
|
||||||
|
signal private input blocks[nLeaves];
|
||||||
|
|
||||||
|
// Check that hash of arguments is correct
|
||||||
|
// We compress arguments into a single hash to considerably reduce gas usage on chain
|
||||||
|
component argsHasher = TreeUpdateArgsHasher(nLeaves);
|
||||||
|
argsHasher.oldRoot <== oldRoot;
|
||||||
|
argsHasher.newRoot <== newRoot;
|
||||||
|
argsHasher.pathIndices <== pathIndices;
|
||||||
|
for(var i = 0; i < nLeaves; i++) {
|
||||||
|
argsHasher.hashes[i] <== hashes[i];
|
||||||
|
argsHasher.instances[i] <== instances[i];
|
||||||
|
argsHasher.blocks[i] <== blocks[i];
|
||||||
|
}
|
||||||
|
argsHash === argsHasher.out;
|
||||||
|
|
||||||
|
// Compute hashes of all leaves
|
||||||
|
component leaves[nLeaves];
|
||||||
|
for(var i = 0; i < nLeaves; i++) {
|
||||||
|
leaves[i] = Poseidon(3);
|
||||||
|
leaves[i].inputs[0] <== instances[i];
|
||||||
|
leaves[i].inputs[1] <== hashes[i];
|
||||||
|
leaves[i].inputs[2] <== blocks[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute batch subtree merkle root
|
||||||
|
component layers[batchLevels];
|
||||||
|
for(var level = batchLevels - 1; level >= 0; level--) {
|
||||||
|
layers[level] = TreeLayer(level);
|
||||||
|
for(var i = 0; i < (1 << (level + 1)); i++) {
|
||||||
|
layers[level].ins[i] <== level == batchLevels - 1 ? leaves[i].out : layers[level + 1].outs[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that batch subtree was inserted correctly
|
||||||
|
component treeUpdater = MerkleTreeUpdater(height, zeroBatchLeaf);
|
||||||
|
treeUpdater.oldRoot <== oldRoot;
|
||||||
|
treeUpdater.newRoot <== newRoot;
|
||||||
|
treeUpdater.leaf <== layers[0].outs[0];
|
||||||
|
treeUpdater.pathIndices <== pathIndices;
|
||||||
|
for(var i = 0; i < height; i++) {
|
||||||
|
treeUpdater.pathElements[i] <== pathElements[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// zeroLeaf = keccak256("tornado") % FIELD_SIZE
|
||||||
|
// zeroBatchLeaf is poseidon(zeroLeaf, zeroLeaf) (batchLevels - 1) times
|
||||||
|
component main = BatchTreeUpdate(20, 2, 18183130938628345667957803100405002905363080101794697711581833408293369315484)
|
||||||
|
|
||||||
|
// for mainnet use 20, 7, 17278668323652664881420209773995988768195998574629614593395162463145689805534
|
||||||
|
|
||||||
|
/*
|
||||||
|
circom has new incompatible poseidon. Temp zeroes list for it: //todo change before going to prod
|
||||||
|
21663839004416932945382355908790599225266501822907911457504978515578255421292
|
||||||
|
6558759280185035534262768457043627242000481389071721366452285175835842504378
|
||||||
|
18183130938628345667957803100405002905363080101794697711581833408293369315484
|
||||||
|
1649420513912702105317807420227791840680436863908773772957989215330544908558
|
||||||
|
1519470092809891639075459401377052367516123764590098800258495876383484284269
|
||||||
|
11483247544333149638457900649034027061705091890934198553446561731884583952449
|
||||||
|
1676192069326199968794155063658499005818740739752078691901135837045539426569
|
||||||
|
3691667188955435348598133121052392834370851623525570836101013075316308823822
|
||||||
|
1023697156307946028208019788980135577833715295171390138750488664899512032833
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
zeros of n-th order:
|
||||||
|
21663839004416932945382355908790599225266501822907911457504978515578255421292
|
||||||
|
11850551329423159860688778991827824730037759162201783566284850822760196767874
|
||||||
|
21572503925325825116380792768937986743990254033176521064707045559165336555197
|
||||||
|
11224495635916644180335675565949106569141882748352237685396337327907709534945
|
||||||
|
2399242030534463392142674970266584742013168677609861039634639961298697064915
|
||||||
|
13182067204896548373877843501261957052850428877096289097123906067079378150834
|
||||||
|
7106632500398372645836762576259242192202230138343760620842346283595225511823
|
||||||
|
17278668323652664881420209773995988768195998574629614593395162463145689805534
|
||||||
|
209436188287252095316293336871467217491997565239632454977424802439169726471
|
||||||
|
6509061943359659796226067852175931816441223836265895622135845733346450111408
|
||||||
|
*/
|
71
circuits/MerkleTree.circom
Normal file
71
circuits/MerkleTree.circom
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||||
|
include "../node_modules/circomlib/circuits/bitify.circom";
|
||||||
|
|
||||||
|
// Computes Poseidon([left, right])
|
||||||
|
template HashLeftRight() {
|
||||||
|
signal input left;
|
||||||
|
signal input right;
|
||||||
|
signal output hash;
|
||||||
|
|
||||||
|
component hasher = Poseidon(2);
|
||||||
|
hasher.inputs[0] <== left;
|
||||||
|
hasher.inputs[1] <== right;
|
||||||
|
hash <== hasher.out;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if s == 0 returns [in[0], in[1]]
|
||||||
|
// if s == 1 returns [in[1], in[0]]
|
||||||
|
template DualMux() {
|
||||||
|
signal input in[2];
|
||||||
|
signal input s;
|
||||||
|
signal output out[2];
|
||||||
|
|
||||||
|
s * (1 - s) === 0;
|
||||||
|
out[0] <== (in[1] - in[0])*s + in[0];
|
||||||
|
out[1] <== (in[0] - in[1])*s + in[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verifies that merkle proof is correct for given merkle root and a leaf
|
||||||
|
// pathIndices input is an array of 0/1 selectors telling whether given pathElement is on the left or right side of merkle path
|
||||||
|
template RawMerkleTree(levels) {
|
||||||
|
signal input leaf;
|
||||||
|
signal input pathElements[levels];
|
||||||
|
signal input pathIndices[levels];
|
||||||
|
|
||||||
|
signal output root;
|
||||||
|
|
||||||
|
component selectors[levels];
|
||||||
|
component hashers[levels];
|
||||||
|
|
||||||
|
for (var i = 0; i < levels; i++) {
|
||||||
|
selectors[i] = DualMux();
|
||||||
|
selectors[i].in[0] <== i == 0 ? leaf : hashers[i - 1].hash;
|
||||||
|
selectors[i].in[1] <== pathElements[i];
|
||||||
|
selectors[i].s <== pathIndices[i];
|
||||||
|
|
||||||
|
hashers[i] = HashLeftRight();
|
||||||
|
hashers[i].left <== selectors[i].out[0];
|
||||||
|
hashers[i].right <== selectors[i].out[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
root <== hashers[levels - 1].hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
template MerkleTree(levels) {
|
||||||
|
signal input leaf;
|
||||||
|
signal input pathElements[levels];
|
||||||
|
signal input pathIndices;
|
||||||
|
signal output root;
|
||||||
|
|
||||||
|
component indexBits = Num2Bits(levels);
|
||||||
|
indexBits.in <== pathIndices;
|
||||||
|
|
||||||
|
component tree = RawMerkleTree(levels)
|
||||||
|
tree.leaf <== leaf;
|
||||||
|
for (var i = 0; i < levels; i++) {
|
||||||
|
tree.pathIndices[i] <== indexBits.out[i];
|
||||||
|
tree.pathElements[i] <== pathElements[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
root <== tree.root
|
||||||
|
}
|
33
circuits/MerkleTreeUpdater.circom
Normal file
33
circuits/MerkleTreeUpdater.circom
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
include "./MerkleTree.circom";
|
||||||
|
|
||||||
|
// inserts a leaf into a tree
|
||||||
|
// checks that tree previously contained zero in the same position
|
||||||
|
template MerkleTreeUpdater(levels, zeroLeaf) {
|
||||||
|
signal input oldRoot;
|
||||||
|
signal input newRoot;
|
||||||
|
signal input leaf;
|
||||||
|
signal input pathIndices;
|
||||||
|
signal private input pathElements[levels];
|
||||||
|
|
||||||
|
// Compute indexBits once for both trees
|
||||||
|
// Since Num2Bits is non deterministic, 2 duplicate calls to it cannot be
|
||||||
|
// optimized by circom compiler
|
||||||
|
component indexBits = Num2Bits(levels);
|
||||||
|
indexBits.in <== pathIndices;
|
||||||
|
|
||||||
|
component treeBefore = RawMerkleTree(levels);
|
||||||
|
for(var i = 0; i < levels; i++) {
|
||||||
|
treeBefore.pathIndices[i] <== indexBits.out[i];
|
||||||
|
treeBefore.pathElements[i] <== pathElements[i];
|
||||||
|
}
|
||||||
|
treeBefore.leaf <== zeroLeaf;
|
||||||
|
treeBefore.root === oldRoot;
|
||||||
|
|
||||||
|
component treeAfter = RawMerkleTree(levels);
|
||||||
|
for(var i = 0; i < levels; i++) {
|
||||||
|
treeAfter.pathIndices[i] <== indexBits.out[i];
|
||||||
|
treeAfter.pathElements[i] <== pathElements[i];
|
||||||
|
}
|
||||||
|
treeAfter.leaf <== leaf;
|
||||||
|
treeAfter.root === newRoot;
|
||||||
|
}
|
58
circuits/Utils.circom
Normal file
58
circuits/Utils.circom
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
include "../node_modules/circomlib/circuits/bitify.circom";
|
||||||
|
include "../node_modules/circomlib/circuits/sha256/sha256.circom";
|
||||||
|
|
||||||
|
template TreeUpdateArgsHasher(nLeaves) {
|
||||||
|
signal private input oldRoot;
|
||||||
|
signal private input newRoot;
|
||||||
|
signal private input pathIndices;
|
||||||
|
signal private input instances[nLeaves];
|
||||||
|
signal private input hashes[nLeaves];
|
||||||
|
signal private input blocks[nLeaves];
|
||||||
|
signal output out;
|
||||||
|
|
||||||
|
var header = 256 + 256 + 32;
|
||||||
|
var bitsPerLeaf = 160 + 256 + 32;
|
||||||
|
component hasher = Sha256(header + nLeaves * bitsPerLeaf);
|
||||||
|
|
||||||
|
component bitsOldRoot = Num2Bits(256);
|
||||||
|
component bitsNewRoot = Num2Bits(256);
|
||||||
|
component bitsPathIndices = Num2Bits(32);
|
||||||
|
component bitsInstance[nLeaves];
|
||||||
|
component bitsHash[nLeaves];
|
||||||
|
component bitsBlock[nLeaves];
|
||||||
|
|
||||||
|
bitsOldRoot.in <== oldRoot;
|
||||||
|
bitsNewRoot.in <== newRoot;
|
||||||
|
bitsPathIndices.in <== pathIndices;
|
||||||
|
for(var i = 0; i < 256; i++) {
|
||||||
|
hasher.in[i] <== bitsOldRoot.out[255 - i];
|
||||||
|
}
|
||||||
|
for(var i = 0; i < 256; i++) {
|
||||||
|
hasher.in[i + 256] <== bitsNewRoot.out[255 - i];
|
||||||
|
}
|
||||||
|
for(var i = 0; i < 32; i++) {
|
||||||
|
hasher.in[i + 512] <== bitsPathIndices.out[31 - i];
|
||||||
|
}
|
||||||
|
for(var leaf = 0; leaf < nLeaves; leaf++) {
|
||||||
|
bitsHash[leaf] = Num2Bits(256);
|
||||||
|
bitsInstance[leaf] = Num2Bits(160);
|
||||||
|
bitsBlock[leaf] = Num2Bits(32);
|
||||||
|
bitsHash[leaf].in <== hashes[leaf];
|
||||||
|
bitsInstance[leaf].in <== instances[leaf];
|
||||||
|
bitsBlock[leaf].in <== blocks[leaf];
|
||||||
|
for(var i = 0; i < 256; i++) {
|
||||||
|
hasher.in[header + leaf * bitsPerLeaf + i] <== bitsHash[leaf].out[255 - i];
|
||||||
|
}
|
||||||
|
for(var i = 0; i < 160; i++) {
|
||||||
|
hasher.in[header + leaf * bitsPerLeaf + i + 256] <== bitsInstance[leaf].out[159 - i];
|
||||||
|
}
|
||||||
|
for(var i = 0; i < 32; i++) {
|
||||||
|
hasher.in[header + leaf * bitsPerLeaf + i + 416] <== bitsBlock[leaf].out[31 - i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
component b2n = Bits2Num(256);
|
||||||
|
for (var i = 0; i < 256; i++) {
|
||||||
|
b2n.in[i] <== hasher.out[255 - i];
|
||||||
|
}
|
||||||
|
out <== b2n.out;
|
||||||
|
}
|
23
contracts/Greeter.sol
Normal file
23
contracts/Greeter.sol
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
//SPDX-License-Identifier: MIT
|
||||||
|
pragma solidity ^0.6.0;
|
||||||
|
|
||||||
|
import "hardhat/console.sol";
|
||||||
|
|
||||||
|
|
||||||
|
contract Greeter {
|
||||||
|
string greeting;
|
||||||
|
|
||||||
|
constructor(string memory _greeting) public {
|
||||||
|
console.log("Deploying a Greeter with greeting:", _greeting);
|
||||||
|
greeting = _greeting;
|
||||||
|
}
|
||||||
|
|
||||||
|
function greet() public view returns (string memory) {
|
||||||
|
return greeting;
|
||||||
|
}
|
||||||
|
|
||||||
|
function setGreeting(string memory _greeting) public {
|
||||||
|
console.log("Changing greeting from '%s' to '%s'", greeting, _greeting);
|
||||||
|
greeting = _greeting;
|
||||||
|
}
|
||||||
|
}
|
195
contracts/TornadoTrees.sol
Normal file
195
contracts/TornadoTrees.sol
Normal file
@ -0,0 +1,195 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
pragma solidity ^0.6.0;
|
||||||
|
pragma experimental ABIEncoderV2;
|
||||||
|
|
||||||
|
import "torn-token/contracts/ENS.sol";
|
||||||
|
import "./interfaces/ITornadoTrees.sol";
|
||||||
|
import "./interfaces/IVerifier.sol";
|
||||||
|
|
||||||
|
contract TornadoTrees is ITornadoTrees, EnsResolve {
|
||||||
|
address public immutable governance;
|
||||||
|
bytes32 public depositRoot;
|
||||||
|
bytes32 public previousDepositRoot;
|
||||||
|
bytes32 public withdrawalRoot;
|
||||||
|
bytes32 public previousWithdrawalRoot;
|
||||||
|
address public tornadoProxy;
|
||||||
|
IVerifier public immutable treeUpdateVerifier;
|
||||||
|
|
||||||
|
// make sure CHUNK_TREE_HEIGHT has the same value in BatchTreeUpdate.circom and IVerifier.sol
|
||||||
|
uint256 public constant CHUNK_TREE_HEIGHT = 7;
|
||||||
|
uint256 public constant CHUNK_SIZE = 2**CHUNK_TREE_HEIGHT;
|
||||||
|
uint256 public constant ITEM_SIZE = 32 + 20 + 4;
|
||||||
|
uint256 public constant BYTES_SIZE = 32 + 32 + 4 + CHUNK_SIZE * ITEM_SIZE;
|
||||||
|
uint256 public constant SNARK_FIELD = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
|
||||||
|
|
||||||
|
bytes32[] public deposits;
|
||||||
|
uint256 public lastProcessedDepositLeaf;
|
||||||
|
|
||||||
|
bytes32[] public withdrawals;
|
||||||
|
uint256 public lastProcessedWithdrawalLeaf;
|
||||||
|
|
||||||
|
event DepositData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
|
||||||
|
event WithdrawalData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
|
||||||
|
|
||||||
|
struct TreeLeaf {
|
||||||
|
bytes32 hash;
|
||||||
|
address instance;
|
||||||
|
uint32 block;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Batch {
|
||||||
|
bytes32 oldRoot;
|
||||||
|
bytes32 newRoot;
|
||||||
|
uint8 pathIndices;
|
||||||
|
TreeLeaf[CHUNK_SIZE] events;
|
||||||
|
}
|
||||||
|
|
||||||
|
modifier onlyTornadoProxy {
|
||||||
|
require(msg.sender == tornadoProxy, "Not authorized");
|
||||||
|
_;
|
||||||
|
}
|
||||||
|
|
||||||
|
modifier onlyGovernance() {
|
||||||
|
require(msg.sender == governance, "Only governance can perform this action");
|
||||||
|
_;
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
bytes32 _governance,
|
||||||
|
bytes32 _tornadoProxy,
|
||||||
|
bytes32 _treeUpdateVerifier,
|
||||||
|
bytes32 _depositRoot,
|
||||||
|
bytes32 _withdrawalRoot
|
||||||
|
) public {
|
||||||
|
governance = resolve(_governance);
|
||||||
|
tornadoProxy = resolve(_tornadoProxy);
|
||||||
|
treeUpdateVerifier = IVerifier(resolve(_treeUpdateVerifier));
|
||||||
|
depositRoot = _depositRoot;
|
||||||
|
withdrawalRoot = _withdrawalRoot;
|
||||||
|
}
|
||||||
|
|
||||||
|
function registerDeposit(address _instance, bytes32 _commitment) external override onlyTornadoProxy {
|
||||||
|
deposits.push(keccak256(abi.encode(_instance, _commitment, blockNumber())));
|
||||||
|
emit DepositData(_instance, _commitment, blockNumber(), deposits.length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function registerWithdrawal(address _instance, bytes32 _nullifier) external override onlyTornadoProxy {
|
||||||
|
withdrawals.push(keccak256(abi.encode(_instance, _nullifier, blockNumber())));
|
||||||
|
emit WithdrawalData(_instance, _nullifier, blockNumber(), withdrawals.length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// todo !!! ensure that during migration the tree is filled evenly
|
||||||
|
function updateDepositTree(
|
||||||
|
bytes calldata _proof,
|
||||||
|
bytes32 _argsHash,
|
||||||
|
bytes32 _currentRoot,
|
||||||
|
bytes32 _newRoot,
|
||||||
|
uint32 _pathIndices,
|
||||||
|
TreeLeaf[CHUNK_SIZE] calldata _events
|
||||||
|
) public {
|
||||||
|
uint256 offset = lastProcessedDepositLeaf;
|
||||||
|
require(_newRoot != previousDepositRoot, "Outdated deposit root");
|
||||||
|
require(_currentRoot == depositRoot, "Proposed deposit root is invalid");
|
||||||
|
require(_pathIndices == offset >> CHUNK_TREE_HEIGHT, "Incorrect insert index");
|
||||||
|
require(uint256(_newRoot) < SNARK_FIELD, "Proposed root is out of range"); // optional
|
||||||
|
|
||||||
|
bytes memory data = new bytes(BYTES_SIZE);
|
||||||
|
assembly {
|
||||||
|
mstore(add(data, 0x44), _pathIndices)
|
||||||
|
mstore(add(data, 0x40), _newRoot)
|
||||||
|
mstore(add(data, 0x20), _currentRoot)
|
||||||
|
}
|
||||||
|
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
|
||||||
|
(bytes32 hash, address instance, uint32 depositBlock) = (_events[i].hash, _events[i].instance, _events[i].block);
|
||||||
|
bytes32 leafHash = keccak256(abi.encode(instance, hash, depositBlock));
|
||||||
|
require(leafHash == deposits[offset + i], "Incorrect deposit");
|
||||||
|
require(uint256(hash) < SNARK_FIELD, "Hash out of range"); // optional
|
||||||
|
assembly {
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), depositBlock)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
|
||||||
|
}
|
||||||
|
delete deposits[offset + i];
|
||||||
|
}
|
||||||
|
|
||||||
|
uint256 argsHash = uint256(sha256(data)) % SNARK_FIELD;
|
||||||
|
require(argsHash == uint256(_argsHash), "Invalid args hash");
|
||||||
|
require(treeUpdateVerifier.verifyProof(_proof, [argsHash]), "Invalid deposit tree update proof");
|
||||||
|
|
||||||
|
previousDepositRoot = _currentRoot;
|
||||||
|
depositRoot = _newRoot;
|
||||||
|
lastProcessedDepositLeaf = offset + CHUNK_SIZE;
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateWithdrawalTree(
|
||||||
|
bytes calldata _proof,
|
||||||
|
bytes32 _argsHash,
|
||||||
|
bytes32 _currentRoot,
|
||||||
|
bytes32 _newRoot,
|
||||||
|
uint256 _pathIndices,
|
||||||
|
TreeLeaf[CHUNK_SIZE] calldata _events
|
||||||
|
) public {
|
||||||
|
uint256 offset = lastProcessedWithdrawalLeaf;
|
||||||
|
require(_newRoot != previousWithdrawalRoot, "Outdated withdrawal root");
|
||||||
|
require(_currentRoot == withdrawalRoot, "Proposed withdrawal root is invalid");
|
||||||
|
require(_pathIndices == offset >> CHUNK_TREE_HEIGHT, "Incorrect insert index");
|
||||||
|
require(uint256(_newRoot) < SNARK_FIELD, "Proposed root is out of range");
|
||||||
|
|
||||||
|
bytes memory data = new bytes(BYTES_SIZE);
|
||||||
|
assembly {
|
||||||
|
mstore(add(data, 0x44), _pathIndices)
|
||||||
|
mstore(add(data, 0x40), _newRoot)
|
||||||
|
mstore(add(data, 0x20), _currentRoot)
|
||||||
|
}
|
||||||
|
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
|
||||||
|
(bytes32 hash, address instance, uint32 withdrawalBlock) = (_events[i].hash, _events[i].instance, _events[i].block);
|
||||||
|
bytes32 leafHash = keccak256(abi.encode(instance, hash, withdrawalBlock));
|
||||||
|
require(leafHash == withdrawals[offset + i], "Incorrect withdrawal");
|
||||||
|
require(uint256(hash) < SNARK_FIELD, "Hash out of range");
|
||||||
|
assembly {
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), withdrawalBlock)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
|
||||||
|
}
|
||||||
|
delete withdrawals[offset + i];
|
||||||
|
}
|
||||||
|
|
||||||
|
uint256 argsHash = uint256(sha256(data)) % SNARK_FIELD;
|
||||||
|
require(argsHash == uint256(_argsHash), "Invalid args hash");
|
||||||
|
require(treeUpdateVerifier.verifyProof(_proof, [argsHash]), "Invalid withdrawal tree update proof");
|
||||||
|
|
||||||
|
previousWithdrawalRoot = _currentRoot;
|
||||||
|
withdrawalRoot = _newRoot;
|
||||||
|
lastProcessedWithdrawalLeaf = offset + CHUNK_SIZE;
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateRoots(bytes32 _depositRoot, bytes32 _withdrawalRoot) public view {
|
||||||
|
require(_depositRoot == depositRoot || _depositRoot == previousDepositRoot, "Incorrect deposit tree root");
|
||||||
|
require(_withdrawalRoot == withdrawalRoot || _withdrawalRoot == previousWithdrawalRoot, "Incorrect withdrawal tree root");
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRegisteredDeposits() external view returns (uint256 count, bytes32[] memory _deposits) {
|
||||||
|
count = deposits.length - lastProcessedDepositLeaf;
|
||||||
|
_deposits = new bytes32[](count);
|
||||||
|
for (uint256 i = 0; i < count; i++) {
|
||||||
|
_deposits[i] = deposits[lastProcessedDepositLeaf + i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRegisteredWithdrawals() external view returns (uint256 count, bytes32[] memory _withdrawals) {
|
||||||
|
count = withdrawals.length - lastProcessedWithdrawalLeaf;
|
||||||
|
_withdrawals = new bytes32[](count);
|
||||||
|
for (uint256 i = 0; i < count; i++) {
|
||||||
|
_withdrawals[i] = withdrawals[lastProcessedWithdrawalLeaf + i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setTornadoProxyContract(address _tornadoProxy) external onlyGovernance {
|
||||||
|
tornadoProxy = _tornadoProxy;
|
||||||
|
}
|
||||||
|
|
||||||
|
function blockNumber() public view virtual returns (uint256) {
|
||||||
|
return block.number;
|
||||||
|
}
|
||||||
|
}
|
9
contracts/interfaces/ITornadoTrees.sol
Normal file
9
contracts/interfaces/ITornadoTrees.sol
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
pragma solidity ^0.6.0;
|
||||||
|
|
||||||
|
interface ITornadoTrees {
|
||||||
|
function registerDeposit(address instance, bytes32 commitment) external;
|
||||||
|
|
||||||
|
function registerWithdrawal(address instance, bytes32 nullifier) external;
|
||||||
|
}
|
7
contracts/interfaces/IVerifier.sol
Normal file
7
contracts/interfaces/IVerifier.sol
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
pragma solidity ^0.6.0;
|
||||||
|
|
||||||
|
interface IVerifier {
|
||||||
|
function verifyProof(bytes calldata proof, uint256[1] calldata input) external view returns (bool);
|
||||||
|
}
|
59
contracts/mocks/Pack.sol
Normal file
59
contracts/mocks/Pack.sol
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
pragma solidity ^0.6.0;
|
||||||
|
|
||||||
|
contract Pack {
|
||||||
|
|
||||||
|
uint256 public constant CHUNK_TREE_HEIGHT = 7;
|
||||||
|
uint256 public constant CHUNK_SIZE = 2**CHUNK_TREE_HEIGHT;
|
||||||
|
uint256 public constant ITEM_SIZE = 32 + 20 + 4;
|
||||||
|
uint256 public constant BYTES_SIZE = CHUNK_SIZE * ITEM_SIZE;
|
||||||
|
|
||||||
|
uint256 public gas1;
|
||||||
|
uint256 public gas2;
|
||||||
|
uint256 public gas3;
|
||||||
|
uint256 public gas4;
|
||||||
|
bytes32 public hash;
|
||||||
|
|
||||||
|
event DepositData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
|
||||||
|
|
||||||
|
function pack2(bytes32[CHUNK_SIZE] memory hashes, address[CHUNK_SIZE] memory instances, uint32[CHUNK_SIZE] memory blocks) public {
|
||||||
|
uint256 gasBefore = gasleft();
|
||||||
|
bytes memory data = new bytes(BYTES_SIZE);
|
||||||
|
for(uint i = 0; i < CHUNK_SIZE; i++) {
|
||||||
|
(bytes32 hash, address instance, uint32 block) = (hashes[i], instances[i], blocks[i]);
|
||||||
|
assembly {
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x38), block)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x34), instance)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x20), hash)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
uint256 gasHash = gasleft();
|
||||||
|
bytes32 hash1 = sha256(data);
|
||||||
|
uint256 gasEvents = gasleft();
|
||||||
|
for(uint i = 0; i < CHUNK_SIZE; i++) {
|
||||||
|
emit DepositData(instances[i], hashes[i], blocks[i], i);
|
||||||
|
}
|
||||||
|
gas1 = gasEvents - gasleft();
|
||||||
|
gas2 = gasHash - gasEvents;
|
||||||
|
gas3 = gasBefore - gasHash;
|
||||||
|
gas4 = gasBefore;
|
||||||
|
hash = hash1;
|
||||||
|
}
|
||||||
|
|
||||||
|
function pack3(bytes32[CHUNK_SIZE] memory hashes, address[CHUNK_SIZE] memory instances, uint32[CHUNK_SIZE] memory blocks) public view returns(uint256 gas1, uint256 gas2, bytes32 hash) {
|
||||||
|
uint256 gasBefore = gasleft();
|
||||||
|
bytes memory data = new bytes(BYTES_SIZE);
|
||||||
|
for(uint i = 0; i < CHUNK_SIZE; i++) {
|
||||||
|
(bytes32 hash, address instance, uint32 block) = (hashes[i], instances[i], blocks[i]);
|
||||||
|
assembly {
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x38), block)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x34), instance)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x20), hash)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
uint256 gasHash = gasleft();
|
||||||
|
bytes32 hash = sha256(data);
|
||||||
|
return (gasleft() - gasHash, gasHash - gasBefore, hash);
|
||||||
|
}
|
||||||
|
}
|
89
contracts/mocks/TornadoTreesMock.sol
Normal file
89
contracts/mocks/TornadoTreesMock.sol
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
pragma solidity ^0.6.0;
|
||||||
|
pragma experimental ABIEncoderV2;
|
||||||
|
|
||||||
|
import "../TornadoTrees.sol";
|
||||||
|
|
||||||
|
contract TornadoTreesMock is TornadoTrees {
|
||||||
|
uint256 public currentBlock;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
bytes32 _governance,
|
||||||
|
bytes32 _tornadoProxy,
|
||||||
|
bytes32 _treeUpdateVerifier,
|
||||||
|
bytes32 _depositRoot,
|
||||||
|
bytes32 _withdrawalRoot
|
||||||
|
) public TornadoTrees(_governance, _tornadoProxy, _treeUpdateVerifier, _depositRoot, _withdrawalRoot) {}
|
||||||
|
|
||||||
|
function resolve(bytes32 _addr) public view override returns (address) {
|
||||||
|
return address(uint160(uint256(_addr) >> (12 * 8)));
|
||||||
|
}
|
||||||
|
|
||||||
|
function setBlockNumber(uint256 _blockNumber) public {
|
||||||
|
currentBlock = _blockNumber;
|
||||||
|
}
|
||||||
|
|
||||||
|
function blockNumber() public view override returns (uint256) {
|
||||||
|
return currentBlock == 0 ? block.number : currentBlock;
|
||||||
|
}
|
||||||
|
|
||||||
|
function register(
|
||||||
|
address _instance,
|
||||||
|
bytes32 _commitment,
|
||||||
|
bytes32 _nullifier,
|
||||||
|
uint256 _depositBlockNumber,
|
||||||
|
uint256 _withdrawBlockNumber
|
||||||
|
) public {
|
||||||
|
setBlockNumber(_depositBlockNumber);
|
||||||
|
deposits.push(keccak256(abi.encode(_instance, _commitment, blockNumber())));
|
||||||
|
setBlockNumber(_withdrawBlockNumber);
|
||||||
|
withdrawals.push(keccak256(abi.encode(_instance, _nullifier, blockNumber())));
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateDepositTreeMock(
|
||||||
|
bytes32 _oldRoot,
|
||||||
|
bytes32 _newRoot,
|
||||||
|
uint32 _pathIndices,
|
||||||
|
TreeLeaf[] calldata _events
|
||||||
|
) public pure returns (uint256) {
|
||||||
|
bytes memory data = new bytes(BYTES_SIZE);
|
||||||
|
assembly {
|
||||||
|
mstore(add(data, 0x44), _pathIndices)
|
||||||
|
mstore(add(data, 0x40), _newRoot)
|
||||||
|
mstore(add(data, 0x20), _oldRoot)
|
||||||
|
}
|
||||||
|
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
|
||||||
|
(bytes32 hash, address instance, uint32 depositBlock) = (_events[i].hash, _events[i].instance, _events[i].block);
|
||||||
|
assembly {
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), depositBlock)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return uint256(sha256(data)) % SNARK_FIELD;
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateDepositTreeMock2(
|
||||||
|
bytes32 _oldRoot,
|
||||||
|
bytes32 _newRoot,
|
||||||
|
uint32 _pathIndices,
|
||||||
|
TreeLeaf[] calldata _events
|
||||||
|
) public pure returns (bytes memory) {
|
||||||
|
bytes memory data = new bytes(BYTES_SIZE);
|
||||||
|
assembly {
|
||||||
|
mstore(add(data, 0x44), _pathIndices)
|
||||||
|
mstore(add(data, 0x40), _newRoot)
|
||||||
|
mstore(add(data, 0x20), _oldRoot)
|
||||||
|
}
|
||||||
|
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
|
||||||
|
(bytes32 hash, address instance, uint32 depositBlock) = (_events[i].hash, _events[i].instance, _events[i].block);
|
||||||
|
assembly {
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), depositBlock)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
|
||||||
|
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
}
|
170
contracts/verifiers/BatchTreeUpdateVerifier.sol
Normal file
170
contracts/verifiers/BatchTreeUpdateVerifier.sol
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
pragma solidity ^0.6.0;
|
||||||
|
|
||||||
|
library Pairing {
|
||||||
|
uint256 constant PRIME_Q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
|
||||||
|
|
||||||
|
struct G1Point {
|
||||||
|
uint256 X;
|
||||||
|
uint256 Y;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encoding of field elements is: X[0] * z + X[1]
|
||||||
|
struct G2Point {
|
||||||
|
uint256[2] X;
|
||||||
|
uint256[2] Y;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* @return The negation of p, i.e. p.plus(p.negate()) should be zero
|
||||||
|
*/
|
||||||
|
function negate(G1Point memory p) internal pure returns (G1Point memory) {
|
||||||
|
// The prime q in the base field F_q for G1
|
||||||
|
if (p.X == 0 && p.Y == 0) {
|
||||||
|
return G1Point(0, 0);
|
||||||
|
} else {
|
||||||
|
return G1Point(p.X, PRIME_Q - (p.Y % PRIME_Q));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* @return r the sum of two points of G1
|
||||||
|
*/
|
||||||
|
function plus(
|
||||||
|
G1Point memory p1,
|
||||||
|
G1Point memory p2
|
||||||
|
) internal view returns (G1Point memory r) {
|
||||||
|
uint256[4] memory input = [
|
||||||
|
p1.X, p1.Y,
|
||||||
|
p2.X, p2.Y
|
||||||
|
];
|
||||||
|
bool success;
|
||||||
|
|
||||||
|
// solium-disable-next-line security/no-inline-assembly
|
||||||
|
assembly {
|
||||||
|
success := staticcall(sub(gas(), 2000), 6, input, 0xc0, r, 0x60)
|
||||||
|
// Use "invalid" to make gas estimation work
|
||||||
|
switch success case 0 { invalid() }
|
||||||
|
}
|
||||||
|
|
||||||
|
require(success, "pairing-add-failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* @return r the product of a point on G1 and a scalar, i.e.
|
||||||
|
* p == p.scalarMul(1) and p.plus(p) == p.scalarMul(2) for all
|
||||||
|
* points p.
|
||||||
|
*/
|
||||||
|
function scalarMul(G1Point memory p, uint256 s) internal view returns (G1Point memory r) {
|
||||||
|
uint256[3] memory input = [p.X, p.Y, s];
|
||||||
|
bool success;
|
||||||
|
|
||||||
|
// solium-disable-next-line security/no-inline-assembly
|
||||||
|
assembly {
|
||||||
|
success := staticcall(sub(gas(), 2000), 7, input, 0x80, r, 0x60)
|
||||||
|
// Use "invalid" to make gas estimation work
|
||||||
|
switch success case 0 { invalid() }
|
||||||
|
}
|
||||||
|
|
||||||
|
require(success, "pairing-mul-failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* @return The result of computing the pairing check
|
||||||
|
* e(p1[0], p2[0]) * .... * e(p1[n], p2[n]) == 1
|
||||||
|
* For example,
|
||||||
|
* pairing([P1(), P1().negate()], [P2(), P2()]) should return true.
|
||||||
|
*/
|
||||||
|
function pairing(
|
||||||
|
G1Point memory a1,
|
||||||
|
G2Point memory a2,
|
||||||
|
G1Point memory b1,
|
||||||
|
G2Point memory b2,
|
||||||
|
G1Point memory c1,
|
||||||
|
G2Point memory c2,
|
||||||
|
G1Point memory d1,
|
||||||
|
G2Point memory d2
|
||||||
|
) internal view returns (bool) {
|
||||||
|
uint256[24] memory input = [
|
||||||
|
a1.X, a1.Y, a2.X[0], a2.X[1], a2.Y[0], a2.Y[1],
|
||||||
|
b1.X, b1.Y, b2.X[0], b2.X[1], b2.Y[0], b2.Y[1],
|
||||||
|
c1.X, c1.Y, c2.X[0], c2.X[1], c2.Y[0], c2.Y[1],
|
||||||
|
d1.X, d1.Y, d2.X[0], d2.X[1], d2.Y[0], d2.Y[1]
|
||||||
|
];
|
||||||
|
uint256[1] memory out;
|
||||||
|
bool success;
|
||||||
|
|
||||||
|
// solium-disable-next-line security/no-inline-assembly
|
||||||
|
assembly {
|
||||||
|
success := staticcall(sub(gas(), 2000), 8, input, mul(24, 0x20), out, 0x20)
|
||||||
|
// Use "invalid" to make gas estimation work
|
||||||
|
switch success case 0 { invalid() }
|
||||||
|
}
|
||||||
|
|
||||||
|
require(success, "pairing-opcode-failed");
|
||||||
|
return out[0] != 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
contract BatchTreeUpdateVerifier {
|
||||||
|
uint256 constant SNARK_SCALAR_FIELD = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
|
||||||
|
uint256 constant PRIME_Q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
|
||||||
|
using Pairing for *;
|
||||||
|
|
||||||
|
struct VerifyingKey {
|
||||||
|
Pairing.G1Point alfa1;
|
||||||
|
Pairing.G2Point beta2;
|
||||||
|
Pairing.G2Point gamma2;
|
||||||
|
Pairing.G2Point delta2;
|
||||||
|
Pairing.G1Point[2] IC;
|
||||||
|
}
|
||||||
|
|
||||||
|
function verifyingKey() internal pure returns (VerifyingKey memory vk) {
|
||||||
|
vk.alfa1 = Pairing.G1Point(uint256(20475789791681002364587166738311620805815985969091106757478379420262430093495), uint256(3034180384279528157431123624668892018871098425968640214767822771352219138078));
|
||||||
|
vk.beta2 = Pairing.G2Point([uint256(347992840312110670849483472224503623225781749273259516677464742758581199694), uint256(16853081403278411985324640353650047676779142117029386935051386044282804346484)], [uint256(10461241566647602546027012417757263991485755060136522105605550609788790829933), uint256(16049761706815422591462572571571264938897676292217555774707799384732883004386)]);
|
||||||
|
vk.gamma2 = Pairing.G2Point([uint256(5535450215937949788522672716791294482208969162172756729752675877422249461391), uint256(4537903555000997751027892507073556632992848536024556182449526590439971414042)], [uint256(6688278057604431581483695896713912024597719708930089928002132340517626404891), uint256(15745439923152020754042431613052318298038129099865656040309120795605091105487)]);
|
||||||
|
vk.delta2 = Pairing.G2Point([uint256(10712491908603553476637447918495381165104059355722416702328240143919146641319), uint256(15855442659923189569787773688895011287546687523233653745264460947047886121140)], [uint256(18278088599243830423965796542892879791365910862597475788753708589843343437901), uint256(10765606859348375283724614934374540130725132299795942405716724739350245709734)]);
|
||||||
|
vk.IC[0] = Pairing.G1Point(uint256(18147360875100520747353841225428915644191762631193821400291387675910597374366), uint256(17222433096548585553756828362569506045947134360392537102794184064340219776032));
|
||||||
|
vk.IC[1] = Pairing.G1Point(uint256(3514632146136652297064638325657684436433185732623721288055192259268961814948), uint256(8363257337389338977321440370428118205387545635573906956020792115766452976369));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* @returns Whether the proof is valid given the hardcoded verifying key
|
||||||
|
* above and the public inputs
|
||||||
|
*/
|
||||||
|
function verifyProof(
|
||||||
|
bytes memory proof,
|
||||||
|
uint256[1] memory input
|
||||||
|
) public view returns (bool) {
|
||||||
|
uint256[8] memory p = abi.decode(proof, (uint256[8]));
|
||||||
|
for (uint8 i = 0; i < p.length; i++) {
|
||||||
|
// Make sure that each element in the proof is less than the prime q
|
||||||
|
require(p[i] < PRIME_Q, "verifier-proof-element-gte-prime-q");
|
||||||
|
}
|
||||||
|
Pairing.G1Point memory proofA = Pairing.G1Point(p[0], p[1]);
|
||||||
|
Pairing.G2Point memory proofB = Pairing.G2Point([p[2], p[3]], [p[4], p[5]]);
|
||||||
|
Pairing.G1Point memory proofC = Pairing.G1Point(p[6], p[7]);
|
||||||
|
|
||||||
|
VerifyingKey memory vk = verifyingKey();
|
||||||
|
// Compute the linear combination vkX
|
||||||
|
Pairing.G1Point memory vkX = vk.IC[0];
|
||||||
|
for (uint256 i = 0; i < input.length; i++) {
|
||||||
|
// Make sure that every input is less than the snark scalar field
|
||||||
|
require(input[i] < SNARK_SCALAR_FIELD, "verifier-input-gte-snark-scalar-field");
|
||||||
|
vkX = Pairing.plus(vkX, Pairing.scalarMul(vk.IC[i + 1], input[i]));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Pairing.pairing(
|
||||||
|
Pairing.negate(proofA),
|
||||||
|
proofB,
|
||||||
|
vk.alfa1,
|
||||||
|
vk.beta2,
|
||||||
|
vkX,
|
||||||
|
vk.gamma2,
|
||||||
|
proofC,
|
||||||
|
vk.delta2
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
22
hardhat.config.js
Normal file
22
hardhat.config.js
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
require("@nomiclabs/hardhat-waffle");
|
||||||
|
|
||||||
|
// This is a sample Hardhat task. To learn how to create your own go to
|
||||||
|
// https://hardhat.org/guides/create-task.html
|
||||||
|
task("accounts", "Prints the list of accounts", async () => {
|
||||||
|
const accounts = await ethers.getSigners();
|
||||||
|
|
||||||
|
for (const account of accounts) {
|
||||||
|
console.log(account.address);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// You need to export an object to set up your config
|
||||||
|
// Go to https://hardhat.org/config/ to learn more
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @type import('hardhat/config').HardhatUserConfig
|
||||||
|
*/
|
||||||
|
module.exports = {
|
||||||
|
solidity: "0.6.12",
|
||||||
|
};
|
||||||
|
|
47
optimize/Dockerfile
Normal file
47
optimize/Dockerfile
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
FROM ubuntu
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y python3 python3-distutils g++ make curl git && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install nvm with node and npm
|
||||||
|
RUN rm /bin/sh && ln -s /bin/bash /bin/sh
|
||||||
|
ENV NVM_DIR /usr/local/nvm
|
||||||
|
ENV NODE_VERSION 14.8.0
|
||||||
|
RUN curl https://raw.githubusercontent.com/creationix/nvm/v0.30.1/install.sh | bash \
|
||||||
|
&& source $NVM_DIR/nvm.sh \
|
||||||
|
&& nvm install $NODE_VERSION \
|
||||||
|
&& nvm alias default $NODE_VERSION \
|
||||||
|
&& nvm use default
|
||||||
|
ENV NODE_PATH $NVM_DIR/v$NODE_VERSION/lib/node_modules
|
||||||
|
ENV PATH $NVM_DIR/versions/node/v$NODE_VERSION/bin:$PATH
|
||||||
|
RUN node --version
|
||||||
|
|
||||||
|
WORKDIR /root
|
||||||
|
|
||||||
|
RUN git clone https://github.com/nodejs/node.git
|
||||||
|
RUN git clone https://github.com/iden3/circom.git
|
||||||
|
|
||||||
|
COPY node.sh /tmp
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y ninja-build
|
||||||
|
RUN /tmp/node.sh
|
||||||
|
|
||||||
|
RUN cd circom && \
|
||||||
|
git checkout v0.5.35 && \
|
||||||
|
npm install
|
||||||
|
|
||||||
|
RUN git clone https://github.com/iden3/r1csoptimize
|
||||||
|
RUN cd r1csoptimize && \
|
||||||
|
git checkout 8bc528b06c0f98818d1b5224e2078397f0bb7faf && \
|
||||||
|
npm install
|
||||||
|
|
||||||
|
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||||
|
RUN source $HOME/.cargo/env && cargo install zkutil
|
||||||
|
RUN npm install -g circom snarkjs
|
||||||
|
|
||||||
|
WORKDIR /root/test
|
||||||
|
RUN npm init -y && npm install circomlib
|
||||||
|
RUN apt-get update && apt-get install -y ne
|
||||||
|
RUN mkdir circuits
|
||||||
|
COPY sha/circuit.circom sha/input.js test.sh ./circuits/
|
34
optimize/node.sh
Executable file
34
optimize/node.sh
Executable file
@ -0,0 +1,34 @@
|
|||||||
|
#!/bin/bash -e
|
||||||
|
cd node
|
||||||
|
git checkout 8beef5eeb82425b13d447b50beafb04ece7f91b1
|
||||||
|
patch -p1 <<EOL
|
||||||
|
index 0097683120..d35fd6e68d 100644
|
||||||
|
--- a/deps/v8/src/api/api.cc
|
||||||
|
+++ b/deps/v8/src/api/api.cc
|
||||||
|
@@ -7986,7 +7986,7 @@ void BigInt::ToWordsArray(int* sign_bit, int* word_count,
|
||||||
|
void Isolate::ReportExternalAllocationLimitReached() {
|
||||||
|
i::Heap* heap = reinterpret_cast<i::Isolate*>(this)->heap();
|
||||||
|
if (heap->gc_state() != i::Heap::NOT_IN_GC) return;
|
||||||
|
- heap->ReportExternalMemoryPressure();
|
||||||
|
+ // heap->ReportExternalMemoryPressure();
|
||||||
|
}
|
||||||
|
|
||||||
|
HeapProfiler* Isolate::GetHeapProfiler() {
|
||||||
|
diff --git a/deps/v8/src/objects/backing-store.cc b/deps/v8/src/objects/backing-store.cc
|
||||||
|
index bd9f39b7d3..c7d7e58ef3 100644
|
||||||
|
--- a/deps/v8/src/objects/backing-store.cc
|
||||||
|
+++ b/deps/v8/src/objects/backing-store.cc
|
||||||
|
@@ -34,7 +34,7 @@ constexpr bool kUseGuardRegions = false;
|
||||||
|
// address space limits needs to be smaller.
|
||||||
|
constexpr size_t kAddressSpaceLimit = 0x8000000000L; // 512 GiB
|
||||||
|
#elif V8_TARGET_ARCH_64_BIT
|
||||||
|
-constexpr size_t kAddressSpaceLimit = 0x10100000000L; // 1 TiB + 4 GiB
|
||||||
|
+constexpr size_t kAddressSpaceLimit = 0x40100000000L; // 4 TiB + 4 GiB
|
||||||
|
#else
|
||||||
|
constexpr size_t kAddressSpaceLimit = 0xC0000000; // 3 GiB
|
||||||
|
#endif
|
||||||
|
EOL
|
||||||
|
# ./configure --ninja
|
||||||
|
# JOBS=24 make
|
||||||
|
./configure
|
||||||
|
make -j12
|
30
package.json
Normal file
30
package.json
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"name": "hardhat-project",
|
||||||
|
"devDependencies": {
|
||||||
|
"@nomiclabs/hardhat-ethers": "^2.0.1",
|
||||||
|
"@nomiclabs/hardhat-waffle": "^2.0.1",
|
||||||
|
"babel-eslint": "^10.1.0",
|
||||||
|
"chai": "^4.2.0",
|
||||||
|
"eslint": "^7.19.0",
|
||||||
|
"eslint-config-prettier": "^7.2.0",
|
||||||
|
"eslint-plugin-prettier": "^3.3.1",
|
||||||
|
"ethereum-waffle": "^3.2.2",
|
||||||
|
"ethers": "^5.0.26",
|
||||||
|
"hardhat": "^2.0.8",
|
||||||
|
"prettier": "^2.2.1",
|
||||||
|
"prettier-plugin-solidity": "^1.0.0-beta.3",
|
||||||
|
"solhint-plugin-prettier": "^0.0.5",
|
||||||
|
"torn-token": "^1.0.0"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"circom": "^0.5.38",
|
||||||
|
"circom_runtime": "^0.1.12",
|
||||||
|
"circomlib": "^0.4.1",
|
||||||
|
"dotenv": "^8.2.0",
|
||||||
|
"ffiasm": "^0.1.1",
|
||||||
|
"fixed-merkle-tree": "^0.5.0",
|
||||||
|
"jssha": "^3.2.0",
|
||||||
|
"snarkjs": "^0.3.57",
|
||||||
|
"tmp-promise": "^3.0.2"
|
||||||
|
}
|
||||||
|
}
|
10
scripts/buildCircuit.sh
Executable file
10
scripts/buildCircuit.sh
Executable file
@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/bash -e
|
||||||
|
if [ "$2" = "large" ]; then
|
||||||
|
npx circom -v -f -r build/circuits/$1.r1cs -c build/circuits/$1.cpp -s build/circuits/$1.sym circuits/$1.circom
|
||||||
|
else
|
||||||
|
npx circom -v -r build/circuits/$1.r1cs -w build/circuits/$1.wasm -s build/circuits/$1.sym circuits/$1.circom
|
||||||
|
fi
|
||||||
|
zkutil setup -c build/circuits/$1.r1cs -p build/circuits/$1.params
|
||||||
|
zkutil generate-verifier -p build/circuits/$1.params -v build/circuits/${1}Verifier.sol
|
||||||
|
sed -i.bak "s/contract Verifier/contract ${1}Verifier/g" build/circuits/${1}Verifier.sol
|
||||||
|
npx snarkjs info -r build/circuits/$1.r1cs
|
8
scripts/buildWitness.sh
Executable file
8
scripts/buildWitness.sh
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/bin/bash -e
|
||||||
|
# required dependencies: libgmp-dev nlohmann-json3-dev nasm g++
|
||||||
|
cd build/circuits
|
||||||
|
node ../../node_modules/ffiasm/src/buildzqfield.js -q 21888242871839275222246405745257275088548364400416034343698204186575808495617 -n Fr
|
||||||
|
nasm -felf64 fr.asm
|
||||||
|
cp ../../node_modules/circom_runtime/c/*.cpp ./
|
||||||
|
cp ../../node_modules/circom_runtime/c/*.hpp ./
|
||||||
|
g++ -pthread main.cpp calcwit.cpp utils.cpp fr.cpp fr.o ${1}.cpp -o ${1} -lgmp -std=c++11 -O3 -fopenmp -DSANITY_CHECK
|
32
scripts/sample-script.js
Normal file
32
scripts/sample-script.js
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
// We require the Hardhat Runtime Environment explicitly here. This is optional
|
||||||
|
// but useful for running the script in a standalone fashion through `node <script>`.
|
||||||
|
//
|
||||||
|
// When running the script with `hardhat run <script>` you'll find the Hardhat
|
||||||
|
// Runtime Environment's members available in the global scope.
|
||||||
|
const hre = require("hardhat");
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
// Hardhat always runs the compile task when running scripts with its command
|
||||||
|
// line interface.
|
||||||
|
//
|
||||||
|
// If this script is run directly using `node` you may want to call compile
|
||||||
|
// manually to make sure everything is compiled
|
||||||
|
// await hre.run('compile');
|
||||||
|
|
||||||
|
// We get the contract to deploy
|
||||||
|
const Greeter = await hre.ethers.getContractFactory("Greeter");
|
||||||
|
const greeter = await Greeter.deploy("Hello, Hardhat!");
|
||||||
|
|
||||||
|
await greeter.deployed();
|
||||||
|
|
||||||
|
console.log("Greeter deployed to:", greeter.address);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We recommend this pattern to be able to use async/await everywhere
|
||||||
|
// and properly handle errors.
|
||||||
|
main()
|
||||||
|
.then(() => process.exit(0))
|
||||||
|
.catch(error => {
|
||||||
|
console.error(error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
106
src/controller.js
Normal file
106
src/controller.js
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
const {
|
||||||
|
bitsToNumber,
|
||||||
|
toFixedHex,
|
||||||
|
poseidonHash,
|
||||||
|
poseidonHash2,
|
||||||
|
} = require('./utils')
|
||||||
|
const jsSHA = require('jssha')
|
||||||
|
const { toBN } = require('web3-utils')
|
||||||
|
|
||||||
|
const fs = require('fs')
|
||||||
|
const tmp = require('tmp-promise')
|
||||||
|
const util = require('util')
|
||||||
|
const exec = util.promisify(require('child_process').exec)
|
||||||
|
|
||||||
|
function hashInputs(input) {
|
||||||
|
const sha = new jsSHA('SHA-256', 'ARRAYBUFFER')
|
||||||
|
sha.update(toBN(input.oldRoot).toBuffer('be', 32))
|
||||||
|
sha.update(toBN(input.newRoot).toBuffer('be', 32))
|
||||||
|
sha.update(toBN(input.pathIndices).toBuffer('be', 4))
|
||||||
|
|
||||||
|
for (let i = 0; i < input.instances.length; i++) {
|
||||||
|
sha.update(toBN(input.hashes[i]).toBuffer('be', 32))
|
||||||
|
sha.update(toBN(input.instances[i]).toBuffer('be', 20))
|
||||||
|
sha.update(toBN(input.blocks[i]).toBuffer('be', 4))
|
||||||
|
}
|
||||||
|
|
||||||
|
const hash = sha.getHash('HEX')
|
||||||
|
const result = toBN(hash).mod(toBN('21888242871839275222246405745257275088548364400416034343698204186575808495617')).toString()
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
function prove(input, keyBasePath) {
|
||||||
|
return tmp.dir().then(async dir => {
|
||||||
|
dir = dir.path
|
||||||
|
fs.writeFileSync(`${dir}/input.json`, JSON.stringify(input, null, 2))
|
||||||
|
let out
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(`${keyBasePath}`)) {
|
||||||
|
// native witness calc
|
||||||
|
out = await exec(`${keyBasePath} ${dir}/input.json ${dir}/witness.json`)
|
||||||
|
} else {
|
||||||
|
out = await exec(`npx snarkjs wd ${keyBasePath}.wasm ${dir}/input.json ${dir}/witness.wtns`)
|
||||||
|
out = await exec(`npx snarkjs wej ${dir}/witness.wtns ${dir}/witness.json`)
|
||||||
|
}
|
||||||
|
out = await exec(`zkutil prove -c ${keyBasePath}.r1cs -p ${keyBasePath}.params -w ${dir}/witness.json -r ${dir}/proof.json -o ${dir}/public.json`)
|
||||||
|
} catch (e) {
|
||||||
|
console.log(out, e)
|
||||||
|
throw e
|
||||||
|
}
|
||||||
|
return '0x' + JSON.parse(fs.readFileSync(`${dir}/proof.json`)).proof
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function batchTreeUpdate(tree, events) {
|
||||||
|
const batchHeight = 2 //await this.tornadoTreesContract.CHUNK_TREE_HEIGHT()
|
||||||
|
if (events.length !== 1 << batchHeight) {
|
||||||
|
throw new Error('events length does not match the batch size')
|
||||||
|
}
|
||||||
|
|
||||||
|
const oldRoot = tree.root().toString()
|
||||||
|
const leaves = events.map((e) => poseidonHash([e.instance, e.hash, e.block]))
|
||||||
|
tree.bulkInsert(leaves)
|
||||||
|
const newRoot = tree.root().toString()
|
||||||
|
let { pathElements, pathIndices } = tree.path(tree.elements().length - 1)
|
||||||
|
pathElements = pathElements.slice(batchHeight).map(a => toBN(a).toString())
|
||||||
|
pathIndices = bitsToNumber(pathIndices.slice(batchHeight)).toString()
|
||||||
|
|
||||||
|
const input = {
|
||||||
|
oldRoot,
|
||||||
|
newRoot,
|
||||||
|
pathIndices,
|
||||||
|
pathElements,
|
||||||
|
instances: events.map((e) => toBN(e.instance).toString()),
|
||||||
|
hashes: events.map((e) => toBN(e.hash).toString()),
|
||||||
|
blocks: events.map((e) => toBN(e.block).toString()),
|
||||||
|
}
|
||||||
|
|
||||||
|
input.argsHash = hashInputs(input)
|
||||||
|
return input
|
||||||
|
// const proofData = await websnarkUtils.genWitnessAndProve(
|
||||||
|
// this.groth16,
|
||||||
|
// input,
|
||||||
|
// this.provingKeys.batchTreeUpdateCircuit,
|
||||||
|
// this.provingKeys.batchTreeUpdateProvingKey,
|
||||||
|
// )
|
||||||
|
// const { proof } = websnarkUtils.toSolidityInput(proofData)
|
||||||
|
|
||||||
|
// const args = [
|
||||||
|
// toFixedHex(input.oldRoot),
|
||||||
|
// toFixedHex(input.newRoot),
|
||||||
|
// toFixedHex(input.pathIndices),
|
||||||
|
// events.map((e) => ({
|
||||||
|
// instance: toFixedHex(e.instance, 20),
|
||||||
|
// hash: toFixedHex(e.hash),
|
||||||
|
// block: toFixedHex(e.block),
|
||||||
|
// })),
|
||||||
|
// ]
|
||||||
|
|
||||||
|
// return {
|
||||||
|
// proof,
|
||||||
|
// args,
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { batchTreeUpdate, prove }
|
165
src/utils.js
Normal file
165
src/utils.js
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
const crypto = require('crypto')
|
||||||
|
const Decimal = require('decimal.js')
|
||||||
|
const { bigInt } = require('snarkjs')
|
||||||
|
const { toBN, BN, soliditySha3 } = require('web3-utils')
|
||||||
|
const Web3 = require('web3')
|
||||||
|
const web3 = new Web3()
|
||||||
|
const { babyJub, pedersenHash, mimcsponge, poseidon } = require('circomlib')
|
||||||
|
|
||||||
|
const RewardExtData = {
|
||||||
|
RewardExtData: {
|
||||||
|
relayer: 'address',
|
||||||
|
encryptedAccount: 'bytes',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const AccountUpdate = {
|
||||||
|
AccountUpdate: {
|
||||||
|
inputRoot: 'bytes32',
|
||||||
|
inputNullifierHash: 'bytes32',
|
||||||
|
outputRoot: 'bytes32',
|
||||||
|
outputPathIndices: 'uint256',
|
||||||
|
outputCommitment: 'bytes32',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const RewardArgs = {
|
||||||
|
RewardArgs: {
|
||||||
|
rate: 'uint256',
|
||||||
|
fee: 'uint256',
|
||||||
|
instance: 'address',
|
||||||
|
rewardNullifier: 'bytes32',
|
||||||
|
extDataHash: 'bytes32',
|
||||||
|
depositRoot: 'bytes32',
|
||||||
|
withdrawalRoot: 'bytes32',
|
||||||
|
extData: RewardExtData.RewardExtData,
|
||||||
|
account: AccountUpdate.AccountUpdate,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const WithdrawExtData = {
|
||||||
|
WithdrawExtData: {
|
||||||
|
fee: 'uint256',
|
||||||
|
recipient: 'address',
|
||||||
|
relayer: 'address',
|
||||||
|
encryptedAccount: 'bytes',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
const pedersenHashBuffer = (buffer) => toBN(babyJub.unpackPoint(pedersenHash.hash(buffer))[0].toString())
|
||||||
|
|
||||||
|
const mimcHash = (items) => toBN(mimcsponge.multiHash(items.map((item) => bigInt(item))).toString())
|
||||||
|
|
||||||
|
const poseidonHash = (items) => toBN(poseidon(items).toString())
|
||||||
|
|
||||||
|
const poseidonHash2 = (a, b) => poseidonHash([a, b])
|
||||||
|
|
||||||
|
/** Generate random number of specified byte length */
|
||||||
|
const randomBN = (nbytes = 31) => new BN(crypto.randomBytes(nbytes))
|
||||||
|
|
||||||
|
/** BigNumber to hex string of specified length */
|
||||||
|
const toFixedHex = (number, length = 32) =>
|
||||||
|
'0x' +
|
||||||
|
(number instanceof Buffer ? number.toString('hex') : toBN(number).toString(16)).padStart(length * 2, '0')
|
||||||
|
|
||||||
|
function getExtRewardArgsHash({ relayer, encryptedAccount }) {
|
||||||
|
const encodedData = web3.eth.abi.encodeParameters(
|
||||||
|
[RewardExtData],
|
||||||
|
[{ relayer: toFixedHex(relayer, 20), encryptedAccount }],
|
||||||
|
)
|
||||||
|
const hash = soliditySha3({ t: 'bytes', v: encodedData })
|
||||||
|
return '0x00' + hash.slice(4) // cut last byte to make it 31 byte long to fit the snark field
|
||||||
|
}
|
||||||
|
|
||||||
|
function getExtWithdrawArgsHash({ fee, recipient, relayer, encryptedAccount }) {
|
||||||
|
const encodedData = web3.eth.abi.encodeParameters(
|
||||||
|
[WithdrawExtData],
|
||||||
|
[
|
||||||
|
{
|
||||||
|
fee: toFixedHex(fee, 32),
|
||||||
|
recipient: toFixedHex(recipient, 20),
|
||||||
|
relayer: toFixedHex(relayer, 20),
|
||||||
|
encryptedAccount,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
const hash = soliditySha3({ t: 'bytes', v: encodedData })
|
||||||
|
return '0x00' + hash.slice(4) // cut first byte to make it 31 byte long to fit the snark field
|
||||||
|
}
|
||||||
|
|
||||||
|
function packEncryptedMessage(encryptedMessage) {
|
||||||
|
const nonceBuf = Buffer.from(encryptedMessage.nonce, 'base64')
|
||||||
|
const ephemPublicKeyBuf = Buffer.from(encryptedMessage.ephemPublicKey, 'base64')
|
||||||
|
const ciphertextBuf = Buffer.from(encryptedMessage.ciphertext, 'base64')
|
||||||
|
const messageBuff = Buffer.concat([
|
||||||
|
Buffer.alloc(24 - nonceBuf.length),
|
||||||
|
nonceBuf,
|
||||||
|
Buffer.alloc(32 - ephemPublicKeyBuf.length),
|
||||||
|
ephemPublicKeyBuf,
|
||||||
|
ciphertextBuf,
|
||||||
|
])
|
||||||
|
return '0x' + messageBuff.toString('hex')
|
||||||
|
}
|
||||||
|
|
||||||
|
function unpackEncryptedMessage(encryptedMessage) {
|
||||||
|
if (encryptedMessage.slice(0, 2) === '0x') {
|
||||||
|
encryptedMessage = encryptedMessage.slice(2)
|
||||||
|
}
|
||||||
|
const messageBuff = Buffer.from(encryptedMessage, 'hex')
|
||||||
|
const nonceBuf = messageBuff.slice(0, 24)
|
||||||
|
const ephemPublicKeyBuf = messageBuff.slice(24, 56)
|
||||||
|
const ciphertextBuf = messageBuff.slice(56)
|
||||||
|
return {
|
||||||
|
version: 'x25519-xsalsa20-poly1305',
|
||||||
|
nonce: nonceBuf.toString('base64'),
|
||||||
|
ephemPublicKey: ephemPublicKeyBuf.toString('base64'),
|
||||||
|
ciphertext: ciphertextBuf.toString('base64'),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function bitsToNumber(bits) {
|
||||||
|
let result = 0
|
||||||
|
for (const item of bits.slice().reverse()) {
|
||||||
|
result = (result << 1) + item
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// a = floor(10**18 * e^(-0.0000000001 * amount))
|
||||||
|
// yield = BalBefore - (BalBefore * a)/10**18
|
||||||
|
function tornadoFormula({ balance, amount, poolWeight = 1e10 }) {
|
||||||
|
const decimals = new Decimal(10 ** 18)
|
||||||
|
balance = new Decimal(balance.toString())
|
||||||
|
amount = new Decimal(amount.toString())
|
||||||
|
poolWeight = new Decimal(poolWeight.toString())
|
||||||
|
|
||||||
|
const power = amount.div(poolWeight).negated()
|
||||||
|
const exponent = Decimal.exp(power).mul(decimals)
|
||||||
|
const newBalance = balance.mul(exponent).div(decimals)
|
||||||
|
return toBN(balance.sub(newBalance).toFixed(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
function reverseTornadoFormula({ balance, tokens, poolWeight = 1e10 }) {
|
||||||
|
balance = new Decimal(balance.toString())
|
||||||
|
tokens = new Decimal(tokens.toString())
|
||||||
|
poolWeight = new Decimal(poolWeight.toString())
|
||||||
|
|
||||||
|
return toBN(poolWeight.times(Decimal.ln(balance.div(balance.sub(tokens)))).toFixed(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
randomBN,
|
||||||
|
pedersenHashBuffer,
|
||||||
|
bitsToNumber,
|
||||||
|
getExtRewardArgsHash,
|
||||||
|
getExtWithdrawArgsHash,
|
||||||
|
packEncryptedMessage,
|
||||||
|
unpackEncryptedMessage,
|
||||||
|
toFixedHex,
|
||||||
|
mimcHash,
|
||||||
|
poseidonHash,
|
||||||
|
poseidonHash2,
|
||||||
|
tornadoFormula,
|
||||||
|
reverseTornadoFormula,
|
||||||
|
RewardArgs,
|
||||||
|
RewardExtData,
|
||||||
|
AccountUpdate,
|
||||||
|
}
|
77
test/pack.test.js
Normal file
77
test/pack.test.js
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
/* global artifacts, web3, contract */
|
||||||
|
require('chai').use(require('bn-chai')(web3.utils.BN)).use(require('chai-as-promised')).should()
|
||||||
|
const { takeSnapshot, revertSnapshot } = require('../scripts/ganacheHelper')
|
||||||
|
const Ccntroller = require('../src/controller')
|
||||||
|
const { toBN } = require('web3-utils')
|
||||||
|
const Pack = artifacts.require('Pack')
|
||||||
|
const jsSHA = require('jssha')
|
||||||
|
|
||||||
|
const { poseidonHash2 } = require('../src/utils')
|
||||||
|
const MerkleTree = require('fixed-merkle-tree')
|
||||||
|
|
||||||
|
const levels = 20
|
||||||
|
const CHUNK_TREE_HEIGHT = 7
|
||||||
|
contract.skip('Pack', (accounts) => {
|
||||||
|
let pack
|
||||||
|
let snapshotId
|
||||||
|
|
||||||
|
const instances = [
|
||||||
|
'0xc6325fa78E0764993Bf2997116A3771bCbcb3fa9',
|
||||||
|
'0xb70738422D0f9d1225300eE0Fc67e7392095567d',
|
||||||
|
'0xA675B536203a123B0214cdf1EBb1298F440dA19A',
|
||||||
|
'0xFA1835cf197C3281Dc993a63bb160026dAC98bF3',
|
||||||
|
]
|
||||||
|
|
||||||
|
const hashes = [
|
||||||
|
'0x6f44cd7458bf24f65851fa8097712e3a8d9a6f3e387c501b285338308a74b8f3',
|
||||||
|
'0xafd3103939b7b0cd7a0ad1ddac57dd13af7f2825a21b47ae995b5bb0f767a106',
|
||||||
|
'0x57f7b90a3cb4ea6860e6dd5fa44ac4f53ebe6ae3948af577a01ef51738313246'
|
||||||
|
]
|
||||||
|
|
||||||
|
const notes = []
|
||||||
|
|
||||||
|
before(async () => {
|
||||||
|
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
|
||||||
|
pack = await Pack.new()
|
||||||
|
|
||||||
|
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
|
||||||
|
notes[i] = {
|
||||||
|
instance: instances[i % instances.length],
|
||||||
|
hash: hashes[i % hashes.length],
|
||||||
|
block: 1 + i,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshotId = await takeSnapshot()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('#pack', () => {
|
||||||
|
it('gastest', async () => {
|
||||||
|
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
|
||||||
|
const receipt = await pack.pack2(notes.map(a => a.hash), notes.map(a => a.instance), notes.map(a => a.block), { gas: 6e6 })
|
||||||
|
console.log('total', receipt.receipt.gasUsed)
|
||||||
|
|
||||||
|
const sha = new jsSHA('SHA-256', 'ARRAYBUFFER')
|
||||||
|
for (let i = 0; i < notes.length; i++) {
|
||||||
|
sha.update(toBN(notes[i].hash).toBuffer('be', 32))
|
||||||
|
sha.update(toBN(notes[i].instance).toBuffer('be', 20))
|
||||||
|
sha.update(toBN(notes[i].block).toBuffer('be', 4))
|
||||||
|
}
|
||||||
|
const hash = sha.getHash('HEX')
|
||||||
|
|
||||||
|
const solHash = await pack.hash()
|
||||||
|
solHash.should.be.equal('0x' + hash)
|
||||||
|
console.log('batch size', notes.length)
|
||||||
|
console.log('events', (await pack.gas1()).toString())
|
||||||
|
console.log('hash', (await pack.gas2()).toString())
|
||||||
|
console.log('bytes',(await pack.gas3()).toString())
|
||||||
|
console.log('calldata', toBN(6e6).sub(await pack.gas4()).toString())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await revertSnapshot(snapshotId.result)
|
||||||
|
// eslint-disable-next-line require-atomic-updates
|
||||||
|
snapshotId = await takeSnapshot()
|
||||||
|
})
|
||||||
|
})
|
14
test/sample-test.js
Normal file
14
test/sample-test.js
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
const { expect } = require("chai");
|
||||||
|
|
||||||
|
describe("Greeter", function() {
|
||||||
|
it("Should return the new greeting once it's changed", async function() {
|
||||||
|
const Greeter = await ethers.getContractFactory("Greeter");
|
||||||
|
const greeter = await Greeter.deploy("Hello, world!");
|
||||||
|
|
||||||
|
await greeter.deployed();
|
||||||
|
expect(await greeter.greet()).to.equal("Hello, world!");
|
||||||
|
|
||||||
|
await greeter.setGreeting("Hola, mundo!");
|
||||||
|
expect(await greeter.greet()).to.equal("Hola, mundo!");
|
||||||
|
});
|
||||||
|
});
|
23
test/snark.test.js
Normal file
23
test/snark.test.js
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
/* global artifacts, web3, contract */
|
||||||
|
require('chai').use(require('bn-chai')(web3.utils.BN)).use(require('chai-as-promised')).should()
|
||||||
|
const MerkleTree = require('fixed-merkle-tree')
|
||||||
|
const { poseidonHash2, randomBN } = require('../src/utils')
|
||||||
|
const { batchTreeUpdate, prove } = require('../src/controller')
|
||||||
|
|
||||||
|
const levels = 20
|
||||||
|
const CHUNK_TREE_HEIGHT = 2
|
||||||
|
contract('Snark', () => {
|
||||||
|
it('should work', async () => {
|
||||||
|
const tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
|
||||||
|
const events = []
|
||||||
|
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
|
||||||
|
events.push({
|
||||||
|
hash: randomBN(31).toString(),
|
||||||
|
instance: randomBN(20).toString(),
|
||||||
|
block: randomBN(4).toString(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
const data = await batchTreeUpdate(tree, events)
|
||||||
|
const proof = await prove(data, './build/circuits/BatchTreeUpdate')
|
||||||
|
})
|
||||||
|
})
|
159
test/tornadoTrees.test.js
Normal file
159
test/tornadoTrees.test.js
Normal file
@ -0,0 +1,159 @@
|
|||||||
|
/* global artifacts, web3, contract */
|
||||||
|
require('chai').use(require('bn-chai')(web3.utils.BN)).use(require('chai-as-promised')).should()
|
||||||
|
const { takeSnapshot, revertSnapshot } = require('../scripts/ganacheHelper')
|
||||||
|
const controller = require('../src/controller')
|
||||||
|
const TornadoTrees = artifacts.require('TornadoTreesMock')
|
||||||
|
const BatchTreeUpdateVerifier = artifacts.require('BatchTreeUpdateVerifier')
|
||||||
|
const { toBN } = require('web3-utils')
|
||||||
|
|
||||||
|
const { toBN } = require('web3-utils')
|
||||||
|
const { toFixedHex, poseidonHash2, randomBN } = require('../src/utils')
|
||||||
|
const MerkleTree = require('fixed-merkle-tree')
|
||||||
|
|
||||||
|
async function registerDeposit(note, tornadoTrees, from) {
|
||||||
|
await tornadoTrees.setBlockNumber(note.depositBlock)
|
||||||
|
await tornadoTrees.registerDeposit(note.instance, toFixedHex(note.commitment), { from })
|
||||||
|
return {
|
||||||
|
instance: note.instance,
|
||||||
|
hash: toFixedHex(note.commitment),
|
||||||
|
block: toFixedHex(note.depositBlock),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function registerWithdrawal(note, tornadoTrees, from) {
|
||||||
|
await tornadoTrees.setBlockNumber(note.withdrawalBlock)
|
||||||
|
await tornadoTrees.registerWithdrawal(note.instance, toFixedHex(note.nullifierHash), { from })
|
||||||
|
return {
|
||||||
|
instance: note.instance,
|
||||||
|
hash: toFixedHex(note.nullifierHash),
|
||||||
|
block: toFixedHex(note.withdrawalBlock),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function register(note, tornadoTrees, from) {
|
||||||
|
await tornadoTrees.register(
|
||||||
|
note.instance,
|
||||||
|
toFixedHex(note.commitment),
|
||||||
|
toFixedHex(note.nullifierHash),
|
||||||
|
note.depositBlock,
|
||||||
|
note.withdrawalBlock,
|
||||||
|
{
|
||||||
|
from,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
instance: note.instance,
|
||||||
|
hash: toFixedHex(note.nullifierHash),
|
||||||
|
block: toFixedHex(note.withdrawalBlock),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const levels = 20
|
||||||
|
const CHUNK_TREE_HEIGHT = 2
|
||||||
|
contract('TornadoTrees', (accounts) => {
|
||||||
|
let tornadoTrees
|
||||||
|
let verifier
|
||||||
|
// let controller
|
||||||
|
let snapshotId
|
||||||
|
let tornadoProxy = accounts[0]
|
||||||
|
let operator = accounts[0]
|
||||||
|
|
||||||
|
const instances = [
|
||||||
|
'0x1111000000000000000000000000000000001111',
|
||||||
|
'0x2222000000000000000000000000000000002222',
|
||||||
|
'0x3333000000000000000000000000000000003333',
|
||||||
|
'0x4444000000000000000000000000000000004444',
|
||||||
|
]
|
||||||
|
|
||||||
|
const blocks = ['0xaaaaaaaa', '0xbbbbbbbb', '0xcccccccc', '0xdddddddd']
|
||||||
|
|
||||||
|
const notes = []
|
||||||
|
|
||||||
|
before(async () => {
|
||||||
|
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
|
||||||
|
verifier = await BatchTreeUpdateVerifier.new()
|
||||||
|
tornadoTrees = await TornadoTrees.new(
|
||||||
|
operator,
|
||||||
|
tornadoProxy,
|
||||||
|
verifier.address,
|
||||||
|
toFixedHex(emptyTree.root()),
|
||||||
|
toFixedHex(emptyTree.root()),
|
||||||
|
)
|
||||||
|
|
||||||
|
// controller = new Controller({
|
||||||
|
// contract: '',
|
||||||
|
// tornadoTreesContract: tornadoTrees,
|
||||||
|
// merkleTreeHeight: levels,
|
||||||
|
// provingKeys,
|
||||||
|
// })
|
||||||
|
// await controller.init()
|
||||||
|
|
||||||
|
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
|
||||||
|
// onsole.log('i', i)
|
||||||
|
notes[i] = {
|
||||||
|
instance: instances[i % instances.length],
|
||||||
|
depositBlock: blocks[i % blocks.length],
|
||||||
|
withdrawalBlock: 2 + i + i * 4 * 60 * 24,
|
||||||
|
commitment: randomBN(),
|
||||||
|
nullifierHash: randomBN(),
|
||||||
|
}
|
||||||
|
await register(notes[i], tornadoTrees, tornadoProxy)
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshotId = await takeSnapshot()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('#updateDepositTree', () => {
|
||||||
|
it('should check hash', async () => {
|
||||||
|
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
|
||||||
|
const events = notes.map((note) => ({
|
||||||
|
hash: toFixedHex(note.commitment),
|
||||||
|
instance: toFixedHex(note.instance, 20),
|
||||||
|
block: toFixedHex(note.depositBlock, 4),
|
||||||
|
}))
|
||||||
|
const data = await controller.batchTreeUpdate(emptyTree, events)
|
||||||
|
const solHash = await tornadoTrees.updateDepositTreeMock(
|
||||||
|
toFixedHex(data.oldRoot),
|
||||||
|
toFixedHex(data.newRoot),
|
||||||
|
toFixedHex(data.pathIndices, 4),
|
||||||
|
events,
|
||||||
|
)
|
||||||
|
toBN(data.argsHash).should.be.eq.BN(solHash)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should prove snark', async () => {
|
||||||
|
const emptyTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
|
||||||
|
const events = notes.map((note) => ({
|
||||||
|
hash: toFixedHex(note.commitment),
|
||||||
|
instance: toFixedHex(note.instance, 20),
|
||||||
|
block: toFixedHex(note.depositBlock, 4),
|
||||||
|
}))
|
||||||
|
const data = await controller.batchTreeUpdate(emptyTree, events)
|
||||||
|
const proof = await controller.prove(data, './build/circuits/BatchTreeUpdate')
|
||||||
|
await tornadoTrees.updateDepositTree(
|
||||||
|
proof,
|
||||||
|
toFixedHex(data.argsHash),
|
||||||
|
toFixedHex(data.oldRoot),
|
||||||
|
toFixedHex(data.newRoot),
|
||||||
|
toFixedHex(data.pathIndices, 4),
|
||||||
|
events,
|
||||||
|
)
|
||||||
|
|
||||||
|
const updatedRoot = await tornadoTrees.depositRoot()
|
||||||
|
updatedRoot.should.be.eq.BN(toBN(toFixedHex(data.newRoot)))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should work for non-empty tree')
|
||||||
|
it('should reject for partially filled tree')
|
||||||
|
it('should reject for outdated deposit root')
|
||||||
|
it('should reject for incorrect insert index')
|
||||||
|
it('should reject for overflows of newRoot')
|
||||||
|
it('should reject for invalid sha256 args')
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await revertSnapshot(snapshotId.result)
|
||||||
|
// eslint-disable-next-line require-atomic-updates
|
||||||
|
snapshotId = await takeSnapshot()
|
||||||
|
})
|
||||||
|
})
|
Loading…
Reference in New Issue
Block a user