Compare commits

...

32 Commits
v1.0.0 ... v2

Author SHA1 Message Date
5db3741ea9 Bump required node version to 16 2023-09-18 07:11:28 -07:00
427ebea84b Resolve dependencies via self-hosted registry & move package to tornado scope 2023-09-18 06:45:35 -07:00
h-ivor
2a26a90940 package.json 2.1.5 2021-09-23 21:16:55 +03:00
h-ivor
9d0262189e lint 2021-09-23 21:16:55 +03:00
fried
b3277137ba fix constructor visibility 2021-09-23 21:16:55 +03:00
Alexey
d93d7c8870 lint 2021-08-23 13:20:32 +03:00
Alexey
9f095bfcb2 updates all functions so they can be easily overriden 2021-08-13 17:05:23 +03:00
nikdementev
6d219e19ee fix: optimism poseidon hash for events 2021-04-02 21:35:59 +03:00
Alexey
18518856ff fix Note.fromString method parsing 2021-04-01 12:04:38 +03:00
poma
ca446c79cd
proxy light 2021-03-29 09:10:32 +03:00
poma
3ae6087f65
update dependency 2021-03-22 20:38:23 +03:00
poma
87babe41d3
update version 2021-03-22 20:30:14 +03:00
poma
e42a2afb43
Fix CVF-49 2021-03-22 20:30:01 +03:00
poma
adf675edd9
Rename event 2021-03-21 01:01:44 +03:00
poma
15eaac2f2e
Fix CVF-52 2021-03-21 01:01:35 +03:00
poma
cd2ea92084
Fix CVF-53 2021-03-21 01:00:59 +03:00
poma
6850d6a6c4
Fix CVF-55, CVF-54 2021-03-21 00:57:22 +03:00
poma
97b486b9ba
fix interface 2021-03-16 23:51:35 +03:00
poma
f02347500b
bump version 2021-03-16 22:39:22 +03:00
Alexey
a326f3e14f backupNotes 2021-03-16 01:05:33 +04:00
Alexey
b9570ea3c3 _updateInstance updates 2021-03-12 15:12:20 +03:00
Alexey
44e9d7d645 refactoring 2021-03-12 15:12:20 +03:00
Alexey
5a82105f24 updates 2021-03-12 15:12:20 +03:00
poma
01ca620e7c
InstanceStateUpdate event 2021-03-06 15:44:17 +03:00
Alexey
435f30900b fix tests 2021-03-04 13:18:01 +03:00
Alexey
aa16e436cd update deps 2021-03-04 12:26:35 +03:00
Alexey
a4f06cfdca bump version 2021-02-25 20:21:05 +03:00
Alexey
580d76958e get rid of ens names for the instances 2021-02-19 20:17:18 +03:00
Alexey
c00c62cb5f update reward and withdraw functions 2021-02-18 18:51:29 +03:00
Alexey
9581d0e72d bump version 2021-02-12 15:53:54 +03:00
Alexey
c5287388f5 init 2021-02-10 23:32:30 +03:00
poma
95f6bbfe17
fix ci 2020-12-16 00:53:24 +03:00
29 changed files with 6544 additions and 3099 deletions

View File

@ -77,7 +77,7 @@ jobs:
uses: appleboy/telegram-action@0.0.7 uses: appleboy/telegram-action@0.0.7
if: failure() if: failure()
with: with:
message: ❗ Failed to publish [${{ steps.vars.outputs.repo_name }}](https://github.com/${{ github.repository }}/actions) because of ${{ env.GITHUB_ACTOR }} message: ❗ Failed to publish [${{ steps.vars.outputs.repo_name }}](https://github.com/${{ github.repository }}/actions) because of ${{ github.actor }}
format: markdown format: markdown
to: ${{ secrets.TELEGRAM_CHAT_ID }} to: ${{ secrets.TELEGRAM_CHAT_ID }}
token: ${{ secrets.TELEGRAM_BOT_TOKEN }} token: ${{ secrets.TELEGRAM_BOT_TOKEN }}

1
.npmrc Normal file
View File

@ -0,0 +1 @@
@tornado:registry=https://git.tornado.ws/api/packages/tornado-packages/npm/

View File

@ -1,8 +1,12 @@
# Tornado.cash anonymity mining [![Build Status](https://github.com/tornadocash/tornado-anonymity-mining/workflows/build/badge.svg)](https://github.com/tornadocash/tornado-anonymity-mining/actions) [![npm](https://img.shields.io/npm/v/tornado-anonymity-mining)](https://www.npmjs.com/package/tornado-anonymity-mining) # Tornado.cash anonymity mining [![Build Status](https://github.com/tornadocash/tornado-anonymity-mining/workflows/build/badge.svg)](https://github.com/tornadocash/tornado-anonymity-mining/actions) [![npm](https://img.shields.io/npm/v/tornado-anonymity-mining)](https://www.npmjs.com/package/tornado-anonymity-mining)
## v2 changes
`TornadoTrees.sol` is no longer part of this project. It migrated to [@tornado/trees](https://git.tornado.ws/tornado-packages/tornado-trees)
## Dependencies ## Dependencies
1. node 12 1. node 16
2. yarn 2. yarn
3. zkutil (`brew install rust && cargo install zkutil`) 3. zkutil (`brew install rust && cargo install zkutil`)

View File

@ -1,5 +1,5 @@
include "../node_modules/circomlib/circuits/poseidon.circom"; include "../node_modules/@tornado/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/bitify.circom"; include "../node_modules/@tornado/circomlib/circuits/bitify.circom";
// Computes Poseidon([left, right]) // Computes Poseidon([left, right])
template HashLeftRight() { template HashLeftRight() {

View File

@ -1,6 +1,6 @@
include "../node_modules/circomlib/circuits/poseidon.circom"; include "../node_modules/@tornado/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/bitify.circom"; include "../node_modules/@tornado/circomlib/circuits/bitify.circom";
include "../node_modules/circomlib/circuits/comparators.circom"; include "../node_modules/@tornado/circomlib/circuits/comparators.circom";
include "./Utils.circom"; include "./Utils.circom";
include "./MerkleTree.circom"; include "./MerkleTree.circom";
include "./MerkleTreeUpdater.circom"; include "./MerkleTreeUpdater.circom";

View File

@ -1,5 +1,5 @@
include "../node_modules/circomlib/circuits/bitify.circom"; include "../node_modules/@tornado/circomlib/circuits/bitify.circom";
include "../node_modules/circomlib/circuits/pedersen.circom"; include "../node_modules/@tornado/circomlib/circuits/pedersen.circom";
// computes Pedersen(nullifier + secret) // computes Pedersen(nullifier + secret)
template TornadoCommitmentHasher() { template TornadoCommitmentHasher() {

View File

@ -1,5 +1,5 @@
include "../node_modules/circomlib/circuits/poseidon.circom"; include "../node_modules/@tornado/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/bitify.circom"; include "../node_modules/@tornado/circomlib/circuits/bitify.circom";
include "./Utils.circom"; include "./Utils.circom";
include "./MerkleTree.circom"; include "./MerkleTree.circom";
include "./MerkleTreeUpdater.circom"; include "./MerkleTreeUpdater.circom";

View File

@ -1,35 +0,0 @@
// Generates Hasher artifact at compile-time using Truffle's external compiler
// mechanism
const path = require('path')
const fs = require('fs')
const genContract = require('circomlib/src/poseidon_gencontract.js')
// where Truffle will expect to find the results of the external compiler
// command
const outputPath = path.join(__dirname, 'build', 'contracts')
const outputPath2 = path.join(outputPath, 'Hasher2.json')
const outputPath3 = path.join(outputPath, 'Hasher3.json')
if (!fs.existsSync(outputPath)) {
fs.mkdirSync(outputPath, { recursive: true })
}
function main() {
const contract2 = {
contractName: 'Hasher2',
abi: genContract.generateABI(2),
bytecode: genContract.createCode(2),
}
fs.writeFileSync(outputPath2, JSON.stringify(contract2, null, 2))
const contract3 = {
contractName: 'Hasher3',
abi: genContract.generateABI(3),
bytecode: genContract.createCode(3),
}
fs.writeFileSync(outputPath3, JSON.stringify(contract3, null, 2))
}
main()

View File

@ -5,7 +5,7 @@ pragma experimental ABIEncoderV2;
import "./interfaces/IVerifier.sol"; import "./interfaces/IVerifier.sol";
import "./interfaces/IRewardSwap.sol"; import "./interfaces/IRewardSwap.sol";
import "./TornadoTrees.sol"; import "@tornado/trees/contracts/TornadoTrees.sol";
import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; import "@openzeppelin/contracts/token/ERC20/IERC20.sol";
import "@openzeppelin/contracts/math/SafeMath.sol"; import "@openzeppelin/contracts/math/SafeMath.sol";
import "torn-token/contracts/ENS.sol"; import "torn-token/contracts/ENS.sol";
@ -105,11 +105,9 @@ contract Miner is EnsResolve {
_setRates(_rates); _setRates(_rates);
// prettier-ignore // prettier-ignore
_setVerifiers([ _setVerifiers(
IVerifier(resolve(_verifiers[0])), [IVerifier(resolve(_verifiers[0])), IVerifier(resolve(_verifiers[1])), IVerifier(resolve(_verifiers[2]))]
IVerifier(resolve(_verifiers[1])), );
IVerifier(resolve(_verifiers[2]))
]);
} }
function reward(bytes memory _proof, RewardArgs memory _args) public { function reward(bytes memory _proof, RewardArgs memory _args) public {
@ -239,15 +237,15 @@ contract Miner is EnsResolve {
// ------VIEW------- // ------VIEW-------
/** /**
@dev Whether the root is present in the root history * @dev Whether the root is present in the root history
*/ */
function isKnownAccountRoot(bytes32 _root, uint256 _index) public view returns (bool) { function isKnownAccountRoot(bytes32 _root, uint256 _index) public view returns (bool) {
return _root != 0 && accountRoots[_index % ACCOUNT_ROOT_HISTORY_SIZE] == _root; return _root != 0 && accountRoots[_index % ACCOUNT_ROOT_HISTORY_SIZE] == _root;
} }
/** /**
@dev Returns the last root * @dev Returns the last root
*/ */
function getLastAccountRoot() public view returns (bytes32) { function getLastAccountRoot() public view returns (bytes32) {
return accountRoots[accountCount % ACCOUNT_ROOT_HISTORY_SIZE]; return accountRoots[accountCount % ACCOUNT_ROOT_HISTORY_SIZE];
} }

View File

@ -1,37 +1,53 @@
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
pragma solidity ^0.6.0; pragma solidity >=0.6.0 <0.8.0;
pragma experimental ABIEncoderV2;
import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; import "@openzeppelin/contracts/token/ERC20/IERC20.sol";
import "@openzeppelin/contracts/token/ERC20/SafeERC20.sol"; import "@openzeppelin/contracts/token/ERC20/SafeERC20.sol";
import "@openzeppelin/contracts/math/Math.sol"; import "@openzeppelin/contracts/math/Math.sol";
import "./interfaces/ITornadoInstance.sol"; import "./interfaces/ITornadoInstance.sol";
import "./interfaces/ITornadoTrees.sol"; import "./interfaces/ITornadoTrees.sol";
import "torn-token/contracts/ENS.sol";
contract TornadoProxy is EnsResolve { contract TornadoProxy {
using SafeERC20 for IERC20; using SafeERC20 for IERC20;
event EncryptedNote(address indexed sender, bytes encryptedNote); event EncryptedNote(address indexed sender, bytes encryptedNote);
event InstanceStateUpdated(ITornadoInstance indexed instance, InstanceState state);
event TornadoTreesUpdated(ITornadoTrees addr);
ITornadoTrees public immutable tornadoTrees; enum InstanceState { DISABLED, ENABLED, MINEABLE }
struct Instance {
bool isERC20;
IERC20 token;
InstanceState state;
}
struct Tornado {
ITornadoInstance addr;
Instance instance;
}
ITornadoTrees public tornadoTrees;
address public immutable governance; address public immutable governance;
mapping(ITornadoInstance => Instance) public instances;
mapping(ITornadoInstance => bool) public instances;
modifier onlyGovernance() { modifier onlyGovernance() {
require(msg.sender == governance, "Not authorized"); require(msg.sender == governance, "Not authorized");
_; _;
} }
constructor( constructor(
bytes32 _tornadoTrees, address _tornadoTrees,
bytes32 _governance, address _governance,
bytes32[] memory _instances Tornado[] memory _instances
) public { ) public {
tornadoTrees = ITornadoTrees(resolve(_tornadoTrees)); tornadoTrees = ITornadoTrees(_tornadoTrees);
governance = resolve(_governance); governance = _governance;
for (uint256 i = 0; i < _instances.length; i++) { for (uint256 i = 0; i < _instances.length; i++) {
instances[ITornadoInstance(resolve(_instances[i]))] = true; _updateInstance(_instances[i]);
} }
} }
@ -39,16 +55,19 @@ contract TornadoProxy is EnsResolve {
ITornadoInstance _tornado, ITornadoInstance _tornado,
bytes32 _commitment, bytes32 _commitment,
bytes calldata _encryptedNote bytes calldata _encryptedNote
) external payable { ) public payable virtual {
require(instances[_tornado], "The instance is not supported"); Instance memory instance = instances[_tornado];
require(instance.state != InstanceState.DISABLED, "The instance is not supported");
if (instance.isERC20) {
instance.token.safeTransferFrom(msg.sender, address(this), _tornado.denomination());
}
_tornado.deposit{ value: msg.value }(_commitment); _tornado.deposit{ value: msg.value }(_commitment);
tornadoTrees.registerDeposit(address(_tornado), _commitment);
emit EncryptedNote(msg.sender, _encryptedNote);
}
function updateInstance(ITornadoInstance _instance, bool _update) external onlyGovernance { if (instance.state == InstanceState.MINEABLE) {
instances[_instance] = _update; tornadoTrees.registerDeposit(address(_tornado), _commitment);
}
emit EncryptedNote(msg.sender, _encryptedNote);
} }
function withdraw( function withdraw(
@ -60,32 +79,66 @@ contract TornadoProxy is EnsResolve {
address payable _relayer, address payable _relayer,
uint256 _fee, uint256 _fee,
uint256 _refund uint256 _refund
) external payable { ) public payable virtual {
require(instances[_tornado], "The instance is not supported"); Instance memory instance = instances[_tornado];
require(instance.state != InstanceState.DISABLED, "The instance is not supported");
_tornado.withdraw{ value: msg.value }(_proof, _root, _nullifierHash, _recipient, _relayer, _fee, _refund); _tornado.withdraw{ value: msg.value }(_proof, _root, _nullifierHash, _recipient, _relayer, _fee, _refund);
tornadoTrees.registerWithdrawal(address(_tornado), _nullifierHash); if (instance.state == InstanceState.MINEABLE) {
tornadoTrees.registerWithdrawal(address(_tornado), _nullifierHash);
}
}
function backupNotes(bytes[] calldata _encryptedNotes) external virtual {
for (uint256 i = 0; i < _encryptedNotes.length; i++) {
emit EncryptedNote(msg.sender, _encryptedNotes[i]);
}
}
function updateInstance(Tornado calldata _tornado) external virtual onlyGovernance {
_updateInstance(_tornado);
}
function setTornadoTreesContract(ITornadoTrees _tornadoTrees) external virtual onlyGovernance {
tornadoTrees = _tornadoTrees;
emit TornadoTreesUpdated(_tornadoTrees);
} }
/// @dev Method to claim junk and accidentally sent tokens /// @dev Method to claim junk and accidentally sent tokens
function rescueTokens( function rescueTokens(
IERC20 _token, IERC20 _token,
address payable _to, address payable _to,
uint256 _balance uint256 _amount
) external onlyGovernance { ) external virtual onlyGovernance {
require(_to != address(0), "TORN: can not send to zero address"); require(_to != address(0), "TORN: can not send to zero address");
if (_token == IERC20(0)) { if (_token == IERC20(0)) {
// for Ether // for Ether
uint256 totalBalance = address(this).balance; uint256 totalBalance = address(this).balance;
uint256 balance = _balance == 0 ? totalBalance : Math.min(totalBalance, _balance); uint256 balance = Math.min(totalBalance, _amount);
_to.transfer(balance); _to.transfer(balance);
} else { } else {
// any other erc20 // any other erc20
uint256 totalBalance = _token.balanceOf(address(this)); uint256 totalBalance = _token.balanceOf(address(this));
uint256 balance = _balance == 0 ? totalBalance : Math.min(totalBalance, _balance); uint256 balance = Math.min(totalBalance, _amount);
require(balance > 0, "TORN: trying to send 0 balance"); require(balance > 0, "TORN: trying to send 0 balance");
_token.safeTransfer(_to, balance); _token.safeTransfer(_to, balance);
} }
} }
function _updateInstance(Tornado memory _tornado) internal {
instances[_tornado.addr] = _tornado.instance;
if (_tornado.instance.isERC20) {
IERC20 token = IERC20(_tornado.addr.token());
require(token == _tornado.instance.token, "Incorrect token");
uint256 allowance = token.allowance(address(this), address(_tornado.addr));
if (_tornado.instance.state != InstanceState.DISABLED && allowance == 0) {
token.safeApprove(address(_tornado.addr), uint256(-1));
} else if (_tornado.instance.state == InstanceState.DISABLED && allowance != 0) {
token.safeApprove(address(_tornado.addr), 0);
}
}
emit InstanceStateUpdated(_tornado.addr, _tornado.instance.state);
}
} }

View File

@ -0,0 +1,36 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2;
import "./interfaces/ITornadoInstance.sol";
contract TornadoProxyLight {
event EncryptedNote(address indexed sender, bytes encryptedNote);
function deposit(
ITornadoInstance _tornado,
bytes32 _commitment,
bytes calldata _encryptedNote
) external payable {
_tornado.deposit{ value: msg.value }(_commitment);
emit EncryptedNote(msg.sender, _encryptedNote);
}
function withdraw(
ITornadoInstance _tornado,
bytes calldata _proof,
bytes32 _root,
bytes32 _nullifierHash,
address payable _recipient,
address payable _relayer,
uint256 _fee,
uint256 _refund
) external payable {
_tornado.withdraw{ value: msg.value }(_proof, _root, _nullifierHash, _recipient, _relayer, _fee, _refund);
}
function backupNotes(bytes[] calldata _encryptedNotes) external {
for (uint256 i = 0; i < _encryptedNotes.length; i++) {
emit EncryptedNote(msg.sender, _encryptedNotes[i]);
}
}
}

View File

@ -1,132 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2;
import "torn-token/contracts/ENS.sol";
import "./utils/OwnableMerkleTree.sol";
import "./interfaces/ITornadoTrees.sol";
import "./interfaces/IHasher.sol";
contract TornadoTrees is ITornadoTrees, EnsResolve {
OwnableMerkleTree public immutable depositTree;
OwnableMerkleTree public immutable withdrawalTree;
IHasher public immutable hasher;
address public immutable tornadoProxy;
bytes32[] public deposits;
uint256 public lastProcessedDepositLeaf;
bytes32[] public withdrawals;
uint256 public lastProcessedWithdrawalLeaf;
event DepositData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
event WithdrawalData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
struct TreeLeaf {
address instance;
bytes32 hash;
uint256 block;
}
modifier onlyTornadoProxy {
require(msg.sender == tornadoProxy, "Not authorized");
_;
}
constructor(
bytes32 _tornadoProxy,
bytes32 _hasher2,
bytes32 _hasher3,
uint32 _levels
) public {
tornadoProxy = resolve(_tornadoProxy);
hasher = IHasher(resolve(_hasher3));
depositTree = new OwnableMerkleTree(_levels, IHasher(resolve(_hasher2)));
withdrawalTree = new OwnableMerkleTree(_levels, IHasher(resolve(_hasher2)));
}
function registerDeposit(address _instance, bytes32 _commitment) external override onlyTornadoProxy {
deposits.push(keccak256(abi.encode(_instance, _commitment, blockNumber())));
}
function registerWithdrawal(address _instance, bytes32 _nullifier) external override onlyTornadoProxy {
withdrawals.push(keccak256(abi.encode(_instance, _nullifier, blockNumber())));
}
function updateRoots(TreeLeaf[] calldata _deposits, TreeLeaf[] calldata _withdrawals) external {
if (_deposits.length > 0) updateDepositTree(_deposits);
if (_withdrawals.length > 0) updateWithdrawalTree(_withdrawals);
}
function updateDepositTree(TreeLeaf[] calldata _deposits) public {
bytes32[] memory leaves = new bytes32[](_deposits.length);
uint256 offset = lastProcessedDepositLeaf;
for (uint256 i = 0; i < _deposits.length; i++) {
TreeLeaf memory deposit = _deposits[i];
bytes32 leafHash = keccak256(abi.encode(deposit.instance, deposit.hash, deposit.block));
require(deposits[offset + i] == leafHash, "Incorrect deposit");
leaves[i] = hasher.poseidon([bytes32(uint256(deposit.instance)), deposit.hash, bytes32(deposit.block)]);
delete deposits[offset + i];
emit DepositData(deposit.instance, deposit.hash, deposit.block, offset + i);
}
lastProcessedDepositLeaf = offset + _deposits.length;
depositTree.bulkInsert(leaves);
}
function updateWithdrawalTree(TreeLeaf[] calldata _withdrawals) public {
bytes32[] memory leaves = new bytes32[](_withdrawals.length);
uint256 offset = lastProcessedWithdrawalLeaf;
for (uint256 i = 0; i < _withdrawals.length; i++) {
TreeLeaf memory withdrawal = _withdrawals[i];
bytes32 leafHash = keccak256(abi.encode(withdrawal.instance, withdrawal.hash, withdrawal.block));
require(withdrawals[offset + i] == leafHash, "Incorrect withdrawal");
leaves[i] = hasher.poseidon([bytes32(uint256(withdrawal.instance)), withdrawal.hash, bytes32(withdrawal.block)]);
delete withdrawals[offset + i];
emit WithdrawalData(withdrawal.instance, withdrawal.hash, withdrawal.block, offset + i);
}
lastProcessedWithdrawalLeaf = offset + _withdrawals.length;
withdrawalTree.bulkInsert(leaves);
}
function validateRoots(bytes32 _depositRoot, bytes32 _withdrawalRoot) public view {
require(depositTree.isKnownRoot(_depositRoot), "Incorrect deposit tree root");
require(withdrawalTree.isKnownRoot(_withdrawalRoot), "Incorrect withdrawal tree root");
}
function depositRoot() external view returns (bytes32) {
return depositTree.getLastRoot();
}
function withdrawalRoot() external view returns (bytes32) {
return withdrawalTree.getLastRoot();
}
function getRegisteredDeposits() external view returns (bytes32[] memory _deposits) {
uint256 count = deposits.length - lastProcessedDepositLeaf;
_deposits = new bytes32[](count);
for (uint256 i = 0; i < count; i++) {
_deposits[i] = deposits[lastProcessedDepositLeaf + i];
}
}
function getRegisteredWithdrawals() external view returns (bytes32[] memory _withdrawals) {
uint256 count = withdrawals.length - lastProcessedWithdrawalLeaf;
_withdrawals = new bytes32[](count);
for (uint256 i = 0; i < count; i++) {
_withdrawals[i] = withdrawals[lastProcessedWithdrawalLeaf + i];
}
}
function blockNumber() public view virtual returns (uint256) {
return block.number;
}
}

View File

@ -1,8 +1,12 @@
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
pragma solidity ^0.6.0; pragma solidity >=0.6.0 <0.8.0;
interface ITornadoInstance { interface ITornadoInstance {
function token() external view returns (address);
function denomination() external view returns (uint256);
function deposit(bytes32 commitment) external payable; function deposit(bytes32 commitment) external payable;
function withdraw( function withdraw(

View File

@ -1,6 +1,6 @@
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
pragma solidity ^0.6.0; pragma solidity >=0.6.0 <0.8.0;
interface ITornadoTrees { interface ITornadoTrees {
function registerDeposit(address instance, bytes32 commitment) external; function registerDeposit(address instance, bytes32 commitment) external;

View File

@ -1,18 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2;
import "../utils/MerkleTreeWithHistory.sol";
contract MerkleTreeWithHistoryMock is MerkleTreeWithHistory {
constructor(uint32 _treeLevels, IHasher _hasher) public MerkleTreeWithHistory(_treeLevels, _hasher) {}
function insert(bytes32 _leaf) external returns (uint32 index) {
return _insert(_leaf);
}
function bulkInsert(bytes32[] memory _leaves) external {
_bulkInsert(_leaves);
}
}

View File

@ -3,28 +3,4 @@
pragma solidity ^0.6.0; pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2; pragma experimental ABIEncoderV2;
import "../TornadoTrees.sol"; import "@tornado/trees/contracts/mocks/TornadoTreesMock.sol";
contract TornadoTreesMock is TornadoTrees {
uint256 public timestamp;
uint256 public currentBlock;
constructor(
bytes32 _tornadoProxy,
bytes32 _hasher2,
bytes32 _hasher3,
uint32 _levels
) public TornadoTrees(_tornadoProxy, _hasher2, _hasher3, _levels) {}
function resolve(bytes32 _addr) public view override returns (address) {
return address(uint160(uint256(_addr) >> (12 * 8)));
}
function setBlockNumber(uint256 _blockNumber) public {
currentBlock = _blockNumber;
}
function blockNumber() public view override returns (uint256) {
return currentBlock == 0 ? block.number : currentBlock;
}
}

View File

@ -0,0 +1,5 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
import "@tornado/trees/contracts/mocks/TornadoTreesV1Mock.sol";

View File

@ -1,136 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
import "../interfaces/IHasher.sol";
contract MerkleTreeWithHistory {
uint256 public constant FIELD_SIZE = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
uint256 public constant ZERO_VALUE = 21663839004416932945382355908790599225266501822907911457504978515578255421292; // = keccak256("tornado") % FIELD_SIZE
uint32 public immutable levels;
IHasher public hasher; // todo immutable
bytes32[] public filledSubtrees;
bytes32[] public zeros;
uint32 public currentRootIndex = 0;
uint32 public nextIndex = 0;
uint32 public constant ROOT_HISTORY_SIZE = 10;
bytes32[ROOT_HISTORY_SIZE] public roots;
constructor(uint32 _treeLevels, IHasher _hasher) public {
require(_treeLevels > 0, "_treeLevels should be greater than zero");
require(_treeLevels < 32, "_treeLevels should be less than 32");
levels = _treeLevels;
hasher = _hasher;
bytes32 currentZero = bytes32(ZERO_VALUE);
zeros.push(currentZero);
filledSubtrees.push(currentZero);
for (uint32 i = 1; i < _treeLevels; i++) {
currentZero = hashLeftRight(currentZero, currentZero);
zeros.push(currentZero);
filledSubtrees.push(currentZero);
}
filledSubtrees.push(hashLeftRight(currentZero, currentZero));
roots[0] = filledSubtrees[_treeLevels];
}
/**
@dev Hash 2 tree leaves, returns poseidon(_left, _right)
*/
function hashLeftRight(bytes32 _left, bytes32 _right) public view returns (bytes32) {
return hasher.poseidon([_left, _right]);
}
function _insert(bytes32 _leaf) internal returns (uint32 index) {
uint32 currentIndex = nextIndex;
require(currentIndex != uint32(2)**levels, "Merkle tree is full. No more leaves can be added");
nextIndex = currentIndex + 1;
bytes32 currentLevelHash = _leaf;
bytes32 left;
bytes32 right;
for (uint32 i = 0; i < levels; i++) {
if (currentIndex % 2 == 0) {
left = currentLevelHash;
right = zeros[i];
filledSubtrees[i] = currentLevelHash;
} else {
left = filledSubtrees[i];
right = currentLevelHash;
}
currentLevelHash = hashLeftRight(left, right);
currentIndex /= 2;
}
currentRootIndex = (currentRootIndex + 1) % ROOT_HISTORY_SIZE;
roots[currentRootIndex] = currentLevelHash;
return nextIndex - 1;
}
function _bulkInsert(bytes32[] memory _leaves) internal {
uint32 insertIndex = nextIndex;
require(insertIndex + _leaves.length <= uint32(2)**levels, "Merkle doesn't have enough capacity to add specified leaves");
bytes32[] memory subtrees = new bytes32[](levels);
bool[] memory modifiedSubtrees = new bool[](levels);
for (uint32 j = 0; j < _leaves.length - 1; j++) {
uint256 index = insertIndex + j;
bytes32 currentLevelHash = _leaves[j];
for (uint32 i = 0; ; i++) {
if (index % 2 == 0) {
modifiedSubtrees[i] = true;
subtrees[i] = currentLevelHash;
break;
}
if (subtrees[i] == bytes32(0)) {
subtrees[i] = filledSubtrees[i];
}
currentLevelHash = hashLeftRight(subtrees[i], currentLevelHash);
index /= 2;
}
}
for (uint32 i = 0; i < levels; i++) {
// using local map to save on gas on writes if elements were not modified
if (modifiedSubtrees[i]) {
filledSubtrees[i] = subtrees[i];
}
}
nextIndex = uint32(insertIndex + _leaves.length - 1);
_insert(_leaves[_leaves.length - 1]);
}
/**
@dev Whether the root is present in the root history
*/
function isKnownRoot(bytes32 _root) public view returns (bool) {
if (_root == 0) {
return false;
}
uint32 i = currentRootIndex;
do {
if (_root == roots[i]) {
return true;
}
if (i == 0) {
i = ROOT_HISTORY_SIZE;
}
i--;
} while (i != currentRootIndex);
return false;
}
/**
@dev Returns the last root
*/
function getLastRoot() public view returns (bytes32) {
return roots[currentRootIndex];
}
}

View File

@ -1,17 +0,0 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
import "@openzeppelin/contracts/access/Ownable.sol";
import "./MerkleTreeWithHistory.sol";
contract OwnableMerkleTree is Ownable, MerkleTreeWithHistory {
constructor(uint32 _treeLevels, IHasher _hasher) public MerkleTreeWithHistory(_treeLevels, _hasher) {}
function insert(bytes32 _leaf) external onlyOwner returns (uint32 index) {
return _insert(_leaf);
}
function bulkInsert(bytes32[] calldata _leaves) external onlyOwner {
_bulkInsert(_leaves);
}
}

View File

@ -1,8 +1,8 @@
{ {
"name": "tornado-anonymity-mining", "name": "@tornado/anonymity-mining",
"version": "1.0.0", "version": "2.1.5",
"main": "index.js", "main": "index.js",
"repository": "https://github.com/tornadocash/tornado-anonymity-mining.git", "repository": "https://git.tornado.ws/tornado-packages/anonymity-mining.git",
"author": "Tornadocash team <hello@tornado.cash>", "author": "Tornadocash team <hello@tornado.cash>",
"license": "MIT", "license": "MIT",
"files": [ "files": [
@ -47,12 +47,13 @@
"truffle-plugin-verify": "^0.3.11" "truffle-plugin-verify": "^0.3.11"
}, },
"dependencies": { "dependencies": {
"circomlib": "git+https://github.com/tornadocash/circomlib.git#3b492f9801573eebcfe1b6c584afe8a3beecf2b4", "@tornado/circomlib": "^0.0.21",
"@tornado/fixed-merkle-tree": "0.3.4",
"@tornado/snarkjs": "0.1.20",
"@tornado/trees": "^0.0.11",
"@tornado/websnark": "^0.0.4",
"decimal.js": "^10.2.0", "decimal.js": "^10.2.0",
"eth-sig-util": "^2.5.3", "eth-sig-util": "^2.5.3",
"fixed-merkle-tree": "^0.3.4", "web3": "^1.2.11"
"snarkjs": "git+https://github.com/tornadocash/snarkjs.git#869181cfaf7526fe8972073d31655493a04326d5",
"web3": "^1.2.11",
"websnark": "git+https://github.com/tornadocash/websnark.git#86a526718cd6f6f5d31bdb1fe26a9ec8819f633e"
} }
} }

View File

@ -3,6 +3,6 @@ npx circom circuits/$1.circom -o build/circuits/$1.json
npx snarkjs info -c build/circuits/$1.json npx snarkjs info -c build/circuits/$1.json
zkutil setup -c build/circuits/$1.json -p build/circuits/$1.params zkutil setup -c build/circuits/$1.json -p build/circuits/$1.params
zkutil export-keys -c build/circuits/$1.json -p build/circuits/$1.params --pk build/circuits/$1_proving_key.json --vk build/circuits/$1_verification_key.json zkutil export-keys -c build/circuits/$1.json -p build/circuits/$1.params --pk build/circuits/$1_proving_key.json --vk build/circuits/$1_verification_key.json
node node_modules/websnark/tools/buildpkey.js -i build/circuits/$1_proving_key.json -o build/circuits/$1_proving_key.bin node node_modules/@tornado/websnark/tools/buildpkey.js -i build/circuits/$1_proving_key.json -o build/circuits/$1_proving_key.bin
zkutil generate-verifier -p build/circuits/$1.params -v build/circuits/${1}Verifier.sol zkutil generate-verifier -p build/circuits/$1.params -v build/circuits/${1}Verifier.sol
sed -i.bak "s/contract Verifier/contract ${1}Verifier/g" build/circuits/${1}Verifier.sol sed -i.bak "s/contract Verifier/contract ${1}Verifier/g" build/circuits/${1}Verifier.sol

View File

@ -11,9 +11,9 @@ const {
RewardArgs, RewardArgs,
} = require('./utils') } = require('./utils')
const Account = require('./account') const Account = require('./account')
const MerkleTree = require('fixed-merkle-tree') const MerkleTree = require('@tornado/fixed-merkle-tree')
const websnarkUtils = require('websnark/src/utils') const websnarkUtils = require('@tornado/websnark/src/utils')
const buildGroth16 = require('websnark/src/groth16') const buildGroth16 = require('@tornado/websnark/src/groth16')
const web3 = new Web3() const web3 = new Web3()
@ -97,8 +97,31 @@ class Controller {
return { proofs, args } return { proofs, args }
} }
async reward({ account, note, publicKey, fee = 0, relayer = 0, accountCommitments = null }) { /**
const rate = await this.contract.methods.rates(note.instance).call() * Generates proof and args to claim AP (anonymity points) for a note
* @param {Account} account The account the AP will be added to
* @param {Note} note The target note
* @param {String} publicKey ETH public key for the Account encryption
* @param {Number} fee Fee for the relayer
* @param {String} relayer Relayer address
* @param {Number} rate How many AP is generated for the note in block time
* @param {String[]} accountCommitments An array of account commitments from miner contract
* @param {String[]} depositDataEvents An array of account commitments from miner contract
* @param {{instance: String, hash: String, block: Number, index: Number}[]} depositDataEvents An array of deposit objects from tornadoTrees contract. hash = commitment
* @param {{instance: String, hash: String, block: Number, index: Number}[]} withdrawalDataEvents An array of withdrawal objects from tornadoTrees contract. hash = nullifierHash
*/
async reward({
account,
note,
publicKey,
fee = 0,
relayer = 0,
rate = null,
accountCommitments = null,
depositDataEvents = null,
withdrawalDataEvents = null,
}) {
rate = rate || (await this.contract.methods.rates(note.instance).call())
const newAmount = account.amount.add( const newAmount = account.amount.add(
toBN(rate) toBN(rate)
@ -107,8 +130,14 @@ class Controller {
) )
const newAccount = new Account({ amount: newAmount }) const newAccount = new Account({ amount: newAmount })
const depositDataEvents = await this._fetchDepositDataEvents() depositDataEvents = depositDataEvents || (await this._fetchDepositDataEvents())
const depositLeaves = depositDataEvents.map((x) => poseidonHash([x.instance, x.hash, x.block])) const depositLeaves = depositDataEvents.map((x) => {
if (x.poseidon) {
return x.poseidon
}
return poseidonHash([x.instance, x.hash, x.block])
})
const depositTree = new MerkleTree(this.merkleTreeHeight, depositLeaves, { hashFunction: poseidonHash2 }) const depositTree = new MerkleTree(this.merkleTreeHeight, depositLeaves, { hashFunction: poseidonHash2 })
const depositItem = depositDataEvents.filter((x) => x.hash === toFixedHex(note.commitment)) const depositItem = depositDataEvents.filter((x) => x.hash === toFixedHex(note.commitment))
if (depositItem.length === 0) { if (depositItem.length === 0) {
@ -116,8 +145,14 @@ class Controller {
} }
const depositPath = depositTree.path(depositItem[0].index) const depositPath = depositTree.path(depositItem[0].index)
const withdrawalDataEvents = await this._fetchWithdrawalDataEvents() withdrawalDataEvents = withdrawalDataEvents || (await this._fetchWithdrawalDataEvents())
const withdrawalLeaves = withdrawalDataEvents.map((x) => poseidonHash([x.instance, x.hash, x.block])) const withdrawalLeaves = withdrawalDataEvents.map((x) => {
if (x.poseidon) {
return x.poseidon
}
return poseidonHash([x.instance, x.hash, x.block])
})
const withdrawalTree = new MerkleTree(this.merkleTreeHeight, withdrawalLeaves, { const withdrawalTree = new MerkleTree(this.merkleTreeHeight, withdrawalLeaves, {
hashFunction: poseidonHash2, hashFunction: poseidonHash2,
}) })
@ -135,7 +170,7 @@ class Controller {
pathElements: new Array(this.merkleTreeHeight).fill(0), pathElements: new Array(this.merkleTreeHeight).fill(0),
pathIndices: new Array(this.merkleTreeHeight).fill(0), pathIndices: new Array(this.merkleTreeHeight).fill(0),
} }
const accountIndex = accountTree.indexOf(account.commitment, (a, b) => a.eq(b)) const accountIndex = accountTree.indexOf(account.commitment, (a, b) => toBN(a).eq(toBN(b)))
const accountPath = accountIndex !== -1 ? accountTree.path(accountIndex) : zeroAccount const accountPath = accountIndex !== -1 ? accountTree.path(accountIndex) : zeroAccount
const accountTreeUpdate = this._updateTree(accountTree, newAccount.commitment) const accountTreeUpdate = this._updateTree(accountTree, newAccount.commitment)
@ -215,15 +250,15 @@ class Controller {
} }
} }
async withdraw({ account, amount, recipient, publicKey, fee = 0, relayer = 0 }) { async withdraw({ account, amount, recipient, publicKey, fee = 0, relayer = 0, accountCommitments = null }) {
const newAmount = account.amount.sub(toBN(amount)).sub(toBN(fee)) const newAmount = account.amount.sub(toBN(amount)).sub(toBN(fee))
const newAccount = new Account({ amount: newAmount }) const newAccount = new Account({ amount: newAmount })
const accountCommitments = await this._fetchAccountCommitments() accountCommitments = accountCommitments || (await this._fetchAccountCommitments())
const accountTree = new MerkleTree(this.merkleTreeHeight, accountCommitments, { const accountTree = new MerkleTree(this.merkleTreeHeight, accountCommitments, {
hashFunction: poseidonHash2, hashFunction: poseidonHash2,
}) })
const accountIndex = accountTree.indexOf(account.commitment, (a, b) => a.eq(b)) const accountIndex = accountTree.indexOf(account.commitment, (a, b) => toBN(a).eq(toBN(b)))
if (accountIndex === -1) { if (accountIndex === -1) {
throw new Error('The accounts tree does not contain such account commitment') throw new Error('The accounts tree does not contain such account commitment')
} }

View File

@ -25,11 +25,10 @@ class Note {
} }
static fromString(note, instance, depositBlock, withdrawalBlock) { static fromString(note, instance, depositBlock, withdrawalBlock) {
note = note.split('-') const [, currency, amount, netId, noteHex] = note.split('-')
const [, currency, amount, netId] = note const noteBuff = Buffer.from(noteHex.slice(2), 'hex')
const hexNote = note[4].slice(2) const nullifier = new BN(noteBuff.slice(0, 31), 16, 'le')
const nullifier = new BN(hexNote.slice(0, 62), 16, 'le') const secret = new BN(noteBuff.slice(31), 16, 'le')
const secret = new BN(hexNote.slice(62), 16, 'le')
return new Note({ return new Note({
secret, secret,
nullifier, nullifier,

View File

@ -1,10 +1,10 @@
const crypto = require('crypto') const crypto = require('crypto')
const Decimal = require('decimal.js') const Decimal = require('decimal.js')
const { bigInt } = require('snarkjs') const { bigInt } = require('@tornado/snarkjs')
const { toBN, soliditySha3 } = require('web3-utils') const { toBN, soliditySha3 } = require('web3-utils')
const Web3 = require('web3') const Web3 = require('web3')
const web3 = new Web3() const web3 = new Web3()
const { babyJub, pedersenHash, mimcsponge, poseidon } = require('circomlib') const { babyJub, pedersenHash, mimcsponge, poseidon } = require('@tornado/circomlib')
const RewardExtData = { const RewardExtData = {
RewardExtData: { RewardExtData: {

View File

@ -1,107 +0,0 @@
/* global artifacts, web3, contract */
require('chai').use(require('bn-chai')(web3.utils.BN)).use(require('chai-as-promised')).should()
const { takeSnapshot, revertSnapshot } = require('../scripts/ganacheHelper')
const { toFixedHex, randomBN } = require('../src/utils')
const MerkleTree = artifacts.require('MerkleTreeWithHistoryMock')
const Hasher = artifacts.require('Hasher2')
const levels = 16
contract('MerkleTree', () => {
let tree1
let tree2
let snapshotId
let hasher
before(async () => {
hasher = await Hasher.new()
tree1 = await MerkleTree.new(levels, hasher.address)
tree2 = await MerkleTree.new(levels, hasher.address)
snapshotId = await takeSnapshot()
})
describe('#tree', () => {
it('should bulk insert', async () => {
const elements = ['123', '456', '789'].map((e) => toFixedHex(e))
await tree1.bulkInsert(elements)
for (const e of elements) {
await tree2.insert(e)
}
const root1 = await tree1.getLastRoot()
const root2 = await tree2.getLastRoot()
root1.should.be.equal(root2)
})
it('almost full tree', async () => {
let tree = await MerkleTree.new(3, hasher.address)
let elements = ['1', '2', '3', '4', '5', '6', '7'].map((e) => toFixedHex(e))
await tree.bulkInsert(elements)
tree = await MerkleTree.new(3, hasher.address)
elements = ['1', '2', '3', '4', '5', '6', '7', '8'].map((e) => toFixedHex(e))
await tree.bulkInsert(elements)
tree = await MerkleTree.new(3, hasher.address)
elements = ['1', '2', '3', '4', '5', '6', '7', '8', '9'].map((e) => toFixedHex(e))
// prettier-ignore
await tree
.bulkInsert(elements)
.should.be.rejectedWith('Merkle doesn\'t have enough capacity to add specified leaves')
})
// it('estimate gas hasher', async () => {
// const gas = await tree1.test() // hasher.contract.methods.poseidon([1, 2]).estimateGas()
// console.log('gas', gas.toString())
// })
it('should bulk insert with initial state', async () => {
const initElements = [123, 456, 789].map((e) => toFixedHex(e))
const elements = [12, 34, 56, 78, 90].map((e) => toFixedHex(e))
for (const e of initElements) {
await tree1.insert(e)
await tree2.insert(e)
}
await tree1.bulkInsert(elements)
for (const e of elements) {
await tree2.insert(e)
}
const root1 = await tree1.getLastRoot()
const root2 = await tree2.getLastRoot()
root1.should.be.equal(root2)
})
it.skip('should pass the stress test', async () => {
const rounds = 40
const elementCount = 10
for (let i = 0; i < rounds; i++) {
const length = 1 + Math.floor(Math.random() * elementCount)
const elements = Array.from({ length }, () => randomBN()).map((e) => toFixedHex(e))
await tree1.bulkInsert(elements)
for (const e of elements) {
await tree2.insert(e)
}
const root1 = await tree1.getLastRoot()
const root2 = await tree2.getLastRoot()
root1.should.be.equal(root2)
}
})
})
afterEach(async () => {
await revertSnapshot(snapshotId.result)
// eslint-disable-next-line require-atomic-updates
snapshotId = await takeSnapshot()
})
})

View File

@ -11,6 +11,7 @@ const Account = require('../src/account')
const Note = require('../src/note') const Note = require('../src/note')
const { const {
toFixedHex, toFixedHex,
poseidonHash,
poseidonHash2, poseidonHash2,
packEncryptedMessage, packEncryptedMessage,
unpackEncryptedMessage, unpackEncryptedMessage,
@ -19,6 +20,7 @@ const {
const { getEncryptionPublicKey } = require('eth-sig-util') const { getEncryptionPublicKey } = require('eth-sig-util')
const Miner = artifacts.require('MinerMock') const Miner = artifacts.require('MinerMock')
const TornadoTrees = artifacts.require('TornadoTreesMock') const TornadoTrees = artifacts.require('TornadoTreesMock')
const TornadoTreesV1 = artifacts.require('TornadoTreesV1Mock')
const Torn = artifacts.require('TORNMock') const Torn = artifacts.require('TORNMock')
const RewardSwap = artifacts.require('RewardSwapMock') const RewardSwap = artifacts.require('RewardSwapMock')
const RewardVerifier = artifacts.require('RewardVerifier') const RewardVerifier = artifacts.require('RewardVerifier')
@ -32,9 +34,7 @@ const provingKeys = {
withdrawProvingKey: fs.readFileSync('./build/circuits/Withdraw_proving_key.bin').buffer, withdrawProvingKey: fs.readFileSync('./build/circuits/Withdraw_proving_key.bin').buffer,
treeUpdateProvingKey: fs.readFileSync('./build/circuits/TreeUpdate_proving_key.bin').buffer, treeUpdateProvingKey: fs.readFileSync('./build/circuits/TreeUpdate_proving_key.bin').buffer,
} }
const MerkleTree = require('fixed-merkle-tree') const MerkleTree = require('@tornado/fixed-merkle-tree')
const Hasher2 = artifacts.require('Hasher2')
const Hasher3 = artifacts.require('Hasher3')
// Set time to beginning of a second // Set time to beginning of a second
async function timeReset() { async function timeReset() {
@ -55,11 +55,13 @@ async function getNextAddr(sender, offset = 0) {
} }
async function registerNote(note, tornadoTrees) { async function registerNote(note, tornadoTrees) {
await tornadoTrees.setBlockNumber(note.depositBlock) await tornadoTrees.register(
await tornadoTrees.registerDeposit(note.instance, toFixedHex(note.commitment)) note.instance,
toFixedHex(note.commitment),
await tornadoTrees.setBlockNumber(note.withdrawalBlock) toFixedHex(note.nullifierHash),
await tornadoTrees.registerWithdrawal(note.instance, toFixedHex(note.nullifierHash)) note.depositBlock,
note.withdrawalBlock,
)
return { return {
depositLeaf: { depositLeaf: {
@ -118,17 +120,31 @@ contract('Miner', (accounts) => {
const privateKey = web3.eth.accounts.create().privateKey.slice(2) const privateKey = web3.eth.accounts.create().privateKey.slice(2)
const publicKey = getEncryptionPublicKey(privateKey) const publicKey = getEncryptionPublicKey(privateKey)
const operator = accounts[0] const operator = accounts[0]
const verifier = accounts[1]
const thirtyDays = 30 * 24 * 3600 const thirtyDays = 30 * 24 * 3600
const poolWeight = 1e11 const poolWeight = 1e11
const governance = accounts[9] const governance = accounts[9]
let depositTree
let withdrawalTree
before(async () => { before(async () => {
const rewardVerifier = await RewardVerifier.new() const rewardVerifier = await RewardVerifier.new()
const withdrawVerifier = await WithdrawVerifier.new() const withdrawVerifier = await WithdrawVerifier.new()
const treeUpdateVerifier = await TreeUpdateVerifier.new() const treeUpdateVerifier = await TreeUpdateVerifier.new()
const hasher2 = await Hasher2.new() const tornadoTreesV1 = await TornadoTreesV1.new(
const hasher3 = await Hasher3.new() 0,
tornadoTrees = await TornadoTrees.new(operator, hasher2.address, hasher3.address, levels) 0,
toFixedHex(emptyTree.root()),
toFixedHex(emptyTree.root()),
)
tornadoTrees = await TornadoTrees.new(operator, tornadoTreesV1.address, {
depositsFrom: 0,
depositsStep: 0,
withdrawalsFrom: 0,
withdrawalsStep: 0,
})
await tornadoTrees.initialize(operator, verifier)
const swapExpectedAddr = await getNextAddr(accounts[0], 1) const swapExpectedAddr = await getNextAddr(accounts[0], 1)
const minerExpectedAddr = await getNextAddr(accounts[0], 2) const minerExpectedAddr = await getNextAddr(accounts[0], 2)
torn = await Torn.new(sender, thirtyDays, [ torn = await Torn.new(sender, thirtyDays, [
@ -151,15 +167,17 @@ contract('Miner', (accounts) => {
[{ instance: tornado, value: RATE.toString() }], [{ instance: tornado, value: RATE.toString() }],
) )
const depositData = [] depositTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const withdrawalData = [] withdrawalTree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
for (const note of notes) { for (const note of notes) {
const { depositLeaf, withdrawalLeaf } = await registerNote(note, tornadoTrees) const { depositLeaf, withdrawalLeaf } = await registerNote(note, tornadoTrees)
depositData.push(depositLeaf) depositTree.insert(poseidonHash([depositLeaf.instance, depositLeaf.hash, depositLeaf.block]))
withdrawalData.push(withdrawalLeaf) withdrawalTree.insert(
poseidonHash([withdrawalLeaf.instance, withdrawalLeaf.hash, withdrawalLeaf.block]),
)
} }
await tornadoTrees.updateRoots(depositData, withdrawalData) await tornadoTrees.updateRoots(toFixedHex(depositTree.root()), toFixedHex(withdrawalTree.root()))
const anotherWeb3 = new AnotherWeb3(web3.currentProvider) const anotherWeb3 = new AnotherWeb3(web3.currentProvider)
contract = new anotherWeb3.eth.Contract(miner.abi, miner.address) contract = new anotherWeb3.eth.Contract(miner.abi, miner.address)
@ -207,6 +225,22 @@ contract('Miner', (accounts) => {
note.commitment.should.be.eq.BN( note.commitment.should.be.eq.BN(
toBN('0x1a08fd10dae9806ce25b62582e44d237c1cb9f8d6bf73e756f18d0e5d7a7351a'), toBN('0x1a08fd10dae9806ce25b62582e44d237c1cb9f8d6bf73e756f18d0e5d7a7351a'),
) )
const note1 = Note.fromString(
'tornado-eth-1-5-0x00b9787ae8b877cc612acfc3d46eb1303c618ef386cb4383b75be5b300ac9b94818f3f7a589b4667612e879dbfd44198231c86aaa412a3770fd86d8fc045',
'0x8b3f5393bA08c24cc7ff5A66a832562aAB7bC95f',
10,
15,
)
note1.secret.should.be.eq.BN(toBN('0x45c08f6dd80f77a312a4aa861c239841d4bf9d872e6167469b587a3f8f8194'))
note1.nullifier.should.be.eq.BN(
toBN('0x9bac00b3e55bb78343cb86f38e613c30b16ed4c3cf2a61cc77b8e87a78b900'),
)
note1.nullifierHash.should.be.eq.BN(
toBN('0x1892018e434ed0476992c7e05e6022b69eacf746a978191117194d180104aed1'),
)
note1.commitment.should.be.eq.BN(
toBN('0x270a865e2bf7de26b0a6de08c527f4d13e2b49026f6aaeeea0866a9dd39e19f6'),
)
}) })
}) })
@ -493,45 +527,6 @@ contract('Miner', (accounts) => {
.reward(proof, args, tmp.proof, update.args) .reward(proof, args, tmp.proof, update.args)
.should.be.rejectedWith('Invalid tree update proof') .should.be.rejectedWith('Invalid tree update proof')
}) })
it('should work with outdated deposit or withdrawal merkle root', async () => {
const note0 = new Note({
instance: tornado,
depositBlock: 10,
withdrawalBlock: 55,
})
const note4 = new Note({
instance: tornado,
depositBlock: 10,
withdrawalBlock: 55,
})
const note5 = new Note({
instance: tornado,
depositBlock: 10,
withdrawalBlock: 65,
})
const claim1 = await controller.reward({ account: new Account(), note: note3, publicKey })
const note4Leaves = await registerNote(note4, tornadoTrees)
await tornadoTrees.updateRoots([note4Leaves.depositLeaf], [note4Leaves.withdrawalLeaf])
const claim2 = await controller.reward({ account: new Account(), note: note4, publicKey })
for (let i = 0; i < 9; i++) {
const note0Leaves = await registerNote(note0, tornadoTrees)
await tornadoTrees.updateRoots([note0Leaves.depositLeaf], [note0Leaves.withdrawalLeaf])
}
await miner.reward(claim1.proof, claim1.args).should.be.rejectedWith('Incorrect deposit tree root')
await miner.reward(claim2.proof, claim2.args).should.be.fulfilled
const note5Leaves = await registerNote(note5, tornadoTrees)
await tornadoTrees.updateRoots([note5Leaves.depositLeaf], [note5Leaves.withdrawalLeaf])
const claim3 = await controller.reward({ account: new Account(), note: note5, publicKey })
await miner.reward(claim3.proof, claim3.args).should.be.fulfilled
})
}) })
describe('#withdraw', () => { describe('#withdraw', () => {

View File

@ -1,142 +0,0 @@
/* global artifacts, web3, contract */
require('chai').use(require('bn-chai')(web3.utils.BN)).use(require('chai-as-promised')).should()
const { takeSnapshot, revertSnapshot } = require('../scripts/ganacheHelper')
const Note = require('../src/note')
const TornadoTrees = artifacts.require('TornadoTreesMock')
const OwnableMerkleTree = artifacts.require('OwnableMerkleTree')
const Hasher2 = artifacts.require('Hasher2')
const Hasher3 = artifacts.require('Hasher3')
const { toFixedHex, poseidonHash2, poseidonHash } = require('../src/utils')
const MerkleTree = require('fixed-merkle-tree')
async function registerDeposit(note, tornadoTrees) {
await tornadoTrees.setBlockNumber(note.depositBlock)
await tornadoTrees.registerDeposit(note.instance, toFixedHex(note.commitment))
return {
instance: note.instance,
hash: toFixedHex(note.commitment),
block: toFixedHex(note.depositBlock),
}
}
async function registerWithdrawal(note, tornadoTrees) {
await tornadoTrees.setBlockNumber(note.withdrawalBlock)
await tornadoTrees.registerWithdrawal(note.instance, toFixedHex(note.nullifierHash))
return {
instance: note.instance,
hash: toFixedHex(note.nullifierHash),
block: toFixedHex(note.withdrawalBlock),
}
}
const levels = 16
contract('TornadoTrees', (accounts) => {
let tornadoTrees
let snapshotId
let hasher2
let hasher3
let operator = accounts[0]
let depositTree
let withdrawalTree
const instances = {
one: '0x0000000000000000000000000000000000000001',
two: '0x0000000000000000000000000000000000000002',
three: '0x0000000000000000000000000000000000000003',
four: '0x0000000000000000000000000000000000000004',
}
const note1 = new Note({
instance: instances.one,
depositBlock: 10,
withdrawalBlock: 10 + 4 * 60 * 24,
})
const note2 = new Note({
instance: instances.two,
depositBlock: 10,
withdrawalBlock: 10 + 2 * 4 * 60 * 24,
})
const note3 = new Note({
instance: instances.three,
depositBlock: 10,
withdrawalBlock: 10 + 3 * 4 * 60 * 24,
})
before(async () => {
hasher2 = await Hasher2.new()
hasher3 = await Hasher3.new()
tornadoTrees = await TornadoTrees.new(operator, hasher2.address, hasher3.address, levels)
depositTree = await OwnableMerkleTree.at(await tornadoTrees.depositTree())
withdrawalTree = await OwnableMerkleTree.at(await tornadoTrees.withdrawalTree())
snapshotId = await takeSnapshot()
})
describe('#constructor', () => {
it('should be initialized', async () => {
const owner = await tornadoTrees.tornadoProxy()
owner.should.be.equal(operator)
})
})
describe('#updateRoots', () => {
it('should work for many instances', async () => {
const note1DepositLeaf = await registerDeposit(note1, tornadoTrees)
const note2DepositLeaf = await registerDeposit(note2, tornadoTrees)
const note2WithdrawalLeaf = await registerWithdrawal(note2, tornadoTrees)
const note3DepositLeaf = await registerDeposit(note3, tornadoTrees)
const note3WithdrawalLeaf = await registerWithdrawal(note3, tornadoTrees)
await tornadoTrees.updateRoots(
[note1DepositLeaf, note2DepositLeaf, note3DepositLeaf],
[note2WithdrawalLeaf, note3WithdrawalLeaf],
)
const localDepositTree = new MerkleTree(levels, [], {
hashFunction: poseidonHash2,
})
localDepositTree.insert(poseidonHash([note1.instance, note1.commitment, note1.depositBlock]))
localDepositTree.insert(poseidonHash([note2.instance, note2.commitment, note2.depositBlock]))
localDepositTree.insert(poseidonHash([note3.instance, note3.commitment, note3.depositBlock]))
const lastDepositRoot = await depositTree.getLastRoot()
toFixedHex(localDepositTree.root()).should.be.equal(lastDepositRoot.toString())
const localWithdrawalTree = new MerkleTree(levels, [], {
hashFunction: poseidonHash2,
})
localWithdrawalTree.insert(poseidonHash([note2.instance, note2.nullifierHash, note2.withdrawalBlock]))
localWithdrawalTree.insert(poseidonHash([note3.instance, note3.nullifierHash, note3.withdrawalBlock]))
const lastWithdrawalRoot = await withdrawalTree.getLastRoot()
toFixedHex(localWithdrawalTree.root()).should.be.equal(lastWithdrawalRoot.toString())
})
it('should work for empty arrays', async () => {
await tornadoTrees.updateRoots([], [])
})
})
describe('#getRegisteredDeposits', () => {
it('should work', async () => {
const note1DepositLeaf = await registerDeposit(note1, tornadoTrees)
let res = await tornadoTrees.getRegisteredDeposits()
res.length.should.be.equal(1)
// res[0].should.be.true
await tornadoTrees.updateRoots([note1DepositLeaf], [])
res = await tornadoTrees.getRegisteredDeposits()
res.length.should.be.equal(0)
await registerDeposit(note2, tornadoTrees)
res = await tornadoTrees.getRegisteredDeposits()
// res[0].should.be.true
})
})
afterEach(async () => {
await revertSnapshot(snapshotId.result)
// eslint-disable-next-line require-atomic-updates
snapshotId = await takeSnapshot()
})
})

View File

@ -34,17 +34,6 @@ module.exports = {
}, },
}, },
}, },
external: {
command: 'node ./compileHasher.js',
targets: [
{
path: './build/contracts/Hasher2.json',
},
{
path: './build/contracts/Hasher3.json',
},
],
},
}, },
plugins: ['truffle-plugin-verify', 'solidity-coverage'], plugins: ['truffle-plugin-verify', 'solidity-coverage'],
} }

8624
yarn.lock

File diff suppressed because it is too large Load Diff