updates
This commit is contained in:
parent
6a52accbd6
commit
5fe2429b89
@ -74,9 +74,9 @@ contract TornadoTrees is ITornadoTrees, EnsResolve {
|
||||
emit DepositData(_instance, _commitment, blockNumber(), deposits.length - 1);
|
||||
}
|
||||
|
||||
function registerWithdrawal(address _instance, bytes32 _nullifier) external override onlyTornadoProxy {
|
||||
withdrawals.push(keccak256(abi.encode(_instance, _nullifier, blockNumber())));
|
||||
emit WithdrawalData(_instance, _nullifier, blockNumber(), withdrawals.length - 1);
|
||||
function registerWithdrawal(address _instance, bytes32 _nullifierHash) external override onlyTornadoProxy {
|
||||
withdrawals.push(keccak256(abi.encode(_instance, _nullifierHash, blockNumber())));
|
||||
emit WithdrawalData(_instance, _nullifierHash, blockNumber(), withdrawals.length - 1);
|
||||
}
|
||||
|
||||
// todo !!! ensure that during migration the tree is filled evenly
|
||||
@ -92,7 +92,6 @@ contract TornadoTrees is ITornadoTrees, EnsResolve {
|
||||
require(_newRoot != previousDepositRoot, "Outdated deposit root");
|
||||
require(_currentRoot == depositRoot, "Proposed deposit root is invalid");
|
||||
require(_pathIndices == offset >> CHUNK_TREE_HEIGHT, "Incorrect insert index");
|
||||
require(uint256(_newRoot) < SNARK_FIELD, "Proposed root is out of range"); // optional
|
||||
|
||||
bytes memory data = new bytes(BYTES_SIZE);
|
||||
assembly {
|
||||
@ -101,12 +100,11 @@ contract TornadoTrees is ITornadoTrees, EnsResolve {
|
||||
mstore(add(data, 0x20), _currentRoot)
|
||||
}
|
||||
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
|
||||
(bytes32 hash, address instance, uint32 depositBlock) = (_events[i].hash, _events[i].instance, _events[i].block);
|
||||
bytes32 leafHash = keccak256(abi.encode(instance, hash, depositBlock));
|
||||
(bytes32 hash, address instance, uint32 blockNumber) = (_events[i].hash, _events[i].instance, _events[i].block);
|
||||
bytes32 leafHash = keccak256(abi.encode(instance, hash, blockNumber));
|
||||
require(leafHash == deposits[offset + i], "Incorrect deposit");
|
||||
require(uint256(hash) < SNARK_FIELD, "Hash out of range"); // optional
|
||||
assembly {
|
||||
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), depositBlock)
|
||||
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), blockNumber)
|
||||
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
|
||||
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
|
||||
}
|
||||
@ -143,12 +141,12 @@ contract TornadoTrees is ITornadoTrees, EnsResolve {
|
||||
mstore(add(data, 0x20), _currentRoot)
|
||||
}
|
||||
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
|
||||
(bytes32 hash, address instance, uint32 withdrawalBlock) = (_events[i].hash, _events[i].instance, _events[i].block);
|
||||
bytes32 leafHash = keccak256(abi.encode(instance, hash, withdrawalBlock));
|
||||
(bytes32 hash, address instance, uint32 blockNumber) = (_events[i].hash, _events[i].instance, _events[i].block);
|
||||
bytes32 leafHash = keccak256(abi.encode(instance, hash, blockNumber));
|
||||
require(leafHash == withdrawals[offset + i], "Incorrect withdrawal");
|
||||
require(uint256(hash) < SNARK_FIELD, "Hash out of range");
|
||||
assembly {
|
||||
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), withdrawalBlock)
|
||||
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), blockNumber)
|
||||
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
|
||||
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
const ethers = require('ethers')
|
||||
const BigNumber = ethers.BigNumber
|
||||
|
||||
const { bitsToNumber, toBuffer, poseidonHash } = require('./utils')
|
||||
const { bitsToNumber, toBuffer, toFixedHex, poseidonHash } = require('./utils')
|
||||
|
||||
const jsSHA = require('jssha')
|
||||
|
||||
@ -79,7 +79,19 @@ function batchTreeUpdate(tree, events) {
|
||||
}
|
||||
|
||||
input.argsHash = hashInputs(input)
|
||||
return input
|
||||
|
||||
const args = [
|
||||
toFixedHex(input.argsHash),
|
||||
toFixedHex(input.oldRoot),
|
||||
toFixedHex(input.newRoot),
|
||||
toFixedHex(input.pathIndices, 4),
|
||||
events.map((e) => ({
|
||||
hash: toFixedHex(e.hash),
|
||||
instance: toFixedHex(e.instance, 20),
|
||||
block: toFixedHex(e.block, 4),
|
||||
})),
|
||||
]
|
||||
return { input, args }
|
||||
// const proofData = await websnarkUtils.genWitnessAndProve(
|
||||
// this.groth16,
|
||||
// input,
|
||||
|
@ -16,8 +16,8 @@ describe('Snark', () => {
|
||||
block: randomBN(4).toString(),
|
||||
})
|
||||
}
|
||||
const data = await batchTreeUpdate(tree, events)
|
||||
const proof = await prove(data, './artifacts/circuits/BatchTreeUpdate')
|
||||
const { input } = batchTreeUpdate(tree, events)
|
||||
const proof = await prove(input, './artifacts/circuits/BatchTreeUpdate')
|
||||
|
||||
expect(proof.length).to.be.gt(0)
|
||||
})
|
||||
|
@ -37,7 +37,8 @@ describe('TornadoTrees', function () {
|
||||
let verifier
|
||||
let tornadoTrees
|
||||
let notes
|
||||
let events
|
||||
const depositEvents = []
|
||||
const withdrawalEvents = []
|
||||
|
||||
beforeEach(async function () {
|
||||
tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
|
||||
@ -65,44 +66,99 @@ describe('TornadoTrees', function () {
|
||||
nullifierHash: randomBN(),
|
||||
}
|
||||
await register(notes[i], tornadoTrees, tornadoProxy)
|
||||
depositEvents[i] = {
|
||||
hash: toFixedHex(notes[i].commitment),
|
||||
instance: toFixedHex(notes[i].instance, 20),
|
||||
block: toFixedHex(notes[i].depositBlock, 4),
|
||||
}
|
||||
withdrawalEvents[i] = {
|
||||
hash: toFixedHex(notes[i].nullifierHash),
|
||||
instance: toFixedHex(notes[i].instance, 20),
|
||||
block: toFixedHex(notes[i].withdrawalBlock, 4),
|
||||
}
|
||||
}
|
||||
|
||||
events = notes.map((note) => ({
|
||||
hash: toFixedHex(note.commitment),
|
||||
instance: toFixedHex(note.instance, 20),
|
||||
block: toFixedHex(note.depositBlock, 4),
|
||||
}))
|
||||
})
|
||||
|
||||
it('Should calculate hash', async function () {
|
||||
const data = await controller.batchTreeUpdate(tree, events)
|
||||
const solHash = await tornadoTrees.updateDepositTreeMock(
|
||||
toFixedHex(data.oldRoot),
|
||||
toFixedHex(data.newRoot),
|
||||
toFixedHex(data.pathIndices, 4),
|
||||
events,
|
||||
)
|
||||
expect(solHash).to.be.equal(data.argsHash)
|
||||
describe('#updateDepositTree', () => {
|
||||
it('should check hash', async () => {
|
||||
const { args } = controller.batchTreeUpdate(tree, depositEvents)
|
||||
const solHash = await tornadoTrees.updateDepositTreeMock(...args.slice(1))
|
||||
expect(solHash).to.be.equal(args[0])
|
||||
})
|
||||
|
||||
it('Should calculate hash', async function () {
|
||||
const data = await controller.batchTreeUpdate(tree, events)
|
||||
const proof = await controller.prove(data, './artifacts/circuits/BatchTreeUpdate')
|
||||
await tornadoTrees.updateDepositTree(
|
||||
proof,
|
||||
toFixedHex(data.argsHash),
|
||||
toFixedHex(data.oldRoot),
|
||||
toFixedHex(data.newRoot),
|
||||
toFixedHex(data.pathIndices, 4),
|
||||
events,
|
||||
)
|
||||
expect(await tornadoTrees.depositRoot()).to.be.equal(tree.root())
|
||||
it('should prove snark', async () => {
|
||||
const { input, args } = controller.batchTreeUpdate(tree, depositEvents)
|
||||
const proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
|
||||
await tornadoTrees.updateDepositTree(proof, ...args)
|
||||
|
||||
const updatedRoot = await tornadoTrees.depositRoot()
|
||||
expect(updatedRoot).to.be.equal(tree.root())
|
||||
})
|
||||
|
||||
it('should work for non-empty tree')
|
||||
it('should work for non-empty tree', async () => {
|
||||
let { input, args } = controller.batchTreeUpdate(tree, depositEvents)
|
||||
let proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
|
||||
await tornadoTrees.updateDepositTree(proof, ...args)
|
||||
let updatedRoot = await tornadoTrees.depositRoot()
|
||||
expect(updatedRoot).to.be.equal(tree.root())
|
||||
//
|
||||
for (let i = 0; i < notes.length; i++) {
|
||||
await register(notes[i], tornadoTrees, tornadoProxy)
|
||||
}
|
||||
;({ input, args } = controller.batchTreeUpdate(tree, depositEvents))
|
||||
proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
|
||||
await tornadoTrees.updateDepositTree(proof, ...args)
|
||||
updatedRoot = await tornadoTrees.depositRoot()
|
||||
expect(updatedRoot).to.be.equal(tree.root())
|
||||
})
|
||||
it('should reject for partially filled tree')
|
||||
it('should reject for outdated deposit root')
|
||||
it('should reject for incorrect insert index')
|
||||
it('should reject for overflows of newRoot')
|
||||
it('should reject for invalid sha256 args')
|
||||
})
|
||||
|
||||
describe('#getRegisteredDeposits', () => {
|
||||
it('should work', async () => {
|
||||
const abi = new ethers.utils.AbiCoder()
|
||||
let { count, _deposits } = await tornadoTrees.getRegisteredDeposits()
|
||||
expect(count).to.be.equal(notes.length)
|
||||
_deposits.forEach((hash, i) => {
|
||||
const encodedData = abi.encode(
|
||||
['address', 'bytes32', 'uint256'],
|
||||
[notes[i].instance, toFixedHex(notes[i].commitment), notes[i].depositBlock],
|
||||
)
|
||||
const leaf = ethers.utils.keccak256(encodedData)
|
||||
|
||||
expect(leaf).to.be.equal(hash)
|
||||
})
|
||||
// res.length.should.be.equal(1)
|
||||
// res[0].should.be.true
|
||||
// await tornadoTrees.updateRoots([note1DepositLeaf], [])
|
||||
|
||||
// res = await tornadoTrees.getRegisteredDeposits()
|
||||
// res.length.should.be.equal(0)
|
||||
|
||||
// await registerDeposit(note2, tornadoTrees)
|
||||
// res = await tornadoTrees.getRegisteredDeposits()
|
||||
// // res[0].should.be.true
|
||||
})
|
||||
})
|
||||
|
||||
describe('#getRegisteredWithdrawals', () => {
|
||||
it('should work', async () => {
|
||||
const abi = new ethers.utils.AbiCoder()
|
||||
let { count, _withdrawals } = await tornadoTrees.getRegisteredWithdrawals()
|
||||
expect(count).to.be.equal(notes.length)
|
||||
_withdrawals.forEach((hash, i) => {
|
||||
const encodedData = abi.encode(
|
||||
['address', 'bytes32', 'uint256'],
|
||||
[notes[i].instance, toFixedHex(notes[i].nullifierHash), notes[i].withdrawalBlock],
|
||||
)
|
||||
const leaf = ethers.utils.keccak256(encodedData)
|
||||
|
||||
expect(leaf).to.be.equal(hash)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
Loading…
Reference in New Issue
Block a user