diff --git a/powersoftau/src/batched_accumulator.rs b/powersoftau/src/batched_accumulator.rs index e7b026b..8751fd5 100644 --- a/powersoftau/src/batched_accumulator.rs +++ b/powersoftau/src/batched_accumulator.rs @@ -2,7 +2,6 @@ /// and then contributes to entropy in parts as well use bellman_ce::pairing::ff::{Field, PrimeField}; use bellman_ce::pairing::*; -use blake2::{Blake2b, Digest}; use log::{error, info}; use generic_array::GenericArray; @@ -46,24 +45,11 @@ pub struct BatchedAccumulator<'a, E: Engine> { /// Hash chain hash pub hash: GenericArray, /// The parameters used for the setup of this accumulator - pub parameters: &'a CeremonyParams, + pub parameters: &'a CeremonyParams, } impl<'a, E: Engine> BatchedAccumulator<'a, E> { - /// Calculate the contribution hash from the resulting file. Original powers of tau implementation - /// used a specially formed writer to write to the file and calculate a hash on the fly, but memory-constrained - /// implementation now writes without a particular order, so plain recalculation at the end - /// of the procedure is more efficient - pub fn calculate_hash(input_map: &Mmap) -> GenericArray { - let chunk_size = 1 << 30; // read by 1GB from map - let mut hasher = Blake2b::default(); - for chunk in input_map.chunks(chunk_size) { - hasher.input(&chunk); - } - hasher.result() - } - - pub fn empty(parameters: &'a CeremonyParams) -> Self { + pub fn empty(parameters: &'a CeremonyParams) -> Self { Self { tau_powers_g1: vec![], tau_powers_g2: vec![], @@ -297,7 +283,7 @@ impl<'a, E: Engine> BatchedAccumulator<'a, E> { output_is_compressed: UseCompression, check_input_for_correctness: CheckForCorrectness, check_output_for_correctness: CheckForCorrectness, - parameters: &'a CeremonyParams, + parameters: &'a CeremonyParams, ) -> bool { use itertools::MinMaxResult::MinMax; assert_eq!(digest.len(), 64); @@ -557,7 +543,7 @@ impl<'a, E: Engine> BatchedAccumulator<'a, E> { input_map: &Mmap, output_map: &mut MmapMut, check_input_for_correctness: CheckForCorrectness, - parameters: &'a CeremonyParams, + parameters: &'a CeremonyParams, ) -> io::Result<()> { use itertools::MinMaxResult::MinMax; @@ -634,7 +620,7 @@ impl<'a, E: Engine> BatchedAccumulator<'a, E> { input_map: &Mmap, check_input_for_correctness: CheckForCorrectness, compression: UseCompression, - parameters: &'a CeremonyParams, + parameters: &'a CeremonyParams, ) -> io::Result> { use itertools::MinMaxResult::MinMax; @@ -734,7 +720,7 @@ impl<'a, E: Engine> BatchedAccumulator<'a, E> { &mut self, output_map: &mut MmapMut, compression: UseCompression, - parameters: &CeremonyParams, + parameters: &CeremonyParams, ) -> io::Result<()> { use itertools::MinMaxResult::MinMax; @@ -1136,7 +1122,7 @@ impl<'a, E: Engine> BatchedAccumulator<'a, E> { compress_the_output: UseCompression, check_input_for_correctness: CheckForCorrectness, key: &PrivateKey, - parameters: &'a CeremonyParams, + parameters: &'a CeremonyParams, ) -> io::Result<()> { /// Exponentiate a large number of points, with an optional coefficient to be applied to the /// exponent. @@ -1308,7 +1294,7 @@ impl<'a, E: Engine> BatchedAccumulator<'a, E> { pub fn generate_initial( output_map: &mut MmapMut, compress_the_output: UseCompression, - parameters: &'a CeremonyParams, + parameters: &'a CeremonyParams, ) -> io::Result<()> { use itertools::MinMaxResult::MinMax; diff --git a/powersoftau/src/bin/beacon_constrained.rs b/powersoftau/src/bin/beacon_constrained.rs index 3fede95..8cd16d0 100644 --- a/powersoftau/src/bin/beacon_constrained.rs +++ b/powersoftau/src/bin/beacon_constrained.rs @@ -1,7 +1,8 @@ use powersoftau::{ batched_accumulator::BatchedAccumulator, keypair::keypair, - parameters::{CeremonyParams, CheckForCorrectness, CurveKind, UseCompression}, + parameters::{CeremonyParams, CheckForCorrectness, UseCompression}, + utils::calculate_hash, }; use bellman_ce::pairing::bn256::Bn256; @@ -29,7 +30,7 @@ fn main() { let circuit_power = args[3].parse().expect("could not parse circuit power"); let batch_size = args[4].parse().expect("could not parse batch size"); - let parameters = CeremonyParams::new(CurveKind::Bn256, circuit_power, batch_size); + let parameters = CeremonyParams::::new(circuit_power, batch_size); println!( "Will contribute a random beacon to accumulator for 2^{} powers of tau", @@ -148,7 +149,7 @@ fn main() { println!("Calculating previous contribution hash..."); - let current_accumulator_hash = BatchedAccumulator::::calculate_hash(&readable_map); + let current_accumulator_hash = calculate_hash(&readable_map); { println!("Contributing on top of the hash:"); @@ -179,7 +180,7 @@ fn main() { println!("Computing and writing your contribution, this could take a while..."); // this computes a transformation and writes it - BatchedAccumulator::::transform( + BatchedAccumulator::transform( &readable_map, &mut writable_map, INPUT_IS_COMPRESSED, @@ -200,7 +201,7 @@ fn main() { let output_readonly = writable_map .make_read_only() .expect("must make a map readonly"); - let contribution_hash = BatchedAccumulator::::calculate_hash(&output_readonly); + let contribution_hash = calculate_hash(&output_readonly); print!( "Done!\n\n\ diff --git a/powersoftau/src/bin/compute_constrained.rs b/powersoftau/src/bin/compute_constrained.rs index 7016703..379b8c9 100644 --- a/powersoftau/src/bin/compute_constrained.rs +++ b/powersoftau/src/bin/compute_constrained.rs @@ -1,6 +1,9 @@ -use powersoftau::batched_accumulator::BatchedAccumulator; -use powersoftau::keypair::keypair; -use powersoftau::parameters::{CheckForCorrectness, UseCompression}; +use powersoftau::{ + batched_accumulator::BatchedAccumulator, + keypair::keypair, + parameters::{CeremonyParams, CheckForCorrectness, UseCompression}, + utils::calculate_hash, +}; use bellman_ce::pairing::bn256::Bn256; use memmap::*; @@ -8,8 +11,6 @@ use std::fs::OpenOptions; use std::io::{Read, Write}; -use powersoftau::parameters::{CeremonyParams, CurveKind}; - const INPUT_IS_COMPRESSED: UseCompression = UseCompression::No; const COMPRESS_THE_OUTPUT: UseCompression = UseCompression::Yes; const CHECK_INPUT_CORRECTNESS: CheckForCorrectness = CheckForCorrectness::No; @@ -25,7 +26,7 @@ fn main() { let circuit_power = args[3].parse().expect("could not parse circuit power"); let batch_size = args[4].parse().expect("could not parse batch size"); - let parameters = CeremonyParams::new(CurveKind::Bn256, circuit_power, batch_size); + let parameters = CeremonyParams::::new(circuit_power, batch_size); println!( "Will contribute to accumulator for 2^{} powers of tau", @@ -136,7 +137,7 @@ fn main() { UseCompression::No == INPUT_IS_COMPRESSED, "Hashing the compressed file in not yet defined" ); - let current_accumulator_hash = BatchedAccumulator::::calculate_hash(&readable_map); + let current_accumulator_hash = calculate_hash(&readable_map); { println!("`challenge` file contains decompressed points and has a hash:"); @@ -189,7 +190,7 @@ fn main() { println!("Computing and writing your contribution, this could take a while..."); // this computes a transformation and writes it - BatchedAccumulator::::transform( + BatchedAccumulator::transform( &readable_map, &mut writable_map, INPUT_IS_COMPRESSED, @@ -213,7 +214,7 @@ fn main() { let output_readonly = writable_map .make_read_only() .expect("must make a map readonly"); - let contribution_hash = BatchedAccumulator::::calculate_hash(&output_readonly); + let contribution_hash = calculate_hash(&output_readonly); print!( "Done!\n\n\ diff --git a/powersoftau/src/bin/new_constrained.rs b/powersoftau/src/bin/new_constrained.rs index c8ac223..3953e0d 100644 --- a/powersoftau/src/bin/new_constrained.rs +++ b/powersoftau/src/bin/new_constrained.rs @@ -1,13 +1,13 @@ use powersoftau::batched_accumulator::BatchedAccumulator; use powersoftau::parameters::UseCompression; -use powersoftau::utils::blank_hash; +use powersoftau::utils::{blank_hash, calculate_hash}; use bellman_ce::pairing::bn256::Bn256; use memmap::*; use std::fs::OpenOptions; use std::io::Write; -use powersoftau::parameters::{CeremonyParams, CurveKind}; +use powersoftau::parameters::CeremonyParams; const COMPRESS_NEW_CHALLENGE: UseCompression = UseCompression::No; @@ -21,7 +21,7 @@ fn main() { let circuit_power = args[2].parse().expect("could not parse circuit power"); let batch_size = args[3].parse().expect("could not parse batch size"); - let parameters = CeremonyParams::new(CurveKind::Bn256, circuit_power, batch_size); + let parameters = CeremonyParams::::new(circuit_power, batch_size); println!( "Will generate an empty accumulator for 2^{} powers of tau", @@ -74,12 +74,8 @@ fn main() { println!(); } - BatchedAccumulator::::generate_initial( - &mut writable_map, - COMPRESS_NEW_CHALLENGE, - ¶meters, - ) - .expect("generation of initial accumulator is successful"); + BatchedAccumulator::generate_initial(&mut writable_map, COMPRESS_NEW_CHALLENGE, ¶meters) + .expect("generation of initial accumulator is successful"); writable_map .flush() .expect("unable to flush memmap to disk"); @@ -88,7 +84,7 @@ fn main() { let output_readonly = writable_map .make_read_only() .expect("must make a map readonly"); - let contribution_hash = BatchedAccumulator::::calculate_hash(&output_readonly); + let contribution_hash = calculate_hash(&output_readonly); println!("Empty contribution is formed with a hash:"); diff --git a/powersoftau/src/bin/prepare_phase2.rs b/powersoftau/src/bin/prepare_phase2.rs index fdc1f83..7ecf206 100644 --- a/powersoftau/src/bin/prepare_phase2.rs +++ b/powersoftau/src/bin/prepare_phase2.rs @@ -2,7 +2,7 @@ use bellman_ce::pairing::bn256::Bn256; use bellman_ce::pairing::bn256::{G1, G2}; use bellman_ce::pairing::{CurveAffine, CurveProjective}; use powersoftau::batched_accumulator::*; -use powersoftau::parameters::{CeremonyParams, CurveKind}; +use powersoftau::parameters::CeremonyParams; use powersoftau::*; use crate::parameters::*; @@ -25,18 +25,16 @@ fn log_2(x: u64) -> u32 { } fn main() { - let parameters = CeremonyParams::new( - CurveKind::Bn256, - 28, // turn this to 10 for the small test - 21, // turn this to 8 for the small test - ); - let args: Vec = std::env::args().collect(); - if args.len() != 2 { - println!("Usage: \n"); + if args.len() != 4 { + println!("Usage: \n "); std::process::exit(exitcode::USAGE); } let response_filename = &args[1]; + let circuit_power = args[2].parse().expect("could not parse circuit power"); + let batch_size = args[3].parse().expect("could not parse batch size"); + + let parameters = CeremonyParams::::new(circuit_power, batch_size); // Try to load response file from disk. let reader = OpenOptions::new() @@ -49,7 +47,7 @@ fn main() { .expect("unable to create a memory map for input") }; - let current_accumulator = BatchedAccumulator::::deserialize( + let current_accumulator = BatchedAccumulator::deserialize( &response_readable_map, CheckForCorrectness::Yes, UseCompression::Yes, diff --git a/powersoftau/src/bin/reduce_powers.rs b/powersoftau/src/bin/reduce_powers.rs index 602d33b..7fb05aa 100644 --- a/powersoftau/src/bin/reduce_powers.rs +++ b/powersoftau/src/bin/reduce_powers.rs @@ -1,8 +1,8 @@ use bellman_ce::pairing::bn256::Bn256; use powersoftau::{ batched_accumulator::BatchedAccumulator, - parameters::{CeremonyParams, CheckForCorrectness, CurveKind, UseCompression}, - utils::reduced_hash, + parameters::{CeremonyParams, CheckForCorrectness, UseCompression}, + utils::{calculate_hash, reduced_hash}, }; use std::fs::OpenOptions; @@ -20,24 +20,31 @@ pub fn log_2(x: u64) -> u32 { } fn main() { - let parameters = CeremonyParams::new( - CurveKind::Bn256, - 10, // here we use 10 since it's the reduced ceremony - 21, - ); + let args: Vec = std::env::args().collect(); + if args.len() != 6 { + println!("Usage: \n "); + std::process::exit(exitcode::USAGE); + } + let challenge_filename = &args[1]; + let reduced_challenge_filename = &args[2]; + let original_circuit_power = args[3].parse().expect("could not parse original circuit power"); + let reduced_circuit_power = args[4].parse().expect("could not parse reduced circuit power"); + let batch_size = args[5].parse().expect("could not parse batch size"); - // Try to load `./challenge` from disk. + let parameters = CeremonyParams::::new(reduced_circuit_power, batch_size); + + // Try to load the challenge from disk. let reader = OpenOptions::new() .read(true) - .open("challenge") - .expect("unable open `./challenge` in this directory"); + .open(challenge_filename) + .expect("unable to open challenge in this directory"); let challenge_readable_map = unsafe { MmapOptions::new() .map(&reader) .expect("unable to create a memory map for input") }; - let current_accumulator = BatchedAccumulator::::deserialize( + let current_accumulator = BatchedAccumulator::deserialize( &challenge_readable_map, CheckForCorrectness::Yes, UseCompression::No, @@ -45,7 +52,7 @@ fn main() { ) .expect("unable to read compressed accumulator"); - let mut reduced_accumulator = BatchedAccumulator::::empty(¶meters); + let mut reduced_accumulator = BatchedAccumulator::empty(¶meters); reduced_accumulator.tau_powers_g1 = current_accumulator.tau_powers_g1[..parameters.powers_g1_length].to_vec(); reduced_accumulator.tau_powers_g2 = @@ -60,8 +67,8 @@ fn main() { .read(true) .write(true) .create_new(true) - .open("reduced_challenge") - .expect("unable to create `./reduced_challenge` in this directory"); + .open(reduced_challenge_filename) + .expect("unable to create the reduced challenge in this directory"); // Recomputation stips the public key and uses hashing to link with the previous contibution after decompression writer @@ -75,7 +82,7 @@ fn main() { }; let hash = reduced_hash( - 28, // this is the full size of the hash + original_circuit_power, parameters.size as u8, ); (&mut writable_map[0..]) @@ -83,7 +90,7 @@ fn main() { .expect("unable to write a default hash to mmap"); writable_map .flush() - .expect("unable to write reduced hash to `./reduced_challenge`"); + .expect("unable to write reduced hash to the reduced_challenge"); println!("Reduced hash for a reduced challenge:"); for line in hash.as_slice().chunks(16) { @@ -105,7 +112,7 @@ fn main() { let output_readonly = writable_map .make_read_only() .expect("must make a map readonly"); - let contribution_hash = BatchedAccumulator::::calculate_hash(&output_readonly); + let contribution_hash = calculate_hash(&output_readonly); println!("Reduced contribution is formed with a hash:"); diff --git a/powersoftau/src/bin/verify.rs b/powersoftau/src/bin/verify.rs index a890a70..0478f19 100644 --- a/powersoftau/src/bin/verify.rs +++ b/powersoftau/src/bin/verify.rs @@ -2,8 +2,8 @@ use bellman_ce::pairing::bn256::Bn256; use bellman_ce::pairing::bn256::{G1, G2}; use bellman_ce::pairing::{CurveAffine, CurveProjective}; use powersoftau::batched_accumulator::*; +use powersoftau::parameters::CeremonyParams; use powersoftau::*; -use powersoftau::parameters::{CeremonyParams, CurveKind}; use crate::keypair::*; use crate::parameters::*; @@ -71,14 +71,14 @@ impl Write for HashWriter { // Computes the hash of the challenge file for the player, // given the current state of the accumulator and the last // response file hash. -fn get_challenge_file_hash( - acc: &mut BatchedAccumulator, +fn get_challenge_file_hash( + acc: &mut BatchedAccumulator, last_response_file_hash: &[u8; 64], is_initial: bool, - parameters: &CeremonyParams, ) -> [u8; 64] { let sink = io::sink(); let mut sink = HashWriter::new(sink); + let parameters = acc.parameters; let file_name = "tmp_challenge_file_hash"; @@ -110,12 +110,8 @@ fn get_challenge_file_hash( .expect("unable to write blank hash to challenge file"); if is_initial { - BatchedAccumulator::::generate_initial( - &mut writable_map, - UseCompression::No, - parameters, - ) - .expect("generation of initial accumulator is successful"); + BatchedAccumulator::generate_initial(&mut writable_map, UseCompression::No, parameters) + .expect("generation of initial accumulator is successful"); } else { acc.serialize(&mut writable_map, UseCompression::No, parameters) .unwrap(); @@ -140,17 +136,19 @@ fn get_challenge_file_hash( tmp } +use bellman_ce::pairing::Engine; + // Computes the hash of the response file, given the new // accumulator, the player's public key, and the challenge // file's hash. -fn get_response_file_hash( - acc: &mut BatchedAccumulator, - pubkey: &PublicKey, +fn get_response_file_hash( + acc: &mut BatchedAccumulator, + pubkey: &PublicKey, last_challenge_file_hash: &[u8; 64], - parameters: &CeremonyParams, ) -> [u8; 64] { let sink = io::sink(); let mut sink = HashWriter::new(sink); + let parameters = acc.parameters; let file_name = "tmp_response_file_hash"; if Path::new(file_name).exists() { @@ -205,7 +203,7 @@ fn get_response_file_hash( tmp } -fn new_accumulator_for_verify(parameters: &CeremonyParams) -> BatchedAccumulator { +fn new_accumulator_for_verify(parameters: &CeremonyParams) -> BatchedAccumulator { let file_name = "tmp_initial_challenge"; { if Path::new(file_name).exists() { @@ -228,12 +226,8 @@ fn new_accumulator_for_verify(parameters: &CeremonyParams) -> BatchedAccumulator .map_mut(&file) .expect("unable to create a memory map") }; - BatchedAccumulator::::generate_initial( - &mut writable_map, - UseCompression::No, - ¶meters, - ) - .expect("generation of initial accumulator is successful"); + BatchedAccumulator::generate_initial(&mut writable_map, UseCompression::No, ¶meters) + .expect("generation of initial accumulator is successful"); writable_map .flush() .expect("unable to flush memmap to disk"); @@ -269,7 +263,7 @@ fn main() { let circuit_power = args[2].parse().expect("could not parse circuit power"); let batch_size = args[3].parse().expect("could not parse batch size"); - let parameters = CeremonyParams::new(CurveKind::Bn256, circuit_power, batch_size); + let parameters = CeremonyParams::::new(circuit_power, batch_size); // Try to load transcript file from disk. let reader = OpenOptions::new() @@ -329,12 +323,8 @@ fn main() { .make_read_only() .expect("must make a map readonly"); - let last_challenge_file_hash = get_challenge_file_hash( - &mut current_accumulator, - &last_response_file_hash, - i == 0, - ¶meters, - ); + let last_challenge_file_hash = + get_challenge_file_hash(&mut current_accumulator, &last_response_file_hash, i == 0); // Deserialize the accumulator provided by the player in // their response file. It's stored in the transcript in @@ -350,8 +340,7 @@ fn main() { .expect("unable to read uncompressed accumulator"); let response_file_pubkey = - PublicKey::::read(&response_readable_map, UseCompression::Yes, ¶meters) - .unwrap(); + PublicKey::read(&response_readable_map, UseCompression::Yes, ¶meters).unwrap(); // Compute the hash of the response file. (we had it in uncompressed // form in the transcript, but the response file is compressed to save // participants bandwidth.) @@ -359,7 +348,6 @@ fn main() { &mut response_file_accumulator, &response_file_pubkey, &last_challenge_file_hash, - ¶meters, ); // Verify the transformation from the previous accumulator to the new diff --git a/powersoftau/src/bin/verify_transform_constrained.rs b/powersoftau/src/bin/verify_transform_constrained.rs index a1b5879..ce5578a 100644 --- a/powersoftau/src/bin/verify_transform_constrained.rs +++ b/powersoftau/src/bin/verify_transform_constrained.rs @@ -1,6 +1,9 @@ -use powersoftau::batched_accumulator::BatchedAccumulator; -use powersoftau::keypair::PublicKey; -use powersoftau::parameters::{CheckForCorrectness, UseCompression}; +use powersoftau::{ + batched_accumulator::BatchedAccumulator, + keypair::PublicKey, + parameters::{CeremonyParams, CheckForCorrectness, UseCompression}, + utils::calculate_hash, +}; use bellman_ce::pairing::bn256::Bn256; use memmap::*; @@ -8,8 +11,6 @@ use std::fs::OpenOptions; use std::io::{Read, Write}; -use powersoftau::parameters::{CeremonyParams, CurveKind}; - const PREVIOUS_CHALLENGE_IS_COMPRESSED: UseCompression = UseCompression::No; const CONTRIBUTION_IS_COMPRESSED: UseCompression = UseCompression::Yes; const COMPRESS_NEW_CHALLENGE: UseCompression = UseCompression::No; @@ -26,7 +27,7 @@ fn main() { let circuit_power = args[4].parse().expect("could not parse circuit power"); let batch_size = args[5].parse().expect("could not parse batch size"); - let parameters = CeremonyParams::new(CurveKind::Bn256, circuit_power, batch_size); + let parameters = CeremonyParams::::new(circuit_power, batch_size); println!( "Will verify and decompress a contribution to accumulator for 2^{} powers of tau", @@ -95,8 +96,7 @@ fn main() { // Check that contribution is correct - let current_accumulator_hash = - BatchedAccumulator::::calculate_hash(&challenge_readable_map); + let current_accumulator_hash = calculate_hash(&challenge_readable_map); println!("Hash of the `challenge` file for verification:"); for line in current_accumulator_hash.as_slice().chunks(16) { @@ -137,7 +137,7 @@ fn main() { } } - let response_hash = BatchedAccumulator::::calculate_hash(&response_readable_map); + let response_hash = calculate_hash(&response_readable_map); println!("Hash of the response file for verification:"); for line in response_hash.as_slice().chunks(16) { @@ -152,7 +152,7 @@ fn main() { } // get the contributor's public key - let public_key = PublicKey::::read( + let public_key = PublicKey::read( &response_readable_map, CONTRIBUTION_IS_COMPRESSED, ¶meters, @@ -165,7 +165,7 @@ fn main() { "Verifying a contribution to contain proper powers and correspond to the public key..." ); - let valid = BatchedAccumulator::::verify_transformation( + let valid = BatchedAccumulator::verify_transformation( &challenge_readable_map, &response_readable_map, &public_key, @@ -220,7 +220,7 @@ fn main() { .expect("unable to write hash to new challenge file"); } - BatchedAccumulator::::decompress( + BatchedAccumulator::decompress( &response_readable_map, &mut writable_map, CheckForCorrectness::No, @@ -234,8 +234,7 @@ fn main() { .make_read_only() .expect("must make a map readonly"); - let recompressed_hash = - BatchedAccumulator::::calculate_hash(&new_challenge_readable_map); + let recompressed_hash = calculate_hash(&new_challenge_readable_map); println!("Here's the BLAKE2b hash of the decompressed participant's response as new_challenge file:"); diff --git a/powersoftau/src/keypair.rs b/powersoftau/src/keypair.rs index 33ffe87..7b232ab 100644 --- a/powersoftau/src/keypair.rs +++ b/powersoftau/src/keypair.rs @@ -171,7 +171,7 @@ impl PublicKey { &self, output_map: &mut MmapMut, accumulator_was_compressed: UseCompression, - parameters: &CeremonyParams, + parameters: &CeremonyParams, ) -> io::Result<()> { let mut position = match accumulator_was_compressed { UseCompression::Yes => parameters.contribution_size - parameters.public_key_size, @@ -218,7 +218,7 @@ impl PublicKey { pub fn read( input_map: &Mmap, accumulator_was_compressed: UseCompression, - parameters: &CeremonyParams, + parameters: &CeremonyParams, ) -> Result { fn read_uncompressed>( input_map: &Mmap, @@ -291,12 +291,12 @@ mod tests { mod bn256 { use super::*; - use crate::parameters::{CurveKind, CurveParams}; + use crate::parameters::CurveParams; use bellman_ce::pairing::bn256::Bn256; #[test] fn test_pubkey_serialization() { - let curve = CurveParams::new(CurveKind::Bn256); + let curve = CurveParams::::new(); let public_key_size = 6 * curve.g1 + 3 * curve.g2; // Generate a random public key diff --git a/powersoftau/src/parameters.rs b/powersoftau/src/parameters.rs index f1e01f6..490e5c7 100644 --- a/powersoftau/src/parameters.rs +++ b/powersoftau/src/parameters.rs @@ -1,43 +1,44 @@ -use bellman_ce::pairing::GroupDecodingError; +use bellman_ce::pairing::{CurveAffine, EncodedPoint, Engine, GroupDecodingError}; use std::fmt; use std::io; +use std::marker::PhantomData; /// The sizes of the group elements of a curev -#[derive(Clone, PartialEq, Eq)] -pub struct CurveParams { +#[derive(Clone, PartialEq, Eq, Default)] +pub struct CurveParams { + /// Size of a G1 Element pub g1: usize, + /// Size of a G2 Element pub g2: usize, + /// Size of a compressed G1 Element pub g1_compressed: usize, + /// Size of a compressed G2 Element pub g2_compressed: usize, + engine_type: PhantomData, } -/// The types of curves we support -#[derive(Clone, PartialEq, Eq)] -pub enum CurveKind { - Bn256, -} - -impl CurveParams { - /// Creates a new curve based on the provided CurveKind - pub fn new(kind: CurveKind) -> Self { - let (g1, g2) = match kind { - CurveKind::Bn256 => (64, 128), - }; +impl CurveParams { + pub fn new() -> CurveParams { + let g1 = <::G1Affine as CurveAffine>::Uncompressed::size(); + let g2 = <::G2Affine as CurveAffine>::Uncompressed::size(); + let g1_compressed = <::G1Affine as CurveAffine>::Compressed::size(); + let g2_compressed = <::G2Affine as CurveAffine>::Compressed::size(); CurveParams { g1, g2, - g1_compressed: g1 / 2, - g2_compressed: g2 / 2, + g1_compressed, + g2_compressed, + engine_type: PhantomData, } } } #[derive(Clone, PartialEq, Eq)] /// The parameters used for the trusted setup ceremony -pub struct CeremonyParams { +pub struct CeremonyParams { /// The type of the curve being used (currently only supports BN256) - pub curve: CurveParams, + pub curve: CurveParams, /// The number of Powers of Tau G1 elements which will be accumulated pub powers_g1_length: usize, /// The number of Powers of Tau Alpha/Beta/G2 elements which will be accumulated @@ -58,18 +59,18 @@ pub struct CeremonyParams { pub hash_size: usize, } -impl CeremonyParams { +impl CeremonyParams { /// Constructs a new ceremony parameters object from the type of provided curve - pub fn new(kind: CurveKind, size: usize, batch_size: usize) -> Self { + pub fn new(size: usize, batch_size: usize) -> Self { // create the curve - let curve = CurveParams::new(kind); + let curve = CurveParams::::new(); Self::new_with_curve(curve, size, batch_size) } /// Constructs a new ceremony parameters object from the directly provided curve with parameters /// Consider using the `new` method if you want to use one of the pre-implemented curves - pub fn new_with_curve(curve: CurveParams, size: usize, batch_size: usize) -> Self { - // asume we're using a 64 byte long hash function such as Blake + pub fn new_with_curve(curve: CurveParams, size: usize, batch_size: usize) -> Self { + // assume we're using a 64 byte long hash function such as Blake let hash_size = 64; // 2^{size} diff --git a/powersoftau/src/utils.rs b/powersoftau/src/utils.rs index 05601f7..027ba9c 100644 --- a/powersoftau/src/utils.rs +++ b/powersoftau/src/utils.rs @@ -6,12 +6,26 @@ use generic_array::GenericArray; use rand::chacha::ChaChaRng; use rand::{Rand, Rng, SeedableRng}; +use memmap::Mmap; use std::io::{self, Write}; use std::sync::Arc; use typenum::consts::U64; use super::parameters::UseCompression; +/// Calculate the contribution hash from the resulting file. Original powers of tau implementation +/// used a specially formed writer to write to the file and calculate a hash on the fly, but memory-constrained +/// implementation now writes without a particular order, so plain recalculation at the end +/// of the procedure is more efficient +pub fn calculate_hash(input_map: &Mmap) -> GenericArray { + let chunk_size = 1 << 30; // read by 1GB from map + let mut hasher = Blake2b::default(); + for chunk in input_map.chunks(chunk_size) { + hasher.input(&chunk); + } + hasher.result() +} + /// Hashes to G2 using the first 32 bytes of `digest`. Panics if `digest` is less /// than 32 bytes. pub fn hash_to_g2(mut digest: &[u8]) -> E::G2 {