,
}
-impl BachedAccumulator {
+impl BatchedAccumulator {
/// Calcualte the contibution hash from the resulting file. Original powers of tau implementaiton
/// used a specially formed writer to write to the file and calculate a hash on the fly, but memory-constrained
/// implementation now writes without a particular order, so plain recalculation at the end
@@ -78,7 +78,7 @@ impl BachedAccumulator {
}
}
-impl BachedAccumulator {
+impl BatchedAccumulator {
pub fn empty() -> Self {
Self {
tau_powers_g1: vec![],
@@ -92,7 +92,7 @@ impl BachedAccumulator {
}
}
-impl BachedAccumulator {
+impl BatchedAccumulator {
fn g1_size(compression: UseCompression) -> usize {
match compression {
UseCompression::Yes => {
@@ -189,7 +189,7 @@ impl BachedAccumulator {
}
/// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`.
-pub fn verify_transform(before: &BachedAccumulator, after: &BachedAccumulator, key: &PublicKey, digest: &[u8]) -> bool
+pub fn verify_transform(before: &BatchedAccumulator, after: &BatchedAccumulator, key: &PublicKey, digest: &[u8]) -> bool
{
assert_eq!(digest.len(), 64);
@@ -253,7 +253,7 @@ pub fn verify_transform(before: &BachedAccu
true
}
-impl BachedAccumulator {
+impl BatchedAccumulator {
/// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`.
pub fn verify_transformation(
input_map: &Mmap,
@@ -449,7 +449,7 @@ impl BachedAccumulator {
input_map: &Mmap,
check_input_for_correctness: CheckForCorrectness,
compression: UseCompression,
- ) -> io::Result>
+ ) -> io::Result>
{
use itertools::MinMaxResult::{MinMax};
@@ -494,7 +494,7 @@ impl BachedAccumulator {
}
}
- Ok(BachedAccumulator {
+ Ok(BatchedAccumulator {
tau_powers_g1: tau_powers_g1,
tau_powers_g2: tau_powers_g2,
alpha_tau_powers_g1: alpha_tau_powers_g1,
@@ -515,7 +515,7 @@ impl BachedAccumulator {
for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) {
if let MinMax(start, end) = chunk.minmax() {
- let mut tmp_acc = BachedAccumulator:: {
+ let mut tmp_acc = BatchedAccumulator:: {
tau_powers_g1: (&self.tau_powers_g1[start..end+1]).to_vec(),
tau_powers_g2: (&self.tau_powers_g2[start..end+1]).to_vec(),
alpha_tau_powers_g1: (&self.alpha_tau_powers_g1[start..end+1]).to_vec(),
@@ -532,7 +532,7 @@ impl BachedAccumulator {
for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) {
if let MinMax(start, end) = chunk.minmax() {
- let mut tmp_acc = BachedAccumulator:: {
+ let mut tmp_acc = BatchedAccumulator:: {
tau_powers_g1: (&self.tau_powers_g1[start..end+1]).to_vec(),
tau_powers_g2: vec![],
alpha_tau_powers_g1: vec![],
@@ -552,7 +552,7 @@ impl BachedAccumulator {
}
-impl BachedAccumulator {
+impl BatchedAccumulator {
pub fn read_chunk (
&mut self,
from: usize,
@@ -721,7 +721,7 @@ impl BachedAccumulator {
}
}
-impl BachedAccumulator {
+impl BatchedAccumulator {
fn write_all(
&mut self,
chunk_start: usize,
@@ -826,7 +826,7 @@ impl BachedAccumulator {
}
-impl BachedAccumulator {
+impl BatchedAccumulator {
/// Transforms the accumulator with a private key.
/// Due to large amount of data in a previous accumulator even in the compressed form
/// this function can now work on compressed input. Output can be made in any form
@@ -970,7 +970,7 @@ impl BachedAccumulator {
}
}
-impl BachedAccumulator {
+impl BatchedAccumulator {
/// Transforms the accumulator with a private key.
pub fn generate_initial(
output_map: &mut MmapMut,
diff --git a/powersoftau/src/bin/beacon_constrained.rs b/powersoftau/src/bin/beacon_constrained.rs
index 1edd6f4..8cf6efd 100644
--- a/powersoftau/src/bin/beacon_constrained.rs
+++ b/powersoftau/src/bin/beacon_constrained.rs
@@ -7,7 +7,7 @@ extern crate byteorder;
extern crate crypto;
use powersoftau::bn256::{Bn256CeremonyParameters};
-use powersoftau::batched_accumulator::{BachedAccumulator};
+use powersoftau::batched_accumulator::{BatchedAccumulator};
use powersoftau::keypair::{keypair};
use powersoftau::parameters::{UseCompression, CheckForCorrectness};
@@ -136,7 +136,7 @@ fn main() {
println!("Calculating previous contribution hash...");
- let current_accumulator_hash = BachedAccumulator::::calculate_hash(&readable_map);
+ let current_accumulator_hash = BatchedAccumulator::::calculate_hash(&readable_map);
{
println!("Contributing on top of the hash:");
@@ -163,7 +163,7 @@ fn main() {
println!("Computing and writing your contribution, this could take a while...");
// this computes a transformation and writes it
- BachedAccumulator::::transform(
+ BatchedAccumulator::::transform(
&readable_map,
&mut writable_map,
INPUT_IS_COMPRESSED,
@@ -178,7 +178,7 @@ fn main() {
// Get the hash of the contribution, so the user can compare later
let output_readonly = writable_map.make_read_only().expect("must make a map readonly");
- let contribution_hash = BachedAccumulator::::calculate_hash(&output_readonly);
+ let contribution_hash = BatchedAccumulator::::calculate_hash(&output_readonly);
print!("Done!\n\n\
Your contribution has been written to response file\n\n\
diff --git a/powersoftau/src/bin/compute_constrained.rs b/powersoftau/src/bin/compute_constrained.rs
index 1668cf6..d62a8ae 100644
--- a/powersoftau/src/bin/compute_constrained.rs
+++ b/powersoftau/src/bin/compute_constrained.rs
@@ -7,7 +7,7 @@ extern crate byteorder;
extern crate exitcode;
use powersoftau::bn256::{Bn256CeremonyParameters};
-use powersoftau::batched_accumulator::{BachedAccumulator};
+use powersoftau::batched_accumulator::{BatchedAccumulator};
use powersoftau::keypair::{keypair};
use powersoftau::parameters::{UseCompression, CheckForCorrectness};
@@ -120,7 +120,7 @@ fn main() {
println!("Calculating previous contribution hash...");
assert!(UseCompression::No == INPUT_IS_COMPRESSED, "Hashing the compressed file in not yet defined");
- let current_accumulator_hash = BachedAccumulator::::calculate_hash(&readable_map);
+ let current_accumulator_hash = BatchedAccumulator::::calculate_hash(&readable_map);
{
println!("`challenge` file contains decompressed points and has a hash:");
@@ -165,7 +165,7 @@ fn main() {
println!("Computing and writing your contribution, this could take a while...");
// this computes a transformation and writes it
- BachedAccumulator::::transform(
+ BatchedAccumulator::::transform(
&readable_map,
&mut writable_map,
INPUT_IS_COMPRESSED,
@@ -183,7 +183,7 @@ fn main() {
// Get the hash of the contribution, so the user can compare later
let output_readonly = writable_map.make_read_only().expect("must make a map readonly");
- let contribution_hash = BachedAccumulator::::calculate_hash(&output_readonly);
+ let contribution_hash = BatchedAccumulator::::calculate_hash(&output_readonly);
print!("Done!\n\n\
Your contribution has been written to response file\n\n\
diff --git a/powersoftau/src/bin/new_constrained.rs b/powersoftau/src/bin/new_constrained.rs
index 211afc5..1f6ac26 100644
--- a/powersoftau/src/bin/new_constrained.rs
+++ b/powersoftau/src/bin/new_constrained.rs
@@ -3,7 +3,7 @@ extern crate bellman_ce;
extern crate memmap;
use powersoftau::bn256::{Bn256CeremonyParameters};
-use powersoftau::batched_accumulator::{BachedAccumulator};
+use powersoftau::batched_accumulator::{BatchedAccumulator};
use powersoftau::parameters::{UseCompression};
use powersoftau::utils::{blank_hash};
@@ -64,12 +64,12 @@ fn main() {
println!();
}
- BachedAccumulator::::generate_initial(&mut writable_map, COMPRESS_NEW_CHALLENGE).expect("generation of initial accumulator is successful");
+ BatchedAccumulator::::generate_initial(&mut writable_map, COMPRESS_NEW_CHALLENGE).expect("generation of initial accumulator is successful");
writable_map.flush().expect("unable to flush memmap to disk");
// Get the hash of the contribution, so the user can compare later
let output_readonly = writable_map.make_read_only().expect("must make a map readonly");
- let contribution_hash = BachedAccumulator::::calculate_hash(&output_readonly);
+ let contribution_hash = BatchedAccumulator::::calculate_hash(&output_readonly);
println!("Empty contribution is formed with a hash:");
diff --git a/powersoftau/src/bin/prepare_phase2.rs b/powersoftau/src/bin/prepare_phase2.rs
index fe65fa4..b47e879 100644
--- a/powersoftau/src/bin/prepare_phase2.rs
+++ b/powersoftau/src/bin/prepare_phase2.rs
@@ -43,7 +43,7 @@ fn main() {
.expect("unable open response file in this directory");
let response_readable_map = unsafe { MmapOptions::new().map(&reader).expect("unable to create a memory map for input") };
- let current_accumulator = BachedAccumulator::::deserialize(
+ let current_accumulator = BatchedAccumulator::::deserialize(
&response_readable_map,
CheckForCorrectness::Yes,
UseCompression::Yes,
diff --git a/powersoftau/src/bin/verify.rs b/powersoftau/src/bin/verify.rs
index 12417f5..e647739 100644
--- a/powersoftau/src/bin/verify.rs
+++ b/powersoftau/src/bin/verify.rs
@@ -36,7 +36,7 @@ fn log_2(x: u64) -> u32 {
// given the current state of the accumulator and the last
// response file hash.
fn get_challenge_file_hash(
- acc: &mut BachedAccumulator::,
+ acc: &mut BatchedAccumulator::,
last_response_file_hash: &[u8; 64],
is_initial: bool,
) -> [u8; 64]
@@ -64,7 +64,7 @@ fn get_challenge_file_hash(
writable_map.flush().expect("unable to write blank hash to challenge file");
if is_initial {
- BachedAccumulator::::generate_initial(&mut writable_map, UseCompression::No).expect("generation of initial accumulator is successful");
+ BatchedAccumulator::::generate_initial(&mut writable_map, UseCompression::No).expect("generation of initial accumulator is successful");
} else {
acc.serialize(
&mut writable_map,
@@ -95,7 +95,7 @@ fn get_challenge_file_hash(
// accumulator, the player's public key, and the challenge
// file's hash.
fn get_response_file_hash(
- acc: &mut BachedAccumulator::,
+ acc: &mut BatchedAccumulator::,
pubkey: &PublicKey::,
last_challenge_file_hash: &[u8; 64]
) -> [u8; 64]
@@ -147,7 +147,7 @@ fn get_response_file_hash(
tmp
}
-fn new_accumulator_for_verify() -> BachedAccumulator {
+fn new_accumulator_for_verify() -> BatchedAccumulator {
let file_name = "tmp_initial_challenge";
{
if Path::new(file_name).exists() {
@@ -165,7 +165,7 @@ fn new_accumulator_for_verify() -> BachedAccumulator::generate_initial(&mut writable_map, UseCompression::No).expect("generation of initial accumulator is successful");
+ BatchedAccumulator::::generate_initial(&mut writable_map, UseCompression::No).expect("generation of initial accumulator is successful");
writable_map.flush().expect("unable to flush memmap to disk");
}
@@ -175,7 +175,7 @@ fn new_accumulator_for_verify() -> BachedAccumulator::calculate_hash(&challenge_readable_map);
+ let current_accumulator_hash = BatchedAccumulator::::calculate_hash(&challenge_readable_map);
println!("Hash of the `challenge` file for verification:");
for line in current_accumulator_hash.as_slice().chunks(16) {
@@ -121,7 +121,7 @@ fn main() {
}
}
- let response_hash = BachedAccumulator::::calculate_hash(&response_readable_map);
+ let response_hash = BatchedAccumulator::::calculate_hash(&response_readable_map);
println!("Hash of the response file for verification:");
for line in response_hash.as_slice().chunks(16) {
@@ -144,7 +144,7 @@ fn main() {
println!("Verifying a contribution to contain proper powers and correspond to the public key...");
- let valid = BachedAccumulator::::verify_transformation(
+ let valid = BatchedAccumulator::::verify_transformation(
&challenge_readable_map,
&response_readable_map,
&public_key,
@@ -188,7 +188,7 @@ fn main() {
writable_map.flush().expect("unable to write hash to new challenge file");
}
- BachedAccumulator::::decompress(
+ BatchedAccumulator::::decompress(
&response_readable_map,
&mut writable_map,
CheckForCorrectness::No).expect("must decompress a response for a new challenge");
@@ -197,7 +197,7 @@ fn main() {
let new_challenge_readable_map = writable_map.make_read_only().expect("must make a map readonly");
- let recompressed_hash = BachedAccumulator::::calculate_hash(&new_challenge_readable_map);
+ let recompressed_hash = BatchedAccumulator::::calculate_hash(&new_challenge_readable_map);
println!("Here's the BLAKE2b hash of the decompressed participant's response as new_challenge file:");