fix BatchedAccumulator name typo

This commit is contained in:
poma 2020-01-17 22:07:04 +07:00
parent 4ccd5b1e0d
commit 7ba88b2e70
No known key found for this signature in database
GPG Key ID: BA20CB01FE165657
7 changed files with 39 additions and 39 deletions

@ -44,7 +44,7 @@ pub enum AccumulatorState{
///
/// * (τ, τ<sup>2</sup>, ..., τ<sup>2<sup>22</sup> - 2</sup>, α, ατ, ατ<sup>2</sup>, ..., ατ<sup>2<sup>21</sup> - 1</sup>, β, βτ, βτ<sup>2</sup>, ..., βτ<sup>2<sup>21</sup> - 1</sup>)<sub>1</sub>
/// * (β, τ, τ<sup>2</sup>, ..., τ<sup>2<sup>21</sup> - 1</sup>)<sub>2</sub>
pub struct BachedAccumulator<E: Engine, P: PowersOfTauParameters> {
pub struct BatchedAccumulator<E: Engine, P: PowersOfTauParameters> {
/// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_G1_LENGTH - 1}
pub tau_powers_g1: Vec<E::G1Affine>,
/// tau^0, tau^1, tau^2, ..., tau^{TAU_POWERS_LENGTH - 1}
@ -61,7 +61,7 @@ pub struct BachedAccumulator<E: Engine, P: PowersOfTauParameters> {
marker: std::marker::PhantomData<P>,
}
impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
impl<E:Engine, P: PowersOfTauParameters> BatchedAccumulator<E, P> {
/// Calcualte the contibution hash from the resulting file. Original powers of tau implementaiton
/// used a specially formed writer to write to the file and calculate a hash on the fly, but memory-constrained
/// implementation now writes without a particular order, so plain recalculation at the end
@ -78,7 +78,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
}
}
impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
impl<E:Engine, P: PowersOfTauParameters> BatchedAccumulator<E, P> {
pub fn empty() -> Self {
Self {
tau_powers_g1: vec![],
@ -92,7 +92,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
}
}
impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
impl<E:Engine, P: PowersOfTauParameters> BatchedAccumulator<E, P> {
fn g1_size(compression: UseCompression) -> usize {
match compression {
UseCompression::Yes => {
@ -189,7 +189,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
}
/// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`.
pub fn verify_transform<E: Engine, P: PowersOfTauParameters>(before: &BachedAccumulator<E, P>, after: &BachedAccumulator<E, P>, key: &PublicKey<E>, digest: &[u8]) -> bool
pub fn verify_transform<E: Engine, P: PowersOfTauParameters>(before: &BatchedAccumulator<E, P>, after: &BatchedAccumulator<E, P>, key: &PublicKey<E>, digest: &[u8]) -> bool
{
assert_eq!(digest.len(), 64);
@ -253,7 +253,7 @@ pub fn verify_transform<E: Engine, P: PowersOfTauParameters>(before: &BachedAccu
true
}
impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
impl<E:Engine, P: PowersOfTauParameters> BatchedAccumulator<E, P> {
/// Verifies a transformation of the `Accumulator` with the `PublicKey`, given a 64-byte transcript `digest`.
pub fn verify_transformation(
input_map: &Mmap,
@ -449,7 +449,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
input_map: &Mmap,
check_input_for_correctness: CheckForCorrectness,
compression: UseCompression,
) -> io::Result<BachedAccumulator<E, P>>
) -> io::Result<BatchedAccumulator<E, P>>
{
use itertools::MinMaxResult::{MinMax};
@ -494,7 +494,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
}
}
Ok(BachedAccumulator {
Ok(BatchedAccumulator {
tau_powers_g1: tau_powers_g1,
tau_powers_g2: tau_powers_g2,
alpha_tau_powers_g1: alpha_tau_powers_g1,
@ -515,7 +515,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) {
if let MinMax(start, end) = chunk.minmax() {
let mut tmp_acc = BachedAccumulator::<E,P> {
let mut tmp_acc = BatchedAccumulator::<E,P> {
tau_powers_g1: (&self.tau_powers_g1[start..end+1]).to_vec(),
tau_powers_g2: (&self.tau_powers_g2[start..end+1]).to_vec(),
alpha_tau_powers_g1: (&self.alpha_tau_powers_g1[start..end+1]).to_vec(),
@ -532,7 +532,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) {
if let MinMax(start, end) = chunk.minmax() {
let mut tmp_acc = BachedAccumulator::<E,P> {
let mut tmp_acc = BatchedAccumulator::<E,P> {
tau_powers_g1: (&self.tau_powers_g1[start..end+1]).to_vec(),
tau_powers_g2: vec![],
alpha_tau_powers_g1: vec![],
@ -552,7 +552,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
}
impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
impl<E:Engine, P: PowersOfTauParameters> BatchedAccumulator<E, P> {
pub fn read_chunk (
&mut self,
from: usize,
@ -721,7 +721,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
}
}
impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
impl<E:Engine, P: PowersOfTauParameters> BatchedAccumulator<E, P> {
fn write_all(
&mut self,
chunk_start: usize,
@ -826,7 +826,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
}
impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
impl<E:Engine, P: PowersOfTauParameters> BatchedAccumulator<E, P> {
/// Transforms the accumulator with a private key.
/// Due to large amount of data in a previous accumulator even in the compressed form
/// this function can now work on compressed input. Output can be made in any form
@ -970,7 +970,7 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
}
}
impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
impl<E:Engine, P: PowersOfTauParameters> BatchedAccumulator<E, P> {
/// Transforms the accumulator with a private key.
pub fn generate_initial(
output_map: &mut MmapMut,

@ -7,7 +7,7 @@ extern crate byteorder;
extern crate crypto;
use powersoftau::bn256::{Bn256CeremonyParameters};
use powersoftau::batched_accumulator::{BachedAccumulator};
use powersoftau::batched_accumulator::{BatchedAccumulator};
use powersoftau::keypair::{keypair};
use powersoftau::parameters::{UseCompression, CheckForCorrectness};
@ -136,7 +136,7 @@ fn main() {
println!("Calculating previous contribution hash...");
let current_accumulator_hash = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&readable_map);
let current_accumulator_hash = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&readable_map);
{
println!("Contributing on top of the hash:");
@ -163,7 +163,7 @@ fn main() {
println!("Computing and writing your contribution, this could take a while...");
// this computes a transformation and writes it
BachedAccumulator::<Bn256, Bn256CeremonyParameters>::transform(
BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::transform(
&readable_map,
&mut writable_map,
INPUT_IS_COMPRESSED,
@ -178,7 +178,7 @@ fn main() {
// Get the hash of the contribution, so the user can compare later
let output_readonly = writable_map.make_read_only().expect("must make a map readonly");
let contribution_hash = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&output_readonly);
let contribution_hash = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&output_readonly);
print!("Done!\n\n\
Your contribution has been written to response file\n\n\

@ -7,7 +7,7 @@ extern crate byteorder;
extern crate exitcode;
use powersoftau::bn256::{Bn256CeremonyParameters};
use powersoftau::batched_accumulator::{BachedAccumulator};
use powersoftau::batched_accumulator::{BatchedAccumulator};
use powersoftau::keypair::{keypair};
use powersoftau::parameters::{UseCompression, CheckForCorrectness};
@ -120,7 +120,7 @@ fn main() {
println!("Calculating previous contribution hash...");
assert!(UseCompression::No == INPUT_IS_COMPRESSED, "Hashing the compressed file in not yet defined");
let current_accumulator_hash = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&readable_map);
let current_accumulator_hash = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&readable_map);
{
println!("`challenge` file contains decompressed points and has a hash:");
@ -165,7 +165,7 @@ fn main() {
println!("Computing and writing your contribution, this could take a while...");
// this computes a transformation and writes it
BachedAccumulator::<Bn256, Bn256CeremonyParameters>::transform(
BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::transform(
&readable_map,
&mut writable_map,
INPUT_IS_COMPRESSED,
@ -183,7 +183,7 @@ fn main() {
// Get the hash of the contribution, so the user can compare later
let output_readonly = writable_map.make_read_only().expect("must make a map readonly");
let contribution_hash = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&output_readonly);
let contribution_hash = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&output_readonly);
print!("Done!\n\n\
Your contribution has been written to response file\n\n\

@ -3,7 +3,7 @@ extern crate bellman_ce;
extern crate memmap;
use powersoftau::bn256::{Bn256CeremonyParameters};
use powersoftau::batched_accumulator::{BachedAccumulator};
use powersoftau::batched_accumulator::{BatchedAccumulator};
use powersoftau::parameters::{UseCompression};
use powersoftau::utils::{blank_hash};
@ -64,12 +64,12 @@ fn main() {
println!();
}
BachedAccumulator::<Bn256, Bn256CeremonyParameters>::generate_initial(&mut writable_map, COMPRESS_NEW_CHALLENGE).expect("generation of initial accumulator is successful");
BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::generate_initial(&mut writable_map, COMPRESS_NEW_CHALLENGE).expect("generation of initial accumulator is successful");
writable_map.flush().expect("unable to flush memmap to disk");
// Get the hash of the contribution, so the user can compare later
let output_readonly = writable_map.make_read_only().expect("must make a map readonly");
let contribution_hash = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&output_readonly);
let contribution_hash = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&output_readonly);
println!("Empty contribution is formed with a hash:");

@ -43,7 +43,7 @@ fn main() {
.expect("unable open response file in this directory");
let response_readable_map = unsafe { MmapOptions::new().map(&reader).expect("unable to create a memory map for input") };
let current_accumulator = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::deserialize(
let current_accumulator = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::deserialize(
&response_readable_map,
CheckForCorrectness::Yes,
UseCompression::Yes,

@ -36,7 +36,7 @@ fn log_2(x: u64) -> u32 {
// given the current state of the accumulator and the last
// response file hash.
fn get_challenge_file_hash(
acc: &mut BachedAccumulator::<Bn256, Bn256CeremonyParameters>,
acc: &mut BatchedAccumulator::<Bn256, Bn256CeremonyParameters>,
last_response_file_hash: &[u8; 64],
is_initial: bool,
) -> [u8; 64]
@ -64,7 +64,7 @@ fn get_challenge_file_hash(
writable_map.flush().expect("unable to write blank hash to challenge file");
if is_initial {
BachedAccumulator::<Bn256, Bn256CeremonyParameters>::generate_initial(&mut writable_map, UseCompression::No).expect("generation of initial accumulator is successful");
BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::generate_initial(&mut writable_map, UseCompression::No).expect("generation of initial accumulator is successful");
} else {
acc.serialize(
&mut writable_map,
@ -95,7 +95,7 @@ fn get_challenge_file_hash(
// accumulator, the player's public key, and the challenge
// file's hash.
fn get_response_file_hash(
acc: &mut BachedAccumulator::<Bn256, Bn256CeremonyParameters>,
acc: &mut BatchedAccumulator::<Bn256, Bn256CeremonyParameters>,
pubkey: &PublicKey::<Bn256>,
last_challenge_file_hash: &[u8; 64]
) -> [u8; 64]
@ -147,7 +147,7 @@ fn get_response_file_hash(
tmp
}
fn new_accumulator_for_verify() -> BachedAccumulator<Bn256, Bn256CeremonyParameters> {
fn new_accumulator_for_verify() -> BatchedAccumulator<Bn256, Bn256CeremonyParameters> {
let file_name = "tmp_initial_challenge";
{
if Path::new(file_name).exists() {
@ -165,7 +165,7 @@ fn new_accumulator_for_verify() -> BachedAccumulator<Bn256, Bn256CeremonyParamet
file.set_len(expected_challenge_length as u64).expect("unable to allocate large enough file");
let mut writable_map = unsafe { MmapOptions::new().map_mut(&file).expect("unable to create a memory map") };
BachedAccumulator::<Bn256, Bn256CeremonyParameters>::generate_initial(&mut writable_map, UseCompression::No).expect("generation of initial accumulator is successful");
BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::generate_initial(&mut writable_map, UseCompression::No).expect("generation of initial accumulator is successful");
writable_map.flush().expect("unable to flush memmap to disk");
}
@ -175,7 +175,7 @@ fn new_accumulator_for_verify() -> BachedAccumulator<Bn256, Bn256CeremonyParamet
.expect("unable open transcript file in this directory");
let readable_map = unsafe { MmapOptions::new().map(&reader).expect("unable to create a memory map for input") };
let initial_accumulator = BachedAccumulator::deserialize(
let initial_accumulator = BatchedAccumulator::deserialize(
&readable_map,
CheckForCorrectness::Yes,
UseCompression::No,
@ -245,7 +245,7 @@ fn main() {
// uncompressed form so that we can more efficiently
// deserialize it.
let mut response_file_accumulator = BachedAccumulator::deserialize(
let mut response_file_accumulator = BatchedAccumulator::deserialize(
&response_readable_map,
CheckForCorrectness::Yes,
UseCompression::Yes,

@ -6,7 +6,7 @@ extern crate blake2;
extern crate byteorder;
use powersoftau::bn256::{Bn256CeremonyParameters};
use powersoftau::batched_accumulator::{BachedAccumulator};
use powersoftau::batched_accumulator::{BatchedAccumulator};
use powersoftau::keypair::{PublicKey};
use powersoftau::parameters::{UseCompression, CheckForCorrectness};
@ -84,7 +84,7 @@ fn main() {
// Check that contribution is correct
let current_accumulator_hash = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&challenge_readable_map);
let current_accumulator_hash = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&challenge_readable_map);
println!("Hash of the `challenge` file for verification:");
for line in current_accumulator_hash.as_slice().chunks(16) {
@ -121,7 +121,7 @@ fn main() {
}
}
let response_hash = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&response_readable_map);
let response_hash = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&response_readable_map);
println!("Hash of the response file for verification:");
for line in response_hash.as_slice().chunks(16) {
@ -144,7 +144,7 @@ fn main() {
println!("Verifying a contribution to contain proper powers and correspond to the public key...");
let valid = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::verify_transformation(
let valid = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::verify_transformation(
&challenge_readable_map,
&response_readable_map,
&public_key,
@ -188,7 +188,7 @@ fn main() {
writable_map.flush().expect("unable to write hash to new challenge file");
}
BachedAccumulator::<Bn256, Bn256CeremonyParameters>::decompress(
BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::decompress(
&response_readable_map,
&mut writable_map,
CheckForCorrectness::No).expect("must decompress a response for a new challenge");
@ -197,7 +197,7 @@ fn main() {
let new_challenge_readable_map = writable_map.make_read_only().expect("must make a map readonly");
let recompressed_hash = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&new_challenge_readable_map);
let recompressed_hash = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::calculate_hash(&new_challenge_readable_map);
println!("Here's the BLAKE2b hash of the decompressed participant's response as new_challenge file:");