makes verify_transform verify intersection between chunks
This commit is contained in:
parent
1388c0a55e
commit
c93a47f8ef
@ -355,9 +355,11 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
|
|||||||
// Read by parts and just verify same ratios. Cause of two fixed variables above with tau_powers_g2_1 = tau_powers_g2_0 ^ s
|
// Read by parts and just verify same ratios. Cause of two fixed variables above with tau_powers_g2_1 = tau_powers_g2_0 ^ s
|
||||||
// one does not need to care about some overlapping
|
// one does not need to care about some overlapping
|
||||||
|
|
||||||
|
let mut tau_powers_last_first_chunks = vec![E::G1Affine::zero(); 2];
|
||||||
for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) {
|
for chunk in &(0..P::TAU_POWERS_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) {
|
||||||
if let MinMax(start, end) = chunk.minmax() {
|
if let MinMax(start, end) = chunk.minmax() {
|
||||||
let size = end - start + 1;
|
// extra 1 to ensure intersection between chunks and ensure we don't overflow
|
||||||
|
let size = end - start + 1 + if end == P::TAU_POWERS_LENGTH - 1 { 0 } else { 1 };
|
||||||
before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect(&format!("must read a chunk from {} to {} from `challenge`", start, end));
|
before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect(&format!("must read a chunk from {} to {} from `challenge`", start, end));
|
||||||
after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect(&format!("must read a chunk from {} to {} from `response`", start, end));
|
after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect(&format!("must read a chunk from {} to {} from `response`", start, end));
|
||||||
|
|
||||||
@ -378,6 +380,9 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
|
|||||||
println!("Invalid ratio power_pairs(&after.beta_tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)");
|
println!("Invalid ratio power_pairs(&after.beta_tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1)");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
if end == P::TAU_POWERS_LENGTH - 1 {
|
||||||
|
tau_powers_last_first_chunks[0] = after.tau_powers_g1[size - 1];
|
||||||
|
}
|
||||||
println!("Done processing {} powers of tau", end);
|
println!("Done processing {} powers of tau", end);
|
||||||
} else {
|
} else {
|
||||||
panic!("Chunk does not have a min and max");
|
panic!("Chunk does not have a min and max");
|
||||||
@ -386,7 +391,8 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
|
|||||||
|
|
||||||
for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) {
|
for chunk in &(P::TAU_POWERS_LENGTH..P::TAU_POWERS_G1_LENGTH).into_iter().chunks(P::EMPIRICAL_BATCH_SIZE) {
|
||||||
if let MinMax(start, end) = chunk.minmax() {
|
if let MinMax(start, end) = chunk.minmax() {
|
||||||
let size = end - start + 1;
|
// extra 1 to ensure intersection between chunks and ensure we don't overflow
|
||||||
|
let size = end - start + 1 + if end == P::TAU_POWERS_G1_LENGTH - 1 { 0 } else { 1 };
|
||||||
before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect(&format!("must read a chunk from {} to {} from `challenge`", start, end));
|
before.read_chunk(start, size, input_is_compressed, check_input_for_correctness, &input_map).expect(&format!("must read a chunk from {} to {} from `challenge`", start, end));
|
||||||
after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect(&format!("must read a chunk from {} to {} from `response`", start, end));
|
after.read_chunk(start, size, output_is_compressed, check_output_for_correctness, &output_map).expect(&format!("must read a chunk from {} to {} from `response`", start, end));
|
||||||
|
|
||||||
@ -398,12 +404,18 @@ impl<E:Engine, P: PowersOfTauParameters> BachedAccumulator<E, P> {
|
|||||||
println!("Invalid ratio power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1) in extra TauG1 contribution");
|
println!("Invalid ratio power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1) in extra TauG1 contribution");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
if start == P::TAU_POWERS_LENGTH {
|
||||||
|
tau_powers_last_first_chunks[1] = after.tau_powers_g1[0];
|
||||||
|
}
|
||||||
println!("Done processing {} powers of tau", end);
|
println!("Done processing {} powers of tau", end);
|
||||||
} else {
|
} else {
|
||||||
panic!("Chunk does not have a min and max");
|
panic!("Chunk does not have a min and max");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !same_ratio(power_pairs(&tau_powers_last_first_chunks), (tau_powers_g2_0, tau_powers_g2_1)) {
|
||||||
|
println!("Invalid ratio power_pairs(&after.tau_powers_g1), (tau_powers_g2_0, tau_powers_g2_1) in TauG1 contribution intersection");
|
||||||
|
}
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,16 +25,6 @@ use std::io::{self, Read, BufWriter, Write};
|
|||||||
|
|
||||||
use memmap::*;
|
use memmap::*;
|
||||||
|
|
||||||
fn into_hex(h: &[u8]) -> String {
|
|
||||||
let mut f = String::new();
|
|
||||||
|
|
||||||
for byte in &h[..] {
|
|
||||||
f += &format!("{:02x}", byte);
|
|
||||||
}
|
|
||||||
|
|
||||||
f
|
|
||||||
}
|
|
||||||
|
|
||||||
// Computes the hash of the challenge file for the player,
|
// Computes the hash of the challenge file for the player,
|
||||||
// given the current state of the accumulator and the last
|
// given the current state of the accumulator and the last
|
||||||
// response file hash.
|
// response file hash.
|
||||||
|
Loading…
Reference in New Issue
Block a user