Merge pull request #8 from tornadocash/master

pull all circom-related functionality from binaries into library
This commit is contained in:
Kobi Gurkan 2020-02-12 08:24:45 +02:00 committed by GitHub
commit b3c18de8a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 464 additions and 457 deletions

@ -1,60 +1,11 @@
extern crate bellman_ce;
extern crate rand;
extern crate phase2;
extern crate exitcode;
extern crate serde;
extern crate serde_json;
extern crate num_bigint;
extern crate num_traits;
extern crate itertools;
use std::fs;
use std::fs::OpenOptions;
use std::iter::repeat;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use phase2::parameters::MPCParameters;
use phase2::utils::{
p1_to_vec,
p2_to_vec,
pairing_to_vec,
use phase2::circom_circuit::{
proving_key_json_file,
verification_key_json_file,
load_params_file
};
use bellman_ce::pairing::{
Engine,
bn256::{
Bn256,
}
};
#[derive(Serialize, Deserialize)]
struct ProvingKeyJson {
#[serde(rename = "A")]
pub a: Vec<Vec<String>>,
#[serde(rename = "B1")]
pub b1: Vec<Vec<String>>,
#[serde(rename = "B2")]
pub b2: Vec<Vec<Vec<String>>>,
#[serde(rename = "C")]
pub c: Vec<Option<Vec<String>>>,
pub vk_alfa_1: Vec<String>,
pub vk_beta_1: Vec<String>,
pub vk_delta_1: Vec<String>,
pub vk_beta_2: Vec<Vec<String>>,
pub vk_delta_2: Vec<Vec<String>>,
#[serde(rename = "hExps")]
pub h: Vec<Vec<String>>,
}
#[derive(Serialize, Deserialize)]
struct VerifyingKeyJson {
#[serde(rename = "IC")]
pub ic: Vec<Vec<String>>,
pub vk_alfa_1: Vec<String>,
pub vk_beta_2: Vec<Vec<String>>,
pub vk_gamma_2: Vec<Vec<String>>,
pub vk_delta_2: Vec<Vec<String>>,
pub vk_alfabeta_12: Vec<Vec<Vec<String>>>,
}
fn main() {
let args: Vec<String> = std::env::args().collect();
@ -65,44 +16,9 @@ fn main() {
let params_filename = &args[1];
let vk_filename = &args[2];
let pk_filename = &args[3];
let disallow_points_at_infinity = false;
println!("Exporting {}...", params_filename);
let reader = OpenOptions::new()
.read(true)
.open(params_filename)
.expect("unable to open.");
let params = MPCParameters::read(reader, disallow_points_at_infinity, true).expect("unable to read params");
let params = params.get_params();
let proving_key = ProvingKeyJson {
a: params.a.iter().map(|e| p1_to_vec(e)).collect_vec(),
b1: params.b_g1.iter().map(|e| p1_to_vec(e)).collect_vec(),
b2: params.b_g2.iter().map(|e| p2_to_vec(e)).collect_vec(),
c: repeat(None).take(params.vk.ic.len()).chain(params.l.iter().map(|e| Some(p1_to_vec(e)))).collect_vec(),
vk_alfa_1: p1_to_vec(&params.vk.alpha_g1),
vk_beta_1: p1_to_vec(&params.vk.beta_g1),
vk_delta_1: p1_to_vec(&params.vk.delta_g1),
vk_beta_2: p2_to_vec(&params.vk.beta_g2),
vk_delta_2: p2_to_vec(&params.vk.delta_g2),
h: params.h.iter().map(|e| p1_to_vec(e)).collect_vec(),
};
let verification_key = VerifyingKeyJson {
ic: params.vk.ic.iter().map(|e| p1_to_vec(e)).collect_vec(),
vk_alfa_1: p1_to_vec(&params.vk.alpha_g1),
vk_beta_2: p2_to_vec(&params.vk.beta_g2),
vk_gamma_2: p2_to_vec(&params.vk.gamma_g2),
vk_delta_2: p2_to_vec(&params.vk.delta_g2),
vk_alfabeta_12: pairing_to_vec(&Bn256::pairing(params.vk.alpha_g1, params.vk.beta_g2)),
};
let pk_json = serde_json::to_string(&proving_key).unwrap();
let vk_json = serde_json::to_string(&verification_key).unwrap();
fs::write(pk_filename, pk_json.as_bytes()).unwrap();
fs::write(vk_filename, vk_json.as_bytes()).unwrap();
let params = load_params_file(params_filename);
proving_key_json_file(&params, pk_filename).unwrap();
verification_key_json_file(&params, vk_filename).unwrap();
println!("Created {} and {}.", pk_filename, vk_filename);
}

@ -1,26 +1,9 @@
#![allow(unused_imports)]
extern crate phase2;
extern crate bellman_ce;
extern crate num_bigint;
extern crate num_traits;
extern crate exitcode;
extern crate serde;
use std::fmt;
use std::fs;
use std::fs::OpenOptions;
use num_bigint::BigUint;
use num_traits::Num;
use phase2::utils::repr_to_big;
use phase2::parameters::MPCParameters;
use bellman_ce::pairing::{
Engine,
CurveAffine,
ff::PrimeField,
bn256::{
Bn256,
}
use phase2::circom_circuit::{
load_params_file,
create_verifier_sol_file
};
fn main() {
@ -31,48 +14,7 @@ fn main() {
}
let params_filename = &args[1];
let verifier_filename = &args[2];
let should_filter_points_at_infinity = false;
let bytes = include_bytes!("../verifier_groth.sol");
let template = String::from_utf8_lossy(bytes);
let reader = OpenOptions::new()
.read(true)
.open(params_filename)
.expect("unable to open.");
let params = MPCParameters::read(reader, should_filter_points_at_infinity, true).expect("unable to read params");
let vk = &params.get_params().vk;
let p1_to_str = |p: &<Bn256 as Engine>::G1Affine| {
let x = repr_to_big(p.get_x().into_repr());
let y = repr_to_big(p.get_y().into_repr());
return format!("{}, {}", x, y)
};
let p2_to_str = |p: &<Bn256 as Engine>::G2Affine| {
let x = p.get_x();
let y = p.get_y();
let x_c0 = repr_to_big(x.c0.into_repr());
let x_c1 = repr_to_big(x.c1.into_repr());
let y_c0 = repr_to_big(y.c0.into_repr());
let y_c1 = repr_to_big(y.c1.into_repr());
format!("[{}, {}], [{}, {}]", x_c0, x_c1, y_c0, y_c1)
};
let template = template.replace("<%vk_alfa1%>", &*p1_to_str(&vk.alpha_g1));
let template = template.replace("<%vk_beta2%>", &*p2_to_str(&vk.beta_g2));
let template = template.replace("<%vk_gamma2%>", &*p2_to_str(&vk.gamma_g2));
let template = template.replace("<%vk_delta2%>", &*p2_to_str(&vk.delta_g2));
let template = template.replace("<%vk_ic_length%>", &*vk.ic.len().to_string());
let template = template.replace("<%vk_input_length%>", &*(vk.ic.len() - 1).to_string());
let mut vi = String::from("");
for i in 0..vk.ic.len() {
vi = format!("{}{}vk.IC[{}] = Pairing.G1Point({});\n", vi, if vi.len() == 0 { "" } else { " " }, i, &*p1_to_str(&vk.ic[i]));
}
let template = template.replace("<%vk_ic_pts%>", &*vi);
fs::write(verifier_filename, template.as_bytes()).unwrap();
let params = load_params_file(params_filename);
create_verifier_sol_file(&params, verifier_filename).unwrap();
println!("Created {}", verifier_filename);
}

@ -4,7 +4,7 @@ extern crate exitcode;
use std::fs::File;
use phase2::parameters::MPCParameters;
use phase2::circom_circuit::CircomCircuit;
use phase2::circom_circuit::circuit_from_json_file;
fn main() {
let args: Vec<String> = std::env::args().collect();
@ -20,7 +20,7 @@ fn main() {
// Import the circuit and create the initial parameters using phase 1
println!("Creating initial parameters for {}...", circuit_filename);
let params = {
let c = CircomCircuit::from_json_file(&circuit_filename);
let c = circuit_from_json_file(&circuit_filename);
MPCParameters::new(c, should_filter_points_at_infinity).unwrap()
};

@ -7,27 +7,16 @@ extern crate num_traits;
extern crate itertools;
use std::fs;
use std::fs::OpenOptions;
use serde::{Deserialize, Serialize};
use itertools::Itertools;
use phase2::parameters::MPCParameters;
use phase2::circom_circuit::CircomCircuit;
use phase2::utils::{
repr_to_big,
p1_to_vec,
p2_to_vec,
use bellman_ce::pairing::bn256::Bn256;
use phase2::circom_circuit::{
load_params_file,
prove,
verify,
create_rng,
proof_to_json_file,
circuit_from_json_file,
witness_from_json_file
};
use bellman_ce::groth16::{prepare_verifying_key, create_random_proof, verify_proof};
use bellman_ce::pairing::ff::PrimeField;
#[derive(Serialize, Deserialize)]
struct ProofJson {
pub protocol: String,
pub pi_a: Vec<String>,
pub pi_b: Vec<Vec<String>>,
pub pi_c: Vec<String>,
}
fn main() {
let args: Vec<String> = std::env::args().collect();
@ -41,48 +30,21 @@ fn main() {
let proof_filename = &args[4];
let public_filename = &args[5];
let should_filter_points_at_infinity = false;
let rng = &mut rand::XorShiftRng::new_unseeded(); // TODO: change this unsafe unseeded random (!)
let mut c = CircomCircuit::from_json_file(circuit_filename);
c.load_witness_json_file(witness_filename);
let input = c.inputs.to_vec();
let reader = OpenOptions::new()
.read(true)
.open(params_filename)
.expect("unable to open.");
let mut params = MPCParameters::read(reader, should_filter_points_at_infinity, true).expect("unable to read params");
params.filter_params();
let params = params.get_params();
let rng = create_rng();
let params = load_params_file(params_filename);
let mut circuit = circuit_from_json_file(circuit_filename);
circuit.witness = Some(witness_from_json_file::<Bn256>(witness_filename));
println!("Proving...");
let proof = create_random_proof(c, &*params, rng).unwrap();
let proof = prove(circuit.clone(), &params, rng).unwrap();
println!("Checking proof");
let pvk = prepare_verifying_key(&params.vk);
let result = verify_proof(
&pvk,
&proof,
&input[1..]
).unwrap();
assert!(result, "Proof is correct");
println!("Verifying proof");
let correct = verify(&circuit, &params, &proof).unwrap();
assert!(correct, "Proof is correct");
let proof = ProofJson {
protocol: "groth".to_string(),
pi_a: p1_to_vec(&proof.a),
pi_b: p2_to_vec(&proof.b),
pi_c: p1_to_vec(&proof.c),
};
let proof_json = serde_json::to_string(&proof).unwrap();
fs::write(proof_filename, proof_json.as_bytes()).unwrap();
let public_inputs = input[1..].iter().map(|x| repr_to_big(x.into_repr())).collect_vec();
let public_json = serde_json::to_string(&public_inputs).unwrap();
fs::write(public_filename, public_json.as_bytes()).unwrap();
println!("Saving {} and {}", proof_filename, public_filename);
proof_to_json_file(&proof, proof_filename).unwrap();
fs::write(public_filename, circuit.get_public_inputs_json().as_bytes()).unwrap();
println!("Done!")
}

@ -4,7 +4,7 @@ extern crate exitcode;
use std::fs::OpenOptions;
use phase2::parameters::*;
use phase2::circom_circuit::CircomCircuit;
use phase2::circom_circuit::circuit_from_json_file;
fn main() {
let args: Vec<String> = std::env::args().collect();
@ -34,7 +34,7 @@ fn main() {
let contribution = verify_contribution(&old_params, &new_params).expect("should verify");
let should_filter_points_at_infinity = false;
let verification_result = new_params.verify(CircomCircuit::from_json_file(&circuit_filename), should_filter_points_at_infinity).unwrap();
let verification_result = new_params.verify(circuit_from_json_file(&circuit_filename), should_filter_points_at_infinity).unwrap();
assert!(contains_contribution(&verification_result, &contribution));
println!("Contribution {} verified.", new_params_filename);
}

@ -1,23 +1,16 @@
#![allow(unused_imports)]
extern crate bellman_ce;
extern crate rand;
use std::str;
use std::fs;
use std::fs::OpenOptions;
use std::fs::{OpenOptions, File};
use std::io::{Read, Write};
use std::collections::BTreeMap;
use std::iter::repeat;
use std::sync::Arc;
use itertools::Itertools;
use std::io::{
Read,
Write,
};
use bellman_ce::pairing::{
Engine,
ff::{
PrimeField,
},
};
use rand::{Rng, OsRng};
use parameters::MPCParameters;
use bellman_ce::{
Circuit,
@ -26,8 +19,31 @@ use bellman_ce::{
Index,
ConstraintSystem,
LinearCombination,
groth16::{
Parameters,
Proof,
prepare_verifying_key,
create_random_proof,
verify_proof,
},
pairing::{
Engine,
CurveAffine,
ff::{
PrimeField,
},
bn256::{
Bn256,
}
}
};
use crate::utils::{
repr_to_big,
p1_to_vec,
p2_to_vec,
pairing_to_vec,
};
#[derive(Serialize, Deserialize)]
struct CircuitJson {
@ -40,13 +56,54 @@ struct CircuitJson {
pub num_variables: usize,
}
#[derive(Serialize, Deserialize)]
struct ProofJson {
pub protocol: String,
pub pi_a: Vec<String>,
pub pi_b: Vec<Vec<String>>,
pub pi_c: Vec<String>,
}
#[derive(Serialize, Deserialize)]
struct ProvingKeyJson {
#[serde(rename = "A")]
pub a: Vec<Vec<String>>,
#[serde(rename = "B1")]
pub b1: Vec<Vec<String>>,
#[serde(rename = "B2")]
pub b2: Vec<Vec<Vec<String>>>,
#[serde(rename = "C")]
pub c: Vec<Option<Vec<String>>>,
pub vk_alfa_1: Vec<String>,
pub vk_beta_1: Vec<String>,
pub vk_delta_1: Vec<String>,
pub vk_beta_2: Vec<Vec<String>>,
pub vk_delta_2: Vec<Vec<String>>,
#[serde(rename = "hExps")]
pub h: Vec<Vec<String>>,
// Todo: add json fields: nPublic, nVars, polsA, polsB, polsC, protocol: groth
}
#[derive(Serialize, Deserialize)]
struct VerifyingKeyJson {
#[serde(rename = "IC")]
pub ic: Vec<Vec<String>>,
pub vk_alfa_1: Vec<String>,
pub vk_beta_2: Vec<Vec<String>>,
pub vk_gamma_2: Vec<Vec<String>>,
pub vk_delta_2: Vec<Vec<String>>,
pub vk_alfabeta_12: Vec<Vec<Vec<String>>>,
pub protocol: String,
#[serde(rename = "nPublic")]
pub inputs_count: usize,
}
#[derive(Clone)]
pub struct CircomCircuit<E: Engine> {
pub num_inputs: usize,
pub num_aux: usize,
pub num_constraints: usize,
pub inputs: Vec<E::Fr>,
pub aux: Vec<E::Fr>,
pub witness: Option<Vec<E::Fr>>,
pub constraints: Vec<(
Vec<(usize, E::Fr)>,
Vec<(usize, E::Fr)>,
@ -55,51 +112,20 @@ pub struct CircomCircuit<E: Engine> {
}
impl<'a, E: Engine> CircomCircuit<E> {
pub fn load_witness_json_file(&mut self, filename: &str) {
let reader = OpenOptions::new()
.read(true)
.open(filename)
.expect("unable to open.");
self.load_witness_json(reader);
pub fn get_public_inputs(&self) -> Option<Vec<E::Fr>> {
return match self.witness.clone() {
None => None,
Some(w) => Some(w[1..self.num_inputs].to_vec()),
}
}
pub fn load_witness_json<R: Read>(&mut self, reader: R) {
let witness: Vec<String> = serde_json::from_reader(reader).unwrap();
let witness = witness.into_iter().map(|x| E::Fr::from_str(&x).unwrap()).collect::<Vec<E::Fr>>();
self.inputs = witness[..self.num_inputs].to_vec();
self.aux = witness[self.num_inputs..].to_vec();
}
pub fn from_json_file(filename: &str) -> CircomCircuit::<E> {
let reader = OpenOptions::new()
.read(true)
.open(filename)
.expect("unable to open.");
return CircomCircuit::from_json(reader);
}
pub fn from_json<R: Read>(reader: R) -> CircomCircuit::<E> {
let circuit_json: CircuitJson = serde_json::from_reader(reader).unwrap();
let num_inputs = circuit_json.num_inputs + circuit_json.num_outputs + 1;
let num_aux = circuit_json.num_variables - num_inputs;
let convert_constraint = |lc: &BTreeMap<String, String>| {
lc.iter().map(|(index, coeff)| (index.parse().unwrap(), E::Fr::from_str(coeff).unwrap())).collect_vec()
};
let constraints = circuit_json.constraints.iter().map(
|c| (convert_constraint(&c[0]), convert_constraint(&c[1]), convert_constraint(&c[2]))
).collect_vec();
return CircomCircuit {
num_inputs: num_inputs,
num_aux: num_aux,
num_constraints: circuit_json.num_variables,
inputs: vec![],
aux: vec![],
constraints: constraints,
pub fn get_public_inputs_json(&self) -> String {
let inputs = self.get_public_inputs();
let inputs = match inputs {
None => return String::from("[]"),
Some(inp) => inp.iter().map(|x| repr_to_big(x.into_repr())).collect_vec()
};
return serde_json::to_string(&inputs).unwrap();
}
}
@ -112,17 +138,24 @@ impl<'a, E: Engine> Circuit<E> for CircomCircuit<E> {
cs: &mut CS
) -> Result<(), SynthesisError>
{
let witness = &self.witness.clone();
for i in 1..self.num_inputs {
cs.alloc_input(|| format!("variable {}", i),
|| {
Ok(if self.inputs.len() > 0 { self.inputs[i] } else { E::Fr::from_str("1").unwrap() })
Ok(match witness {
None => E::Fr::from_str("1").unwrap(),
Some(w) => w[i],
})
})?;
}
for i in 0..self.num_aux {
cs.alloc(|| format!("aux {}", i),
|| {
Ok(if self.aux.len() > 0 { self.aux[i] } else { E::Fr::from_str("1").unwrap() })
Ok(match witness {
None => E::Fr::from_str("1").unwrap(),
Some(w) => w[i + self.num_inputs],
})
})?;
}
@ -147,3 +180,186 @@ impl<'a, E: Engine> Circuit<E> for CircomCircuit<E> {
Ok(())
}
}
pub fn prove<E: Engine, R: Rng>(circuit: CircomCircuit<E>, params: &Parameters<E>, mut rng: R) -> Result<Proof<E>, SynthesisError> {
let mut params2 = params.clone();
filter_params(&mut params2);
return create_random_proof(circuit, &params2, &mut rng);
}
pub fn verify<E: Engine>(circuit: &CircomCircuit<E>, params: &Parameters<E>, proof: &Proof<E>) -> Result<bool, SynthesisError> {
let inputs = match circuit.get_public_inputs() {
None => return Err(SynthesisError::AssignmentMissing),
Some(inp) => inp,
};
return verify_proof(
&prepare_verifying_key(&params.vk),
proof,
&inputs
);
}
pub fn create_verifier_sol(params: &Parameters<Bn256>) -> String {
// TODO: use a simple template engine
let bytes = include_bytes!("verifier_groth.sol");
let template = String::from_utf8_lossy(bytes);
let p1_to_str = |p: &<Bn256 as Engine>::G1Affine| {
let x = repr_to_big(p.get_x().into_repr());
let y = repr_to_big(p.get_y().into_repr());
return format!("uint256({}), uint256({})", x, y)
};
let p2_to_str = |p: &<Bn256 as Engine>::G2Affine| {
let x = p.get_x();
let y = p.get_y();
let x_c0 = repr_to_big(x.c0.into_repr());
let x_c1 = repr_to_big(x.c1.into_repr());
let y_c0 = repr_to_big(y.c0.into_repr());
let y_c1 = repr_to_big(y.c1.into_repr());
format!("[uint256({}), uint256({})], [uint256({}), uint256({})]", x_c1, x_c0, y_c1, y_c0)
};
let template = template.replace("<%vk_alfa1%>", &*p1_to_str(&params.vk.alpha_g1));
let template = template.replace("<%vk_beta2%>", &*p2_to_str(&params.vk.beta_g2));
let template = template.replace("<%vk_gamma2%>", &*p2_to_str(&params.vk.gamma_g2));
let template = template.replace("<%vk_delta2%>", &*p2_to_str(&params.vk.delta_g2));
let template = template.replace("<%vk_ic_length%>", &*params.vk.ic.len().to_string());
let template = template.replace("<%vk_input_length%>", &*(params.vk.ic.len() - 1).to_string());
let mut vi = String::from("");
for i in 0..params.vk.ic.len() {
vi = format!("{}{}vk.IC[{}] = Pairing.G1Point({});\n", vi, if vi.len() == 0 { "" } else { " " }, i, &*p1_to_str(&params.vk.ic[i]));
}
let template = template.replace("<%vk_ic_pts%>", &*vi);
return template;
}
pub fn create_verifier_sol_file(params: &Parameters<Bn256>, filename: &str) -> std::io::Result<()> {
return fs::write(filename, create_verifier_sol(params).as_bytes());
}
pub fn proof_to_json(proof: &Proof<Bn256>) -> Result<String, serde_json::error::Error> {
return serde_json::to_string(&ProofJson {
protocol: "groth".to_string(),
pi_a: p1_to_vec(&proof.a),
pi_b: p2_to_vec(&proof.b),
pi_c: p1_to_vec(&proof.c),
});
}
pub fn proof_to_json_file(proof: &Proof<Bn256>, filename: &str) -> std::io::Result<()> {
let str = proof_to_json(proof).unwrap(); // TODO: proper error handling
return fs::write(filename, str.as_bytes());
}
pub fn load_params_file(filename: &str) -> Parameters<Bn256> {
let reader = OpenOptions::new()
.read(true)
.open(filename)
.expect("unable to open.");
return load_params(reader);
}
pub fn load_params<R: Read>(reader: R) -> Parameters<Bn256> {
let should_filter_points_at_infinity = false;
let params = MPCParameters::read(reader, should_filter_points_at_infinity, true).expect("unable to read params");
return params.get_params().clone();
}
pub fn filter_params<E: Engine>(params: &mut Parameters<E>) {
params.vk.ic = params.vk.ic.clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>();
params.h = Arc::new((*params.h).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>());
params.a = Arc::new((*params.a).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>());
params.b_g1 = Arc::new((*params.b_g1).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>());
params.b_g2 = Arc::new((*params.b_g2).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>());
}
pub fn proving_key_json(params: &Parameters<Bn256>) -> Result<String, serde_json::error::Error> {
let proving_key = ProvingKeyJson {
a: params.a.iter().map(|e| p1_to_vec(e)).collect_vec(),
b1: params.b_g1.iter().map(|e| p1_to_vec(e)).collect_vec(),
b2: params.b_g2.iter().map(|e| p2_to_vec(e)).collect_vec(),
c: repeat(None).take(params.vk.ic.len()).chain(params.l.iter().map(|e| Some(p1_to_vec(e)))).collect_vec(),
vk_alfa_1: p1_to_vec(&params.vk.alpha_g1),
vk_beta_1: p1_to_vec(&params.vk.beta_g1),
vk_delta_1: p1_to_vec(&params.vk.delta_g1),
vk_beta_2: p2_to_vec(&params.vk.beta_g2),
vk_delta_2: p2_to_vec(&params.vk.delta_g2),
h: params.h.iter().map(|e| p1_to_vec(e)).collect_vec(),
};
return serde_json::to_string(&proving_key);
}
pub fn proving_key_json_file(params: &Parameters<Bn256>, filename: &str) -> std::io::Result<()> {
let str = proving_key_json(params).unwrap(); // TODO: proper error handling
return fs::write(filename, str.as_bytes());
}
pub fn verification_key_json(params: &Parameters<Bn256>) -> Result<String, serde_json::error::Error> {
let verification_key = VerifyingKeyJson {
ic: params.vk.ic.iter().map(|e| p1_to_vec(e)).collect_vec(),
vk_alfa_1: p1_to_vec(&params.vk.alpha_g1),
vk_beta_2: p2_to_vec(&params.vk.beta_g2),
vk_gamma_2: p2_to_vec(&params.vk.gamma_g2),
vk_delta_2: p2_to_vec(&params.vk.delta_g2),
vk_alfabeta_12: pairing_to_vec(&Bn256::pairing(params.vk.alpha_g1, params.vk.beta_g2)),
inputs_count: params.vk.ic.len() - 1,
protocol: String::from("groth"),
};
return serde_json::to_string(&verification_key);
}
pub fn verification_key_json_file(params: &Parameters<Bn256>, filename: &str) -> std::io::Result<()> {
let str = verification_key_json(params).unwrap(); // TODO: proper error handling
return fs::write(filename, str.as_bytes());
}
pub fn witness_from_json_file<E: Engine>(filename: &str) -> Vec<E::Fr> {
let reader = OpenOptions::new()
.read(true)
.open(filename)
.expect("unable to open.");
return witness_from_json::<E, File>(reader);
}
pub fn witness_from_json<E: Engine, R: Read>(reader: R) -> Vec<E::Fr>{
let witness: Vec<String> = serde_json::from_reader(reader).unwrap();
return witness.into_iter().map(|x| E::Fr::from_str(&x).unwrap()).collect::<Vec<E::Fr>>();
}
pub fn circuit_from_json_file<E: Engine>(filename: &str) -> CircomCircuit::<E> {
let reader = OpenOptions::new()
.read(true)
.open(filename)
.expect("unable to open.");
return circuit_from_json(reader);
}
pub fn circuit_from_json<E: Engine, R: Read>(reader: R) -> CircomCircuit::<E> {
let circuit_json: CircuitJson = serde_json::from_reader(reader).unwrap();
let num_inputs = circuit_json.num_inputs + circuit_json.num_outputs + 1;
let num_aux = circuit_json.num_variables - num_inputs;
let convert_constraint = |lc: &BTreeMap<String, String>| {
lc.iter().map(|(index, coeff)| (index.parse().unwrap(), E::Fr::from_str(coeff).unwrap())).collect_vec()
};
let constraints = circuit_json.constraints.iter().map(
|c| (convert_constraint(&c[0]), convert_constraint(&c[1]), convert_constraint(&c[2]))
).collect_vec();
return CircomCircuit {
num_inputs: num_inputs,
num_aux: num_aux,
num_constraints: circuit_json.num_variables,
witness: None,
constraints: constraints,
};
}
pub fn create_rng() -> Box<dyn Rng> {
return Box::new(OsRng::new().unwrap())
}

@ -401,14 +401,6 @@ impl MPCParameters {
&self.params
}
pub fn filter_params(&mut self) {
self.params.vk.ic = self.params.vk.ic.clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>();
self.params.h = Arc::new((*self.params.h).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>());
self.params.a = Arc::new((*self.params.a).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>());
self.params.b_g1 = Arc::new((*self.params.b_g1).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>());
self.params.b_g2 = Arc::new((*self.params.b_g2).clone().into_iter().filter(|x| !x.is_zero()).collect::<Vec<_>>());
}
/// Contributes some randomness to the parameters. Only one
/// contributor needs to be honest for the parameters to be
/// secure.

@ -16,9 +16,7 @@ use bellman_ce::pairing::{
CurveAffine,
CurveProjective,
Wnaf,
Engine,
bn256::{
Bn256,
G2,
G1Affine,
G2Affine,

@ -1,223 +1,210 @@
//
// Copyright 2017 Christian Reitwiessner
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
// 2019 OKIMS
// ported to solidity 0.5
// fixed linter warnings
// added require error messages
//
pragma solidity ^0.5.0;
pragma solidity ^0.6.0;
library Pairing {
uint256 constant PRIME_Q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
struct G1Point {
uint X;
uint Y;
uint256 X;
uint256 Y;
}
// Encoding of field elements is: X[0] * z + X[1]
struct G2Point {
uint[2] X;
uint[2] Y;
uint256[2] X;
uint256[2] Y;
}
/// @return the generator of G1
function P1() internal pure returns (G1Point memory) {
return G1Point(1, 2);
}
/// @return the generator of G2
function P2() internal pure returns (G2Point memory) {
// Original code point
return G2Point(
[11559732032986387107991004021392285783925812861821192530917403151452391805634,
10857046999023057135944570762232829481370756359578518086990519993285655852781],
[4082367875863433681332203403145435568316851327593401208105741076214120093531,
8495653923123431417604973247489272438418190587263600148770280649306958101930]
);
/*
// Changed by Jordi point
return G2Point(
[10857046999023057135944570762232829481370756359578518086990519993285655852781,
11559732032986387107991004021392285783925812861821192530917403151452391805634],
[8495653923123431417604973247489272438418190587263600148770280649306958101930,
4082367875863433681332203403145435568316851327593401208105741076214120093531]
);
*/
}
/// @return the negation of p, i.e. p.addition(p.negate()) should be zero.
/*
* @return The negation of p, i.e. p.plus(p.negate()) should be zero.
*/
function negate(G1Point memory p) internal pure returns (G1Point memory) {
// The prime q in the base field F_q for G1
uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
if (p.X == 0 && p.Y == 0)
if (p.X == 0 && p.Y == 0) {
return G1Point(0, 0);
return G1Point(p.X, q - (p.Y % q));
} else {
return G1Point(p.X, PRIME_Q - (p.Y % PRIME_Q));
}
/// @return the sum of two points of G1
function addition(G1Point memory p1, G1Point memory p2) internal view returns (G1Point memory r) {
uint[4] memory input;
}
/*
* @return r the sum of two points of G1
*/
function plus(
G1Point memory p1,
G1Point memory p2
) internal view returns (G1Point memory r) {
uint256[4] memory input;
input[0] = p1.X;
input[1] = p1.Y;
input[2] = p2.X;
input[3] = p2.Y;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 6, input, 0xc0, r, 0x60)
success := staticcall(sub(gas(), 2000), 6, input, 0xc0, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success,"pairing-add-failed");
require(success, "pairing-add-failed");
}
/// @return the product of a point on G1 and a scalar, i.e.
/// p == p.scalar_mul(1) and p.addition(p) == p.scalar_mul(2) for all points p.
function scalar_mul(G1Point memory p, uint s) internal view returns (G1Point memory r) {
uint[3] memory input;
/*
* @return r the product of a point on G1 and a scalar, i.e.
* p == p.scalar_mul(1) and p.plus(p) == p.scalar_mul(2) for all
* points p.
*/
function scalar_mul(G1Point memory p, uint256 s) internal view returns (G1Point memory r) {
uint256[3] memory input;
input[0] = p.X;
input[1] = p.Y;
input[2] = s;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 7, input, 0x80, r, 0x60)
success := staticcall(sub(gas(), 2000), 7, input, 0x80, r, 0x60)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require (success,"pairing-mul-failed");
require(success, "pairing-mul-failed");
}
/// @return the result of computing the pairing check
/// e(p1[0], p2[0]) * .... * e(p1[n], p2[n]) == 1
/// For example pairing([P1(), P1().negate()], [P2(), P2()]) should
/// return true.
function pairing(G1Point[] memory p1, G2Point[] memory p2) internal view returns (bool) {
require(p1.length == p2.length,"pairing-lengths-failed");
uint elements = p1.length;
uint inputSize = elements * 6;
uint[] memory input = new uint[](inputSize);
for (uint i = 0; i < elements; i++)
{
input[i * 6 + 0] = p1[i].X;
input[i * 6 + 1] = p1[i].Y;
input[i * 6 + 2] = p2[i].X[0];
input[i * 6 + 3] = p2[i].X[1];
input[i * 6 + 4] = p2[i].Y[0];
input[i * 6 + 5] = p2[i].Y[1];
/* @return The result of computing the pairing check
* e(p1[0], p2[0]) * .... * e(p1[n], p2[n]) == 1
* For example,
* pairing([P1(), P1().negate()], [P2(), P2()]) should return true.
*/
function pairing(
G1Point memory a1,
G2Point memory a2,
G1Point memory b1,
G2Point memory b2,
G1Point memory c1,
G2Point memory c2,
G1Point memory d1,
G2Point memory d2
) internal view returns (bool) {
G1Point[4] memory p1 = [a1, b1, c1, d1];
G2Point[4] memory p2 = [a2, b2, c2, d2];
uint256 inputSize = 24;
uint256[] memory input = new uint256[](inputSize);
for (uint256 i = 0; i < 4; i++) {
uint256 j = i * 6;
input[j + 0] = p1[i].X;
input[j + 1] = p1[i].Y;
input[j + 2] = p2[i].X[0];
input[j + 3] = p2[i].X[1];
input[j + 4] = p2[i].Y[0];
input[j + 5] = p2[i].Y[1];
}
uint[1] memory out;
uint256[1] memory out;
bool success;
// solium-disable-next-line security/no-inline-assembly
assembly {
success := staticcall(sub(gas, 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
success := staticcall(sub(gas(), 2000), 8, add(input, 0x20), mul(inputSize, 0x20), out, 0x20)
// Use "invalid" to make gas estimation work
switch success case 0 { invalid() }
}
require(success,"pairing-opcode-failed");
require(success, "pairing-opcode-failed");
return out[0] != 0;
}
/// Convenience method for a pairing check for two pairs.
function pairingProd2(G1Point memory a1, G2Point memory a2, G1Point memory b1, G2Point memory b2) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](2);
G2Point[] memory p2 = new G2Point[](2);
p1[0] = a1;
p1[1] = b1;
p2[0] = a2;
p2[1] = b2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for three pairs.
function pairingProd3(
G1Point memory a1, G2Point memory a2,
G1Point memory b1, G2Point memory b2,
G1Point memory c1, G2Point memory c2
) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](3);
G2Point[] memory p2 = new G2Point[](3);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
return pairing(p1, p2);
}
/// Convenience method for a pairing check for four pairs.
function pairingProd4(
G1Point memory a1, G2Point memory a2,
G1Point memory b1, G2Point memory b2,
G1Point memory c1, G2Point memory c2,
G1Point memory d1, G2Point memory d2
) internal view returns (bool) {
G1Point[] memory p1 = new G1Point[](4);
G2Point[] memory p2 = new G2Point[](4);
p1[0] = a1;
p1[1] = b1;
p1[2] = c1;
p1[3] = d1;
p2[0] = a2;
p2[1] = b2;
p2[2] = c2;
p2[3] = d2;
return pairing(p1, p2);
}
}
contract Verifier {
uint256 constant SNARK_SCALAR_FIELD = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
uint256 constant PRIME_Q = 21888242871839275222246405745257275088696311157297823662689037894645226208583;
using Pairing for *;
struct VerifyingKey {
Pairing.G1Point alfa1;
Pairing.G2Point beta2;
Pairing.G2Point gamma2;
Pairing.G2Point delta2;
Pairing.G1Point[] IC;
Pairing.G1Point[<%vk_ic_length%>] IC;
}
struct Proof {
Pairing.G1Point A;
Pairing.G2Point B;
Pairing.G1Point C;
}
function verifyingKey() internal pure returns (VerifyingKey memory vk) {
vk.alfa1 = Pairing.G1Point(<%vk_alfa1%>);
vk.beta2 = Pairing.G2Point(<%vk_beta2%>);
vk.gamma2 = Pairing.G2Point(<%vk_gamma2%>);
vk.delta2 = Pairing.G2Point(<%vk_delta2%>);
vk.IC = new Pairing.G1Point[](<%vk_ic_length%>);
<%vk_ic_pts%>
}
function verify(uint[] memory input, Proof memory proof) internal view returns (uint) {
uint256 snark_scalar_field = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
/*
* @returns Whether the proof is valid given the hardcoded verifying key
* above and the public inputs
*/
function verifyProof(
bytes memory proof,
uint256[<%vk_input_length%>] memory input
) public view returns (bool r) {
uint256[8] memory p = abi.decode(proof, (uint256[8]));
// Make sure that each element in the proof is less than the prime q
for (uint8 i = 0; i < p.length; i++) {
require(p[i] < PRIME_Q, "verifier-proof-element-gte-prime-q");
}
Proof memory _proof;
_proof.A = Pairing.G1Point(p[0], p[1]);
_proof.B = Pairing.G2Point([p[2], p[3]], [p[4], p[5]]);
_proof.C = Pairing.G1Point(p[6], p[7]);
VerifyingKey memory vk = verifyingKey();
require(input.length + 1 == vk.IC.length,"verifier-bad-input");
// Compute the linear combination vk_x
Pairing.G1Point memory vk_x = Pairing.G1Point(0, 0);
for (uint i = 0; i < input.length; i++) {
require(input[i] < snark_scalar_field,"verifier-gte-snark-scalar-field");
vk_x = Pairing.addition(vk_x, Pairing.scalar_mul(vk.IC[i + 1], input[i]));
vk_x = Pairing.plus(vk_x, vk.IC[0]);
// Make sure that every input is less than the snark scalar field
for (uint256 i = 0; i < input.length; i++) {
require(input[i] < SNARK_SCALAR_FIELD, "verifier-gte-snark-scalar-field");
vk_x = Pairing.plus(vk_x, Pairing.scalar_mul(vk.IC[i + 1], input[i]));
}
vk_x = Pairing.addition(vk_x, vk.IC[0]);
if (!Pairing.pairingProd4(
Pairing.negate(proof.A), proof.B,
vk.alfa1, vk.beta2,
vk_x, vk.gamma2,
proof.C, vk.delta2
)) return 1;
return 0;
}
function verifyProof(
uint[2] memory a,
uint[2][2] memory b,
uint[2] memory c,
uint[<%vk_input_length%>] memory input
) public view returns (bool r) {
Proof memory proof;
proof.A = Pairing.G1Point(a[0], a[1]);
proof.B = Pairing.G2Point([b[0][0], b[0][1]], [b[1][0], b[1][1]]);
proof.C = Pairing.G1Point(c[0], c[1]);
return verify(input, proof);
}
function verifyProof(bytes memory proof, uint[<%vk_input_length%>] memory inputs) public view returns (bool r) {
uint[8] memory p = abi.decode(proof, (uint[8]));
Proof memory proof;
proof.A = Pairing.G1Point(p[0], p[1]);
proof.B = Pairing.G2Point([p[2], p[3]], [p[4], p[5]]);
proof.C = Pairing.G1Point(p[6], c[7]);
return verify(inputs, proof) == 0;
return Pairing.pairing(
Pairing.negate(_proof.A),
_proof.B,
vk.alfa1,
vk.beta2,
vk_x,
vk.gamma2,
_proof.C,
vk.delta2
);
}
}

@ -26,14 +26,13 @@ cargo run --release --bin verify_contribution circuit.json circom2.params circom
cargo run --release --bin contribute circom3.params circom4.params askldfjklasdf
cargo run --release --bin verify_contribution circuit.json circom3.params circom4.params
# generate resulting keys
cargo run --release --bin export_keys circom4.params vk.json pk.json
# create dummy keys in circom format
echo "Generating dummy key files..."
npx snarkjs setup --protocol groth
# generate resulting keys
cargo run --release --bin export_keys circom4.params vk.json pk.json
# patch dummy keys with actual keys params
cargo run --release --bin copy_json proving_key.json pk.json transformed_pk.json
cargo run --release --bin copy_json verification_key.json vk.json transformed_vk.json
# generate solidity verifier
cargo run --release --bin generate_verifier circom4.params verifier.sol
@ -41,4 +40,4 @@ cargo run --release --bin generate_verifier circom4.params verifier.sol
# try to generate and verify proof
npx snarkjs calculatewitness
cargo run --release --bin prove circuit.json witness.json circom4.params proof.json public.json
npx snarkjs verify --vk transformed_vk.json --proof proof.json
npx snarkjs verify --vk vk.json --proof proof.json

@ -4,22 +4,17 @@ extern crate blake2;
extern crate byteorder;
extern crate bellman_ce;
use bellman_ce::pairing::{CurveAffine, CurveProjective};
use bellman_ce::pairing::bn256::Bn256;
use bellman_ce::pairing::bn256::{G1, G2};
use powersoftau::small_bn256::{Bn256CeremonyParameters};
use powersoftau::bn256::Bn256CeremonyParameters;
use powersoftau::batched_accumulator::*;
use powersoftau::parameters::{UseCompression};
use powersoftau::utils::{reduced_hash};
use powersoftau::parameters::UseCompression;
use powersoftau::utils::reduced_hash;
use powersoftau::*;
use crate::parameters::*;
use bellman_ce::multicore::Worker;
use bellman_ce::domain::{EvaluationDomain, Point};
use std::fs::OpenOptions;
use std::io::{BufWriter, Write};
use std::io::Write;
use memmap::*;
@ -40,7 +35,7 @@ impl PowersOfTauParameters for Bn256ReducedCeremonyParameters {
const fn num_bits<T>() -> usize { std::mem::size_of::<T>() * 8 }
fn log_2(x: u64) -> u32 {
pub fn log_2(x: u64) -> u32 {
assert!(x > 0);
num_bits::<u64>() as u32 - x.leading_zeros() - 1
}
@ -53,13 +48,13 @@ fn main() {
.expect("unable open `./challenge` in this directory");
let challenge_readable_map = unsafe { MmapOptions::new().map(&reader).expect("unable to create a memory map for input") };
let current_accumulator = BachedAccumulator::<Bn256, Bn256CeremonyParameters>::deserialize(
let current_accumulator = BatchedAccumulator::<Bn256, Bn256CeremonyParameters>::deserialize(
&challenge_readable_map,
CheckForCorrectness::Yes,
UseCompression::No,
).expect("unable to read compressed accumulator");
let mut reduced_accumulator = BachedAccumulator::<Bn256, Bn256ReducedCeremonyParameters>::empty();
let mut reduced_accumulator = BatchedAccumulator::<Bn256, Bn256ReducedCeremonyParameters>::empty();
reduced_accumulator.tau_powers_g1 = current_accumulator.tau_powers_g1[..Bn256ReducedCeremonyParameters::TAU_POWERS_G1_LENGTH].to_vec();
reduced_accumulator.tau_powers_g2 = current_accumulator.tau_powers_g2[..Bn256ReducedCeremonyParameters::TAU_POWERS_LENGTH].to_vec();
reduced_accumulator.alpha_tau_powers_g1 = current_accumulator.alpha_tau_powers_g1[..Bn256ReducedCeremonyParameters::TAU_POWERS_LENGTH].to_vec();
@ -95,11 +90,11 @@ fn main() {
println!("");
}
reduced_accumulator.serialize(&mut writable_map, UseCompression::No);
reduced_accumulator.serialize(&mut writable_map, UseCompression::No).unwrap();
// Get the hash of the contribution, so the user can compare later
let output_readonly = writable_map.make_read_only().expect("must make a map readonly");
let contribution_hash = BachedAccumulator::<Bn256, Bn256ReducedCeremonyParameters>::calculate_hash(&output_readonly);
let contribution_hash = BatchedAccumulator::<Bn256, Bn256ReducedCeremonyParameters>::calculate_hash(&output_readonly);
println!("Reduced contribution is formed with a hash:");