phase2 verification done

This commit is contained in:
Jordi Baylina 2020-06-14 17:37:22 +02:00
parent 7965dc5d6b
commit a4da81f0a7
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
32 changed files with 1475 additions and 327 deletions

194
cli.js

@ -30,7 +30,6 @@ const {stringifyBigInts, unstringifyBigInts} = require("ffjavascript").utils;
const loadR1cs = require("r1csfile").load; const loadR1cs = require("r1csfile").load;
const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder; const WitnessCalculatorBuilder = require("circom_runtime").WitnessCalculatorBuilder;
const zkeyFile = require("./src/zkeyfile");
const wtnsFile = require("./src/wtnsfile"); const wtnsFile = require("./src/wtnsfile");
const loadSyms = require("./src/loadsyms"); const loadSyms = require("./src/loadsyms");
@ -43,12 +42,12 @@ const powersOfTaw = require("./src/powersoftau");
const bn128 = require("ffjavascript").bn128; const bn128 = require("ffjavascript").bn128;
const solidityGenerator = require("./src/soliditygenerator.js"); const solidityGenerator = require("./src/soliditygenerator.js");
const phase2 = require("./src/phase2");
const Scalar = require("ffjavascript").Scalar; const Scalar = require("ffjavascript").Scalar;
const assert = require("assert"); const assert = require("assert");
const groth16Prover = require("./src/groth16_prover"); const groth16Prover = require("./src/groth16_prover");
const zkey = require("./src/zkey");
const commands = [ const commands = [
{ {
@ -97,12 +96,6 @@ const commands = [
alias: ["zv", "verify -vk|verificationkey -pub|public -p|proof"], alias: ["zv", "verify -vk|verificationkey -pub|public -p|proof"],
action: zksnarkVerify action: zksnarkVerify
}, },
{
cmd: "zkey export vkey [circuit.zkey] [verification_key.json]",
description: "Exports a verification key to JSON",
alias: ["zkev"],
action: zKeyExportVKey
},
{ {
cmd: "solidity genverifier <verificationKey.json> <verifier.sol>", cmd: "solidity genverifier <verificationKey.json> <verifier.sol>",
description: "Creates a verifier in solidity", description: "Creates a verifier in solidity",
@ -125,7 +118,7 @@ const commands = [
{ {
cmd: "powersoftau export challange <powersoftau_0000.ptau> [challange]", cmd: "powersoftau export challange <powersoftau_0000.ptau> [challange]",
description: "Creates a challange", description: "Creates a challange",
alias: ["pte"], alias: ["ptec"],
options: "-verbose|v", options: "-verbose|v",
action: powersOfTawExportChallange action: powersOfTawExportChallange
}, },
@ -159,7 +152,7 @@ const commands = [
}, },
{ {
cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>", cmd: "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>",
description: "verifies a powers of tau file", description: "creates a ptau file with a new contribution",
alias: ["ptc"], alias: ["ptc"],
options: "-verbose|v -name|n -entropy|e", options: "-verbose|v -name|n -entropy|e",
action: powersOfTawContribute action: powersOfTawContribute
@ -173,11 +166,59 @@ const commands = [
action: powersOfTawPreparePhase2 action: powersOfTawPreparePhase2
}, },
{ {
cmd: "phase2 new [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]", cmd: "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
description: "Creates an initial pkey file with zero contributions ", description: "Exports a power of tau file to a JSON",
alias: ["p2n"], alias: ["ptej"],
options: "-verbose|v", options: "-verbose|v",
action: phase2new action: powersOfTawExportJson
},
{
cmd: "zkey new [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]",
description: "Creates an initial pkey file with zero contributions ",
alias: ["zkn"],
options: "-verbose|v",
action: zkeyNew
},
{
cmd: "zkey export bellman [circuit.zkey] [circuit.mpcparams]",
description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman)",
alias: ["zkeb"],
options: "-verbose|v",
action: zkeyExportBellman
},
{
cmd: "zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>",
description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman) ",
alias: ["zkib"],
options: "-verbose|v",
action: zkeyImportBellman
},
{
cmd: "zkey verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]",
description: "Verify zkey file contributions and verify that matches with the original circuit.r1cs and ptau",
alias: ["zkv"],
options: "-verbose|v",
action: zkeyVerify
},
{
cmd: "zkey contribute <circuit_old.zkey> <circuit_new.zkey>",
description: "creates a zkey file with a new contribution",
alias: ["zkc"],
options: "-verbose|v",
action: zkeyContribute
},
{
cmd: "zkey export vkey [circuit.zkey] [verification_key.json]",
description: "Exports a verification key",
alias: ["zkev"],
action: zkeyExportVKey
},
{
cmd: "zkey export json [circuit.zkey] [circuit.zkey.json]",
description: "Exports a circuit key to a JSON file",
alias: ["zkej"],
options: "-verbose|v",
action: zkeyExportJson
}, },
]; ];
@ -224,12 +265,6 @@ TODO COMMANDS
action: witnessVerify action: witnessVerify
}, },
ptau new Starts a ceremony with a new challange for the powes of Tau ceremony
ptau contribute Contribute in the ceremony of powers of tau
ptau beacon Apply a beacon random to the ceremony
ptau verify Verify the powers of tau ceremony
ptau preparePhase2 Prepare Powers of Taus for a phase 2
phase2 new Starts a second phase ceremony for a given circuit with a first challange and a reference Hash.
phase2 constribute Contribute in the seconf phase ceremony phase2 constribute Contribute in the seconf phase ceremony
phase2 beacon Contribute in the seconf phase ceremony with a Powers of Tau phase2 beacon Contribute in the seconf phase ceremony with a Powers of Tau
phase2 verify Verify the Powers of tau phase2 verify Verify the Powers of tau
@ -374,7 +409,7 @@ async function zksnarkSetup(params, options) {
if (!zkSnark[protocol]) throw new Error("Invalid protocol"); if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const setup = zkSnark[protocol].setup(cir, options.verbose); const setup = zkSnark[protocol].setup(cir, options.verbose);
await zkeyFile.write(zkeyName, setup.vk_proof); await zkey.utils.write(zkeyName, setup.vk_proof);
// await fs.promises.writeFile(provingKeyName, JSON.stringify(stringifyBigInts(setup.vk_proof), null, 1), "utf-8"); // await fs.promises.writeFile(provingKeyName, JSON.stringify(stringifyBigInts(setup.vk_proof), null, 1), "utf-8");
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(setup.vk_verifier), null, 1), "utf-8"); await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(setup.vk_verifier), null, 1), "utf-8");
@ -453,12 +488,11 @@ async function zksnarkVerify(params, options) {
} }
// zkey export vkey [circuit.zkey] [verification_key.json]", // zkey export vkey [circuit.zkey] [verification_key.json]",
async function zKeyExportVKey(params) { async function zkeyExportVKey(params) {
const zkeyName = params[0] || "circuit.zkey"; const zkeyName = params[0] || "circuit.zkey";
const verificationKeyName = params[2] || "verification_key.json"; const verificationKeyName = params[2] || "verification_key.json";
const zKey = await zkeyFile.read(zkeyName); const zKey = await zkey.utils.read(zkeyName);
let curve; let curve;
if (Scalar.eq(zKey.q, bn128.q)) { if (Scalar.eq(zKey.q, bn128.q)) {
@ -472,19 +506,26 @@ async function zKeyExportVKey(params) {
IC: zKey.IC, IC: zKey.IC,
vk_alfa_1: zKey.vk_alfa_1, vk_alpha_1: zKey.vk_alpha_1,
vk_beta_2: zKey.vk_beta_2, vk_beta_2: zKey.vk_beta_2,
vk_gamma_2: zKey.vk_gamma_2, vk_gamma_2: zKey.vk_gamma_2,
vk_delta_2: zKey.vk_delta_2, vk_delta_2: zKey.vk_delta_2,
vk_alfabeta_12: curve.pairing( zKey.vk_alfa_1 , zKey.vk_beta_2 ) vk_alphabeta_12: curve.pairing( zKey.vk_alpha_1 , zKey.vk_beta_2 )
}; };
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(vKey), null, 1), "utf-8"); await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(vKey), null, 1), "utf-8");
} }
// zkey export json [circuit.zkey] [circuit.zkey.json]",
async function zkeyExportJson(params, options) {
const zkeyName = params[0] || "circuit.zkey";
const zkeyJsonName = params[1] || "circuit.zkey.json";
return await zkey.exportJson(zkeyName, zkeyJsonName, options.verbose);
}
// solidity genverifier <verificationKey.json> <verifier.sol> // solidity genverifier <verificationKey.json> <verifier.sol>
async function solidityGenVerifier(params, options) { async function solidityGenVerifier(params, options) {
@ -638,7 +679,7 @@ async function powersOfTawImport(params, options) {
if (options.nopoints) importPoints = false; if (options.nopoints) importPoints = false;
if (options.nocheck) doCheck = false; if (options.nocheck) doCheck = false;
const res = await powersOfTaw.impoertResponse(oldPtauName, response, newPtauName, options.name, importPoints, options.verbose); const res = await powersOfTaw.importResponse(oldPtauName, response, newPtauName, options.name, importPoints, options.verbose);
if (res) return res; if (res) return res;
if (!doCheck) return; if (!doCheck) return;
@ -652,7 +693,7 @@ async function powersOfTawVerify(params, options) {
ptauName = params[0]; ptauName = params[0];
const res = await powersOfTaw.verify(ptauName, options.verbose); const res = await powersOfTaw.verify(ptauName, options.verbose);
if (res) { if (res === true) {
console.log("Powers of tau OK!"); console.log("Powers of tau OK!");
return 0; return 0;
} else { } else {
@ -695,9 +736,20 @@ async function powersOfTawPreparePhase2(params, options) {
return await powersOfTaw.preparePhase2(oldPtauName, newPtauName, options.verbose); return await powersOfTaw.preparePhase2(oldPtauName, newPtauName, options.verbose);
} }
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
async function powersOfTawExportJson(params, options) {
let ptauName;
let jsonName;
ptauName = params[0];
jsonName = params[1];
return await powersOfTaw.exportJson(ptauName, jsonName, options.verbose);
}
// phase2 new <circuit.r1cs> <powersoftau.ptau> <circuit.zkey> // phase2 new <circuit.r1cs> <powersoftau.ptau> <circuit.zkey>
async function phase2new(params, options) { async function zkeyNew(params, options) {
let r1csName; let r1csName;
let ptauName; let ptauName;
let zkeyName; let zkeyName;
@ -722,3 +774,87 @@ async function phase2new(params, options) {
return phase2.new(r1csName, ptauName, zkeyName, options.verbose); return phase2.new(r1csName, ptauName, zkeyName, options.verbose);
} }
// zkey export bellman [circuit.zkey] [circuit.mpcparams]
async function zkeyExportBellman(params, options) {
let zkeyName;
let mpcparamsName;
if (params.length < 1) {
zkeyName = "circuit.zkey";
} else {
zkeyName = params[0];
}
if (params.length < 2) {
mpcparamsName = "circuit.mpcparams";
} else {
mpcparamsName = params[1];
}
return phase2.exportMPCParams(zkeyName, mpcparamsName, options.verbose);
}
// zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>
async function zkeyImportBellman(params, options) {
let zkeyNameOld;
let mpcParamsName;
let zkeyNameNew;
zkeyNameOld = params[0];
mpcParamsName = params[1];
zkeyNameNew = params[2];
return zkey.importBellman(zkeyNameOld, mpcParamsName, zkeyNameNew, options.verbose);
}
// phase2 verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]
async function zkeyVerify(params, options) {
let r1csName;
let ptauName;
let zkeyName;
if (params.length < 1) {
r1csName = "circuit.r1cs";
} else {
r1csName = params[0];
}
if (params.length < 2) {
ptauName = "powersoftau.ptau";
} else {
ptauName = params[1];
}
if (params.length < 3) {
zkeyName = "circuit.zkey";
} else {
zkeyName = params[2];
}
const res = await zkey.verify(r1csName, ptauName, zkeyName, options.verbose);
if (res === true) {
console.log("zKey OK!");
return 0;
} else {
console.log("=======>INVALID zKey<==========");
return 1;
}
}
// phase2 contribute <circuit_old.zkey> <circuit_new.zkey>
async function zkeyContribute(params, options) {
let zkeyOldName;
let zkeyNewName;
zkeyOldName = params[0];
zkeyNewName = params[1];
return phase2.contribute(zkeyOldName, zkeyNewName, options.verbose);
}

@ -118,6 +118,22 @@ async function readFullSection(fd, sections, idSection) {
return res; return res;
} }
async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
const MAX_BUFF_SIZE = fd1.pageSize * 16;
await startReadUniqueSection(fd1, sections1, idSection);
await startReadUniqueSection(fd2, sections2, idSection);
if (sections1[idSection][0].size != sections2[idSection][0].size) return false;
const totalBytes=sections1[idSection][0].size;
for (let i=0; i<totalBytes; i+= MAX_BUFF_SIZE) {
const n = Math.min(totalBytes-i, MAX_BUFF_SIZE);
const buff1 = await fd1.read(n);
const buff2 = await fd2.read(n);
for (i=0; i<n; i++) if (buff1[i] != buff2[i]) return false;
}
await endReadSection(fd1);
await endReadSection(fd2);
return true;
}
module.exports.readBinFile = readBinFile; module.exports.readBinFile = readBinFile;
@ -130,3 +146,4 @@ module.exports.startReadUniqueSection = startReadUniqueSection;
module.exports.endReadSection = endReadSection; module.exports.endReadSection = endReadSection;
module.exports.copySection = copySection; module.exports.copySection = copySection;
module.exports.readFullSection = readFullSection; module.exports.readFullSection = readFullSection;
module.exports.sectionIsEqual = sectionIsEqual;

@ -1,5 +1,5 @@
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
const zkeyFile = require("./zkeyfile"); const zkeyUtils = require("./zkey").utils;
const wtnsFile = require("./wtnsfile"); const wtnsFile = require("./wtnsfile");
const getCurve = require("./curves").getCurveFromQ; const getCurve = require("./curves").getCurveFromQ;
const {log2} = require("./misc"); const {log2} = require("./misc");
@ -12,7 +12,7 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2); const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2);
const zkey = await zkeyFile.readHeader(fdZKey, sectionsZKey, "groth16"); const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey, "groth16");
if (!Scalar.eq(zkey.r, wtns.q)) { if (!Scalar.eq(zkey.r, wtns.q)) {
throw new Error("Curve of the witness does not match the curve of the proving key"); throw new Error("Curve of the witness does not match the curve of the proving key");
@ -66,7 +66,7 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
const s = curve.Fr.random(); const s = curve.Fr.random();
proof.pi_a = G1.add( proof.pi_a, zkey.vk_alfa_1 ); proof.pi_a = G1.add( proof.pi_a, zkey.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( zkey.vk_delta_1, r )); proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( zkey.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, zkey.vk_beta_2 ); proof.pi_b = G2.add( proof.pi_b, zkey.vk_beta_2 );

@ -6,6 +6,20 @@ const blake2b = require("blake2b");
const ChaCha = require("ffjavascript").ChaCha; const ChaCha = require("ffjavascript").ChaCha;
function hashToG2(hash) {
const hashV = new DataView(hash.buffer);
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = hashV.getUint32(i*4);
}
const rng = new ChaCha(seed);
const g2_sp = bn128.G2.fromRng(rng);
return g2_sp;
}
function getG2sp(persinalization, challange, g1s, g1sx) { function getG2sp(persinalization, challange, g1s, g1sx) {
const h = blake2b(64); const h = blake2b(64);
@ -17,17 +31,7 @@ function getG2sp(persinalization, challange, g1s, g1sx) {
h.update( utils.beInt2Buff(g1sx[1],32)); h.update( utils.beInt2Buff(g1sx[1],32));
const hash = Buffer.from(h.digest()); const hash = Buffer.from(h.digest());
const seed = []; return hashToG2(hash);
for (let i=0; i<8; i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ChaCha(seed);
const g2_sp = bn128.G2.fromRng(rng);
return g2_sp;
} }
function calculatePubKey(k, curve, personalization, challangeHash, rng ) { function calculatePubKey(k, curve, personalization, challangeHash, rng ) {
@ -55,3 +59,4 @@ function createPTauKey(curve, challangeHash, rng) {
module.exports.createPTauKey = createPTauKey; module.exports.createPTauKey = createPTauKey;
module.exports.getG2sp = getG2sp; module.exports.getG2sp = getG2sp;
module.exports.hashToG2 = hashToG2;

@ -1,3 +1,4 @@
const Blake2b = require("blake2b-wasm");
const _revTable = []; const _revTable = [];
for (let i=0; i<256; i++) { for (let i=0; i<256; i++) {
@ -30,5 +31,52 @@ function log2( V )
return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) ); return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) );
} }
function formatHash(b) {
const a = new DataView(b.buffer);
let S = "";
for (let i=0; i<4; i++) {
if (i>0) S += "\n";
S += "\t\t";
for (let j=0; j<4; j++) {
if (j>0) S += " ";
S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0");
}
}
return S;
}
function hashIsEqual(h1, h2) {
if (h1.byteLength != h2.byteLength) return false;
var dv1 = new Int8Array(h1);
var dv2 = new Int8Array(h2);
for (var i = 0 ; i != h1.byteLength ; i++)
{
if (dv1[i] != dv2[i]) return false;
}
return true;
}
function cloneHasher(h) {
const ph = h.getPartialHash();
const res = Blake2b(64);
res.setPartialHash(ph);
return res;
}
async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
if (curve.G1.isZero(g1s)) return false;
if (curve.G1.isZero(g1sx)) return false;
if (curve.G2.isZero(g2s)) return false;
if (curve.G2.isZero(g2sx)) return false;
// return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
const res = await curve.pairingEq(g1s, g2sx, curve.G1.neg(g1sx), g2s);
return res;
}
module.exports.bitReverse = bitReverse; module.exports.bitReverse = bitReverse;
module.exports.log2 = log2; module.exports.log2 = log2;
module.exports.formatHash = formatHash;
module.exports.hashIsEqual = hashIsEqual;
module.exports.cloneHasher = cloneHasher;
module.exports.sameRatio = sameRatio;

@ -1,3 +0,0 @@
module.exports.new = require("./phase2_new.js");

@ -1,10 +1,11 @@
module.exports.newAccumulator = require("./powersoftau_new"); module.exports.newAccumulator = require("./powersoftau_new");
module.exports.exportChallange = require("./powersoftau_export"); module.exports.exportChallange = require("./powersoftau_exportchallange");
module.exports.challangeContribute = require("./powersoftau_challangecontribute"); module.exports.challangeContribute = require("./powersoftau_challangecontribute");
module.exports.impoertResponse = require("./powersoftau_import"); module.exports.importResponse = require("./powersoftau_import");
module.exports.verify = require("./powersoftau_verify"); module.exports.verify = require("./powersoftau_verify");
module.exports.challangeContribute = require("./powersoftau_challangecontribute"); module.exports.challangeContribute = require("./powersoftau_challangecontribute");
module.exports.beacon = require("./powersoftau_beacon"); module.exports.beacon = require("./powersoftau_beacon");
module.exports.contribute = require("./powersoftau_contribute"); module.exports.contribute = require("./powersoftau_contribute");
module.exports.preparePhase2 = require("./powersoftau_preparephase2"); module.exports.preparePhase2 = require("./powersoftau_preparephase2");
module.exports.exportJson = require("./powersoftau_exportjson");

@ -1,6 +1,7 @@
const Blake2b = require("blake2b-wasm"); const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils"); const utils = require("./powersoftau_utils");
const applyKey = require("./mpc_applykey"); const misc = require("./misc");
const binFileUtils = require("./binfileutils");
function hex2ByteArray(s) { function hex2ByteArray(s) {
return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) { return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) {
@ -27,12 +28,19 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
return false; return false;
} }
await Blake2b.ready(); await Blake2b.ready();
const {fd: fdOld, sections} = await utils.readBinFile(oldPtauFilename, "ptau", 1); const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections); const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
}
if (sections[12]) {
console.log("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
}
const contributions = await utils.readContributions(fdOld, curve, sections); const contributions = await utils.readContributions(fdOld, curve, sections);
const currentContribution = { const curContribution = {
name: name, name: name,
type: 1, // Beacon type: 1, // Beacon
numIterationsExp: numIterationsExp, numIterationsExp: numIterationsExp,
@ -44,124 +52,130 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
if (contributions.length>0) { if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange; lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else { } else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power); lastChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose);
} }
currentContribution.key = utils.keyFromBeacon(curve, lastChallangeHash, beaconHash, numIterationsExp); curContribution.key = utils.keyFromBeacon(curve, lastChallangeHash, beaconHash, numIterationsExp);
const fdNew = await utils.createBinFile(newPTauFilename, "ptau", 1, 7);
await utils.writePTauHeader(fdNew, curve, power);
const newChallangeHasher = new Blake2b(64);
newChallangeHasher.update(lastChallangeHash);
const responseHasher = new Blake2b(64); const responseHasher = new Blake2b(64);
responseHasher.update(lastChallangeHash); responseHasher.update(lastChallangeHash);
currentContribution.tauG1 = (await applyKey({ const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
fdFrom: fdOld, await utils.writePTauHeader(fdNew, curve, power);
sections,
curve,
fdTo: fdNew,
sectionId: 2,
NPoints: (1 << power) * 2 -1,
G: "G1",
first: curve.Fr.one,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [1],
sectionName: "tauG1",
verbose
}))[0];
currentContribution.tauG2 = (await applyKey({ const startSections = [];
fdFrom: fdOld,
sections,
curve,
fdTo: fdNew,
sectionId: 3,
NPoints: 1 << power,
G: "G2",
first: curve.Fr.one,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [1],
sectionName: "tauG2",
verbose
}))[0];
currentContribution.alphaG1 = (await applyKey({ let firstPoints;
fdFrom: fdOld, firstPoints = await processSection(2, "G1", (1<<power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1" );
sections, curContribution.tauG1 = firstPoints[1];
curve, firstPoints = await processSection(3, "G2", (1<<power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2" );
fdTo: fdNew, curContribution.tauG2 = firstPoints[1];
sectionId: 4, firstPoints = await processSection(4, "G1", (1<<power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1" );
NPoints: 1 << power, curContribution.alphaG1 = firstPoints[0];
G: "G1", firstPoints = await processSection(5, "G1", (1<<power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1" );
first: currentContribution.key.alpha.prvKey, curContribution.betaG1 = firstPoints[0];
inc: currentContribution.key.tau.prvKey, firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2" );
newChallangeHasher, curContribution.betaG2 = firstPoints[0];
responseHasher,
returnPoints: [0],
sectionName: "alphaTauG1",
verbose
}))[0];
currentContribution.betaG1 = (await applyKey({ curContribution.partialHash = responseHasher.getPartialHash();
fdFrom: fdOld,
sections,
curve,
fdTo: fdNew,
sectionId: 5,
NPoints: 1 << power,
G: "G1",
first: currentContribution.key.beta.prvKey,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [0],
sectionName: "betaTauG1",
verbose
}))[0];
currentContribution.betaG2 = (await applyKey({
fdFrom: fdOld,
sections,
curve,
fdTo: fdNew,
sectionId: 6,
NPoints: 1,
G: "G2",
first: currentContribution.key.beta.prvKey,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [0],
sectionName: "betaG2",
verbose
}))[0];
currentContribution.nextChallange = newChallangeHasher.digest();
currentContribution.partialHash = responseHasher.getPartialHash();
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3); const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
utils.toPtauPubKeyRpr(buffKey, 0, curve, currentContribution.key, false); utils.toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
responseHasher.update(new Uint8Array(buffKey)); responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest(); const hashResponse = responseHasher.digest();
console.log("Contribution Response Hash imported: "); console.log("Contribution Response Hash imported: ");
console.log(utils.formatHash(hashResponse)); console.log(misc.formatHash(hashResponse));
contributions.push(currentContribution); const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1");
await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2");
await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1");
await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1");
await hashSection(fdNew, "G2", 6, 1 , "betaG2");
curContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: ");
console.log(misc.formatHash(curContribution.nextChallange));
contributions.push(curContribution);
await utils.writeContributions(fdNew, curve, contributions); await utils.writeContributions(fdNew, curve, contributions);
await fdOld.close(); await fdOld.close();
await fdNew.close(); await fdNew.close();
return;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) {
const res = [];
fdOld.pos = sections[sectionId][0].p;
await binFileUtils.startWriteSection(fdNew, sectionId);
startSections[sectionId] = fdNew.pos;
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<NPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
const n= Math.min(NPoints-i, chunkSize );
const buffIn = await fdOld.read(n * sG);
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
/* Code to test the case where we don't have the 2^m-2 component
if (sectionName== "tauG1") {
const bz = new Uint8Array(64);
buffOutLEM.set(bz, 64*((1 << power) - 1 ));
}
*/
const promiseWrite = fdNew.write(buffOutLEM);
const buffOutC = await G.batchLEMtoC(buffOutLEM);
responseHasher.update(buffOutC);
await promiseWrite;
if (i==0) // Return the 2 first points.
for (let j=0; j<Math.min(2, NPoints); j++)
res.push(G.fromRprLEM(buffOutLEM, j*sG));
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
}
await binFileUtils.endWriteSection(fdNew);
return res;
}
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const nPointsChunk = Math.floor((1<<24)/sG);
const oldPos = fdTo.pos;
fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) {
if ((verbose)&&i) console.log(`Hashing ${sectionName}: ` + i);
const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG);
const buffU = await G.batchLEMtoU(buffLEM);
nextChallangeHasher.update(buffU);
}
fdTo.pos = oldPos;
}
} }
module.exports = beacon; module.exports = beacon;

@ -24,7 +24,7 @@ const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha; const ChaCha = require("ffjavascript").ChaCha;
const fs = require("fs"); const fs = require("fs");
const utils = require("./powersoftau_utils"); const utils = require("./powersoftau_utils");
const misc = require("./misc");
const keyPair = require("./keypair"); const keyPair = require("./keypair");
@ -78,11 +78,11 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
const claimedHash = await fdFrom.read(64, 0); const claimedHash = await fdFrom.read(64, 0);
console.log("Claimed Previus Challange Hash: "); console.log("Claimed Previus Challange Hash: ");
console.log(utils.formatHash(claimedHash)); console.log(misc.formatHash(claimedHash));
const challangeHash = challangeHasher.digest(); const challangeHash = challangeHasher.digest();
console.log("Current Challange Hash: "); console.log("Current Challange Hash: ");
console.log(utils.formatHash(challangeHash)); console.log(misc.formatHash(challangeHash));
const hasher = Blake2b(64); const hasher = Blake2b(64);
@ -139,7 +139,7 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
responseHasher.update(buffKey); responseHasher.update(buffKey);
const responseHash = responseHasher.digest(); const responseHash = responseHasher.digest();
console.log("Contribution Response Hash: "); console.log("Contribution Response Hash: ");
console.log(utils.formatHash(responseHash)); console.log(misc.formatHash(responseHash));
await fdTo.close(); await fdTo.close();
await fdFrom.close(); await fdFrom.close();

@ -12,7 +12,7 @@ const crypto = require("crypto");
const keyPair = require("./keypair"); const keyPair = require("./keypair");
const readline = require("readline"); const readline = require("readline");
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
const misc = require("./misc");
const rl = readline.createInterface({ const rl = readline.createInterface({
input: process.stdin, input: process.stdin,
@ -34,7 +34,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
throw new Error("This file has been reduced. You cannot contribute into a reduced file."); throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
} }
if (sections[12]) { if (sections[12]) {
console.log("WARNING: Contributing into a fle that has phase2 calculated. You will have to prepare phase2 again."); console.log("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
} }
const contributions = await utils.readContributions(fdOld, curve, sections); const contributions = await utils.readContributions(fdOld, curve, sections);
const curContribution = { const curContribution = {
@ -98,7 +98,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
const hashResponse = responseHasher.digest(); const hashResponse = responseHasher.digest();
console.log("Contribution Response Hash imported: "); console.log("Contribution Response Hash imported: ");
console.log(utils.formatHash(hashResponse)); console.log(misc.formatHash(hashResponse));
const nextChallangeHasher = new Blake2b(64); const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse); nextChallangeHasher.update(hashResponse);
@ -112,7 +112,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
curContribution.nextChallange = nextChallangeHasher.digest(); curContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: "); console.log("Next Challange Hash: ");
console.log(utils.formatHash(curContribution.nextChallange)); console.log(misc.formatHash(curContribution.nextChallange));
contributions.push(curContribution); contributions.push(curContribution);

@ -10,6 +10,7 @@ const fastFile = require("fastfile");
const Blake2b = require("blake2b-wasm"); const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils"); const utils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
const misc = require("./misc");
async function exportChallange(pTauFilename, challangeFilename, verbose) { async function exportChallange(pTauFilename, challangeFilename, verbose) {
await Blake2b.ready(); await Blake2b.ready();
@ -28,10 +29,10 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
} }
console.log("Last Response Hash: "); console.log("Last Response Hash: ");
console.log(utils.formatHash(lastResponseHash)); console.log(misc.formatHash(lastResponseHash));
console.log("New Challange Hash: "); console.log("New Challange Hash: ");
console.log(utils.formatHash(curChallangeHash)); console.log(misc.formatHash(curChallangeHash));
const fdTo = await fastFile.createOverride(challangeFilename); const fdTo = await fastFile.createOverride(challangeFilename);
@ -51,9 +52,9 @@ async function exportChallange(pTauFilename, challangeFilename, verbose) {
const calcCurChallangeHash = toHash.digest(); const calcCurChallangeHash = toHash.digest();
if (!utils.hashIsEqual (curChallangeHash, calcCurChallangeHash)) { if (!misc.hashIsEqual (curChallangeHash, calcCurChallangeHash)) {
console.log("Calc Curret Challange Hash: "); console.log("Calc Curret Challange Hash: ");
console.log(utils.formatHash(calcCurChallangeHash)); console.log(misc.formatHash(calcCurChallangeHash));
throw new Error("PTau file is corrupted. Calculated new challange hash does not match with the eclared one"); throw new Error("PTau file is corrupted. Calculated new challange hash does not match with the eclared one");
} }

@ -0,0 +1,73 @@
const utils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils");
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
async function exportJson(pTauFilename, jsonFileName, verbose) {
const {fd, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fd, sections);
const pTau = {};
pTau.q = curve.q;
pTau.power = power;
pTau.contributions = await utils.readContributions(fd, curve, sections);
pTau.tauG1 = await exportSection(2, "G1", (1 << power)*2 -1, "tauG1");
pTau.tauG2 = await exportSection(3, "G2", (1 << power), "tauG2");
pTau.alphaTauG1 = await exportSection(4, "G1", (1 << power), "alphaTauG1");
pTau.betaTauG1 = await exportSection(5, "G1", (1 << power), "betaTauG1");
pTau.betaG2 = await exportSection(6, "G2", 1, "betaG2");
pTau.lTauG1 = await exportLagrange(12, "G1", "lTauG1");
pTau.lTauG2 = await exportLagrange(13, "G2", "lTauG2");
pTau.lAlphaTauG1 = await exportLagrange(14, "G1", "lAlphaTauG2");
pTau.lBetaTauG1 = await exportLagrange(15, "G1", "lBetaTauG2");
await fd.close();
const S = JSON.stringify(stringifyBigInts(pTau), null, 1);
await fs.promises.writeFile(jsonFileName, S);
async function exportSection(sectionId, groupName, nPoints, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const res = [];
await binFileUtils.startReadUniqueSection(fd, sections, sectionId);
for (let i=0; i< nPoints; i++) {
if ((verbose)&&i&&(i%10000 == 0)) console.log(`${sectionName}: ` + i);
const buff = await fd.read(sG);
res.push(G.fromRprLEM(buff, 0));
}
await binFileUtils.endReadSection(fd);
return res;
}
async function exportLagrange(sectionId, groupName, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const res = [];
await binFileUtils.startReadUniqueSection(fd, sections, sectionId);
for (let p=0; p<=power; p++) {
if (verbose) console.log(`${sectionName}: Power: ${p}`);
res[p] = [];
const nPoints = (1<<p);
for (let i=0; i<nPoints; i++) {
if ((verbose)&&i&&(i%10000 == 0)) console.log(`${sectionName}: ${i}/${nPoints}`);
const buff = await fd.read(sG);
res[p].push(G.fromRprLEM(buff, 0));
}
}
await binFileUtils.endReadSection(fd);
return res;
}
}
module.exports = exportJson;

@ -4,6 +4,7 @@ const Blake2b = require("blake2b-wasm");
const fs = require("fs"); const fs = require("fs");
const utils = require("./powersoftau_utils"); const utils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
const misc = require("./misc");
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, verbose) { async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, verbose) {
@ -45,7 +46,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
const fdResponse = await fastFile.readExisting(contributionFilename); const fdResponse = await fastFile.readExisting(contributionFilename);
const contributionPreviousHash = await fdResponse.read(64); const contributionPreviousHash = await fdResponse.read(64);
assert(utils.hashIsEqual(contributionPreviousHash,lastChallangeHash), assert(misc.hashIsEqual(contributionPreviousHash,lastChallangeHash),
"Wrong contribution. this contribution is not based on the previus hash"); "Wrong contribution. this contribution is not based on the previus hash");
const hasherResponse = new Blake2b(64); const hasherResponse = new Blake2b(64);
@ -75,7 +76,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
const hashResponse = hasherResponse.digest(); const hashResponse = hasherResponse.digest();
console.log("Contribution Response Hash imported: "); console.log("Contribution Response Hash imported: ");
console.log(utils.formatHash(hashResponse)); console.log(misc.formatHash(hashResponse));
const nextChallangeHasher = new Blake2b(64); const nextChallangeHasher = new Blake2b(64);
nextChallangeHasher.update(hashResponse); nextChallangeHasher.update(hashResponse);
@ -89,7 +90,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
currentContribution.nextChallange = nextChallangeHasher.digest(); currentContribution.nextChallange = nextChallangeHasher.digest();
console.log("Next Challange Hash: "); console.log("Next Challange Hash: ");
console.log(utils.formatHash(currentContribution.nextChallange)); console.log(misc.formatHash(currentContribution.nextChallange));
contributions.push(currentContribution); contributions.push(currentContribution);

@ -11,7 +11,7 @@ tauG2(3)
{1<<power}[ {1<<power}[
G2, tau*G2, tau^2 * G2, ... G2, tau*G2, tau^2 * G2, ...
] ]
alfaTauG1(4) alphaTauG1(4)
{1<<power}[ {1<<power}[
alpha*G1, alpha*tau*G1, alpha*tau^2*G1,.... alpha*G1, alpha*tau*G1, alpha*tau^2*G1,....
] ]
@ -35,9 +35,9 @@ contributions(7)
tau_g1s tau_g1s
tau_g1sx tau_g1sx
tau_g2spx tau_g2spx
alfa_g1s alpha_g1s
alfa_g1sx alpha_g1sx
alfa_g1spx alpha_g1spx
beta_g1s beta_g1s
beta_g1sx beta_g1sx
beta_g1spx beta_g1spx
@ -50,6 +50,7 @@ const ptauUtils = require("./powersoftau_utils");
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
const utils = require("./powersoftau_utils"); const utils = require("./powersoftau_utils");
const Blake2b = require("blake2b-wasm"); const Blake2b = require("blake2b-wasm");
const misc = require("./misc");
async function newAccumulator(curve, power, fileName, verbose) { async function newAccumulator(curve, power, fileName, verbose) {
@ -84,13 +85,13 @@ async function newAccumulator(curve, power, fileName, verbose) {
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
// Write alfaTauG1 // Write alphaTauG1
/////////// ///////////
await binFileUtils.startWriteSection(fd, 4); await binFileUtils.startWriteSection(fd, 4);
const nAlfaTauG1 = (1 << power); const nAlfaTauG1 = (1 << power);
for (let i=0; i< nAlfaTauG1; i++) { for (let i=0; i< nAlfaTauG1; i++) {
await fd.write(buffG1); await fd.write(buffG1);
if ((verbose)&&((i%100000) == 0)&&i) console.log("alfaTauG1: " + i); if ((verbose)&&((i%100000) == 0)&&i) console.log("alphaTauG1: " + i);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -121,10 +122,10 @@ async function newAccumulator(curve, power, fileName, verbose) {
const firstChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose); const firstChallangeHash = utils.calculateFirstChallangeHash(curve, power, verbose);
console.log("Blank Contribution Hash:"); console.log("Blank Contribution Hash:");
console.log(utils.formatHash(Blake2b(64).digest())); console.log(misc.formatHash(Blake2b(64).digest()));
console.log("First Contribution Hash:"); console.log("First Contribution Hash:");
console.log(utils.formatHash(firstChallangeHash)); console.log(misc.formatHash(firstChallangeHash));
} }

@ -297,32 +297,6 @@ async function writeContributions(fd, curve, contributions) {
fd.pos = oldPos; fd.pos = oldPos;
} }
function formatHash(b) {
const a = new DataView(b.buffer);
let S = "";
for (let i=0; i<4; i++) {
if (i>0) S += "\n";
S += "\t\t";
for (let j=0; j<4; j++) {
if (j>0) S += " ";
S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0");
}
}
return S;
}
function hashIsEqual(h1, h2) {
if (h1.byteLength != h2.byteLength) return false;
var dv1 = new Int8Array(h1);
var dv2 = new Int8Array(h2);
for (var i = 0 ; i != h1.byteLength ; i++)
{
if (dv1[i] != dv2[i]) return false;
}
return true;
}
function calculateFirstChallangeHash(curve, power, verbose) { function calculateFirstChallangeHash(curve, power, verbose) {
if (verbose) console.log("Calculating First Challange Hash"); if (verbose) console.log("Calculating First Challange Hash");
@ -405,10 +379,8 @@ module.exports.readPTauHeader = readPTauHeader;
module.exports.writePTauHeader = writePTauHeader; module.exports.writePTauHeader = writePTauHeader;
module.exports.readPtauPubKey = readPtauPubKey; module.exports.readPtauPubKey = readPtauPubKey;
module.exports.writePtauPubKey = writePtauPubKey; module.exports.writePtauPubKey = writePtauPubKey;
module.exports.formatHash = formatHash;
module.exports.readContributions = readContributions; module.exports.readContributions = readContributions;
module.exports.writeContributions = writeContributions; module.exports.writeContributions = writeContributions;
module.exports.hashIsEqual = hashIsEqual;
module.exports.calculateFirstChallangeHash = calculateFirstChallangeHash; module.exports.calculateFirstChallangeHash = calculateFirstChallangeHash;
module.exports.toPtauPubKeyRpr = toPtauPubKeyRpr; module.exports.toPtauPubKeyRpr = toPtauPubKeyRpr;
module.exports.fromPtauPubKeyRpr = fromPtauPubKeyRpr; module.exports.fromPtauPubKeyRpr = fromPtauPubKeyRpr;

@ -5,16 +5,8 @@ const assert = require("assert");
const crypto = require("crypto"); const crypto = require("crypto");
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
const ChaCha = require("ffjavascript").ChaCha; const ChaCha = require("ffjavascript").ChaCha;
const misc = require("./misc");
async function sameRatio(curve, g1s, g1sx, g2s, g2sx) { const sameRatio = misc.sameRatio;
if (curve.G1.isZero(g1s)) return false;
if (curve.G1.isZero(g1sx)) return false;
if (curve.G2.isZero(g2s)) return false;
if (curve.G2.isZero(g2sx)) return false;
// return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
const res = await curve.pairingEq(g1s, g2sx, curve.G1.neg(g1sx), g2s);
return res;
}
async function verifyContribution(curve, cur, prev) { async function verifyContribution(curve, cur, prev) {
let sr; let sr;
@ -235,14 +227,14 @@ async function verify(tauFilename, verbose) {
const nextContributionHash = nextContributionHasher.digest(); const nextContributionHash = nextContributionHasher.digest();
// Check the nextChallangeHash // Check the nextChallangeHash
if (!utils.hashIsEqual(nextContributionHash,curContr.nextChallange)) { if (!misc.hashIsEqual(nextContributionHash,curContr.nextChallange)) {
console.log("Hash of the values does not match the next challange of the last contributor in the contributions section"); console.log("Hash of the values does not match the next challange of the last contributor in the contributions section");
return false; return false;
} }
if (verbose) { if (verbose) {
console.log("Next challange hash: "); console.log("Next challange hash: ");
console.log(utils.formatHash(nextContributionHash)); console.log(misc.formatHash(nextContributionHash));
} }
// Verify Previous contributions // Verify Previous contributions
@ -280,7 +272,7 @@ async function verify(tauFilename, verbose) {
console.log("-----------------------------------------------------"); console.log("-----------------------------------------------------");
console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`); console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`);
console.log("\tBased on challange"); console.log("\tBased on challange");
console.log(utils.formatHash(prevContr.nextChallange)); console.log(misc.formatHash(prevContr.nextChallange));
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3); const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
utils.toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false); utils.toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false);
@ -291,10 +283,10 @@ async function verify(tauFilename, verbose) {
const responseHash = responseHasher.digest(); const responseHash = responseHasher.digest();
console.log("\tResponse Hash"); console.log("\tResponse Hash");
console.log(utils.formatHash(responseHash)); console.log(misc.formatHash(responseHash));
console.log("\tNext Challange"); console.log("\tNext Challange");
console.log(utils.formatHash(curContr.nextChallange)); console.log(misc.formatHash(curContr.nextChallange));
} }
async function processSectionBetaG2() { async function processSectionBetaG2() {

@ -70,7 +70,7 @@ module.exports = function genProof(vk_proof, witness, verbose) {
if ((verbose)&&(s%1000 == 1)) console.log("C: ", s); if ((verbose)&&(s%1000 == 1)) console.log("C: ", s);
} }
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alfa_1 ); proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.vk_delta_1, r )); proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 ); proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );

@ -73,10 +73,10 @@ module.exports = function genProof(vk_proof, witness) {
proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[s], witness[s])); proof.pi_c = G1.add( proof.pi_c, G1.mulScalar( vk_proof.C[s], witness[s]));
} }
proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alfa_1 ); proof.pi_a = G1.add( proof.pi_a, vk_proof.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( G1.g, r )); proof.pi_a = G1.add( proof.pi_a, G1.mulScalar( G1.g, r ));
piadelta = G1.add( piadelta, vk_proof.vk_alfadelta_1); piadelta = G1.add( piadelta, vk_proof.vk_alphadelta_1);
piadelta = G1.add( piadelta, G1.mulScalar( vk_proof.vk_delta_1, r )); piadelta = G1.add( piadelta, G1.mulScalar( vk_proof.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 ); proof.pi_b = G2.add( proof.pi_b, vk_proof.vk_beta_2 );

@ -53,13 +53,13 @@ module.exports = function setup(circuit, verbose) {
IC: setup.vk_proof.IC, IC: setup.vk_proof.IC,
vk_alfa_1: setup.vk_proof.vk_alfa_1, vk_alpha_1: setup.vk_proof.vk_alpha_1,
vk_beta_2: setup.vk_proof.vk_beta_2, vk_beta_2: setup.vk_proof.vk_beta_2,
vk_gamma_2: setup.vk_proof.vk_gamma_2, vk_gamma_2: setup.vk_proof.vk_gamma_2,
vk_delta_2: setup.vk_proof.vk_delta_2, vk_delta_2: setup.vk_proof.vk_delta_2,
vk_alfabeta_12: bn128.pairing( setup.vk_proof.vk_alfa_1 , setup.vk_proof.vk_beta_2 ) vk_alphabeta_12: bn128.pairing( setup.vk_proof.vk_alpha_1 , setup.vk_proof.vk_beta_2 )
}; };
return setup; return setup;
@ -157,7 +157,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
setup.vk_proof.C = new Array(circuit.nVars); setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_proof.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1); setup.vk_proof.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.kalfa = F.random(); setup.toxic.kalpha = F.random();
setup.toxic.kbeta = F.random(); setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random(); setup.toxic.kgamma = F.random();
setup.toxic.kdelta = F.random(); setup.toxic.kdelta = F.random();
@ -165,7 +165,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
let invDelta = F.inv(setup.toxic.kdelta); let invDelta = F.inv(setup.toxic.kdelta);
let invGamma = F.inv(setup.toxic.kgamma); let invGamma = F.inv(setup.toxic.kgamma);
setup.vk_proof.vk_alfa_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalfa)); setup.vk_proof.vk_alpha_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kbeta)); setup.vk_proof.vk_beta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kdelta)); setup.vk_proof.vk_delta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kdelta));
@ -199,7 +199,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
F.add( F.add(
F.add( F.add(
F.mul(v.a_t[s], setup.toxic.kbeta), F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalfa)), F.mul(v.b_t[s], setup.toxic.kalpha)),
v.c_t[s])); v.c_t[s]));
const IC = G1.mulScalar(G1.g, ps); const IC = G1.mulScalar(G1.g, ps);
@ -213,7 +213,7 @@ function calculateEncriptedValuesAtT(setup, circuit, verbose) {
F.add( F.add(
F.add( F.add(
F.mul(v.a_t[s], setup.toxic.kbeta), F.mul(v.a_t[s], setup.toxic.kbeta),
F.mul(v.b_t[s], setup.toxic.kalfa)), F.mul(v.b_t[s], setup.toxic.kalpha)),
v.c_t[s])); v.c_t[s]));
const C = G1.mulScalar(G1.g, ps); const C = G1.mulScalar(G1.g, ps);
setup.vk_proof.C[s]=C; setup.vk_proof.C[s]=C;

@ -128,28 +128,28 @@ function calculateEncriptedValuesAtT(setup, circuit) {
setup.vk_proof.C = new Array(circuit.nVars); setup.vk_proof.C = new Array(circuit.nVars);
setup.vk_verifier.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1); setup.vk_verifier.IC = new Array(circuit.nPubInputs + circuit.nOutputs + 1);
setup.toxic.kalfa = F.random(); setup.toxic.kalpha = F.random();
setup.toxic.kbeta = F.random(); setup.toxic.kbeta = F.random();
setup.toxic.kgamma = F.random(); setup.toxic.kgamma = F.random();
setup.toxic.kdelta = F.random(); setup.toxic.kdelta = F.random();
const gammaSquare = F.mul(setup.toxic.kgamma, setup.toxic.kgamma); const gammaSquare = F.mul(setup.toxic.kgamma, setup.toxic.kgamma);
setup.vk_proof.vk_alfa_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalfa)); setup.vk_proof.vk_alpha_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalpha));
setup.vk_proof.vk_beta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kbeta)); setup.vk_proof.vk_beta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kbeta));
setup.vk_proof.vk_delta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kdelta)); setup.vk_proof.vk_delta_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kdelta));
setup.vk_proof.vk_alfadelta_1 = G1.affine(G1.mulScalar( G1.g, F.mul(setup.toxic.kalfa, setup.toxic.kdelta))); setup.vk_proof.vk_alphadelta_1 = G1.affine(G1.mulScalar( G1.g, F.mul(setup.toxic.kalpha, setup.toxic.kdelta)));
setup.vk_proof.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta)); setup.vk_proof.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_alfa_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalfa)); setup.vk_verifier.vk_alpha_1 = G1.affine(G1.mulScalar( G1.g, setup.toxic.kalpha));
setup.vk_verifier.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta)); setup.vk_verifier.vk_beta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kbeta));
setup.vk_verifier.vk_gamma_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma)); setup.vk_verifier.vk_gamma_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kgamma));
setup.vk_verifier.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta)); setup.vk_verifier.vk_delta_2 = G2.affine(G2.mulScalar( G2.g, setup.toxic.kdelta));
setup.vk_verifier.vk_alfabeta_12 = bn128.pairing( setup.vk_verifier.vk_alfa_1 , setup.vk_verifier.vk_beta_2 ); setup.vk_verifier.vk_alphabeta_12 = bn128.pairing( setup.vk_verifier.vk_alpha_1 , setup.vk_verifier.vk_beta_2 );
for (let s=0; s<circuit.nVars; s++) { for (let s=0; s<circuit.nVars; s++) {
@ -181,7 +181,7 @@ function calculateEncriptedValuesAtT(setup, circuit) {
v.a_t[s] v.a_t[s]
), ),
F.mul( F.mul(
setup.toxic.kalfa, setup.toxic.kalpha,
v.b_t[s] v.b_t[s]
) )
) )
@ -204,7 +204,7 @@ function calculateEncriptedValuesAtT(setup, circuit) {
v.a_t[s] v.a_t[s]
), ),
F.mul( F.mul(
F.mul(setup.toxic.kalfa, setup.toxic.kgamma), F.mul(setup.toxic.kalpha, setup.toxic.kgamma),
v.b_t[s] v.b_t[s]
) )
) )

@ -69,9 +69,9 @@ function generateVerifier_groth16(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8"); let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8");
const vkalfa1_str = `${verificationKey.vk_alfa_1[0].toString()},`+ const vkalpha1_str = `${verificationKey.vk_alpha_1[0].toString()},`+
`${verificationKey.vk_alfa_1[1].toString()}`; `${verificationKey.vk_alpha_1[1].toString()}`;
template = template.replace("<%vk_alfa1%>", vkalfa1_str); template = template.replace("<%vk_alpha1%>", vkalpha1_str);
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+ const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
`${verificationKey.vk_beta_2[0][0].toString()}], `+ `${verificationKey.vk_beta_2[0][0].toString()}], `+
@ -112,9 +112,9 @@ function generateVerifier_kimleeoh(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8"); let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8");
const vkalfa1_str = `${verificationKey.vk_alfa_1[0].toString()},`+ const vkalpha1_str = `${verificationKey.vk_alpha_1[0].toString()},`+
`${verificationKey.vk_alfa_1[1].toString()}`; `${verificationKey.vk_alpha_1[1].toString()}`;
template = template.replace("<%vk_alfa1%>", vkalfa1_str); template = template.replace("<%vk_alpha1%>", vkalpha1_str);
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+ const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
`${verificationKey.vk_beta_2[0][0].toString()}], `+ `${verificationKey.vk_beta_2[0][0].toString()}], `+

@ -34,7 +34,7 @@ module.exports = function isValid(vk_verifier, proof, publicSignals) {
if (! bn128.F12.eq( if (! bn128.F12.eq(
bn128.pairing( proof.pi_a , proof.pi_b ), bn128.pairing( proof.pi_a , proof.pi_b ),
bn128.F12.mul( bn128.F12.mul(
vk_verifier.vk_alfabeta_12, vk_verifier.vk_alphabeta_12,
bn128.F12.mul( bn128.F12.mul(
bn128.pairing( cpub , vk_verifier.vk_gamma_2 ), bn128.pairing( cpub , vk_verifier.vk_gamma_2 ),
bn128.pairing( proof.pi_c , vk_verifier.vk_delta_2 ) bn128.pairing( proof.pi_c , vk_verifier.vk_delta_2 )

@ -63,7 +63,7 @@ module.exports = function isValid(vk_verifier, proof, publicSignals) {
G2.add(proof.pi_b, G2.mulScalar(vk_verifier.vk_delta_2, h2)) G2.add(proof.pi_b, G2.mulScalar(vk_verifier.vk_delta_2, h2))
), ),
bn128.F12.mul( bn128.F12.mul(
vk_verifier.vk_alfabeta_12, vk_verifier.vk_alphabeta_12,
bn128.F12.mul( bn128.F12.mul(
bn128.pairing( cpub , vk_verifier.vk_gamma_2 ), bn128.pairing( cpub , vk_verifier.vk_gamma_2 ),
bn128.pairing( proof.pi_c , G2.g ) bn128.pairing( proof.pi_c , G2.g )

10
src/zkey.js Normal file

@ -0,0 +1,10 @@
module.exports.new = require("./zkey_new.js");
module.exports.exportBellman = require("./zkey_export_bellman.js");
module.exports.importBellman = require("./zkey_import_bellman.js");
module.exports.verify = require("./zkey_verify.js");
module.exports.contribute = require("./zkey_contribute.js");
module.exports.beacon = require("./zkey_beacon.js");
module.exports.exportJson = require("./zkey_export_json.js");
module.exports.utils = require("./zkey_utils.js");

5
src/zkey_beacon.js Normal file

@ -0,0 +1,5 @@
async function beacon(oldZKeyFileName, newZKeyFileName, name, numIterationsExp, beaconHashStr, verbose) {
}

4
src/zkey_contribute.js Normal file

@ -0,0 +1,4 @@
module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, verbose) {
};

137
src/zkey_export_bellman.js Normal file

@ -0,0 +1,137 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const fastFile = require("fastFile");
const getCurve = require("./curves").getCurveFromQ;
module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName, verbose) {
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey, "groth16");
const curve = getCurve(zkey.q);
await curve.loadEngine();
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const mpcParams = await zkeyUtils.readMPCParams(fdZKey, sectionsZKey);
const fdMPCParams = await fastFile.createOverride(mpcparamsName);
/////////////////////
// Verification Key Section
/////////////////////
await writeG1(zkey.vk_alpha_1);
await writeG1(zkey.vk_beta_1);
await writeG2(zkey.vk_beta_2);
await writeG2(zkey.vk_gamma_2);
await writeG1(zkey.vk_delta_1);
await writeG2(zkey.vk_delta_2);
// IC
let buffBasesIC;
buffBasesIC = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 3);
buffBasesIC = await curve.G1.batchLEMtoU(buffBasesIC);
await writePointArray("G1", buffBasesIC);
/////////////////////
// h Section
/////////////////////
const buffBasesH_Lodd = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 9);
let buffBasesH_Tau;
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", verbose ? console.log : undefined);
buffBasesH_Tau = await curve.G1.batchApplyKey(buffBasesH_Tau, curve.Fr.neg(curve.Fr.e(2)), curve.PFr.w[zkey.power+1], "jacobian", "affine", verbose ? console.log : undefined);
// Remove last element. (The degree of H will be allways m-2)
buffBasesH_Tau = buffBasesH_Tau.slice(0, buffBasesH_Tau.byteLength - sG1);
buffBasesH_Tau = await curve.G1.batchLEMtoU(buffBasesH_Tau);
await writePointArray("G1", buffBasesH_Tau);
/////////////////////
// C section (l section in some notations)
/////////////////////
let buffBasesC;
buffBasesC = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 8);
buffBasesC = await curve.G1.batchLEMtoU(buffBasesC);
await writePointArray("G1", buffBasesC);
/////////////////////
// A Section (C section)
/////////////////////
let buffBasesA;
buffBasesA = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 5);
buffBasesA = await curve.G1.batchLEMtoU(buffBasesA);
await writePointArray("G1", buffBasesA);
/////////////////////
// B1 Section
/////////////////////
let buffBasesB1;
buffBasesB1 = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 6);
buffBasesB1 = await curve.G1.batchLEMtoU(buffBasesB1);
await writePointArray("G1", buffBasesB1);
/////////////////////
// B2 Section
/////////////////////
let buffBasesB2;
buffBasesB2 = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 7);
buffBasesB2 = await curve.G2.batchLEMtoU(buffBasesB2);
await writePointArray("G2", buffBasesB2);
await fdMPCParams.write(mpcParams.csHash);
await writeU32(mpcParams.contributions.length);
for (let i=0; i<mpcParams.contributions.length; i++) {
const c = mpcParams.contributions[i];
await writeG1(c.deltaAfter);
await writeG1(c.delta.g1_s);
await writeG1(c.delta.g1_sx);
await writeG1(c.delta.g2_spx);
await fdMPCParams.write(c.transcript);
}
await fdZKey.close();
await fdMPCParams.close();
async function writeG1(P) {
const buff = new Uint8Array(sG1);
curve.G1.toRprBE(buff, 0, P);
await fdMPCParams.write(buff);
}
async function writeG2(P) {
const buff = new Uint8Array(sG2);
curve.G2.toRprBE(buff, 0, P);
await fdMPCParams.write(buff);
}
async function writePointArray(groupName, buff) {
let sG;
if (groupName == "G1") {
sG = sG1;
} else {
sG = sG2;
}
const buffSize = new Uint8Array(4);
const buffSizeV = new DataView(buffSize.buffer);
buffSizeV.setUint32(0, buff.byteLength / sG, false);
await fdMPCParams.write(buffSize);
await fdMPCParams.write(buff);
}
async function writeU32(n) {
const buffSize = new Uint8Array(4);
const buffSizeV = new DataView(buffSize.buffer);
buffSizeV.setUint32(0, n, false);
await fdMPCParams.write(buffSize);
}
};

12
src/zkey_export_json.js Normal file

@ -0,0 +1,12 @@
const {stringifyBigInts} = require("ffjavascript").utils;
const fs = require("fs");
const readZKey = require("./zkey_utils").read;
module.exports = zkeyExportJson;
async function zkeyExportJson(zkeyFileName, jsonFileName, verbose) {
const zKey = await readZKey(zkeyFileName);
const S = JSON.stringify(stringifyBigInts(zKey), null, 1);
await fs.promises.writeFile(jsonFileName, S);
}

181
src/zkey_import_bellman.js Normal file

@ -0,0 +1,181 @@
const zkeyUtils = require("./zkey_utils");
const binFileUtils = require("./binfileutils");
const fastFile = require("fastFile");
const getCurve = require("./curves").getCurveFromQ;
const misc = require("./misc");
module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, verbose) {
const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkeyHeader = await zkeyUtils.readHeader(fdZKeyOld, sectionsZKeyOld, "groth16");
const curve = getCurve(zkeyHeader.q);
await curve.loadEngine();
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const oldMPCParams = await zkeyUtils.readMPCParams(fdZKeyOld, curve, sectionsZKeyOld);
const newMPCParams = {};
const fdMPCParams = await fastFile.readExisting(mpcparamsName);
fdMPCParams.pos =
sG1*3 + sG2*3 + // vKey
8 + sG1*zkeyHeader.nVars + // IC + C
4 + sG1*(zkeyHeader.domainSize-1) + // H
4 + sG1*zkeyHeader.nVars + // A
4 + sG1*zkeyHeader.nVars + // B1
4 + sG2*zkeyHeader.nVars; // B2
// csHash
newMPCParams.csHash = await fdMPCParams.read(64);
const nConttributions = await fdMPCParams.readUBE32();
newMPCParams.contributions = [];
for (let i=0; i<nConttributions; i++) {
const c = { delta:{} };
c.deltaAfter = await readG1(fdMPCParams);
c.delta.g1_s = await readG1(fdMPCParams);
c.delta.g1_sx = await readG1(fdMPCParams);
c.delta.g2_spx = await readG2(fdMPCParams);
c.transcript = await fdMPCParams.read(64);
newMPCParams.contributions.push(c);
}
if (!misc.hashIsEqual(newMPCParams.csHash, oldMPCParams.csHash)) {
console.log("Hash of the original circuit does not match with the MPC one");
return false;
}
if (oldMPCParams.contributions.length > newMPCParams.contributions.length) {
console.log("The impoerted file does not include new contributions");
return false;
}
for (let i=0; i<oldMPCParams.contributions.length; i++) {
if (!contributionIsEqual(oldMPCParams.contributions[i], newMPCParams.contributions[i])) {
console.log(`Previos contribution ${i} does not match`);
return false;
}
}
const fdZKeyNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
fdMPCParams.pos = 0;
// Header
fdMPCParams.pos += sG1; // ignore alpha1 (keep original)
fdMPCParams.pos += sG1; // ignore beta1
fdMPCParams.pos += sG2; // ignore beta2
fdMPCParams.pos += sG2; // ignore gamma2
zkeyHeader.vk_delta_1 = await readG1(fdMPCParams);
zkeyHeader.vk_delta_2 = await readG2(fdMPCParams);
await zkeyUtils.writeHeader(fdZKeyNew, zkeyHeader);
// IC (Keep original)
const nIC = await fdMPCParams.readUBE32();
if (nIC != zkeyHeader.nPublic +1) {
console.log("Invalid number of points in IC");
await fdZKeyNew.discard();
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nPublic+1);
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 3);
// Coeffs (Keep original)
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 4);
// H Section
const nH = await fdMPCParams.readUBE32();
if (nH != zkeyHeader.domainSize-1) {
console.log("Invalid number of points in H");
await fdZKeyNew.discard();
return false;
}
let buffH;
const buffTauU = await fdMPCParams.read(sG1*(zkeyHeader.domainSize-1));
const buffTauLEM = await curve.G1.batchUtoLEM(buffTauU);
buffH = new Uint8Array(zkeyHeader.domainSize*sG1);
buffH.set(buffTauLEM); // Let the last one to zero.
const n2Inv = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2)));
const wInv = curve.Fr.inv(curve.PFr.w[zkeyHeader.power+1]);
buffH = await curve.G1.batchApplyKey(buffH, n2Inv, wInv, "affine", "jacobian", verbose ? console.log : undefined);
buffH = await curve.G1.ifft(buffH, "jacobian", "affine", verbose ? console.log : undefined);
await binFileUtils.startWriteSection(fdZKeyNew, 9);
await fdZKeyNew.write(buffH);
await binFileUtils.endWriteSection(fdZKeyNew);
// C Secion (L section)
const nL = await fdMPCParams.readUBE32();
if (nL != (zkeyHeader.nVars-zkeyHeader.nPublic-1)) {
console.log("Invalid number of points in L");
await fdZKeyNew.discard();
return false;
}
let buffL;
buffL = await fdMPCParams.read(sG1*(zkeyHeader.nVars-zkeyHeader.nPublic-1));
buffL = await curve.G1.batchUtoLEM(buffL);
await binFileUtils.startWriteSection(fdZKeyNew, 8);
await fdZKeyNew.write(buffL);
await binFileUtils.endWriteSection(fdZKeyNew);
// A Section
const nA = await fdMPCParams.readUBE32();
if (nA != zkeyHeader.nVars) {
console.log("Invalid number of points in A");
await fdZKeyNew.discard();
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nVars);
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 5);
// B1 Section
const nB1 = await fdMPCParams.readUBE32();
if (nB1 != zkeyHeader.nVars) {
console.log("Invalid number of points in B1");
await fdZKeyNew.discard();
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nVars);
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 6);
// B2 Section
const nB2 = await fdMPCParams.readUBE32();
if (nB2 != zkeyHeader.nVars) {
console.log("Invalid number of points in B2");
await fdZKeyNew.discard();
return false;
}
fdMPCParams.pos += sG2*(zkeyHeader.nVars);
await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 7);
await zkeyUtils.writeMPCParams(fdZKeyNew, curve, newMPCParams);
await fdMPCParams.close();
await fdZKeyNew.close();
await fdZKeyOld.close();
return true;
async function readG1(fd) {
const buff = await fd.read(curve.G1.F.n8*2);
return curve.G1.fromRprUncompressed(buff, 0);
}
async function readG2(fd) {
const buff = await fd.read(curve.G2.F.n8*2);
return curve.G2.fromRprUncompressed(buff, 0);
}
function contributionIsEqual(c1, c2) {
if (!curve.G1.eq(c1.deltaAfter , c2.deltaAfter)) return false;
if (!curve.G1.eq(c1.delta.g1_s , c2.delta.g1_s)) return false;
if (!curve.G1.eq(c1.delta.g1_sx , c2.delta.g1_sx)) return false;
if (!curve.G2.eq(c1.delta.g2_spx , c2.delta.g2_spx)) return false;
if (!misc.hashIsEqual(c1.transcript, c2.transcript)) return false;
return true;
}
};

@ -5,9 +5,13 @@ const binFileUtils = require("./binfileutils");
const assert = require("assert"); const assert = require("assert");
const {log2} = require("./misc"); const {log2} = require("./misc");
const Scalar = require("ffjavascript").Scalar; const Scalar = require("ffjavascript").Scalar;
const Blake2b = require("blake2b-wasm");
const misc = require("./misc");
module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose) { module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose) {
await Blake2b.ready();
const csHasher = Blake2b(64);
const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1); const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1);
const r1cs = await r1csFile.loadHeader(fdR1cs, sectionsR1cs); const r1cs = await r1csFile.loadHeader(fdR1cs, sectionsR1cs);
@ -17,7 +21,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
await curve.loadEngine(); await curve.loadEngine();
const fdZKey = await binFileUtils.createBinFile(zkeyName, "zkey", 1, 9); const fdZKey = await binFileUtils.createBinFile(zkeyName, "zkey", 1, 10);
const sG1 = curve.G1.F.n8*2; const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2; const sG2 = curve.G2.F.n8*2;
@ -68,24 +72,39 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE) await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE)
await fdZKey.writeULE32(domainSize); // domainSize await fdZKey.writeULE32(domainSize); // domainSize
const bAlpha1 = await fdPTau.read(sG1, sectionsPTau[4][0].p); let bAlpha1;
bAlpha1 = await fdPTau.read(sG1, sectionsPTau[4][0].p);
await fdZKey.write(bAlpha1); await fdZKey.write(bAlpha1);
bAlpha1 = await curve.G1.batchLEMtoU(bAlpha1);
csHasher.update(bAlpha1);
const bBeta1 = await fdPTau.read(sG1, sectionsPTau[5][0].p); let bBeta1;
bBeta1 = await fdPTau.read(sG1, sectionsPTau[5][0].p);
await fdZKey.write(bBeta1); await fdZKey.write(bBeta1);
bBeta1 = await curve.G1.batchLEMtoU(bBeta1);
csHasher.update(bBeta1);
let bBeta2;
bBeta2 = await fdPTau.read(sG2, sectionsPTau[6][0].p);
await fdZKey.write(bBeta2);
bBeta2 = await curve.G2.batchLEMtoU(bBeta2);
csHasher.update(bBeta2);
const bg1 = new Uint8Array(sG1); const bg1 = new Uint8Array(sG1);
curve.G1.toRprLEM(bg1, 0, curve.G1.g); curve.G1.toRprLEM(bg1, 0, curve.G1.g);
await fdZKey.write(bg1); // delta1
const bBeta2 = await fdPTau.read(sG2, sectionsPTau[6][0].p);
await fdZKey.write(bBeta2);
const bg2 = new Uint8Array(sG2); const bg2 = new Uint8Array(sG2);
curve.G2.toRprLEM(bg2, 0, curve.G2.g); curve.G2.toRprLEM(bg2, 0, curve.G2.g);
await fdZKey.write(bg2); // gamma2 const bg1U = new Uint8Array(sG1);
await fdZKey.write(bg2); // delta2 curve.G1.toRprBE(bg1U, 0, curve.G1.g);
const bg2U = new Uint8Array(sG2);
curve.G2.toRprBE(bg2U, 0, curve.G2.g);
await fdZKey.write(bg2); // gamma2
await fdZKey.write(bg1); // delta1
await fdZKey.write(bg2); // delta2
csHasher.update(bg2U); // gamma2
csHasher.update(bg1U); // delta1
csHasher.update(bg2U); // delta2
await binFileUtils.endWriteSection(fdZKey); await binFileUtils.endWriteSection(fdZKey);
@ -208,10 +227,6 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
*/ */
await composeAndWritePoints(3, "G1", IC, "IC"); await composeAndWritePoints(3, "G1", IC, "IC");
await composeAndWritePoints(5, "G1", A, "A");
await composeAndWritePoints(6, "G1", B1, "B1");
await composeAndWritePoints(7, "G2", B2, "B2");
await composeAndWritePoints(8, "G1", C, "C");
// Write Hs // Write Hs
await binFileUtils.startWriteSection(fdZKey, 9); await binFileUtils.startWriteSection(fdZKey, 9);
@ -221,6 +236,23 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
await fdZKey.write(buff); await fdZKey.write(buff);
} }
await binFileUtils.endWriteSection(fdZKey); await binFileUtils.endWriteSection(fdZKey);
await hashHPoints();
await composeAndWritePoints(8, "G1", C, "C");
await composeAndWritePoints(5, "G1", A, "A");
await composeAndWritePoints(6, "G1", B1, "B1");
await composeAndWritePoints(7, "G2", B2, "B2");
const csHash = csHasher.digest();
// Contributions section
await binFileUtils.startWriteSection(fdZKey, 10);
await fdZKey.write(csHash);
await fdZKey.writeULE32(0);
await binFileUtils.endWriteSection(fdZKey);
console.log("Circuit hash: ");
console.log(misc.formatHash(csHash));
await fdZKey.close(); await fdZKey.close();
await fdPTau.close(); await fdPTau.close();
@ -239,6 +271,8 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
async function composeAndWritePoints(idSection, groupName, arr, sectionName) { async function composeAndWritePoints(idSection, groupName, arr, sectionName) {
const CHUNK_SIZE= 1<<18; const CHUNK_SIZE= 1<<18;
hashU32(arr.length);
await binFileUtils.startWriteSection(fdZKey, idSection); await binFileUtils.startWriteSection(fdZKey, idSection);
for (let i=0; i<arr.length; i+= CHUNK_SIZE) { for (let i=0; i<arr.length; i+= CHUNK_SIZE) {
@ -254,6 +288,7 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
const concurrency= curve.engine.concurrency; const concurrency= curve.engine.concurrency;
const nElementsPerThread = Math.floor(arr.length / concurrency); const nElementsPerThread = Math.floor(arr.length / concurrency);
const opPromises = []; const opPromises = [];
const G = curve[groupName];
for (let i=0; i<concurrency; i++) { for (let i=0; i<concurrency; i++) {
let n; let n;
if (i< concurrency-1) { if (i< concurrency-1) {
@ -271,6 +306,8 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
for (let i=0; i<result.length; i++) { for (let i=0; i<result.length; i++) {
await fdZKey.write(result[i][0]); await fdZKey.write(result[i][0]);
const buff = await G.batchLEMtoU(result[i][0]);
csHasher.update(buff);
} }
} }
@ -357,6 +394,88 @@ module.exports = async function phase2new(r1csName, ptauName, zkeyName, verbose
} }
async function hashHPoints() {
const CHUNK_SIZE = 1<<20;
hashU32(domainSize-1);
for (let i=0; i<domainSize-1; i+= CHUNK_SIZE) {
if (verbose) console.log(`HashingHPoints: ${i}/${domainSize}`);
const n = Math.min(domainSize-1, CHUNK_SIZE);
await hashHPointsChunk(i*CHUNK_SIZE, n);
}
}
async function hashHPointsChunk(offset, nPoints) {
const buff1 = await fdPTau.read(nPoints *sG1, sectionsPTau[2][0].p + (offset + domainSize)*sG1);
const buff2 = await fdPTau.read(nPoints *sG1, sectionsPTau[2][0].p + offset*sG1);
const concurrency= curve.engine.concurrency;
const nPointsPerThread = Math.floor(nPoints / concurrency);
const opPromises = [];
for (let i=0; i<concurrency; i++) {
let n;
if (i< concurrency-1) {
n = nPointsPerThread;
} else {
n = nPoints - i*nPointsPerThread;
}
if (n==0) continue;
const subBuff1 = buff1.slice(i*nPointsPerThread*sG1, (i*nPointsPerThread+n)*sG1);
const subBuff2 = buff2.slice(i*nPointsPerThread*sG1, (i*nPointsPerThread+n)*sG1);
opPromises.push(hashHPointsThread(subBuff1, subBuff2));
}
const result = await Promise.all(opPromises);
for (let i=0; i<result.length; i++) {
csHasher.update(result[i][0]);
}
}
async function hashHPointsThread(buff1, buff2) {
const nPoints = buff1.byteLength/sG1;
const sGmid = curve.G1.F.n8*3;
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: buff1});
task.push({cmd: "ALLOCSET", var: 1, buff: buff2});
task.push({cmd: "ALLOC", var: 2, len: nPoints*sGmid});
for (let i=0; i<nPoints; i++) {
task.push({
cmd: "CALL",
fnName: "g1m_subAffine",
params: [
{var: 0, offset: i*sG1},
{var: 1, offset: i*sG1},
{var: 2, offset: i*sGmid},
]
});
}
task.push({cmd: "CALL", fnName: "g1m_batchToAffine", params: [
{var: 2},
{val: nPoints},
{var: 2},
]});
task.push({cmd: "CALL", fnName: "g1m_batchLEMtoU", params: [
{var: 2},
{val: nPoints},
{var: 2},
]});
task.push({cmd: "GET", out: 0, var: 2, len: nPoints*sG1});
const res = await curve.engine.queueAction(task);
return res;
}
function hashU32(n) {
const buff = new Uint8Array(4);
const buffV = new DataView(buff.buffer);
buffV.setUint32(0, n, false);
csHasher.update(buff);
}
}; };

@ -1,9 +1,9 @@
// Format // Format
// ====== // ======
// Header // Header(1)
// Prover Type 1 Groth // Prover Type 1 Groth
// HeaderGroth // HeaderGroth(2)
// n8q // n8q
// q // q
// n8r // n8r
@ -11,36 +11,31 @@
// NVars // NVars
// NPub // NPub
// DomainSize (multiple of 2 // DomainSize (multiple of 2
// alfa1 // alpha1
// beta1 // beta1
// delta1 // delta1
// beta2 // beta2
// gamma2 // gamma2
// delta2 // delta2
// IC // IC(3)
// PolA // Coefs(4)
// PolB // PointsA(5)
// PointsA // PointsB1(6)
// PointsB1 // PointsB2(7)
// PointsB2 // PointsC(8)
// PointsC // PointsH(9)
// PointsH // Contributions(10)
// Contributions
const Scalar = require("ffjavascript").Scalar; const Scalar = require("ffjavascript").Scalar;
const F1Field = require("ffjavascript").F1Field; const F1Field = require("ffjavascript").F1Field;
const assert = require("assert"); const assert = require("assert");
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
const bn128 = require("ffjavascript").bn128;
const getCurve = require("./curves").getCurveFromQ; const getCurve = require("./curves").getCurveFromQ;
const {log2} = require("./misc");
async function writeZKey(fileName, zkey) { async function writeHeader(fd, zkey) {
let curve = getCurve(zkey.q);
const fd = await binFileUtils.createBinFile(fileName,"zkey", 1, 9);
// Write the header // Write the header
/////////// ///////////
@ -51,14 +46,14 @@ async function writeZKey(fileName, zkey) {
// Write the Groth header section // Write the Groth header section
/////////// ///////////
const curve = getCurve(zkey.q);
await binFileUtils.startWriteSection(fd, 2); await binFileUtils.startWriteSection(fd, 2);
const primeQ = zkey.q; const primeQ = curve.q;
const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8; const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const primeR = zkey.r; const primeR = curve.r;
const n8r = (Math.floor( (Scalar.bitLength(primeR) - 1) / 64) +1)*8; const n8r = (Math.floor( (Scalar.bitLength(primeR) - 1) / 64) +1)*8;
const Rr = Scalar.mod(Scalar.shl(1, n8r*8), primeR);
const R2r = Scalar.mod(Scalar.mul(Rr,Rr), primeR);
await fd.writeULE32(n8q); await fd.writeULE32(n8q);
await binFileUtils.writeBigInt(fd, primeQ, n8q); await binFileUtils.writeBigInt(fd, primeQ, n8q);
@ -67,16 +62,28 @@ async function writeZKey(fileName, zkey) {
await fd.writeULE32(zkey.nVars); // Total number of bars await fd.writeULE32(zkey.nVars); // Total number of bars
await fd.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE) await fd.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
await fd.writeULE32(zkey.domainSize); // domainSize await fd.writeULE32(zkey.domainSize); // domainSize
await writePointG1(zkey.vk_alfa_1); await writeG1(fd, curve, zkey.vk_alpha_1);
await writePointG1(zkey.vk_beta_1); await writeG1(fd, curve, zkey.vk_beta_1);
await writePointG1(zkey.vk_delta_1); await writeG2(fd, curve, zkey.vk_beta_2);
await writePointG2(zkey.vk_beta_2); await writeG2(fd, curve, zkey.vk_gamma_2);
await writePointG2(zkey.vk_gamma_2); await writeG1(fd, curve, zkey.vk_delta_1);
await writePointG2(zkey.vk_delta_2); await writeG2(fd, curve, zkey.vk_delta_2);
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
}
async function writeZKey(fileName, zkey) {
let curve = getCurve(zkey.q);
const fd = await binFileUtils.createBinFile(fileName,"zkey", 1, 9);
await writeHeader(fd, zkey);
const n8r = (Math.floor( (Scalar.bitLength(zkey.r) - 1) / 64) +1)*8;
const Rr = Scalar.mod(Scalar.shl(1, n8r*8), zkey.r);
const R2r = Scalar.mod(Scalar.mul(Rr,Rr), zkey.r);
// Write Pols (A and B (C can be ommited)) // Write Pols (A and B (C can be ommited))
/////////// ///////////
@ -99,7 +106,7 @@ async function writeZKey(fileName, zkey) {
/////////// ///////////
await binFileUtils.startWriteSection(fd, 3); await binFileUtils.startWriteSection(fd, 3);
for (let i=0; i<= zkey.nPublic; i++) { for (let i=0; i<= zkey.nPublic; i++) {
await writePointG1(zkey.IC[i] ); await writeG1(fd, curve, zkey.IC[i] );
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -108,7 +115,7 @@ async function writeZKey(fileName, zkey) {
/////////// ///////////
await binFileUtils.startWriteSection(fd, 5); await binFileUtils.startWriteSection(fd, 5);
for (let i=0; i<zkey.nVars; i++) { for (let i=0; i<zkey.nVars; i++) {
await writePointG1(zkey.A[i]); await writeG1(fd, curve, zkey.A[i]);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -116,7 +123,7 @@ async function writeZKey(fileName, zkey) {
/////////// ///////////
await binFileUtils.startWriteSection(fd, 6); await binFileUtils.startWriteSection(fd, 6);
for (let i=0; i<zkey.nVars; i++) { for (let i=0; i<zkey.nVars; i++) {
await writePointG1(zkey.B1[i]); await writeG1(fd, curve, zkey.B1[i]);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -124,7 +131,7 @@ async function writeZKey(fileName, zkey) {
/////////// ///////////
await binFileUtils.startWriteSection(fd, 7); await binFileUtils.startWriteSection(fd, 7);
for (let i=0; i<zkey.nVars; i++) { for (let i=0; i<zkey.nVars; i++) {
await writePointG2(zkey.B2[i]); await writeG2(fd, curve, zkey.B2[i]);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -132,7 +139,7 @@ async function writeZKey(fileName, zkey) {
/////////// ///////////
await binFileUtils.startWriteSection(fd, 8); await binFileUtils.startWriteSection(fd, 8);
for (let i=zkey.nPublic+1; i<zkey.nVars; i++) { for (let i=zkey.nPublic+1; i<zkey.nVars; i++) {
await writePointG1(zkey.C[i]); await writeG1(fd, curve, zkey.C[i]);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -141,7 +148,7 @@ async function writeZKey(fileName, zkey) {
/////////// ///////////
await binFileUtils.startWriteSection(fd, 9); await binFileUtils.startWriteSection(fd, 9);
for (let i=0; i<zkey.domainSize; i++) { for (let i=0; i<zkey.domainSize; i++) {
await writePointG1(zkey.hExps[i]); await writeG1(fd, curve, zkey.hExps[i]);
} }
await binFileUtils.endWriteSection(fd); await binFileUtils.endWriteSection(fd);
@ -149,24 +156,36 @@ async function writeZKey(fileName, zkey) {
async function writeFr2(n) { async function writeFr2(n) {
// Convert to montgomery // Convert to montgomery
n = Scalar.mod( Scalar.mul(n, R2r), primeR); n = Scalar.mod( Scalar.mul(n, R2r), zkey.r);
await binFileUtils.writeBigInt(fd, n, n8r); await binFileUtils.writeBigInt(fd, n, n8r);
} }
async function writePointG1(p) {
const buff = new Uint8Array(curve.G1.F.n8*2);
curve.G1.toRprLEM(buff, 0, p);
await fd.write(buff);
}
async function writePointG2(p) {
const buff = new Uint8Array(curve.G2.F.n8*2);
curve.G2.toRprLEM(buff, 0, p);
await fd.write(buff);
}
} }
async function writeG1(fd, curve, p) {
const buff = new Uint8Array(curve.G1.F.n8*2);
curve.G1.toRprLEM(buff, 0, p);
await fd.write(buff);
}
async function writeG2(fd, curve, p) {
const buff = new Uint8Array(curve.G2.F.n8*2);
curve.G2.toRprLEM(buff, 0, p);
await fd.write(buff);
}
async function readG1(fd, curve) {
const buff = await fd.read(curve.G1.F.n8*2);
return curve.G1.fromRprLEM(buff, 0);
}
async function readG2(fd, curve) {
const buff = await fd.read(curve.G2.F.n8*2);
return curve.G2.fromRprLEM(buff, 0);
}
async function readHeader(fd, sections, protocol) { async function readHeader(fd, sections, protocol) {
if (protocol != "groth16") throw new Error("Protocol not supported: "+protocol); if (protocol != "groth16") throw new Error("Protocol not supported: "+protocol);
@ -197,26 +216,17 @@ async function readHeader(fd, sections, protocol) {
zkey.nVars = await fd.readULE32(); zkey.nVars = await fd.readULE32();
zkey.nPublic = await fd.readULE32(); zkey.nPublic = await fd.readULE32();
zkey.domainSize = await fd.readULE32(); zkey.domainSize = await fd.readULE32();
zkey.vk_alfa_1 = await readG1(); zkey.power = log2(zkey.domainSize);
zkey.vk_beta_1 = await readG1(); zkey.vk_alpha_1 = await readG1(fd, curve);
zkey.vk_delta_1 = await readG1(); zkey.vk_beta_1 = await readG1(fd, curve);
zkey.vk_beta_2 = await readG2(); zkey.vk_beta_2 = await readG2(fd, curve);
zkey.vk_gamma_2 = await readG2(); zkey.vk_gamma_2 = await readG2(fd, curve);
zkey.vk_delta_2 = await readG2(); zkey.vk_delta_1 = await readG1(fd, curve);
zkey.vk_delta_2 = await readG2(fd, curve);
await binFileUtils.endReadSection(fd); await binFileUtils.endReadSection(fd);
return zkey; return zkey;
async function readG1() {
const buff = await fd.read(curve.G1.F.n8*2);
return curve.G1.fromRprLEM(buff, 0);
}
async function readG2() {
const buff = await fd.read(curve.G2.F.n8*2);
return curve.G2.fromRprLEM(buff, 0);
}
} }
async function readZKey(fileName) { async function readZKey(fileName) {
@ -236,7 +246,7 @@ async function readZKey(fileName) {
await binFileUtils.startReadUniqueSection(fd, sections, 3); await binFileUtils.startReadUniqueSection(fd, sections, 3);
zkey.IC = []; zkey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) { for (let i=0; i<= zkey.nPublic; i++) {
const P = await readG1(); const P = await readG1(fd, curve);
zkey.IC.push(P); zkey.IC.push(P);
} }
await binFileUtils.endReadSection(fd); await binFileUtils.endReadSection(fd);
@ -266,7 +276,7 @@ async function readZKey(fileName) {
await binFileUtils.startReadUniqueSection(fd, sections, 5); await binFileUtils.startReadUniqueSection(fd, sections, 5);
zkey.A = []; zkey.A = [];
for (let i=0; i<zkey.nVars; i++) { for (let i=0; i<zkey.nVars; i++) {
const A = await readG1(); const A = await readG1(fd, curve);
zkey.A[i] = A; zkey.A[i] = A;
} }
await binFileUtils.endReadSection(fd); await binFileUtils.endReadSection(fd);
@ -277,7 +287,7 @@ async function readZKey(fileName) {
await binFileUtils.startReadUniqueSection(fd, sections, 6); await binFileUtils.startReadUniqueSection(fd, sections, 6);
zkey.B1 = []; zkey.B1 = [];
for (let i=0; i<zkey.nVars; i++) { for (let i=0; i<zkey.nVars; i++) {
const B1 = await readG1(); const B1 = await readG1(fd, curve);
zkey.B1[i] = B1; zkey.B1[i] = B1;
} }
@ -289,7 +299,7 @@ async function readZKey(fileName) {
await binFileUtils.startReadUniqueSection(fd, sections, 7); await binFileUtils.startReadUniqueSection(fd, sections, 7);
zkey.B2 = []; zkey.B2 = [];
for (let i=0; i<zkey.nVars; i++) { for (let i=0; i<zkey.nVars; i++) {
const B2 = await readG2(); const B2 = await readG2(fd, curve);
zkey.B2[i] = B2; zkey.B2[i] = B2;
} }
await binFileUtils.endReadSection(fd); await binFileUtils.endReadSection(fd);
@ -300,7 +310,7 @@ async function readZKey(fileName) {
await binFileUtils.startReadUniqueSection(fd, sections, 8); await binFileUtils.startReadUniqueSection(fd, sections, 8);
zkey.C = []; zkey.C = [];
for (let i=zkey.nPublic+1; i<zkey.nVars; i++) { for (let i=zkey.nPublic+1; i<zkey.nVars; i++) {
const C = await readG1(); const C = await readG1(fd, curve);
zkey.C[i] = C; zkey.C[i] = C;
} }
@ -312,7 +322,7 @@ async function readZKey(fileName) {
await binFileUtils.startReadUniqueSection(fd, sections, 9); await binFileUtils.startReadUniqueSection(fd, sections, 9);
zkey.hExps = []; zkey.hExps = [];
for (let i=0; i<zkey.domainSize; i++) { for (let i=0; i<zkey.domainSize; i++) {
const H = await readG1(); const H = await readG1(fd, curve);
zkey.hExps.push(H); zkey.hExps.push(H);
} }
await binFileUtils.endReadSection(fd); await binFileUtils.endReadSection(fd);
@ -326,19 +336,55 @@ async function readZKey(fileName) {
return Fr.mul(n, Rri2); return Fr.mul(n, Rri2);
} }
async function readG1() { }
const buff = await fd.read(curve.G1.F.n8*2);
return curve.G1.fromRprLEM(buff, 0);
async function readContribution(fd, curve) {
const c = {delta:{}};
c.deltaAfter = await readG1(fd, curve);
c.delta.g1_s = await readG1(fd, curve);
c.delta.g1_sx = await readG1(fd, curve);
c.delta.g2_spx = await readG2(fd, curve);
c.transcript = await fd.read(64);
return c;
}
async function readMPCParams(fd, curve, sections) {
await binFileUtils.startReadUniqueSection(fd, sections, 10);
const res = { contributions: []};
res.csHash = await fd.read(64);
const n = await fd.readULE32();
for (let i=0; i<n; i++) {
const c = await readContribution(fd, curve);
res.contributions.push(c);
} }
await binFileUtils.endReadSection(fd);
async function readG2() { return res;
const buff = await fd.read(curve.G2.F.n8*2); }
return curve.G2.fromRprLEM(buff, 0);
async function writeContribution(fd, curve, c) {
await writeG1(fd, curve, c.deltaAfter);
await writeG1(fd, curve, c.delta.g1_s);
await writeG1(fd, curve, c.delta.g1_sx);
await writeG2(fd, curve, c.delta.g2_spx);
await fd.write(c.transcript);
}
async function writeMPCParams(fd, curve, mpcParams) {
await binFileUtils.startWriteSection(fd, 10);
await fd.write(mpcParams.csHash);
await fd.writeULE32(mpcParams.contributions.length);
for (let i=0; i<mpcParams.contributions.length; i++) {
await writeContribution(fd, curve,mpcParams.contributions[i]);
} }
await binFileUtils.endWriteSection(fd);
} }
module.exports.readHeader = readHeader; module.exports.readHeader = readHeader;
module.exports.writeHeader = writeHeader;
module.exports.read = readZKey; module.exports.read = readZKey;
module.exports.write = writeZKey; module.exports.write = writeZKey;
module.exports.readMPCParams = readMPCParams;
module.exports.writeMPCParams = writeMPCParams;

376
src/zkey_verify.js Normal file

@ -0,0 +1,376 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const getCurve = require("./curves").getCurveFromQ;
const Blake2b = require("blake2b-wasm");
const misc = require("./misc");
const Scalar = require("ffjavascript").Scalar;
const hashToG2 = require("./keypair").hashToG2;
const sameRatio = misc.sameRatio;
const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha;
const newZKey = require("./zkey_new");
module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, verbose) {
let sr;
await Blake2b.ready();
const {fd, sections} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fd, sections, "groth16");
const curve = getCurve(zkey.q);
await curve.loadEngine();
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const mpcParams = await zkeyUtils.readMPCParams(fd, curve, sections);
const responses = [];
const accumulatedHasher = Blake2b(64);
accumulatedHasher.update(mpcParams.csHash);
let curDelta = curve.G1.g;
for (let i=0; i<mpcParams.contributions.length; i++) {
const c = mpcParams.contributions[i];
const ourHasher = misc.cloneHasher(accumulatedHasher);
hashG1(ourHasher, c.delta.g1_s);
hashG1(ourHasher, c.delta.g1_sx);
if (!misc.hashIsEqual(ourHasher.digest(), c.transcript)) {
console.log(`INVALID(${i}): Inconsistent transcript `);
return false;
}
const delta_g2_sp = hashToG2(c.transcript);
sr = await sameRatio(curve, c.delta.g1_s, c.delta.g1_sx, delta_g2_sp, c.delta.g2_spx);
if (sr !== true) {
console.log(`INVALID(${i}): public key G1 and G2 do not have the same ration `);
return false;
}
sr = await sameRatio(curve, curDelta, c.deltaAfter, delta_g2_sp, c.delta.g2_spx);
if (sr !== true) {
console.log(`INVALID(${i}): deltaAfter does not fillow the public key `);
return false;
}
hashPubKey(accumulatedHasher, c);
const contributionHasher = Blake2b(64);
hashPubKey(contributionHasher, c);
responses.push(contributionHasher.digest());
curDelta = c.deltaAfter;
}
const initFileName = "~" + zkeyFileName + ".init";
await newZKey(r1csFileName, pTauFileName, initFileName);
const {fd: fdInit, sections: sectionsInit} = await binFileUtils.readBinFile(initFileName, "zkey", 2);
const zkeyInit = await zkeyUtils.readHeader(fdInit, sectionsInit, "groth16");
if ( (!Scalar.eq(zkeyInit.q, zkey.q))
||(!Scalar.eq(zkeyInit.r, zkey.r))
||(zkeyInit.n8q != zkey.n8q)
||(zkeyInit.n8r != zkey.n8r))
{
console.log("INVALID: Different curves");
return false;
}
if ( (zkeyInit.nVars != zkey.nVars)
||(zkeyInit.nPublic != zkey.nPublic)
||(zkeyInit.domainSize != zkey.domainSize))
{
console.log("INVALID: Different circuit parameters");
return false;
}
if (!curve.G1.eq(zkey.vk_alpha_1, zkeyInit.vk_alpha_1)) {
console.log("INVALID: Invalid alpha1");
return false;
}
if (!curve.G1.eq(zkey.vk_beta_1, zkeyInit.vk_beta_1)) {
console.log("INVALID: Invalid beta1");
return false;
}
if (!curve.G2.eq(zkey.vk_beta_2, zkeyInit.vk_beta_2)) {
console.log("INVALID: Invalid beta2");
return false;
}
if (!curve.G2.eq(zkey.vk_gamma_2, zkeyInit.vk_gamma_2)) {
console.log("INVALID: Invalid gamma2");
return false;
}
if (!curve.G1.eq(zkey.vk_delta_1, curDelta)) {
console.log("INVALID: Invalud delta1");
return false;
}
sr = await sameRatio(curve, curve.G1.g, curDelta, curve.G2.g, zkey.vk_delta_2);
if (sr !== true) {
console.log("INVALID: Invalud delta2");
return false;
}
const mpcParamsInit = await zkeyUtils.readMPCParams(fdInit, curve, sectionsInit);
if (!misc.hashIsEqual(mpcParams.csHash, mpcParamsInit.csHash)) {
console.log("INVALID: Circuit does not match");
return false;
}
// Check sizes of sections
if (sections[8][0].size != sG1*(zkey.nVars-zkey.nPublic-1)) {
console.log("INVALID: Invalid L section size");
return false;
}
if (sections[9][0].size != sG1*(zkey.domainSize)) {
console.log("INVALID: Invalid H section size");
return false;
}
let ss;
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 3);
if (!ss) {
console.log("INVALID: IC section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 4);
if (!ss) {
console.log("Coeffs section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 5);
if (!ss) {
console.log("A section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 6);
if (!ss) {
console.log("B1 section is not identical");
return false;
}
ss = await binFileUtils.sectionIsEqual(fd, sections, fdInit, sectionsInit, 7);
if (!ss) {
console.log("B2 section is not identical");
return false;
}
// Check L
sr = await sectionHasSameRatio("G1", fdInit, sectionsInit, fd, sections, 8, zkey.vk_delta_2, zkeyInit.vk_delta_2, "L section");
if (sr!==true) {
console.log("L section does not match");
return false;
}
// Check H
sr = await sameRatioH();
if (sr!==true) {
console.log("H section does not match");
return false;
}
return true;
function hashG1(hasher, p) {
const buff = new Uint8Array(sG1);
curve.G1.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
function hashG2(hasher, p) {
const buff = new Uint8Array(sG2);
curve.G2.toRprUncompressed(buff, 0, p);
hasher.update(buff);
}
function hashPubKey(hasher, c) {
hashG1(hasher, c.deltaAfter);
hashG1(hasher, c.delta.g1_s);
hashG1(hasher, c.delta.g1_sx);
hashG2(hasher, c.delta.g2_spx);
hasher.update(c.transcript);
}
async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) {
const MAX_CHUNK_SIZE = 1<<20;
const G = curve[groupName];
const sG = G.F.n8*2;
await binFileUtils.startReadUniqueSection(fd1, sections1, idSection);
await binFileUtils.startReadUniqueSection(fd2, sections2, idSection);
let R1 = G.zero;
let R2 = G.zero;
const nPoints = sections1[idSection][0].size / sG;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
const n = Math.min(nPoints - i, MAX_CHUNK_SIZE);
const bases1 = await fd1.read(n*sG);
const bases2 = await fd2.read(n*sG);
const scalars = new Uint8Array(4*n);
crypto.randomFillSync(scalars);
const r1 = await G.multiExpAffine(bases1, scalars);
const r2 = await G.multiExpAffine(bases2, scalars);
R1 = G.add(R1, r1);
R2 = G.add(R2, r2);
}
await binFileUtils.endReadSection(fd1);
await binFileUtils.endReadSection(fd2);
sr = await sameRatio(curve, R1, R2, g2sp, g2spx);
if (sr !== true) return false;
return true;
}
async function sameRatioH() {
const MAX_CHUNK_SIZE = 1<<20;
const G = curve.G1;
const sG = G.F.n8*2;
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(pTauFileName, "ptau", 1);
let buff_r = new Uint8Array(zkey.domainSize * zkey.n8r);
const seed= new Array(8);
for (let i=0; i<8; i++) {
seed[i] = crypto.randomBytes(4).readUInt32BE(0, true);
}
const rng = new ChaCha(seed);
for (let i=0; i<zkey.domainSize-1; i++) { // Note that last one is zero
const e = curve.Fr.fromRng(rng);
curve.Fr.toRprLE(buff_r, i*zkey.n8r, e);
}
let R1 = G.zero;
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`H Verificaition(tau): ${i}/${zkey.domainSize}`);
const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE);
const buff1 = await fdPTau.read(sG*n, sectionsPTau[2][0].p + zkey.domainSize*sG + i*MAX_CHUNK_SIZE*sG);
const buff2 = await fdPTau.read(sG*n, sectionsPTau[2][0].p + i*MAX_CHUNK_SIZE*sG);
const buffB = await batchSubstract(buff1, buff2);
const buffS = buff_r.slice((i*MAX_CHUNK_SIZE)*zkey.n8r, (i*MAX_CHUNK_SIZE+n)*zkey.n8r);
const r = await G.multiExpAffine(buffB, buffS);
R1 = G.add(R1, r);
}
// Caluclate odd coeficients in transformed domain
buff_r = await curve.Fr.batchToMontgomery(buff_r);
// const first = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2)));
// Works*2 const first = curve.Fr.neg(curve.Fr.e(2));
const first = curve.Fr.neg(curve.Fr.e(2));
// const inc = curve.Fr.inv(curve.PFr.w[zkey.power+1]);
const inc = curve.PFr.w[zkey.power+1];
buff_r = await curve.Fr.batchApplyKey(buff_r, first, inc);
buff_r = await curve.Fr.fft(buff_r);
buff_r = await curve.Fr.batchFromMontgomery(buff_r);
await binFileUtils.startReadUniqueSection(fd, sections, 9);
let R2 = G.zero;
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`H Verificaition(lagrange): ${i}/${zkey.domainSize}`);
const n = Math.min(zkey.domainSize - i, MAX_CHUNK_SIZE);
const buff = await fd.read(sG*n);
const buffS = buff_r.slice((i*MAX_CHUNK_SIZE)*zkey.n8r, (i*MAX_CHUNK_SIZE+n)*zkey.n8r);
const r = await G.multiExpAffine(buff, buffS);
R2 = G.add(R2, r);
}
await binFileUtils.endReadSection(fd);
sr = await sameRatio(curve, R1, R2, zkey.vk_delta_2, zkeyInit.vk_delta_2);
if (sr !== true) return false;
return true;
}
async function batchSubstract(buff1, buff2) {
const sG = curve.G1.F.n8*2;
const nPoints = buff1.byteLength / sG;
const concurrency= curve.engine.concurrency;
const nPointsPerThread = Math.floor(nPoints / concurrency);
const opPromises = [];
for (let i=0; i<concurrency; i++) {
let n;
if (i< concurrency-1) {
n = nPointsPerThread;
} else {
n = nPoints - i*nPointsPerThread;
}
if (n==0) continue;
const subBuff1 = buff1.slice(i*nPointsPerThread*sG1, (i*nPointsPerThread+n)*sG1);
const subBuff2 = buff2.slice(i*nPointsPerThread*sG1, (i*nPointsPerThread+n)*sG1);
opPromises.push(batchSubstractThread(subBuff1, subBuff2));
}
const result = await Promise.all(opPromises);
const fullBuffOut = new Uint8Array(nPoints*sG);
let p =0;
for (let i=0; i<result.length; i++) {
fullBuffOut.set(result[i][0], p);
p+=result[i][0].byteLength;
}
return fullBuffOut;
}
async function batchSubstractThread(buff1, buff2) {
const sG1 = curve.G1.F.n8*2;
const sGmid = curve.G1.F.n8*3;
const nPoints = buff1.byteLength/sG1;
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: buff1});
task.push({cmd: "ALLOCSET", var: 1, buff: buff2});
task.push({cmd: "ALLOC", var: 2, len: nPoints*sGmid});
for (let i=0; i<nPoints; i++) {
task.push({
cmd: "CALL",
fnName: "g1m_subAffine",
params: [
{var: 0, offset: i*sG1},
{var: 1, offset: i*sG1},
{var: 2, offset: i*sGmid},
]
});
}
task.push({cmd: "CALL", fnName: "g1m_batchToAffine", params: [
{var: 2},
{val: nPoints},
{var: 2},
]});
task.push({cmd: "GET", out: 0, var: 2, len: nPoints*sG1});
const res = await curve.engine.queueAction(task);
return res;
}
};