beacon phase2, verification and challange contribution

This commit is contained in:
Jordi Baylina 2020-06-18 19:14:06 +02:00
parent 0e32995f26
commit b45eb248e2
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
19 changed files with 565 additions and 154 deletions

78
cli.js

@ -46,8 +46,9 @@ const Scalar = require("ffjavascript").Scalar;
const assert = require("assert"); const assert = require("assert");
const groth16Prover = require("./src/groth16_prover");
const zkey = require("./src/zkey"); const zkey = require("./src/zkey");
const zksnark = require("./src/zksnark");
const curves = require("./src/curves");
const commands = [ const commands = [
{ {
@ -123,7 +124,7 @@ const commands = [
action: powersOfTawExportChallange action: powersOfTawExportChallange
}, },
{ {
cmd: "powersoftau challange contribute <challange> [response]", cmd: "powersoftau challange contribute <curve> <challange> [response]",
description: "Contribute to a challange", description: "Contribute to a challange",
alias: ["ptcc"], alias: ["ptcc"],
options: "-verbose|v -entropy|e", options: "-verbose|v -entropy|e",
@ -207,6 +208,20 @@ const commands = [
options: "-verbose|v -entropy|e -name|n", options: "-verbose|v -entropy|e -name|n",
action: zkeyContribute action: zkeyContribute
}, },
{
cmd: "zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp>",
description: "adds a beacon",
alias: ["zkb"],
options: "-verbose|v -name|n",
action: zkeyBeacon
},
{
cmd: "zkey challange contribute <curve> <challange> [response]",
description: "contributes to a llallange file in bellman format",
alias: ["zkcc"],
options: "-verbose|v -entropy|e",
action: zkeyChallangeContribute
},
{ {
cmd: "zkey export vkey [circuit.zkey] [verification_key.json]", cmd: "zkey export vkey [circuit.zkey] [verification_key.json]",
description: "Exports a verification key", description: "Exports a verification key",
@ -454,7 +469,7 @@ async function zksnarkProve(params, options) {
const publicName = params[3] || "public.json"; const publicName = params[3] || "public.json";
const {proof, publicSignals} = await groth16Prover(zkeyName, witnessName, options.verbose); const {proof, publicSignals} = await zksnark.groth16.prover(zkeyName, witnessName, options.verbose);
await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8"); await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8"); await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
@ -473,10 +488,14 @@ async function zksnarkVerify(params, options) {
const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8"))); const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8"))); const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8")));
/*
const protocol = verificationKey.protocol; const protocol = verificationKey.protocol;
if (!zkSnark[protocol]) throw new Error("Invalid protocol"); if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const isValid = zkSnark[protocol].isValid(verificationKey, proof, pub); const isValid = zkSnark[protocol].isValid(verificationKey, proof, pub);
*/
const isValid = await zksnark.groth16.verifier(verificationKey, proof, pub);
if (isValid) { if (isValid) {
console.log("OK"); console.log("OK");
@ -512,7 +531,7 @@ async function zkeyExportVKey(params) {
vk_gamma_2: zKey.vk_gamma_2, vk_gamma_2: zKey.vk_gamma_2,
vk_delta_2: zKey.vk_delta_2, vk_delta_2: zKey.vk_delta_2,
vk_alphabeta_12: curve.pairing( zKey.vk_alpha_1 , zKey.vk_beta_2 ) vk_alphabeta_12: await curve.pairing( zKey.vk_alpha_1 , zKey.vk_beta_2 )
}; };
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(vKey), null, 1), "utf-8"); await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(vKey), null, 1), "utf-8");
@ -648,20 +667,22 @@ async function powersOfTawExportChallange(params, options) {
return await powersOfTaw.exportChallange(ptauName, challangeName, options.verbose); return await powersOfTaw.exportChallange(ptauName, challangeName, options.verbose);
} }
// powersoftau challange contribute <curve> <challange> [response]
async function powersOfTawChallangeContribute(params, options) { async function powersOfTawChallangeContribute(params, options) {
let challangeName; let challangeName;
let responseName; let responseName;
challangeName = params[0]; const curve = curves.getCurveFromName(params[0]);
if (params.length < 2) { challangeName = params[1];
if (params.length < 3) {
responseName = changeExt(challangeName, "response"); responseName = changeExt(challangeName, "response");
} else { } else {
responseName = params[1]; responseName = params[2];
} }
return await powersOfTaw.challangeContribute(bn128, challangeName, responseName, options.entropy, options.verbose); return await powersOfTaw.challangeContribute(curve, challangeName, responseName, options.entropy, options.verbose);
} }
@ -766,7 +787,7 @@ async function zkeyNew(params, options) {
ptauName = params[1]; ptauName = params[1];
} }
if (params.length < 2) { if (params.length < 3) {
zkeyName = "circuit.zkey"; zkeyName = "circuit.zkey";
} else { } else {
zkeyName = params[2]; zkeyName = params[2];
@ -792,7 +813,7 @@ async function zkeyExportBellman(params, options) {
mpcparamsName = params[1]; mpcparamsName = params[1];
} }
return zkey.exportMPCParams(zkeyName, mpcparamsName, options.verbose); return zkey.exportBellman(zkeyName, mpcparamsName, options.verbose);
} }
@ -857,3 +878,38 @@ async function zkeyContribute(params, options) {
return zkey.contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, options.verbose); return zkey.contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, options.verbose);
} }
// zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp>
async function zkeyBeacon(params, options) {
let zkeyOldName;
let zkeyNewName;
let beaconHashStr;
let numIterationsExp;
zkeyOldName = params[0];
zkeyNewName = params[1];
beaconHashStr = params[2];
numIterationsExp = params[3];
return await zkey.beacon(zkeyOldName, zkeyNewName, options.name ,numIterationsExp, beaconHashStr, options.verbose);
}
// zkey challange contribute <curve> <challange> [response]",
async function zkeyChallangeContribute(params, options) {
let challangeName;
let responseName;
const curve = curves.getCurveFromName(params[0]);
challangeName = params[1];
if (params.length < 3) {
responseName = changeExt(challangeName, "response");
} else {
responseName = params[2];
}
return zkey.challangeContribute(curve, challangeName, responseName, options.entropy, options.verbose);
}

@ -10,3 +10,20 @@ module.exports.getCurveFromQ = function getCurveFromQ(q) {
} }
return curve; return curve;
}; };
module.exports.getCurveFromName = function getCurveFromName(name) {
let curve;
const normName = normalizeName(name);
if (["BN128", "BN254", "ALTBN128"].indexOf(normName) >= 0) {
curve = bn128;
} else {
throw new Error(`Curve not supported: ${name}`);
}
return curve;
function normalizeName(n) {
return n.toUpperCase().match(/[A-Za-z0-9]+/g).join("");
}
};

@ -109,6 +109,47 @@ async function getRandomRng(entropy) {
return rng; return rng;
} }
function rngFromBeaconParams(beaconHash, numIterationsExp) {
let nIterationsInner;
let nIterationsOuter;
if (numIterationsExp<32) {
nIterationsInner = (1 << numIterationsExp) >>> 0;
nIterationsOuter = 1;
} else {
nIterationsInner = 0x100000000;
nIterationsOuter = (1 << (numIterationsExp-32)) >>> 0;
}
let curHash = beaconHash;
for (let i=0; i<nIterationsOuter; i++) {
for (let j=0; j<nIterationsInner; j++) {
curHash = crypto.createHash("sha256").update(curHash).digest();
}
}
const curHashV = new DataView(curHash.buffer, curHash.byteOffset, curHash.byteLength);
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = curHashV.getUint32(i*4, false);
}
const rng = new ChaCha(seed);
return rng;
}
function hex2ByteArray(s) {
return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) {
return parseInt(h, 16);
}));
}
function byteArray2hex(byteArray) {
return Array.prototype.map.call(byteArray, function(byte) {
return ("0" + (byte & 0xFF).toString(16)).slice(-2);
}).join("");
}
module.exports.bitReverse = bitReverse; module.exports.bitReverse = bitReverse;
module.exports.log2 = log2; module.exports.log2 = log2;
module.exports.formatHash = formatHash; module.exports.formatHash = formatHash;
@ -116,3 +157,6 @@ module.exports.hashIsEqual = hashIsEqual;
module.exports.cloneHasher = cloneHasher; module.exports.cloneHasher = cloneHasher;
module.exports.sameRatio = sameRatio; module.exports.sameRatio = sameRatio;
module.exports.getRandomRng = getRandomRng; module.exports.getRandomRng = getRandomRng;
module.exports.rngFromBeaconParams = rngFromBeaconParams;
module.exports.hex2ByteArray = hex2ByteArray;
module.exports.byteArray2hex = byteArray2hex;

@ -31,14 +31,34 @@ async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, group
await binFileUtils.endWriteSection(fdNew); await binFileUtils.endWriteSection(fdNew);
await binFileUtils.endReadSection(fdOld); await binFileUtils.endReadSection(fdOld);
}
async function applyKeyToBinFile(fdOld, fdNew, curve, groupName, nPoints, first, inc, sectionName, verbose) {
} }
module.exports.applyKeyToBinFile = applyKeyToBinFile;
async function applyKeyToChallangeSection(fdOld, fdNew, responseHasher, curve, groupName, nPoints, first, inc, formatOut, sectionName, verbose) {
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<nPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
const n= Math.min(nPoints-i, chunkSize );
const buffInU = await fdOld.read(n * sG);
const buffInLEM = await G.batchUtoLEM(buffInU);
const buffOutLEM = await G.batchApplyKey(buffInLEM, t, inc);
let buffOut;
if (formatOut == "COMPRESSED") {
buffOut = await G.batchLEMtoC(buffOutLEM);
} else {
buffOut = await G.batchLEMtoU(buffOutLEM);
}
if (responseHasher) responseHasher.update(buffOutC);
await fdNew.write(buffOut);
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
}
}
module.exports.applyKeyToChallangeSection = applyKeyToChallangeSection;
module.exports.applyKeyToSection = applyKeyToSection; module.exports.applyKeyToSection = applyKeyToSection;

@ -3,14 +3,10 @@ const utils = require("./powersoftau_utils");
const misc = require("./misc"); const misc = require("./misc");
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
function hex2ByteArray(s) {
return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) {
return parseInt(h, 16);
}));
}
async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp, beaconHashStr, verbose) { async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp, beaconHashStr, verbose) {
const beaconHash = hex2ByteArray(beaconHashStr); const beaconHash = misc.hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0) if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length)) || (beaconHash.byteLength*2 !=beaconHashStr.length))
{ {

@ -19,28 +19,12 @@
const fastFile = require("fastfile"); const fastFile = require("fastfile");
const assert = require("assert"); const assert = require("assert");
const Blake2b = require("blake2b-wasm"); const Blake2b = require("blake2b-wasm");
const readline = require("readline");
const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha;
const fs = require("fs"); const fs = require("fs");
const utils = require("./powersoftau_utils"); const utils = require("./powersoftau_utils");
const misc = require("./misc"); const misc = require("./misc");
const { applyKeyToChallangeSection } = require("./mpc_applykey");
const keyPair = require("./keypair"); const keyPair = require("./keypair");
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
function askEntropy() {
return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
}
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) { async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) {
await Blake2b.ready(); await Blake2b.ready();
@ -63,10 +47,6 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
const fdTo = await fastFile.createOverride(responesFileName); const fdTo = await fastFile.createOverride(responesFileName);
while (!entropy) {
entropy = await askEntropy();
}
// Calculate the hash // Calculate the hash
console.log("Hashing challange"); console.log("Hashing challange");
const challangeHasher = Blake2b(64); const challangeHasher = Blake2b(64);
@ -84,21 +64,7 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
console.log("Current Challange Hash: "); console.log("Current Challange Hash: ");
console.log(misc.formatHash(challangeHash)); console.log(misc.formatHash(challangeHash));
const hasher = Blake2b(64); const rng = await misc.getRandomRng(entropy);
hasher.update(crypto.randomBytes(64));
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
const seed = [];
for (let i=0;i<8;i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ChaCha(seed);
const key = keyPair.createPTauKey(curve, challangeHash, rng); const key = keyPair.createPTauKey(curve, challangeHash, rng);
@ -125,12 +91,11 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
await fdTo.write(challangeHash); await fdTo.write(challangeHash);
responseHasher.update(challangeHash); responseHasher.update(challangeHash);
await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , verbose );
await contributeSection("G1", (1<<power)*2-1, curve.Fr.one, key.tau.prvKey, "tauG1" ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", (1<<power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , verbose );
await contributeSection("G2", (1<<power) , curve.Fr.one, key.tau.prvKey, "tauG2" ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", verbose );
await contributeSection("G1", (1<<power) , key.alpha.prvKey, key.tau.prvKey, "alphaTauG1" ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<<power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , verbose );
await contributeSection("G1", (1<<power) , key.beta.prvKey, key.tau.prvKey, "betaTauG1" ); await applyKeyToChallangeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , verbose );
await contributeSection("G2", 1 , key.beta.prvKey, key.tau.prvKey, "betaG2" );
// Write and hash key // Write and hash key
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3); const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
@ -143,26 +108,6 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
await fdTo.close(); await fdTo.close();
await fdFrom.close(); await fdFrom.close();
async function contributeSection(groupName, nPoints, first, inc, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i<nPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
const n= Math.min(nPoints-i, chunkSize );
const buffInU = await fdFrom.read(n * sG);
const buffInLEM = await G.batchUtoLEM(buffInU);
const buffOutLEM = await G.batchApplyKey(buffInLEM, t, inc);
const buffOutC = await G.batchLEMtoC(buffOutLEM);
responseHasher.update(buffOutC);
await fdTo.write(buffOutC);
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
}
}
} }
module.exports = challangeContribute; module.exports = challangeContribute;

@ -7,24 +7,10 @@
const Blake2b = require("blake2b-wasm"); const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils"); const utils = require("./powersoftau_utils");
const ChaCha = require("ffjavascript").ChaCha;
const crypto = require("crypto");
const keyPair = require("./keypair"); const keyPair = require("./keypair");
const readline = require("readline");
const binFileUtils = require("./binfileutils"); const binFileUtils = require("./binfileutils");
const misc = require("./misc"); const misc = require("./misc");
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
function askEntropy() {
return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
}
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbose) { async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbose) {
await Blake2b.ready(); await Blake2b.ready();
@ -51,20 +37,9 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
} }
// Generate a random key // Generate a random key
while (!entropy) {
entropy = await askEntropy();
}
const hasher = Blake2b(64);
hasher.update(crypto.randomBytes(64));
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
const seed = []; const rng = await misc.getRandomRng(entropy);
for (let i=0;i<8;i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ChaCha(seed);
curContribution.key = keyPair.createPTauKey(curve, lastChallangeHash, rng); curContribution.key = keyPair.createPTauKey(curve, lastChallangeHash, rng);

@ -1,11 +1,9 @@
const fastFile = require("fastfile");
const assert = require("assert"); const assert = require("assert");
const Scalar = require("ffjavascript").Scalar; const Scalar = require("ffjavascript").Scalar;
const bn128 = require("ffjavascript").bn128; const bn128 = require("ffjavascript").bn128;
const Blake2b = require("blake2b-wasm"); const Blake2b = require("blake2b-wasm");
const ChaCha = require("ffjavascript").ChaCha;
const keyPair = require("./keypair"); const keyPair = require("./keypair");
const crypto = require("crypto"); const misc = require("./misc");
async function writePTauHeader(fd, curve, power, ceremonyPower) { async function writePTauHeader(fd, curve, power, ceremonyPower) {
// Write the header // Write the header
@ -345,30 +343,8 @@ function calculateFirstChallangeHash(curve, power, verbose) {
function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) { function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) {
let nIterationsInner;
let nIterationsOuter;
if (numIterationsExp<32) {
nIterationsInner = (1 << numIterationsExp) >>> 0;
nIterationsOuter = 1;
} else {
nIterationsInner = 0x100000000;
nIterationsOuter = (1 << (numIterationsExp-32)) >>> 0;
}
let curHash = beaconHash; const rng = misc.rngFromBeaconParams(beaconHash, numIterationsExp);
for (let i=0; i<nIterationsOuter; i++) {
for (let j=0; j<nIterationsInner; j++) {
curHash = crypto.createHash("sha256").update(curHash).digest();
}
}
const curHashV = new DataView(curHash.buffer, curHash.byteOffset, curHash.byteLength);
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = curHashV.getUint32(i*4, false);
}
const rng = new ChaCha(seed);
const key = keyPair.createPTauKey(curve, challangeHash, rng); const key = keyPair.createPTauKey(curve, challangeHash, rng);

@ -271,8 +271,9 @@ async function verify(tauFilename, verbose) {
function printContribution(curContr, prevContr) { function printContribution(curContr, prevContr) {
console.log("-----------------------------------------------------"); console.log("-----------------------------------------------------");
console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`); console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`);
console.log("\tBased on challange");
console.log(misc.formatHash(prevContr.nextChallange)); console.log("\tNext Challange");
console.log(misc.formatHash(curContr.nextChallange));
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3); const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
utils.toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false); utils.toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false);
@ -285,8 +286,14 @@ async function verify(tauFilename, verbose) {
console.log("\tResponse Hash"); console.log("\tResponse Hash");
console.log(misc.formatHash(responseHash)); console.log(misc.formatHash(responseHash));
console.log("\tNext Challange"); console.log("\tBased on challange");
console.log(misc.formatHash(curContr.nextChallange)); console.log(misc.formatHash(prevContr.nextChallange));
if (curContr.type == 1) {
console.log(`Beacon generator: ${misc.byteArray2Hex(curContr.beaconHash)}`);
console.log(`Beacon iterations Exp: ${curContr.numIterationsExp}`);
}
} }
async function processSectionBetaG2() { async function processSectionBetaG2() {

@ -8,3 +8,4 @@ module.exports.contribute = require("./zkey_contribute.js");
module.exports.beacon = require("./zkey_beacon.js"); module.exports.beacon = require("./zkey_beacon.js");
module.exports.exportJson = require("./zkey_export_json.js"); module.exports.exportJson = require("./zkey_export_json.js");
module.exports.utils = require("./zkey_utils.js"); module.exports.utils = require("./zkey_utils.js");
module.exports.challangeContribute = require("./zkey_challangecontribute.js");

@ -1,5 +1,110 @@
const binFileUtils = require("./binfileutils");
const zkeyUtils = require("./zkey_utils");
const getCurve = require("./curves").getCurveFromQ;
const misc = require("./misc");
const Blake2b = require("blake2b-wasm");
const utils = require("./zkey_utils");
const hashToG2 = require("./keypair").hashToG2;
const {applyKeyToSection} = require("./mpc_applykey");
async function beacon(oldZKeyFileName, newZKeyFileName, name, numIterationsExp, beaconHashStr, verbose) { module.exports = async function beacon(zkeyNameOld, zkeyNameNew, name, numIterationsExp, beaconHashStr, verbose) {
await Blake2b.ready();
} const beaconHash = misc.hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
{
console.log("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false;
}
if (beaconHash.length>=256) {
console.log("Maximum lenght of beacon hash is 255 bytes");
return false;
}
numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) {
console.log("Invalid numIterationsExp. (Must be between 10 and 63)");
return false;
}
const {fd: fdOld, sections: sections} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdOld, sections, "groth16");
const curve = getCurve(zkey.q);
await curve.loadEngine();
const mpcParams = await zkeyUtils.readMPCParams(fdOld, curve, sections);
const fdNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
const rng = await misc.rngFromBeaconParams(beaconHash, numIterationsExp);
const transcriptHasher = Blake2b(64);
transcriptHasher.update(mpcParams.csHash);
for (let i=0; i<mpcParams.contributions.length; i++) {
utils.hashPubKey(transcriptHasher, curve, mpcParams.contributions[i]);
}
const curContribution = {};
curContribution.delta = {};
curContribution.delta.prvKey = curve.Fr.fromRng(rng);
curContribution.delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(curContribution.delta.g1_s, curContribution.delta.prvKey));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(curContribution.delta.g2_sp, curContribution.delta.prvKey));
zkey.vk_delta_1 = curve.G1.mulScalar(zkey.vk_delta_1, curContribution.delta.prvKey);
zkey.vk_delta_2 = curve.G2.mulScalar(zkey.vk_delta_2, curContribution.delta.prvKey);
curContribution.deltaAfter = zkey.vk_delta_1;
curContribution.type = 1;
curContribution.numIterationsExp = numIterationsExp;
curContribution.beaconHash = beaconHash;
if (name) curContribution.name = name;
mpcParams.contributions.push(curContribution);
await zkeyUtils.writeHeader(fdNew, zkey);
// IC
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
// Coeffs (Keep original)
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
// A Section
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
// B1 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
// B2 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", verbose);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", verbose);
await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);
await fdOld.close();
await fdNew.close();
const contributionHasher = Blake2b(64);
utils.hashPubKey(contributionHasher, curve, curContribution);
const contribuionHash = contributionHasher.digest();
console.log("Contribution Hash: ");
console.log(misc.formatHash(contribuionHash));
return true;
};

@ -0,0 +1,181 @@
// Format of the output
// Hash of the last contribution 64 Bytes
// 2^N*2-1 TauG1 Points (compressed)
// 2^N TauG2 Points (compressed)
// 2^N AlphaTauG1 Points (compressed)
// 2^N BetaTauG1 Points (compressed)
// Public Key
// BetaG2 (compressed)
// G1*s (compressed)
// G1*s*tau (compressed)
// G1*t (compressed)
// G1*t*alpha (compressed)
// G1*u (compressed)
// G1*u*beta (compressed)
// G2*sp*tau (compressed)
// G2*tp*alpha (compressed)
// G2*up*beta (compressed)
const fastFile = require("fastfile");
const Blake2b = require("blake2b-wasm");
const utils = require("./zkey_utils");
const misc = require("./misc");
const { applyKeyToChallangeSection } = require("./mpc_applykey");
const {hashPubKey} = require("./zkey_utils");
const hashToG2 = require("./keypair").hashToG2;
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) {
await Blake2b.ready();
const rng = await misc.getRandomRng(entropy);
const delta = curve.Fr.fromRng(rng);
const invDelta = curve.Fr.inv(delta);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const fdFrom = await fastFile.readExisting(challangeFilename);
const fdTo = await fastFile.createOverride(responesFileName);
await copy(sG1); // alpha1
await copy(sG1); // beta1
await copy(sG2); // beta2
await copy(sG2); // gamma2
const oldDelta1 = await readG1();
const delta1 = curve.G1.mulScalar(oldDelta1, delta);
await writeG1(delta1);
const oldDelta2 = await readG2();
const delta2 = curve.G2.mulScalar(oldDelta2, delta);
await writeG2(delta2);
// IC
const nIC = await fdFrom.readUBE32();
await fdTo.writeUBE32(nIC);
await copy(nIC*sG1);
// H
const nH = await fdFrom.readUBE32();
await fdTo.writeUBE32(nH);
await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nH, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "H", verbose);
// L
const nL = await fdFrom.readUBE32();
await fdTo.writeUBE32(nL);
await applyKeyToChallangeSection(fdFrom, fdTo, null, curve, "G1", nL, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "L", verbose);
// A
const nA = await fdFrom.readUBE32();
await fdTo.writeUBE32(nA);
await copy(nA*sG1);
// B1
const nB1 = await fdFrom.readUBE32();
await fdTo.writeUBE32(nB1);
await copy(nB1*sG1);
// B2
const nB2 = await fdFrom.readUBE32();
await fdTo.writeUBE32(nB2);
await copy(nB2*sG2);
//////////
/// Read contributions
//////////
const transcriptHasher = Blake2b(64);
const mpcParams = {};
// csHash
mpcParams.csHash = await fdFrom.read(64);
transcriptHasher.update(mpcParams.csHash);
const nConttributions = await fdFrom.readUBE32();
mpcParams.contributions = [];
for (let i=0; i<nConttributions; i++) {
const c = { delta:{} };
c.deltaAfter = await readG1();
c.delta.g1_s = await readG1();
c.delta.g1_sx = await readG1();
c.delta.g2_spx = await readG2();
c.transcript = await fdFrom.read(64);
mpcParams.contributions.push(c);
hashPubKey(transcriptHasher, curve, c);
}
const curContribution = {};
curContribution.delta = {};
curContribution.delta.prvKey = delta;
curContribution.delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
curContribution.delta.g1_sx = curve.G1.affine(curve.G1.mulScalar(curContribution.delta.g1_s, delta));
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_s);
utils.hashG1(transcriptHasher, curve, curContribution.delta.g1_sx);
curContribution.transcript = transcriptHasher.digest();
curContribution.delta.g2_sp = hashToG2(curContribution.transcript);
curContribution.delta.g2_spx = curve.G2.affine(curve.G2.mulScalar(curContribution.delta.g2_sp, delta));
curContribution.deltaAfter = delta1;
curContribution.type = 0;
mpcParams.contributions.push(curContribution);
//////////
/// Write COntribution
//////////
await fdTo.write(mpcParams.csHash);
await fdTo.writeUBE32(mpcParams.contributions.length);
for (let i=0; i<mpcParams.contributions.length; i++) {
const c = mpcParams.contributions[i];
await writeG1(c.deltaAfter);
await writeG1(c.delta.g1_s);
await writeG1(c.delta.g1_sx);
await writeG2(c.delta.g2_spx);
await fdTo.write(c.transcript);
}
const contributionHasher = Blake2b(64);
hashPubKey(contributionHasher, curve, curContribution);
console.log("Contribution Hash: ");
console.log(misc.formatHash(contributionHasher.digest()));
await fdTo.close();
await fdFrom.close();
async function copy(nBytes) {
const CHUNK_SIZE = fdFrom.pageSize*2;
for (let i=0; i<nBytes; i+= CHUNK_SIZE) {
const n = Math.min(nBytes -i, CHUNK_SIZE);
const buff = await fdFrom.read(n);
await fdTo.write(buff);
}
}
async function readG1() {
const buff = await fdFrom.read(curve.G1.F.n8*2);
return curve.G1.fromRprUncompressed(buff, 0);
}
async function readG2() {
const buff = await fdFrom.read(curve.G2.F.n8*2);
return curve.G2.fromRprUncompressed(buff, 0);
}
async function writeG1(P) {
const buff = new Uint8Array(sG1);
curve.G1.toRprUncompressed(buff, 0, P);
await fdTo.write(buff);
}
async function writeG2(P) {
const buff = new Uint8Array(sG2);
curve.G2.toRprUncompressed(buff, 0, P);
await fdTo.write(buff);
}
}
module.exports = challangeContribute;

@ -21,7 +21,6 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
const fdNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10); const fdNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
const curContribution = {};
const rng = await misc.getRandomRng(entropy); const rng = await misc.getRandomRng(entropy);
@ -30,6 +29,7 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
utils.hashPubKey(transcriptHasher, curve, mpcParams.contributions[i]); utils.hashPubKey(transcriptHasher, curve, mpcParams.contributions[i]);
} }
const curContribution = {};
curContribution.delta = {}; curContribution.delta = {};
curContribution.delta.prvKey = curve.Fr.fromRng(rng); curContribution.delta.prvKey = curve.Fr.fromRng(rng);
curContribution.delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng)); curContribution.delta.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
@ -44,11 +44,12 @@ module.exports = async function phase2contribute(zkeyNameOld, zkeyNameNew, name
zkey.vk_delta_2 = curve.G2.mulScalar(zkey.vk_delta_2, curContribution.delta.prvKey); zkey.vk_delta_2 = curve.G2.mulScalar(zkey.vk_delta_2, curContribution.delta.prvKey);
curContribution.deltaAfter = zkey.vk_delta_1; curContribution.deltaAfter = zkey.vk_delta_1;
mpcParams.contributions.push(curContribution);
curContribution.type = 0; curContribution.type = 0;
if (name) curContribution.name = name; if (name) curContribution.name = name;
mpcParams.contributions.push(curContribution);
await zkeyUtils.writeHeader(fdNew, zkey); await zkeyUtils.writeHeader(fdNew, zkey);
// IC // IC

@ -14,7 +14,7 @@ module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName,
const sG1 = curve.G1.F.n8*2; const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2; const sG2 = curve.G2.F.n8*2;
const mpcParams = await zkeyUtils.readMPCParams(fdZKey, sectionsZKey); const mpcParams = await zkeyUtils.readMPCParams(fdZKey, curve, sectionsZKey);
const fdMPCParams = await fastFile.createOverride(mpcparamsName); const fdMPCParams = await fastFile.createOverride(mpcparamsName);
@ -50,7 +50,7 @@ module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName,
await writePointArray("G1", buffBasesH_Tau); await writePointArray("G1", buffBasesH_Tau);
///////////////////// /////////////////////
// C section (l section in some notations) // L section
///////////////////// /////////////////////
let buffBasesC; let buffBasesC;
buffBasesC = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 8); buffBasesC = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 8);
@ -89,7 +89,7 @@ module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName,
await writeG1(c.deltaAfter); await writeG1(c.deltaAfter);
await writeG1(c.delta.g1_s); await writeG1(c.delta.g1_s);
await writeG1(c.delta.g1_sx); await writeG1(c.delta.g1_sx);
await writeG1(c.delta.g2_spx); await writeG2(c.delta.g2_spx);
await fdMPCParams.write(c.transcript); await fdMPCParams.write(c.transcript);
} }
@ -98,13 +98,13 @@ module.exports = async function phase2exportMPCParams(zkeyName, mpcparamsName,
async function writeG1(P) { async function writeG1(P) {
const buff = new Uint8Array(sG1); const buff = new Uint8Array(sG1);
curve.G1.toRprBE(buff, 0, P); curve.G1.toRprUncompressed(buff, 0, P);
await fdMPCParams.write(buff); await fdMPCParams.write(buff);
} }
async function writeG2(P) { async function writeG2(P) {
const buff = new Uint8Array(sG2); const buff = new Uint8Array(sG2);
curve.G2.toRprBE(buff, 0, P); curve.G2.toRprUncompressed(buff, 0, P);
await fdMPCParams.write(buff); await fdMPCParams.write(buff);
} }

@ -167,6 +167,7 @@ module.exports = async function phase2importMPCParams(zkeyNameOld, mpcparamsNam
return curve.G2.fromRprUncompressed(buff, 0); return curve.G2.fromRprUncompressed(buff, 0);
} }
function contributionIsEqual(c1, c2) { function contributionIsEqual(c1, c2) {
if (!curve.G1.eq(c1.deltaAfter , c2.deltaAfter)) return false; if (!curve.G1.eq(c1.deltaAfter , c2.deltaAfter)) return false;
if (!curve.G1.eq(c1.delta.g1_s , c2.delta.g1_s)) return false; if (!curve.G1.eq(c1.delta.g1_s , c2.delta.g1_s)) return false;

@ -27,7 +27,6 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
const mpcParams = await zkeyUtils.readMPCParams(fd, curve, sections); const mpcParams = await zkeyUtils.readMPCParams(fd, curve, sections);
const responses = [];
const accumulatedHasher = Blake2b(64); const accumulatedHasher = Blake2b(64);
accumulatedHasher.update(mpcParams.csHash); accumulatedHasher.update(mpcParams.csHash);
let curDelta = curve.G1.g; let curDelta = curve.G1.g;
@ -57,11 +56,27 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
return false; return false;
} }
if (c.type == 1) {
const rng = misc.rngFromBeaconParams(c.beaconHash, c.numIterationsExp);
const expected_prvKey = curve.Fr.fromRng(rng);
const expected_g1_s = curve.G1.affine(curve.G1.fromRng(rng));
const expected_g1_sx = curve.G1.affine(curve.G1.mulScalar(expected_g1_s, expected_prvKey));
if (curve.G1.eq(expected_g1_s, c.delta.g1_s) !== true) {
console.log(`INVALID(${i}): Key of the beacon does not match. g1_s `);
return false;
}
if (curve.G1.eq(expected_g1_sx, c.delta.g1_sx) !== true) {
console.log(`INVALID(${i}): Key of the beacon does not match. g1_sx `);
return false;
}
}
hashPubKey(accumulatedHasher, curve, c); hashPubKey(accumulatedHasher, curve, c);
const contributionHasher = Blake2b(64); const contributionHasher = Blake2b(64);
hashPubKey(contributionHasher, curve, c); hashPubKey(contributionHasher, curve, c);
responses.push(contributionHasher.digest());
c.contributionHash = contributionHasher.digest();
curDelta = c.deltaAfter; curDelta = c.deltaAfter;
} }
@ -179,6 +194,18 @@ module.exports = async function phase2verify(r1csFileName, pTauFileName, zkeyFi
return false; return false;
} }
for (let i=mpcParams.contributions.length-1; i>=0; i--) {
const c = mpcParams.contributions[i];
console.log("-------------------------");
console.log(`contribution #${i+1}${c.name ? c.name : ""}:`);
console.log(misc.formatHash(c.contributionHash));
if (c.type == 1) {
console.log(`Beacon generator: ${misc.byteArray2hex(c.beaconHash)}`);
console.log(`Beacon iterations Exp: ${c.numIterationsExp}`);
}
}
console.log("-------------------------");
return true; return true;

8
src/zksnark.js Normal file

@ -0,0 +1,8 @@
module.exports = {
groth16: {
prover: module.require("./zksnark_groth16_prover"),
verifier: module.require("./zksnark_groth16_verifier")
}
};

@ -95,6 +95,9 @@ async function groth16Prover(zkeyFileName, witnessFileName, verbose) {
proof.protocol = "groth"; proof.protocol = "groth";
await fdZKey.close();
await fdWtns.close();
return {proof, publicSignals}; return {proof, publicSignals};
} }

@ -0,0 +1,48 @@
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see <https://www.gnu.org/licenses/>.
*/
/* Implementation of this paper: https://eprint.iacr.org/2016/260.pdf */
const bn128 = require("ffjavascript").bn128;
const G1 = bn128.G1;
module.exports = async function isValid(vk_verifier, proof, publicSignals) {
/*
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.mulScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
*/
let cpub = await G1.multiExp(vk_verifier.IC.slice(1), publicSignals);
cpub = G1.add(cpub, vk_verifier.IC[0]);
const res = await bn128.pairingEq(
bn128.G1.neg(proof.pi_a) , proof.pi_b,
cpub , vk_verifier.vk_gamma_2,
proof.pi_c , vk_verifier.vk_delta_2,
vk_verifier.vk_alpha_1, vk_verifier.vk_beta_2
);
if (! res) return false;
return true;
};