#! /usr/bin/env node
'use strict';
var fs = require('fs');
var r1csfile = require('r1csfile');
var fastFile = require('fastfile');
var ffjavascript = require('ffjavascript');
var path = require('path');
var Blake2b = require('blake2b-wasm');
var readline = require('readline');
var crypto = require('crypto');
var binFileUtils = require('@iden3/binfileutils');
var ejs = require('ejs');
var circom_runtime = require('circom_runtime');
var jsSha3 = require('js-sha3');
var Logger = require('logplease');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () { return e[k]; }
});
}
});
}
n["default"] = e;
return Object.freeze(n);
}
var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
var fastFile__namespace = /*#__PURE__*/_interopNamespace(fastFile);
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
var Blake2b__default = /*#__PURE__*/_interopDefaultLegacy(Blake2b);
var readline__default = /*#__PURE__*/_interopDefaultLegacy(readline);
var crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto);
var binFileUtils__namespace = /*#__PURE__*/_interopNamespace(binFileUtils);
var ejs__default = /*#__PURE__*/_interopDefaultLegacy(ejs);
var jsSha3__default = /*#__PURE__*/_interopDefaultLegacy(jsSha3);
var Logger__default = /*#__PURE__*/_interopDefaultLegacy(Logger);
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
async function loadSymbols(symFileName) {
const sym = {
labelIdx2Name: [ "one" ],
varIdx2Name: [ "one" ],
componentIdx2Name: []
};
const fd = await fastFile__namespace.readExisting(symFileName);
const buff = await fd.read(fd.totalSize);
const symsStr = new TextDecoder("utf-8").decode(buff);
const lines = symsStr.split("\n");
for (let i=0; i.
*/
function r1csPrint$1(r1cs, syms, logger) {
for (let i=0; i {
let S = "";
const keys = Object.keys(lc);
keys.forEach( (k) => {
let name = syms.varIdx2Name[k];
if (name == "one") name = "";
let vs = r1cs.curve.Fr.toString(lc[k]);
if (vs == "1") vs = ""; // Do not show ones
if (vs == "-1") vs = "-"; // Do not show ones
if ((S!="")&&(vs[0]!="-")) vs = "+"+vs;
if (S!="") vs = " "+vs;
S= S + vs + name;
});
return S;
};
const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`;
if (logger) logger.info(S);
}
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
const bls12381r = ffjavascript.Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
const bn128r = ffjavascript.Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
async function r1csInfo$1(r1csName, logger) {
const cir = await r1csfile.readR1cs(r1csName);
if (ffjavascript.Scalar.eq(cir.prime, bn128r)) {
if (logger) logger.info("Curve: bn-128");
} else if (ffjavascript.Scalar.eq(cir.prime, bls12381r)) {
if (logger) logger.info("Curve: bls12-381");
} else {
if (logger) logger.info(`Unknown Curve. Prime: ${ffjavascript.Scalar.toString(cir.prime)}`);
}
if (logger) logger.info(`# of Wires: ${cir.nVars}`);
if (logger) logger.info(`# of Constraints: ${cir.nConstraints}`);
if (logger) logger.info(`# of Private Inputs: ${cir.nPrvInputs}`);
if (logger) logger.info(`# of Public Inputs: ${cir.nPubInputs}`);
if (logger) logger.info(`# of Labels: ${cir.nLabels}`);
if (logger) logger.info(`# of Outputs: ${cir.nOutputs}`);
return cir;
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
function stringifyBigInts$4(Fr, o) {
if (o instanceof Uint8Array) {
return Fr.toString(o);
} else if (Array.isArray(o)) {
return o.map(stringifyBigInts$4.bind(null, Fr));
} else if (typeof o == "object") {
const res = {};
const keys = Object.keys(o);
keys.forEach( (k) => {
res[k] = stringifyBigInts$4(Fr, o[k]);
});
return res;
} else if ((typeof(o) == "bigint") || o.eq !== undefined) {
return o.toString(10);
} else {
return o;
}
}
async function r1csExportJson(r1csFileName, logger) {
const cir = await r1csfile.readR1cs(r1csFileName, true, true, true, logger);
const Fr=cir.curve.Fr;
delete cir.curve;
return stringifyBigInts$4(Fr, cir);
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
const __dirname$2 = path__default["default"].dirname(new URL((typeof document === 'undefined' ? new (require('u' + 'rl').URL)('file:' + __filename).href : (document.currentScript && document.currentScript.src || new URL('cli.cjs', document.baseURI).href))).pathname);
let pkgS;
try {
pkgS = fs__default["default"].readFileSync(path__default["default"].join(__dirname$2, "package.json"));
} catch (err) {
pkgS = fs__default["default"].readFileSync(path__default["default"].join(__dirname$2, "..","package.json"));
}
const pkg = JSON.parse(pkgS);
const version = pkg.version;
let selectedCommand = null;
async function clProcessor(commands) {
const cl = [];
const argv = {};
for (let i=2; i 1) {
argv[arr[0]] = arr.slice(1).join("=");
} else {
argv[arr[0]] = true;
}
} else {
cl.push(process.argv[i]);
}
}
for (let i=0; i0) console.log("Invalid number of parameters");
helpCmd(cmd);
return 99;
}
return;
}
}
if (cl.length>0) console.log("Invalid command");
helpAll();
return 99;
function calculateMatch(cmd, cl) {
const alias = [];
const m = parseLine(cmd.cmd);
alias.push(m);
if (cmd.alias) {
if (Array.isArray(cmd.alias)) {
for (let i=0; i1) ? arr1[1] : null
};
}
function areParamsValid(cmd, params) {
while ((params.length)&&(!params[params.length-1])) params.pop();
const pl = parseLine(cmd);
if (params.length > pl.params.length) return false;
let minParams = pl.params.length;
while ((minParams>0)&&(pl.params[minParams-1][0] == "[")) minParams --;
if (params.length < minParams) return false;
for (let i=0; (i< pl.params.length)&&(pl.params[i][0]=="<"); i++) {
if (typeof params[i] == "undefined") return false;
}
return true;
}
function getOptions(options) {
const res = {};
const opts = options.match(/(\S+)/g);
for (let i=0; i ... ");
console.log(" or snarkjs ... ");
console.log("");
console.log("Type snarkjs --help to get more information for that command");
console.log("");
console.log("Full Command Description");
console.log("============ =================");
for (let i=0; i.
*/
function hashToG2(curve, hash) {
const hashV = new DataView(hash.buffer, hash.byteOffset, hash.byteLength);
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = hashV.getUint32(i*4);
}
const rng = new ffjavascript.ChaCha(seed);
const g2_sp = curve.G2.fromRng(rng);
return g2_sp;
}
function getG2sp(curve, persinalization, challenge, g1s, g1sx) {
const h = Blake2b__default["default"](64);
const b1 = new Uint8Array([persinalization]);
h.update(b1);
h.update(challenge);
const b3 = curve.G1.toUncompressed(g1s);
h.update( b3);
const b4 = curve.G1.toUncompressed(g1sx);
h.update( b4);
const hash =h.digest();
return hashToG2(curve, hash);
}
function calculatePubKey(k, curve, personalization, challengeHash, rng ) {
k.g1_s = curve.G1.toAffine(curve.G1.fromRng(rng));
k.g1_sx = curve.G1.toAffine(curve.G1.timesFr(k.g1_s, k.prvKey));
k.g2_sp = curve.G2.toAffine(getG2sp(curve, personalization, challengeHash, k.g1_s, k.g1_sx));
k.g2_spx = curve.G2.toAffine(curve.G2.timesFr(k.g2_sp, k.prvKey));
return k;
}
function createPTauKey(curve, challengeHash, rng) {
const key = {
tau: {},
alpha: {},
beta: {}
};
key.tau.prvKey = curve.Fr.fromRng(rng);
key.alpha.prvKey = curve.Fr.fromRng(rng);
key.beta.prvKey = curve.Fr.fromRng(rng);
calculatePubKey(key.tau, curve, 0, challengeHash, rng);
calculatePubKey(key.alpha, curve, 1, challengeHash, rng);
calculatePubKey(key.beta, curve, 2, challengeHash, rng);
return key;
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
const _revTable = [];
for (let i=0; i<256; i++) {
_revTable[i] = _revSlow(i, 8);
}
function _revSlow(idx, bits) {
let res =0;
let a = idx;
for (let i=0; i>=1;
}
return res;
}
function log2( V )
{
return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) );
}
function formatHash(b, title) {
const a = new DataView(b.buffer, b.byteOffset, b.byteLength);
let S = "";
for (let i=0; i<4; i++) {
if (i>0) S += "\n";
S += "\t\t";
for (let j=0; j<4; j++) {
if (j>0) S += " ";
S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0");
}
}
if (title) S = title + "\n" + S;
return S;
}
function hashIsEqual(h1, h2) {
if (h1.byteLength != h2.byteLength) return false;
var dv1 = new Int8Array(h1);
var dv2 = new Int8Array(h2);
for (var i = 0 ; i != h1.byteLength ; i++)
{
if (dv1[i] != dv2[i]) return false;
}
return true;
}
function cloneHasher(h) {
const ph = h.getPartialHash();
const res = Blake2b__default["default"](64);
res.setPartialHash(ph);
return res;
}
async function sameRatio$2(curve, g1s, g1sx, g2s, g2sx) {
if (curve.G1.isZero(g1s)) return false;
if (curve.G1.isZero(g1sx)) return false;
if (curve.G2.isZero(g2s)) return false;
if (curve.G2.isZero(g2sx)) return false;
// return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
const res = await curve.pairingEq(g1s, g2sx, curve.G1.neg(g1sx), g2s);
return res;
}
function askEntropy() {
if (process.browser) {
return window.prompt("Enter a random text. (Entropy): ", "");
} else {
const rl = readline__default["default"].createInterface({
input: process.stdin,
output: process.stdout
});
return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
}
}
async function getRandomRng(entropy) {
// Generate a random Rng
while (!entropy) {
entropy = await askEntropy();
}
const hasher = Blake2b__default["default"](64);
hasher.update(crypto__default["default"].randomBytes(64));
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
const seed = [];
for (let i=0;i<8;i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ffjavascript.ChaCha(seed);
return rng;
}
function rngFromBeaconParams(beaconHash, numIterationsExp) {
let nIterationsInner;
let nIterationsOuter;
if (numIterationsExp<32) {
nIterationsInner = (1 << numIterationsExp) >>> 0;
nIterationsOuter = 1;
} else {
nIterationsInner = 0x100000000;
nIterationsOuter = (1 << (numIterationsExp-32)) >>> 0;
}
let curHash = beaconHash;
for (let i=0; i= 0) {
curve = await ffjavascript.buildBn128();
} else if (["BLS12381"].indexOf(normName) >= 0) {
curve = await ffjavascript.buildBls12381();
} else {
throw new Error(`Curve not supported: ${name}`);
}
return curve;
function normalizeName(n) {
return n.toUpperCase().match(/[A-Za-z0-9]+/g).join("");
}
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
async function writePTauHeader(fd, curve, power, ceremonyPower) {
// Write the header
///////////
if (! ceremonyPower) ceremonyPower = power;
await fd.writeULE32(1); // Header type
const pHeaderSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.writeULE32(curve.F1.n64*8);
const buff = new Uint8Array(curve.F1.n8);
ffjavascript.Scalar.toRprLE(buff, 0, curve.q, curve.F1.n8);
await fd.write(buff);
await fd.writeULE32(power); // power
await fd.writeULE32(ceremonyPower); // power
const headerSize = fd.pos - pHeaderSize - 8;
const oldPos = fd.pos;
await fd.writeULE64(headerSize, pHeaderSize);
fd.pos = oldPos;
}
async function readPTauHeader(fd, sections) {
if (!sections[1]) throw new Error(fd.fileName + ": File has no header");
if (sections[1].length>1) throw new Error(fd.fileName +": File has more than one header");
fd.pos = sections[1][0].p;
const n8 = await fd.readULE32();
const buff = await fd.read(n8);
const q = ffjavascript.Scalar.fromRprLE(buff);
const curve = await getCurveFromQ(q);
if (curve.F1.n64*8 != n8) throw new Error(fd.fileName +": Invalid size");
const power = await fd.readULE32();
const ceremonyPower = await fd.readULE32();
if (fd.pos-sections[1][0].p != sections[1][0].size) throw new Error("Invalid PTau header size");
return {curve, power, ceremonyPower};
}
async function readPtauPubKey(fd, curve, montgomery) {
const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3);
return fromPtauPubKeyRpr(buff, 0, curve, montgomery);
}
function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
const key = {
tau: {},
alpha: {},
beta: {}
};
key.tau.g1_s = readG1();
key.tau.g1_sx = readG1();
key.alpha.g1_s = readG1();
key.alpha.g1_sx = readG1();
key.beta.g1_s = readG1();
key.beta.g1_sx = readG1();
key.tau.g2_spx = readG2();
key.alpha.g2_spx = readG2();
key.beta.g2_spx = readG2();
return key;
function readG1() {
let p;
if (montgomery) {
p = curve.G1.fromRprLEM( buff, pos );
} else {
p = curve.G1.fromRprUncompressed( buff, pos );
}
pos += curve.G1.F.n8*2;
return p;
}
function readG2() {
let p;
if (montgomery) {
p = curve.G2.fromRprLEM( buff, pos );
} else {
p = curve.G2.fromRprUncompressed( buff, pos );
}
pos += curve.G2.F.n8*2;
return p;
}
}
function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
writeG1(key.tau.g1_s);
writeG1(key.tau.g1_sx);
writeG1(key.alpha.g1_s);
writeG1(key.alpha.g1_sx);
writeG1(key.beta.g1_s);
writeG1(key.beta.g1_sx);
writeG2(key.tau.g2_spx);
writeG2(key.alpha.g2_spx);
writeG2(key.beta.g2_spx);
async function writeG1(p) {
if (montgomery) {
curve.G1.toRprLEM(buff, pos, p);
} else {
curve.G1.toRprUncompressed(buff, pos, p);
}
pos += curve.F1.n8*2;
}
async function writeG2(p) {
if (montgomery) {
curve.G2.toRprLEM(buff, pos, p);
} else {
curve.G2.toRprUncompressed(buff, pos, p);
}
pos += curve.F2.n8*2;
}
return buff;
}
async function writePtauPubKey(fd, curve, key, montgomery) {
const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3);
toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
await fd.write(buff);
}
async function readContribution$1(fd, curve) {
const c = {};
c.tauG1 = await readG1();
c.tauG2 = await readG2();
c.alphaG1 = await readG1();
c.betaG1 = await readG1();
c.betaG2 = await readG2();
c.key = await readPtauPubKey(fd, curve, true);
c.partialHash = await fd.read(216);
c.nextChallenge = await fd.read(64);
c.type = await fd.readULE32();
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
toPtauPubKeyRpr(buffV, 0, curve, c.key, false);
const responseHasher = Blake2b__default["default"](64);
responseHasher.setPartialHash(c.partialHash);
responseHasher.update(buffV);
c.responseHash = responseHasher.digest();
const paramLength = await fd.readULE32();
const curPos = fd.pos;
let lastType =0;
while (fd.pos-curPos < paramLength) {
const buffType = await readDV(1);
if (buffType[0]<= lastType) throw new Error("Parameters in the contribution must be sorted");
lastType = buffType[0];
if (buffType[0]==1) { // Name
const buffLen = await readDV(1);
const buffStr = await readDV(buffLen[0]);
c.name = new TextDecoder().decode(buffStr);
} else if (buffType[0]==2) {
const buffExp = await readDV(1);
c.numIterationsExp = buffExp[0];
} else if (buffType[0]==3) {
const buffLen = await readDV(1);
c.beaconHash = await readDV(buffLen[0]);
} else {
throw new Error("Parameter not recognized");
}
}
if (fd.pos != curPos + paramLength) {
throw new Error("Parametes do not match");
}
return c;
async function readG1() {
const pBuff = await fd.read(curve.G1.F.n8*2);
return curve.G1.fromRprLEM( pBuff );
}
async function readG2() {
const pBuff = await fd.read(curve.G2.F.n8*2);
return curve.G2.fromRprLEM( pBuff );
}
async function readDV(n) {
const b = await fd.read(n);
return new Uint8Array(b);
}
}
async function readContributions(fd, curve, sections) {
if (!sections[7]) throw new Error(fd.fileName + ": File has no contributions");
if (sections[7][0].length>1) throw new Error(fd.fileName +": File has more than one contributions section");
fd.pos = sections[7][0].p;
const nContributions = await fd.readULE32();
const contributions = [];
for (let i=0; i0) {
const paramsBuff = new Uint8Array(params);
await fd.writeULE32(paramsBuff.byteLength);
await fd.write(paramsBuff);
} else {
await fd.writeULE32(0);
}
async function writeG1(p) {
curve.G1.toRprLEM(buffG1, 0, p);
await fd.write(buffG1);
}
async function writeG2(p) {
curve.G2.toRprLEM(buffG2, 0, p);
await fd.write(buffG2);
}
}
async function writeContributions(fd, curve, contributions) {
await fd.writeULE32(7); // Header type
const pContributionsSize = fd.pos;
await fd.writeULE64(0); // Temporally set to 0 length
await fd.writeULE32(contributions.length);
for (let i=0; i< contributions.length; i++) {
await writeContribution$1(fd, curve, contributions[i]);
}
const contributionsSize = fd.pos - pContributionsSize - 8;
const oldPos = fd.pos;
await fd.writeULE64(contributionsSize, pContributionsSize);
fd.pos = oldPos;
}
function calculateFirstChallengeHash(curve, power, logger) {
if (logger) logger.debug("Calculating First Challenge Hash");
const hasher = new Blake2b__default["default"](64);
const vG1 = new Uint8Array(curve.G1.F.n8*2);
const vG2 = new Uint8Array(curve.G2.F.n8*2);
curve.G1.toRprUncompressed(vG1, 0, curve.G1.g);
curve.G2.toRprUncompressed(vG2, 0, curve.G2.g);
hasher.update(Blake2b__default["default"](64).digest());
let n;
n=(2 ** power)*2 -1;
if (logger) logger.debug("Calculate Initial Hash: tauG1");
hashBlock(vG1, n);
n= 2 ** power;
if (logger) logger.debug("Calculate Initial Hash: tauG2");
hashBlock(vG2, n);
if (logger) logger.debug("Calculate Initial Hash: alphaTauG1");
hashBlock(vG1, n);
if (logger) logger.debug("Calculate Initial Hash: betaTauG1");
hashBlock(vG1, n);
hasher.update(vG2);
return hasher.digest();
function hashBlock(buff, n) {
const blockSize = 500000;
const nBlocks = Math.floor(n / blockSize);
const rem = n % blockSize;
const bigBuff = new Uint8Array(blockSize * buff.byteLength);
for (let i=0; i.
*/
async function newAccumulator(curve, power, fileName, logger) {
await Blake2b__default["default"].ready();
const fd = await binFileUtils__namespace.createBinFile(fileName, "ptau", 1, 7);
await writePTauHeader(fd, curve, power, 0);
const buffG1 = curve.G1.oneAffine;
const buffG2 = curve.G2.oneAffine;
// Write tauG1
///////////
await binFileUtils__namespace.startWriteSection(fd, 2);
const nTauG1 = (2 ** power) * 2 -1;
for (let i=0; i< nTauG1; i++) {
await fd.write(buffG1);
if ((logger)&&((i%100000) == 0)&&i) logger.log("tauG1: " + i);
}
await binFileUtils__namespace.endWriteSection(fd);
// Write tauG2
///////////
await binFileUtils__namespace.startWriteSection(fd, 3);
const nTauG2 = (2 ** power);
for (let i=0; i< nTauG2; i++) {
await fd.write(buffG2);
if ((logger)&&((i%100000) == 0)&&i) logger.log("tauG2: " + i);
}
await binFileUtils__namespace.endWriteSection(fd);
// Write alphaTauG1
///////////
await binFileUtils__namespace.startWriteSection(fd, 4);
const nAlfaTauG1 = (2 ** power);
for (let i=0; i< nAlfaTauG1; i++) {
await fd.write(buffG1);
if ((logger)&&((i%100000) == 0)&&i) logger.log("alphaTauG1: " + i);
}
await binFileUtils__namespace.endWriteSection(fd);
// Write betaTauG1
///////////
await binFileUtils__namespace.startWriteSection(fd, 5);
const nBetaTauG1 = (2 ** power);
for (let i=0; i< nBetaTauG1; i++) {
await fd.write(buffG1);
if ((logger)&&((i%100000) == 0)&&i) logger.log("betaTauG1: " + i);
}
await binFileUtils__namespace.endWriteSection(fd);
// Write betaG2
///////////
await binFileUtils__namespace.startWriteSection(fd, 6);
await fd.write(buffG2);
await binFileUtils__namespace.endWriteSection(fd);
// Contributions
///////////
await binFileUtils__namespace.startWriteSection(fd, 7);
await fd.writeULE32(0); // 0 Contributions
await binFileUtils__namespace.endWriteSection(fd);
await fd.close();
const firstChallengeHash = calculateFirstChallengeHash(curve, power, logger);
if (logger) logger.debug(formatHash(Blake2b__default["default"](64).digest(), "Blank Contribution Hash:"));
if (logger) logger.info(formatHash(firstChallengeHash, "First Contribution Hash:"));
return firstChallengeHash;
}
// Format of the outpu
async function exportChallenge(pTauFilename, challengeFilename, logger) {
await Blake2b__default["default"].ready();
const {fd: fdFrom, sections} = await binFileUtils__namespace.readBinFile(pTauFilename, "ptau", 1);
const {curve, power} = await readPTauHeader(fdFrom, sections);
const contributions = await readContributions(fdFrom, curve, sections);
let lastResponseHash, curChallengeHash;
if (contributions.length == 0) {
lastResponseHash = Blake2b__default["default"](64).digest();
curChallengeHash = calculateFirstChallengeHash(curve, power);
} else {
lastResponseHash = contributions[contributions.length-1].responseHash;
curChallengeHash = contributions[contributions.length-1].nextChallenge;
}
if (logger) logger.info(formatHash(lastResponseHash, "Last Response Hash: "));
if (logger) logger.info(formatHash(curChallengeHash, "New Challenge Hash: "));
const fdTo = await fastFile__namespace.createOverride(challengeFilename);
const toHash = Blake2b__default["default"](64);
await fdTo.write(lastResponseHash);
toHash.update(lastResponseHash);
await exportSection(2, "G1", (2 ** power) * 2 -1, "tauG1");
await exportSection(3, "G2", (2 ** power) , "tauG2");
await exportSection(4, "G1", (2 ** power) , "alphaTauG1");
await exportSection(5, "G1", (2 ** power) , "betaTauG1");
await exportSection(6, "G2", 1 , "betaG2");
await fdFrom.close();
await fdTo.close();
const calcCurChallengeHash = toHash.digest();
if (!hashIsEqual (curChallengeHash, calcCurChallengeHash)) {
if (logger) logger.info(formatHash(calcCurChallengeHash, "Calc Curret Challenge Hash: "));
if (logger) logger.error("PTau file is corrupted. Calculated new challenge hash does not match with the eclared one");
throw new Error("PTau file is corrupted. Calculated new challenge hash does not match with the eclared one");
}
return curChallengeHash;
async function exportSection(sectionId, groupName, nPoints, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const nPointsChunk = Math.floor((1<<24)/sG);
await binFileUtils__namespace.startReadUniqueSection(fdFrom, sections, sectionId);
for (let i=0; i< nPoints; i+= nPointsChunk) {
if (logger) logger.debug(`Exporting ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
let buff;
buff = await fdFrom.read(n*sG);
buff = await G.batchLEMtoU(buff);
await fdTo.write(buff);
toHash.update(buff);
}
await binFileUtils__namespace.endReadSection(fdFrom);
}
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, logger) {
await Blake2b__default["default"].ready();
const noHash = new Uint8Array(64);
for (let i=0; i<64; i++) noHash[i] = 0xFF;
const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await readPTauHeader(fdOld, sections);
const contributions = await readContributions(fdOld, curve, sections);
const currentContribution = {};
if (name) currentContribution.name = name;
const sG1 = curve.F1.n8*2;
const scG1 = curve.F1.n8; // Compresed size
const sG2 = curve.F2.n8*2;
const scG2 = curve.F2.n8; // Compresed size
const fdResponse = await fastFile__namespace.readExisting(contributionFilename);
if (fdResponse.totalSize !=
64 + // Old Hash
((2 ** power)*2-1)*scG1 +
(2 ** power)*scG2 +
(2 ** power)*scG1 +
(2 ** power)*scG1 +
scG2 +
sG1*6 + sG2*3)
throw new Error("Size of the contribution is invalid");
let lastChallengeHash;
if (contributions.length>0) {
lastChallengeHash = contributions[contributions.length-1].nextChallenge;
} else {
lastChallengeHash = calculateFirstChallengeHash(curve, power, logger);
}
const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, importPoints ? 7: 2);
await writePTauHeader(fdNew, curve, power);
const contributionPreviousHash = await fdResponse.read(64);
if (hashIsEqual(noHash,lastChallengeHash)) {
lastChallengeHash = contributionPreviousHash;
contributions[contributions.length-1].nextChallenge = lastChallengeHash;
}
if(!hashIsEqual(contributionPreviousHash,lastChallengeHash))
throw new Error("Wrong contribution. this contribution is not based on the previus hash");
const hasherResponse = new Blake2b__default["default"](64);
hasherResponse.update(contributionPreviousHash);
const startSections = [];
let res;
res = await processSection(fdResponse, fdNew, "G1", 2, (2 ** power) * 2 -1, [1], "tauG1");
currentContribution.tauG1 = res[0];
res = await processSection(fdResponse, fdNew, "G2", 3, (2 ** power) , [1], "tauG2");
currentContribution.tauG2 = res[0];
res = await processSection(fdResponse, fdNew, "G1", 4, (2 ** power) , [0], "alphaG1");
currentContribution.alphaG1 = res[0];
res = await processSection(fdResponse, fdNew, "G1", 5, (2 ** power) , [0], "betaG1");
currentContribution.betaG1 = res[0];
res = await processSection(fdResponse, fdNew, "G2", 6, 1 , [0], "betaG2");
currentContribution.betaG2 = res[0];
currentContribution.partialHash = hasherResponse.getPartialHash();
const buffKey = await fdResponse.read(curve.F1.n8*2*6+curve.F2.n8*2*3);
currentContribution.key = fromPtauPubKeyRpr(buffKey, 0, curve, false);
hasherResponse.update(new Uint8Array(buffKey));
const hashResponse = hasherResponse.digest();
if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: "));
if (importPoints) {
const nextChallengeHasher = new Blake2b__default["default"](64);
nextChallengeHasher.update(hashResponse);
await hashSection(nextChallengeHasher, fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger);
await hashSection(nextChallengeHasher, fdNew, "G2", 3, (2 ** power) , "tauG2", logger);
await hashSection(nextChallengeHasher, fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger);
await hashSection(nextChallengeHasher, fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger);
await hashSection(nextChallengeHasher, fdNew, "G2", 6, 1 , "betaG2", logger);
currentContribution.nextChallenge = nextChallengeHasher.digest();
if (logger) logger.info(formatHash(currentContribution.nextChallenge, "Next Challenge Hash: "));
} else {
currentContribution.nextChallenge = noHash;
}
contributions.push(currentContribution);
await writeContributions(fdNew, curve, contributions);
await fdResponse.close();
await fdNew.close();
await fdOld.close();
return currentContribution.nextChallenge;
async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
if (importPoints) {
return await processSectionImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName);
} else {
return await processSectionNoImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName);
}
}
async function processSectionImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
const G = curve[groupName];
const scG = G.F.n8;
const sG = G.F.n8*2;
const singularPoints = [];
await binFileUtils__namespace.startWriteSection(fdTo, sectionId);
const nPointsChunk = Math.floor((1<<24)/sG);
startSections[sectionId] = fdTo.pos;
for (let i=0; i< nPoints; i += nPointsChunk) {
if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffC = await fdFrom.read(n * scG);
hasherResponse.update(buffC);
const buffLEM = await G.batchCtoLEM(buffC);
await fdTo.write(buffLEM);
for (let j=0; j=i) && (sp < i+n)) {
const P = G.fromRprLEM(buffLEM, (sp-i)*sG);
singularPoints.push(P);
}
}
}
await binFileUtils__namespace.endWriteSection(fdTo);
return singularPoints;
}
async function processSectionNoImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
const G = curve[groupName];
const scG = G.F.n8;
const singularPoints = [];
const nPointsChunk = Math.floor((1<<24)/scG);
for (let i=0; i< nPoints; i += nPointsChunk) {
if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffC = await fdFrom.read(n * scG);
hasherResponse.update(buffC);
for (let j=0; j=i) && (sp < i+n)) {
const P = G.fromRprCompressed(buffC, (sp-i)*scG);
singularPoints.push(P);
}
}
}
return singularPoints;
}
async function hashSection(nextChallengeHasher, fdTo, groupName, sectionId, nPoints, sectionName, logger) {
const G = curve[groupName];
const sG = G.F.n8*2;
const nPointsChunk = Math.floor((1<<24)/sG);
const oldPos = fdTo.pos;
fdTo.pos = startSections[sectionId];
for (let i=0; i< nPoints; i += nPointsChunk) {
if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`);
const n = Math.min(nPoints-i, nPointsChunk);
const buffLEM = await fdTo.read(n * sG);
const buffU = await G.batchLEMtoU(buffLEM);
nextChallengeHasher.update(buffU);
}
fdTo.pos = oldPos;
}
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
const sameRatio$1 = sameRatio$2;
async function verifyContribution(curve, cur, prev, logger) {
let sr;
if (cur.type == 1) { // Verify the beacon.
const beaconKey = keyFromBeacon(curve, prev.nextChallenge, cur.beaconHash, cur.numIterationsExp);
if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) {
if (logger) logger.error(`BEACON key (tauG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) {
if (logger) logger.error(`BEACON key (tauG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) {
if (logger) logger.error(`BEACON key (tauG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) {
if (logger) logger.error(`BEACON key (alphaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) {
if (logger) logger.error(`BEACON key (alphaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) {
if (logger) logger.error(`BEACON key (alphaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) {
if (logger) logger.error(`BEACON key (betaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) {
if (logger) logger.error(`BEACON key (betaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) {
if (logger) logger.error(`BEACON key (betaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` );
return false;
}
}
cur.key.tau.g2_sp = curve.G2.toAffine(getG2sp(curve, 0, prev.nextChallenge, cur.key.tau.g1_s, cur.key.tau.g1_sx));
cur.key.alpha.g2_sp = curve.G2.toAffine(getG2sp(curve, 1, prev.nextChallenge, cur.key.alpha.g1_s, cur.key.alpha.g1_sx));
cur.key.beta.g2_sp = curve.G2.toAffine(getG2sp(curve, 2, prev.nextChallenge, cur.key.beta.g1_s, cur.key.beta.g1_sx));
sr = await sameRatio$1(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID key (tau) in challenge #"+cur.id);
return false;
}
sr = await sameRatio$1(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID key (alpha) in challenge #"+cur.id);
return false;
}
sr = await sameRatio$1(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID key (beta) in challenge #"+cur.id);
return false;
}
sr = await sameRatio$1(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID tau*G1. challenge #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio$1(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2);
if (sr !== true) {
if (logger) logger.error("INVALID tau*G2. challenge #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio$1(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID alpha*G1. challenge #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio$1(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx);
if (sr !== true) {
if (logger) logger.error("INVALID beta*G1. challenge #"+cur.id+" It does not follow the previous contribution");
return false;
}
sr = await sameRatio$1(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2);
if (sr !== true) {
if (logger) logger.error("INVALID beta*G2. challenge #"+cur.id+"It does not follow the previous contribution");
return false;
}
if (logger) logger.info("Powers Of tau file OK!");
return true;
}
async function verify(tauFilename, logger) {
let sr;
await Blake2b__default["default"].ready();
const {fd, sections} = await binFileUtils__namespace.readBinFile(tauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await readPTauHeader(fd, sections);
const contrs = await readContributions(fd, curve, sections);
if (logger) logger.debug("power: 2**" + power);
// Verify Last contribution
if (logger) logger.debug("Computing initial contribution hash");
const initialContribution = {
tauG1: curve.G1.g,
tauG2: curve.G2.g,
alphaG1: curve.G1.g,
betaG1: curve.G1.g,
betaG2: curve.G2.g,
nextChallenge: calculateFirstChallengeHash(curve, ceremonyPower, logger),
responseHash: Blake2b__default["default"](64).digest()
};
if (contrs.length == 0) {
if (logger) logger.error("This file has no contribution! It cannot be used in production");
return false;
}
let prevContr;
if (contrs.length>1) {
prevContr = contrs[contrs.length-2];
} else {
prevContr = initialContribution;
}
const curContr = contrs[contrs.length-1];
if (logger) logger.debug("Validating contribution #"+contrs[contrs.length-1].id);
const res = await verifyContribution(curve, curContr, prevContr, logger);
if (!res) return false;
const nextContributionHasher = Blake2b__default["default"](64);
nextContributionHasher.update(curContr.responseHash);
// Verify powers and compute nextChallengeHash
// await test();
// Verify Section tau*G1
if (logger) logger.debug("Verifying powers in tau*G1 section");
const rTau1 = await processSection(2, "G1", "tauG1", (2 ** power)*2-1, [0, 1], logger);
sr = await sameRatio$1(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
if (logger) logger.error("tauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) {
if (logger) logger.error("First element of tau*G1 section must be the generator");
return false;
}
if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) {
if (logger) logger.error("Second element of tau*G1 section does not match the one in the contribution section");
return false;
}
// await test();
// Verify Section tau*G2
if (logger) logger.debug("Verifying powers in tau*G2 section");
const rTau2 = await processSection(3, "G2", "tauG2", 2 ** power, [0, 1], logger);
sr = await sameRatio$1(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2);
if (sr !== true) {
if (logger) logger.error("tauG2 section. Powers do not match");
return false;
}
if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) {
if (logger) logger.error("First element of tau*G2 section must be the generator");
return false;
}
if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) {
if (logger) logger.error("Second element of tau*G2 section does not match the one in the contribution section");
return false;
}
// Verify Section alpha*tau*G1
if (logger) logger.debug("Verifying powers in alpha*tau*G1 section");
const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 2 ** power, [0], logger);
sr = await sameRatio$1(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
if (logger) logger.error("alphaTauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) {
if (logger) logger.error("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section");
return false;
}
// Verify Section beta*tau*G1
if (logger) logger.debug("Verifying powers in beta*tau*G1 section");
const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 2 ** power, [0], logger);
sr = await sameRatio$1(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2);
if (sr !== true) {
if (logger) logger.error("betaTauG1 section. Powers do not match");
return false;
}
if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) {
if (logger) logger.error("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section");
return false;
}
//Verify Beta G2
const betaG2 = await processSectionBetaG2(logger);
if (!curve.G2.eq(curContr.betaG2, betaG2)) {
if (logger) logger.error("betaG2 element in betaG2 section does not match the one in the contribution section");
return false;
}
const nextContributionHash = nextContributionHasher.digest();
// Check the nextChallengeHash
if (power == ceremonyPower) {
if (!hashIsEqual(nextContributionHash,curContr.nextChallenge)) {
if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section");
return false;
}
}
if (logger) logger.info(formatHash(nextContributionHash, "Next challenge hash: "));
// Verify Previous contributions
printContribution(curContr, prevContr);
for (let i = contrs.length-2; i>=0; i--) {
const curContr = contrs[i];
const prevContr = (i>0) ? contrs[i-1] : initialContribution;
const res = await verifyContribution(curve, curContr, prevContr, logger);
if (!res) return false;
printContribution(curContr, prevContr);
}
if (logger) logger.info("-----------------------------------------------------");
if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) {
if (logger) logger.warn(
"this file does not contain phase2 precalculated values. Please run: \n" +
" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony."
);
} else {
let res;
res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1", logger);
if (!res) return false;
res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1", logger);
if (!res) return false;
}
await fd.close();
if (logger) logger.info("Powers of Tau Ok!");
return true;
function printContribution(curContr, prevContr) {
if (!logger) return;
logger.info("-----------------------------------------------------");
logger.info(`Contribution #${curContr.id}: ${curContr.name ||""}`);
logger.info(formatHash(curContr.nextChallenge, "Next Challenge: "));
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false);
const responseHasher = Blake2b__default["default"](64);
responseHasher.setPartialHash(curContr.partialHash);
responseHasher.update(buffV);
const responseHash = responseHasher.digest();
logger.info(formatHash(responseHash, "Response Hash:"));
logger.info(formatHash(prevContr.nextChallenge, "Response Hash:"));
if (curContr.type == 1) {
logger.info(`Beacon generator: ${byteArray2hex(curContr.beaconHash)}`);
logger.info(`Beacon iterations Exp: ${curContr.numIterationsExp}`);
}
}
async function processSectionBetaG2(logger) {
const G = curve.G2;
const sG = G.F.n8*2;
const buffUv = new Uint8Array(sG);
if (!sections[6]) {
logger.error("File has no BetaG2 section");
throw new Error("File has no BetaG2 section");
}
if (sections[6].length>1) {
logger.error("File has no BetaG2 section");
throw new Error("File has more than one GetaG2 section");
}
fd.pos = sections[6][0].p;
const buff = await fd.read(sG);
const P = G.fromRprLEM(buff);
G.toRprUncompressed(buffUv, 0, P);
nextContributionHasher.update(buffUv);
return P;
}
async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes, logger) {
const MAX_CHUNK_SIZE = 1<<16;
const G = curve[groupName];
const sG = G.F.n8*2;
await binFileUtils__namespace.startReadUniqueSection(fd, sections, idSection);
const singularPoints = [];
let R1 = G.zero;
let R2 = G.zero;
let lastBase = G.zero;
for (let i=0; i0) {
const firstBase = G.fromRprLEM(bases, 0);
const r = crypto__default["default"].randomBytes(4).readUInt32BE(0, true);
R1 = G.add(R1, G.timesScalar(lastBase, r));
R2 = G.add(R2, G.timesScalar(firstBase, r));
}
const r1 = await G.multiExpAffine(bases.slice(0, (n-1)*sG), scalars);
const r2 = await G.multiExpAffine(bases.slice(sG), scalars);
R1 = G.add(R1, r1);
R2 = G.add(R2, r2);
lastBase = G.fromRprLEM( bases, (n-1)*sG);
for (let j=0; j=i) && (sp < i+n)) {
const P = G.fromRprLEM(bases, (sp-i)*sG);
singularPoints.push(P);
}
}
}
await binFileUtils__namespace.endReadSection(fd);
return {
R1: R1,
R2: R2,
singularPoints: singularPoints
};
}
async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName, logger) {
if (logger) logger.debug(`Verifying phase2 calculated values ${sectionName}...`);
const G = curve[gName];
const sG = G.F.n8*2;
const seed= new Array(8);
for (let i=0; i<8; i++) {
seed[i] = crypto__default["default"].randomBytes(4).readUInt32BE(0, true);
}
for (let p=0; p<= power; p ++) {
const res = await verifyPower(p);
if (!res) return false;
}
if (tauSection == 2) {
const res = await verifyPower(power+1);
if (!res) return false;
}
return true;
async function verifyPower(p) {
if (logger) logger.debug(`Power ${p}...`);
const n8r = curve.Fr.n8;
const nPoints = 2 ** p;
let buff_r = new Uint32Array(nPoints);
let buffG;
let rng = new ffjavascript.ChaCha(seed);
if (logger) logger.debug(`Creating random numbers Powers${p}...`);
for (let i=0; i.
*/
/*
This function creates a new section in the fdTo file with id idSection.
It multiplies the pooints in fdFrom by first, first*inc, first*inc^2, ....
nPoint Times.
It also updates the newChallengeHasher with the new points
*/
async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, logger) {
const MAX_CHUNK_SIZE = 1 << 16;
const G = curve[groupName];
const sG = G.F.n8*2;
const nPoints = sections[idSection][0].size / sG;
await binFileUtils__namespace.startReadUniqueSection(fdOld, sections,idSection );
await binFileUtils__namespace.startWriteSection(fdNew, idSection);
let t = first;
for (let i=0; i.
*/
async function challengeContribute(curve, challengeFilename, responesFileName, entropy, logger) {
await Blake2b__default["default"].ready();
const fdFrom = await fastFile__namespace.readExisting(challengeFilename);
const sG1 = curve.F1.n64*8*2;
const sG2 = curve.F2.n64*8*2;
const domainSize = (fdFrom.totalSize + sG1 - 64 - sG2) / (4*sG1 + sG2);
let e = domainSize;
let power = 0;
while (e>1) {
e = e /2;
power += 1;
}
if (2 ** power != domainSize) throw new Error("Invalid file size");
if (logger) logger.debug("Power to tau size: "+power);
const rng = await getRandomRng(entropy);
const fdTo = await fastFile__namespace.createOverride(responesFileName);
// Calculate the hash
const challengeHasher = Blake2b__default["default"](64);
for (let i=0; i {
logger.debug(k + ".g1_s: " + curve.G1.toString(key[k].g1_s, 16));
logger.debug(k + ".g1_sx: " + curve.G1.toString(key[k].g1_sx, 16));
logger.debug(k + ".g2_sp: " + curve.G2.toString(key[k].g2_sp, 16));
logger.debug(k + ".g2_spx: " + curve.G2.toString(key[k].g2_spx, 16));
logger.debug("");
});
}
const responseHasher = Blake2b__default["default"](64);
await fdTo.write(challengeHash);
responseHasher.update(challengeHash);
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , logger );
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G2", (2 ** power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , logger );
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", logger );
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , logger );
await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , logger );
// Write and hash key
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
toPtauPubKeyRpr(buffKey, 0, curve, key, false);
await fdTo.write(buffKey);
responseHasher.update(buffKey);
const responseHash = responseHasher.digest();
if (logger) logger.info(formatHash(responseHash, "Contribution Response Hash: "));
await fdTo.close();
await fdFrom.close();
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
async function beacon$1(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) {
const beaconHash = hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
{
if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false;
}
if (beaconHash.length>=256) {
if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
return false;
}
numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) {
if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
return false;
}
await Blake2b__default["default"].ready();
const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
return false;
}
if (sections[12]) {
if (logger) logger.warn("Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
}
const contributions = await readContributions(fdOld, curve, sections);
const curContribution = {
name: name,
type: 1, // Beacon
numIterationsExp: numIterationsExp,
beaconHash: beaconHash
};
let lastChallengeHash;
if (contributions.length>0) {
lastChallengeHash = contributions[contributions.length-1].nextChallenge;
} else {
lastChallengeHash = calculateFirstChallengeHash(curve, power, logger);
}
curContribution.key = keyFromBeacon(curve, lastChallengeHash, beaconHash, numIterationsExp);
const responseHasher = new Blake2b__default["default"](64);
responseHasher.update(lastChallengeHash);
const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 7);
await writePTauHeader(fdNew, curve, power);
const startSections = [];
let firstPoints;
firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1", logger );
curContribution.tauG1 = firstPoints[1];
firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2", logger );
curContribution.tauG2 = firstPoints[1];
firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1", logger );
curContribution.alphaG1 = firstPoints[0];
firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1", logger );
curContribution.betaG1 = firstPoints[0];
firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2", logger );
curContribution.betaG2 = firstPoints[0];
curContribution.partialHash = responseHasher.getPartialHash();
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest();
if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: "));
const nextChallengeHasher = new Blake2b__default["default"](64);
nextChallengeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger);
await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2", logger);
await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger);
await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger);
await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger);
curContribution.nextChallenge = nextChallengeHasher.digest();
if (logger) logger.info(formatHash(curContribution.nextChallenge, "Next Challenge Hash: "));
contributions.push(curContribution);
await writeContributions(fdNew, curve, contributions);
await fdOld.close();
await fdNew.close();
return hashResponse;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName, logger) {
const res = [];
fdOld.pos = sections[sectionId][0].p;
await binFileUtils__namespace.startWriteSection(fdNew, sectionId);
startSections[sectionId] = fdNew.pos;
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i.
*/
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logger) {
await Blake2b__default["default"].ready();
const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
}
if (sections[12]) {
if (logger) logger.warn("WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again.");
}
const contributions = await readContributions(fdOld, curve, sections);
const curContribution = {
name: name,
type: 0, // Beacon
};
let lastChallengeHash;
const rng = await getRandomRng(entropy);
if (contributions.length>0) {
lastChallengeHash = contributions[contributions.length-1].nextChallenge;
} else {
lastChallengeHash = calculateFirstChallengeHash(curve, power, logger);
}
// Generate a random key
curContribution.key = createPTauKey(curve, lastChallengeHash, rng);
const responseHasher = new Blake2b__default["default"](64);
responseHasher.update(lastChallengeHash);
const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 7);
await writePTauHeader(fdNew, curve, power);
const startSections = [];
let firstPoints;
firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1" );
curContribution.tauG1 = firstPoints[1];
firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2" );
curContribution.tauG2 = firstPoints[1];
firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1" );
curContribution.alphaG1 = firstPoints[0];
firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1" );
curContribution.betaG1 = firstPoints[0];
firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2" );
curContribution.betaG2 = firstPoints[0];
curContribution.partialHash = responseHasher.getPartialHash();
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest();
if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: "));
const nextChallengeHasher = new Blake2b__default["default"](64);
nextChallengeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1");
await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2");
await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1");
await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1");
await hashSection(fdNew, "G2", 6, 1 , "betaG2");
curContribution.nextChallenge = nextChallengeHasher.digest();
if (logger) logger.info(formatHash(curContribution.nextChallenge, "Next Challenge Hash: "));
contributions.push(curContribution);
await writeContributions(fdNew, curve, contributions);
await fdOld.close();
await fdNew.close();
return hashResponse;
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) {
const res = [];
fdOld.pos = sections[sectionId][0].p;
await binFileUtils__namespace.startWriteSection(fdNew, sectionId);
startSections[sectionId] = fdNew.pos;
const G = curve[groupName];
const sG = G.F.n8*2;
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
let t = first;
for (let i=0 ; i.
*/
async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await readPTauHeader(fdOld, sections);
const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 11);
await writePTauHeader(fdNew, curve, power);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 2);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 3);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 4);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 5);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 6);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", "tauG1" );
await processSection(3, 13, "G2", "tauG2" );
await processSection(4, 14, "G1", "alphaTauG1" );
await processSection(5, 15, "G1", "betaTauG1" );
await fdOld.close();
await fdNew.close();
// await fs.promises.unlink(newPTauFilename+ ".tmp");
return;
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
if (logger) logger.debug("Starting section: "+sectionName);
await binFileUtils__namespace.startWriteSection(fdNew, newSectionId);
for (let p=0; p<=power; p++) {
await processSectionPower(p);
}
if (oldSectionId == 2) {
await processSectionPower(power+1);
}
await binFileUtils__namespace.endWriteSection(fdNew);
async function processSectionPower(p) {
const nPoints = 2 ** p;
const G = curve[Gstr];
curve.Fr;
const sGin = G.F.n8*2;
G.F.n8*3;
let buff;
buff = new ffjavascript.BigBuffer(nPoints*sGin);
await binFileUtils__namespace.startReadUniqueSection(fdOld, sections, oldSectionId);
if ((oldSectionId == 2)&&(p==power+1)) {
await fdOld.readToBuffer(buff, 0,(nPoints-1)*sGin );
buff.set(curve.G1.zeroAffine, (nPoints-1)*sGin );
} else {
await fdOld.readToBuffer(buff, 0,nPoints*sGin );
}
await binFileUtils__namespace.endReadSection(fdOld, true);
buff = await G.lagrangeEvaluations(buff, "affine", "affine", logger, sectionName);
await fdNew.write(buff);
/*
if (p <= curve.Fr.s) {
buff = await G.ifft(buff, "affine", "affine", logger, sectionName);
await fdNew.write(buff);
} else if (p == curve.Fr.s+1) {
const smallM = 1<.
*/
async function truncate(ptauFilename, template, logger) {
const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(ptauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
for (let p=1; p.
*/
async function convert(oldPtauFilename, newPTauFilename, logger) {
const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await readPTauHeader(fdOld, sections);
const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 11);
await writePTauHeader(fdNew, curve, power);
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 2);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 3);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 4);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 5);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 6);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", "tauG1" );
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 13);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 14);
await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 15);
await fdOld.close();
await fdNew.close();
// await fs.promises.unlink(newPTauFilename+ ".tmp");
return;
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
if (logger) logger.debug("Starting section: "+sectionName);
await binFileUtils__namespace.startWriteSection(fdNew, newSectionId);
const size = sections[newSectionId][0].size;
const chunkSize = fdOld.pageSize;
await binFileUtils__namespace.startReadUniqueSection(fdOld, sections, newSectionId);
for (let p=0; p.
*/
async function exportJson(pTauFilename, verbose) {
const {fd, sections} = await binFileUtils__namespace.readBinFile(pTauFilename, "ptau", 1);
const {curve, power} = await readPTauHeader(fd, sections);
const pTau = {};
pTau.q = curve.q;
pTau.power = power;
pTau.contributions = await readContributions(fd, curve, sections);
pTau.tauG1 = await exportSection(2, "G1", (2 ** power)*2 -1, "tauG1");
pTau.tauG2 = await exportSection(3, "G2", (2 ** power), "tauG2");
pTau.alphaTauG1 = await exportSection(4, "G1", (2 ** power), "alphaTauG1");
pTau.betaTauG1 = await exportSection(5, "G1", (2 ** power), "betaTauG1");
pTau.betaG2 = await exportSection(6, "G2", 1, "betaG2");
pTau.lTauG1 = await exportLagrange(12, "G1", "lTauG1");
pTau.lTauG2 = await exportLagrange(13, "G2", "lTauG2");
pTau.lAlphaTauG1 = await exportLagrange(14, "G1", "lAlphaTauG2");
pTau.lBetaTauG1 = await exportLagrange(15, "G1", "lBetaTauG2");
await fd.close();
return pTau;
async function exportSection(sectionId, groupName, nPoints, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const res = [];
await binFileUtils__namespace.startReadUniqueSection(fd, sections, sectionId);
for (let i=0; i< nPoints; i++) {
if ((verbose)&&i&&(i%10000 == 0)) console.log(`${sectionName}: ` + i);
const buff = await fd.read(sG);
res.push(G.fromRprLEM(buff, 0));
}
await binFileUtils__namespace.endReadSection(fd);
return res;
}
async function exportLagrange(sectionId, groupName, sectionName) {
const G = curve[groupName];
const sG = G.F.n8*2;
const res = [];
await binFileUtils__namespace.startReadUniqueSection(fd, sections, sectionId);
for (let p=0; p<=power; p++) {
if (verbose) console.log(`${sectionName}: Power: ${p}`);
res[p] = [];
const nPoints = (2 ** p);
for (let i=0; i.
*/
const SUBARRAY_SIZE = 0x40000;
const BigArrayHandler = {
get: function(obj, prop) {
if (!isNaN(prop)) {
return obj.getElement(prop);
} else return obj[prop];
},
set: function(obj, prop, value) {
if (!isNaN(prop)) {
return obj.setElement(prop, value);
} else {
obj[prop] = value;
return true;
}
}
};
class _BigArray {
constructor (initSize) {
this.length = initSize || 0;
this.arr = new Array(SUBARRAY_SIZE);
for (let i=0; i= this.length) this.length = idx+1;
return true;
}
getKeys() {
const newA = new BigArray();
for (let i=0; i.
*/
async function newZKey(r1csName, ptauName, zkeyName, logger) {
const TAU_G1 = 0;
const TAU_G2 = 1;
const ALPHATAU_G1 = 2;
const BETATAU_G1 = 3;
await Blake2b__default["default"].ready();
const csHasher = Blake2b__default["default"](64);
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
const r1cs = await r1csfile.readR1csHeader(fdR1cs, sectionsR1cs, false);
const fdZKey = await binFileUtils.createBinFile(zkeyName, "zkey", 1, 10, 1<<22, 1<<24);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
if (r1cs.prime != curve.r) {
if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve");
return -1;
}
const cirPower = log2(r1cs.nConstraints + r1cs.nPubInputs + r1cs.nOutputs +1 -1) +1;
if (cirPower > power) {
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`);
return -1;
}
if (!sectionsPTau[12]) {
if (logger) logger.error("Powers of tau is not prepared.");
return -1;
}
const nPublic = r1cs.nOutputs + r1cs.nPubInputs;
const domainSize = 2 ** cirPower;
// Write the header
///////////
await binFileUtils.startWriteSection(fdZKey, 1);
await fdZKey.writeULE32(1); // Groth
await binFileUtils.endWriteSection(fdZKey);
// Write the Groth header section
///////////
await binFileUtils.startWriteSection(fdZKey, 2);
const primeQ = curve.q;
const n8q = (Math.floor( (ffjavascript.Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const primeR = curve.r;
const n8r = (Math.floor( (ffjavascript.Scalar.bitLength(primeR) - 1) / 64) +1)*8;
const Rr = ffjavascript.Scalar.mod(ffjavascript.Scalar.shl(1, n8r*8), primeR);
const R2r = curve.Fr.e(ffjavascript.Scalar.mod(ffjavascript.Scalar.mul(Rr,Rr), primeR));
await fdZKey.writeULE32(n8q);
await binFileUtils.writeBigInt(fdZKey, primeQ, n8q);
await fdZKey.writeULE32(n8r);
await binFileUtils.writeBigInt(fdZKey, primeR, n8r);
await fdZKey.writeULE32(r1cs.nVars); // Total number of bars
await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE)
await fdZKey.writeULE32(domainSize); // domainSize
let bAlpha1;
bAlpha1 = await fdPTau.read(sG1, sectionsPTau[4][0].p);
await fdZKey.write(bAlpha1);
bAlpha1 = await curve.G1.batchLEMtoU(bAlpha1);
csHasher.update(bAlpha1);
let bBeta1;
bBeta1 = await fdPTau.read(sG1, sectionsPTau[5][0].p);
await fdZKey.write(bBeta1);
bBeta1 = await curve.G1.batchLEMtoU(bBeta1);
csHasher.update(bBeta1);
let bBeta2;
bBeta2 = await fdPTau.read(sG2, sectionsPTau[6][0].p);
await fdZKey.write(bBeta2);
bBeta2 = await curve.G2.batchLEMtoU(bBeta2);
csHasher.update(bBeta2);
const bg1 = new Uint8Array(sG1);
curve.G1.toRprLEM(bg1, 0, curve.G1.g);
const bg2 = new Uint8Array(sG2);
curve.G2.toRprLEM(bg2, 0, curve.G2.g);
const bg1U = new Uint8Array(sG1);
curve.G1.toRprUncompressed(bg1U, 0, curve.G1.g);
const bg2U = new Uint8Array(sG2);
curve.G2.toRprUncompressed(bg2U, 0, curve.G2.g);
await fdZKey.write(bg2); // gamma2
await fdZKey.write(bg1); // delta1
await fdZKey.write(bg2); // delta2
csHasher.update(bg2U); // gamma2
csHasher.update(bg1U); // delta1
csHasher.update(bg2U); // delta2
await binFileUtils.endWriteSection(fdZKey);
if (logger) logger.info("Reading r1cs");
let sR1cs = await binFileUtils.readSection(fdR1cs, sectionsR1cs, 2);
const A = new BigArray(r1cs.nVars);
const B1 = new BigArray(r1cs.nVars);
const B2 = new BigArray(r1cs.nVars);
const C = new BigArray(r1cs.nVars- nPublic -1);
const IC = new Array(nPublic+1);
if (logger) logger.info("Reading tauG1");
let sTauG1 = await binFileUtils.readSection(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1);
if (logger) logger.info("Reading tauG2");
let sTauG2 = await binFileUtils.readSection(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2);
if (logger) logger.info("Reading alphatauG1");
let sAlphaTauG1 = await binFileUtils.readSection(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1);
if (logger) logger.info("Reading betatauG1");
let sBetaTauG1 = await binFileUtils.readSection(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1);
await processConstraints();
await composeAndWritePoints(3, "G1", IC, "IC");
await writeHs();
await hashHPoints();
await composeAndWritePoints(8, "G1", C, "C");
await composeAndWritePoints(5, "G1", A, "A");
await composeAndWritePoints(6, "G1", B1, "B1");
await composeAndWritePoints(7, "G2", B2, "B2");
const csHash = csHasher.digest();
// Contributions section
await binFileUtils.startWriteSection(fdZKey, 10);
await fdZKey.write(csHash);
await fdZKey.writeULE32(0);
await binFileUtils.endWriteSection(fdZKey);
if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
await fdZKey.close();
await fdR1cs.close();
await fdPTau.close();
return csHash;
async function writeHs() {
await binFileUtils.startWriteSection(fdZKey, 9);
const buffOut = new ffjavascript.BigBuffer(domainSize*sG1);
if (cirPower < curve.Fr.s) {
let sTauG1 = await binFileUtils.readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
for (let i=0; i< domainSize; i++) {
if ((logger)&&(i%10000 == 0)) logger.debug(`spliting buffer: ${i}/${domainSize}`);
const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 );
buffOut.set(buff, i*sG1);
}
} else if (cirPower == curve.Fr.s) {
const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1;
await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, o + domainSize*sG1);
} else {
if (logger) logger.error("Circuit too big");
throw new Error("Circuit too big for this curve");
}
await fdZKey.write(buffOut);
await binFileUtils.endWriteSection(fdZKey);
}
async function processConstraints() {
const buffCoeff = new Uint8Array(12 + curve.Fr.n8);
const buffCoeffV = new DataView(buffCoeff.buffer);
const bOne = new Uint8Array(curve.Fr.n8);
curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
let r1csPos = 0;
function r1cs_readULE32() {
const buff = sR1cs.slice(r1csPos, r1csPos+4);
r1csPos += 4;
const buffV = new DataView(buff.buffer);
return buffV.getUint32(0, true);
}
const coefs = new BigArray();
for (let c=0; c=0) {
n = curve.Fr.fromRprLE(sR1cs.slice(c[3], c[3] + curve.Fr.n8), 0);
} else {
n = curve.Fr.fromRprLE(bOne, 0);
}
const nR2 = curve.Fr.mul(n, R2r);
curve.Fr.toRprLE(buffCoeff, 12, nR2);
buffSection.set(buffCoeff, coefsPos);
coefsPos += buffCoeff.length;
}
}
async function composeAndWritePoints(idSection, groupName, arr, sectionName) {
const CHUNK_SIZE= 1<<15;
const G = curve[groupName];
hashU32(arr.length);
await binFileUtils.startWriteSection(fdZKey, idSection);
let opPromises = [];
let i=0;
while (i {
if (logger) logger.debug(`Writing points end ${sectionName}: ${_i}/${arr.length}`);
return r;
}));
i += n;
t++;
}
const result = await Promise.all(opPromises);
for (let k=0; k 2<<14) {
bBases = new ffjavascript.BigBuffer(acc*sGin);
bScalars = new ffjavascript.BigBuffer(acc*curve.Fr.n8);
} else {
bBases = new Uint8Array(acc*sGin);
bScalars = new Uint8Array(acc*curve.Fr.n8);
}
let pB =0;
let pS =0;
const sBuffs = [
sTauG1,
sTauG2,
sAlphaTauG1,
sBetaTauG1
];
const bOne = new Uint8Array(curve.Fr.n8);
curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
let offset = 0;
for (let i=0; i=0) {
bScalars.set(
sR1cs.slice(
arr[i][j][2],
arr[i][j][2] + curve.Fr.n8
),
offset*curve.Fr.n8
);
} else {
bScalars.set(bOne, offset*curve.Fr.n8);
}
offset ++;
}
}
if (arr.length>1) {
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: bBases});
task.push({cmd: "ALLOCSET", var: 1, buff: bScalars});
task.push({cmd: "ALLOC", var: 2, len: arr.length*sGmid});
pB = 0;
pS = 0;
let pD =0;
for (let i=0; i.
*/
async function writeHeader(fd, zkey) {
// Write the header
///////////
await binFileUtils__namespace.startWriteSection(fd, 1);
await fd.writeULE32(1); // Groth
await binFileUtils__namespace.endWriteSection(fd);
// Write the Groth header section
///////////
const curve = await getCurveFromQ(zkey.q);
await binFileUtils__namespace.startWriteSection(fd, 2);
const primeQ = curve.q;
const n8q = (Math.floor( (ffjavascript.Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const primeR = curve.r;
const n8r = (Math.floor( (ffjavascript.Scalar.bitLength(primeR) - 1) / 64) +1)*8;
await fd.writeULE32(n8q);
await binFileUtils__namespace.writeBigInt(fd, primeQ, n8q);
await fd.writeULE32(n8r);
await binFileUtils__namespace.writeBigInt(fd, primeR, n8r);
await fd.writeULE32(zkey.nVars); // Total number of bars
await fd.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
await fd.writeULE32(zkey.domainSize); // domainSize
await writeG1(fd, curve, zkey.vk_alpha_1);
await writeG1(fd, curve, zkey.vk_beta_1);
await writeG2(fd, curve, zkey.vk_beta_2);
await writeG2(fd, curve, zkey.vk_gamma_2);
await writeG1(fd, curve, zkey.vk_delta_1);
await writeG2(fd, curve, zkey.vk_delta_2);
await binFileUtils__namespace.endWriteSection(fd);
}
async function writeG1(fd, curve, p) {
const buff = new Uint8Array(curve.G1.F.n8*2);
curve.G1.toRprLEM(buff, 0, p);
await fd.write(buff);
}
async function writeG2(fd, curve, p) {
const buff = new Uint8Array(curve.G2.F.n8*2);
curve.G2.toRprLEM(buff, 0, p);
await fd.write(buff);
}
async function readG1(fd, curve, toObject) {
const buff = await fd.read(curve.G1.F.n8*2);
const res = curve.G1.fromRprLEM(buff, 0);
return toObject ? curve.G1.toObject(res) : res;
}
async function readG2(fd, curve, toObject) {
const buff = await fd.read(curve.G2.F.n8*2);
const res = curve.G2.fromRprLEM(buff, 0);
return toObject ? curve.G2.toObject(res) : res;
}
async function readHeader$1(fd, sections, toObject) {
// Read Header
/////////////////////
await binFileUtils__namespace.startReadUniqueSection(fd, sections, 1);
const protocolId = await fd.readULE32();
await binFileUtils__namespace.endReadSection(fd);
if (protocolId == 1) {
return await readHeaderGroth16(fd, sections, toObject);
} else if (protocolId == 2) {
return await readHeaderPlonk(fd, sections);
} else {
throw new Error("Protocol not supported: ");
}
}
async function readHeaderGroth16(fd, sections, toObject) {
const zkey = {};
zkey.protocol = "groth16";
// Read Groth Header
/////////////////////
await binFileUtils__namespace.startReadUniqueSection(fd, sections, 2);
const n8q = await fd.readULE32();
zkey.n8q = n8q;
zkey.q = await binFileUtils__namespace.readBigInt(fd, n8q);
const n8r = await fd.readULE32();
zkey.n8r = n8r;
zkey.r = await binFileUtils__namespace.readBigInt(fd, n8r);
let curve = await getCurveFromQ(zkey.q);
zkey.nVars = await fd.readULE32();
zkey.nPublic = await fd.readULE32();
zkey.domainSize = await fd.readULE32();
zkey.power = log2(zkey.domainSize);
zkey.vk_alpha_1 = await readG1(fd, curve, toObject);
zkey.vk_beta_1 = await readG1(fd, curve, toObject);
zkey.vk_beta_2 = await readG2(fd, curve, toObject);
zkey.vk_gamma_2 = await readG2(fd, curve, toObject);
zkey.vk_delta_1 = await readG1(fd, curve, toObject);
zkey.vk_delta_2 = await readG2(fd, curve, toObject);
await binFileUtils__namespace.endReadSection(fd);
return zkey;
}
async function readHeaderPlonk(fd, sections, protocol, toObject) {
const zkey = {};
zkey.protocol = "plonk";
// Read Plonk Header
/////////////////////
await binFileUtils__namespace.startReadUniqueSection(fd, sections, 2);
const n8q = await fd.readULE32();
zkey.n8q = n8q;
zkey.q = await binFileUtils__namespace.readBigInt(fd, n8q);
const n8r = await fd.readULE32();
zkey.n8r = n8r;
zkey.r = await binFileUtils__namespace.readBigInt(fd, n8r);
let curve = await getCurveFromQ(zkey.q);
zkey.nVars = await fd.readULE32();
zkey.nPublic = await fd.readULE32();
zkey.domainSize = await fd.readULE32();
zkey.power = log2(zkey.domainSize);
zkey.nAdditions = await fd.readULE32();
zkey.nConstrains = await fd.readULE32();
zkey.k1 = await fd.read(n8r);
zkey.k2 = await fd.read(n8r);
zkey.Qm = await readG1(fd, curve, toObject);
zkey.Ql = await readG1(fd, curve, toObject);
zkey.Qr = await readG1(fd, curve, toObject);
zkey.Qo = await readG1(fd, curve, toObject);
zkey.Qc = await readG1(fd, curve, toObject);
zkey.S1 = await readG1(fd, curve, toObject);
zkey.S2 = await readG1(fd, curve, toObject);
zkey.S3 = await readG1(fd, curve, toObject);
zkey.X_2 = await readG2(fd, curve, toObject);
await binFileUtils__namespace.endReadSection(fd);
return zkey;
}
async function readZKey(fileName, toObject) {
const {fd, sections} = await binFileUtils__namespace.readBinFile(fileName, "zkey", 1);
const zkey = await readHeader$1(fd, sections, "groth16");
const Fr = new ffjavascript.F1Field(zkey.r);
const Rr = ffjavascript.Scalar.mod(ffjavascript.Scalar.shl(1, zkey.n8r*8), zkey.r);
const Rri = Fr.inv(Rr);
const Rri2 = Fr.mul(Rri, Rri);
let curve = await getCurveFromQ(zkey.q);
// Read IC Section
///////////
await binFileUtils__namespace.startReadUniqueSection(fd, sections, 3);
zkey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
const P = await readG1(fd, curve, toObject);
zkey.IC.push(P);
}
await binFileUtils__namespace.endReadSection(fd);
// Read Coefs
///////////
await binFileUtils__namespace.startReadUniqueSection(fd, sections, 4);
const nCCoefs = await fd.readULE32();
zkey.ccoefs = [];
for (let i=0; i0) {
const paramsBuff = new Uint8Array(params);
await fd.writeULE32(paramsBuff.byteLength);
await fd.write(paramsBuff);
} else {
await fd.writeULE32(0);
}
}
async function writeMPCParams(fd, curve, mpcParams) {
await binFileUtils__namespace.startWriteSection(fd, 10);
await fd.write(mpcParams.csHash);
await fd.writeULE32(mpcParams.contributions.length);
for (let i=0; i.
*/
async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, name, logger) {
const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils__namespace.readBinFile(zkeyNameOld, "zkey", 2);
const zkeyHeader = await readHeader$1(fdZKeyOld, sectionsZKeyOld, false);
if (zkeyHeader.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurveFromQ(zkeyHeader.q);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const oldMPCParams = await readMPCParams(fdZKeyOld, curve, sectionsZKeyOld);
const newMPCParams = {};
const fdMPCParams = await fastFile__namespace.readExisting(mpcparamsName);
fdMPCParams.pos =
sG1*3 + sG2*3 + // vKey
8 + sG1*zkeyHeader.nVars + // IC + C
4 + sG1*(zkeyHeader.domainSize-1) + // H
4 + sG1*zkeyHeader.nVars + // A
4 + sG1*zkeyHeader.nVars + // B1
4 + sG2*zkeyHeader.nVars; // B2
// csHash
newMPCParams.csHash = await fdMPCParams.read(64);
const nConttributions = await fdMPCParams.readUBE32();
newMPCParams.contributions = [];
for (let i=0; i newMPCParams.contributions.length) {
if (logger) logger.error("The impoerted file does not include new contributions");
return false;
}
for (let i=0; i.
*/
const sameRatio = sameRatio$2;
async function phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, logger) {
let sr;
await Blake2b__default["default"].ready();
const {fd, sections} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2);
const zkey = await readHeader$1(fd, sections, false);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurveFromQ(zkey.q);
const sG1 = curve.G1.F.n8*2;
const mpcParams = await readMPCParams(fd, curve, sections);
const accumulatedHasher = Blake2b__default["default"](64);
accumulatedHasher.update(mpcParams.csHash);
let curDelta = curve.G1.g;
for (let i=0; i=0; i--) {
const c = mpcParams.contributions[i];
if (logger) logger.info("-------------------------");
if (logger) logger.info(formatHash(c.contributionHash, `contribution #${i+1} ${c.name ? c.name : ""}:`));
if (c.type == 1) {
if (logger) logger.info(`Beacon generator: ${byteArray2hex(c.beaconHash)}`);
if (logger) logger.info(`Beacon iterations Exp: ${c.numIterationsExp}`);
}
}
if (logger) logger.info("-------------------------");
if (logger) logger.info("ZKey Ok!");
return true;
async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) {
const MAX_CHUNK_SIZE = 1<<20;
const G = curve[groupName];
const sG = G.F.n8*2;
await binFileUtils__namespace.startReadUniqueSection(fd1, sections1, idSection);
await binFileUtils__namespace.startReadUniqueSection(fd2, sections2, idSection);
let R1 = G.zero;
let R2 = G.zero;
const nPoints = sections1[idSection][0].size / sG;
for (let i=0; i.
*/
async function phase2verifyFromR1cs(r1csFileName, pTauFileName, zkeyFileName, logger) {
// const initFileName = "~" + zkeyFileName + ".init";
const initFileName = {type: "bigMem"};
await newZKey(r1csFileName, pTauFileName, initFileName, logger);
return await phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, logger);
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, logger) {
await Blake2b__default["default"].ready();
const {fd: fdOld, sections: sections} = await binFileUtils__namespace.readBinFile(zkeyNameOld, "zkey", 2);
const zkey = await readHeader$1(fdOld, sections);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurveFromQ(zkey.q);
const mpcParams = await readMPCParams(fdOld, curve, sections);
const fdNew = await binFileUtils__namespace.createBinFile(zkeyNameNew, "zkey", 1, 10);
const rng = await getRandomRng(entropy);
const transcriptHasher = Blake2b__default["default"](64);
transcriptHasher.update(mpcParams.csHash);
for (let i=0; i.
*/
async function beacon(zkeyNameOld, zkeyNameNew, name, beaconHashStr, numIterationsExp, logger) {
await Blake2b__default["default"].ready();
const beaconHash = hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
{
if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false;
}
if (beaconHash.length>=256) {
if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes");
return false;
}
numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) {
if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)");
return false;
}
const {fd: fdOld, sections: sections} = await binFileUtils__namespace.readBinFile(zkeyNameOld, "zkey", 2);
const zkey = await readHeader$1(fdOld, sections);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
const curve = await getCurveFromQ(zkey.q);
const mpcParams = await readMPCParams(fdOld, curve, sections);
const fdNew = await binFileUtils__namespace.createBinFile(zkeyNameNew, "zkey", 1, 10);
const rng = await rngFromBeaconParams(beaconHash, numIterationsExp);
const transcriptHasher = Blake2b__default["default"](64);
transcriptHasher.update(mpcParams.csHash);
for (let i=0; i.
*/
async function bellmanContribute(curve, challengeFilename, responesFileName, entropy, logger) {
await Blake2b__default["default"].ready();
const rng = await getRandomRng(entropy);
const delta = curve.Fr.fromRng(rng);
const invDelta = curve.Fr.inv(delta);
const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2;
const fdFrom = await fastFile__namespace.readExisting(challengeFilename);
const fdTo = await fastFile__namespace.createOverride(responesFileName);
await copy(sG1); // alpha1
await copy(sG1); // beta1
await copy(sG2); // beta2
await copy(sG2); // gamma2
const oldDelta1 = await readG1();
const delta1 = curve.G1.timesFr(oldDelta1, delta);
await writeG1(delta1);
const oldDelta2 = await readG2();
const delta2 = curve.G2.timesFr(oldDelta2, delta);
await writeG2(delta2);
// IC
const nIC = await fdFrom.readUBE32();
await fdTo.writeUBE32(nIC);
await copy(nIC*sG1);
// H
const nH = await fdFrom.readUBE32();
await fdTo.writeUBE32(nH);
await applyKeyToChallengeSection(fdFrom, fdTo, null, curve, "G1", nH, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "H", logger);
// L
const nL = await fdFrom.readUBE32();
await fdTo.writeUBE32(nL);
await applyKeyToChallengeSection(fdFrom, fdTo, null, curve, "G1", nL, invDelta, curve.Fr.e(1), "UNCOMPRESSED", "L", logger);
// A
const nA = await fdFrom.readUBE32();
await fdTo.writeUBE32(nA);
await copy(nA*sG1);
// B1
const nB1 = await fdFrom.readUBE32();
await fdTo.writeUBE32(nB1);
await copy(nB1*sG1);
// B2
const nB2 = await fdFrom.readUBE32();
await fdTo.writeUBE32(nB2);
await copy(nB2*sG2);
//////////
/// Read contributions
//////////
const transcriptHasher = Blake2b__default["default"](64);
const mpcParams = {};
// csHash
mpcParams.csHash = await fdFrom.read(64);
transcriptHasher.update(mpcParams.csHash);
const nConttributions = await fdFrom.readUBE32();
mpcParams.contributions = [];
for (let i=0; i.
*/
const {stringifyBigInts: stringifyBigInts$3} = ffjavascript.utils;
async function zkeyExportVerificationKey(zkeyName, /* logger */ ) {
const {fd, sections} = await binFileUtils__namespace.readBinFile(zkeyName, "zkey", 2);
const zkey = await readHeader$1(fd, sections);
let res;
if (zkey.protocol == "groth16") {
res = await groth16Vk(zkey, fd, sections);
} else if (zkey.protocol == "plonk") {
res = await plonkVk(zkey);
} else {
throw new Error("zkey file is not groth16");
}
await fd.close();
return res;
}
async function groth16Vk(zkey, fd, sections) {
const curve = await getCurveFromQ(zkey.q);
const sG1 = curve.G1.F.n8*2;
const alphaBeta = await curve.pairing( zkey.vk_alpha_1 , zkey.vk_beta_2 );
let vKey = {
protocol: zkey.protocol,
curve: curve.name,
nPublic: zkey.nPublic,
vk_alpha_1: curve.G1.toObject(zkey.vk_alpha_1),
vk_beta_2: curve.G2.toObject(zkey.vk_beta_2),
vk_gamma_2: curve.G2.toObject(zkey.vk_gamma_2),
vk_delta_2: curve.G2.toObject(zkey.vk_delta_2),
vk_alphabeta_12: curve.Gt.toObject(alphaBeta)
};
// Read IC Section
///////////
await binFileUtils__namespace.startReadUniqueSection(fd, sections, 3);
vKey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
const buff = await fd.read(sG1);
const P = curve.G1.toObject(buff);
vKey.IC.push(P);
}
await binFileUtils__namespace.endReadSection(fd);
vKey = stringifyBigInts$3(vKey);
return vKey;
}
async function plonkVk(zkey) {
const curve = await getCurveFromQ(zkey.q);
let vKey = {
protocol: zkey.protocol,
curve: curve.name,
nPublic: zkey.nPublic,
power: zkey.power,
k1: curve.Fr.toObject(zkey.k1),
k2: curve.Fr.toObject(zkey.k2),
Qm: curve.G1.toObject(zkey.Qm),
Ql: curve.G1.toObject(zkey.Ql),
Qr: curve.G1.toObject(zkey.Qr),
Qo: curve.G1.toObject(zkey.Qo),
Qc: curve.G1.toObject(zkey.Qc),
S1: curve.G1.toObject(zkey.S1),
S2: curve.G1.toObject(zkey.S2),
S3: curve.G1.toObject(zkey.S3),
X_2: curve.G2.toObject(zkey.X_2),
w: curve.Fr.toObject(curve.Fr.w[zkey.power])
};
vKey = stringifyBigInts$3(vKey);
return vKey;
}
// Not ready yet
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
async function exportSolidityVerifier(zKeyName, templates, logger) {
const verificationKey = await zkeyExportVerificationKey(zKeyName);
let template = templates[verificationKey.protocol];
return ejs__default["default"].render(template , verificationKey);
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
async function write(fd, witness, prime) {
await binFileUtils__namespace.startWriteSection(fd, 1);
const n8 = (Math.floor( (ffjavascript.Scalar.bitLength(prime) - 1) / 64) +1)*8;
await fd.writeULE32(n8);
await binFileUtils__namespace.writeBigInt(fd, prime, n8);
await fd.writeULE32(witness.length);
await binFileUtils__namespace.endWriteSection(fd);
await binFileUtils__namespace.startWriteSection(fd, 2);
for (let i=0; i.
*/
const {stringifyBigInts: stringifyBigInts$2} = ffjavascript.utils;
async function groth16Prove$1(zkeyFileName, witnessFileName, logger) {
const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils__namespace.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
const wtns = await readHeader(fdWtns, sectionsWtns);
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
const zkey = await readHeader$1(fdZKey, sectionsZKey);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
if (!ffjavascript.Scalar.eq(zkey.r, wtns.q)) {
throw new Error("Curve of the witness does not match the curve of the proving key");
}
if (wtns.nWitness != zkey.nVars) {
throw new Error(`Invalid witness length. Circuit: ${zkey.nVars}, witness: ${wtns.nWitness}`);
}
const curve = await getCurveFromQ(zkey.q);
const Fr = curve.Fr;
const G1 = curve.G1;
const G2 = curve.G2;
const power = log2(zkey.domainSize);
if (logger) logger.debug("Reading Wtns");
const buffWitness = await binFileUtils__namespace.readSection(fdWtns, sectionsWtns, 2);
if (logger) logger.debug("Reading Coeffs");
const buffCoeffs = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 4);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC1(curve, zkey, buffWitness, buffCoeffs, logger);
const inc = power == Fr.s ? curve.Fr.shift : curve.Fr.w[power+1];
const buffA = await Fr.ifft(buffA_T, "", "", logger, "IFFT_A");
const buffAodd = await Fr.batchApplyKey(buffA, Fr.e(1), inc);
const buffAodd_T = await Fr.fft(buffAodd, "", "", logger, "FFT_A");
const buffB = await Fr.ifft(buffB_T, "", "", logger, "IFFT_B");
const buffBodd = await Fr.batchApplyKey(buffB, Fr.e(1), inc);
const buffBodd_T = await Fr.fft(buffBodd, "", "", logger, "FFT_B");
const buffC = await Fr.ifft(buffC_T, "", "", logger, "IFFT_C");
const buffCodd = await Fr.batchApplyKey(buffC, Fr.e(1), inc);
const buffCodd_T = await Fr.fft(buffCodd, "", "", logger, "FFT_C");
if (logger) logger.debug("Join ABC");
const buffPodd_T = await joinABC(curve, zkey, buffAodd_T, buffBodd_T, buffCodd_T, logger);
let proof = {};
if (logger) logger.debug("Reading A Points");
const buffBasesA = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 5);
proof.pi_a = await curve.G1.multiExpAffine(buffBasesA, buffWitness, logger, "multiexp A");
if (logger) logger.debug("Reading B1 Points");
const buffBasesB1 = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 6);
let pib1 = await curve.G1.multiExpAffine(buffBasesB1, buffWitness, logger, "multiexp B1");
if (logger) logger.debug("Reading B2 Points");
const buffBasesB2 = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 7);
proof.pi_b = await curve.G2.multiExpAffine(buffBasesB2, buffWitness, logger, "multiexp B2");
if (logger) logger.debug("Reading C Points");
const buffBasesC = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 8);
proof.pi_c = await curve.G1.multiExpAffine(buffBasesC, buffWitness.slice((zkey.nPublic+1)*curve.Fr.n8), logger, "multiexp C");
if (logger) logger.debug("Reading H Points");
const buffBasesH = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 9);
const resH = await curve.G1.multiExpAffine(buffBasesH, buffPodd_T, logger, "multiexp H");
const r = curve.Fr.random();
const s = curve.Fr.random();
proof.pi_a = G1.add( proof.pi_a, zkey.vk_alpha_1 );
proof.pi_a = G1.add( proof.pi_a, G1.timesFr( zkey.vk_delta_1, r ));
proof.pi_b = G2.add( proof.pi_b, zkey.vk_beta_2 );
proof.pi_b = G2.add( proof.pi_b, G2.timesFr( zkey.vk_delta_2, s ));
pib1 = G1.add( pib1, zkey.vk_beta_1 );
pib1 = G1.add( pib1, G1.timesFr( zkey.vk_delta_1, s ));
proof.pi_c = G1.add(proof.pi_c, resH);
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( proof.pi_a, s ));
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( pib1, r ));
proof.pi_c = G1.add( proof.pi_c, G1.timesFr( zkey.vk_delta_1, Fr.neg(Fr.mul(r,s) )));
let publicSignals = [];
for (let i=1; i<= zkey.nPublic; i++) {
const b = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
publicSignals.push(ffjavascript.Scalar.fromRprLE(b));
}
proof.pi_a = G1.toObject(G1.toAffine(proof.pi_a));
proof.pi_b = G2.toObject(G2.toAffine(proof.pi_b));
proof.pi_c = G1.toObject(G1.toAffine(proof.pi_c));
proof.protocol = "groth16";
proof.curve = curve.name;
await fdZKey.close();
await fdWtns.close();
proof = stringifyBigInts$2(proof);
publicSignals = stringifyBigInts$2(publicSignals);
return {proof, publicSignals};
}
async function buldABC1(curve, zkey, witness, coeffs, logger) {
const n8 = curve.Fr.n8;
const sCoef = 4*3 + zkey.n8r;
const nCoef = (coeffs.byteLength-4) / sCoef;
const outBuffA = new ffjavascript.BigBuffer(zkey.domainSize * n8);
const outBuffB = new ffjavascript.BigBuffer(zkey.domainSize * n8);
const outBuffC = new ffjavascript.BigBuffer(zkey.domainSize * n8);
const outBuf = [ outBuffA, outBuffB ];
for (let i=0; i1) {
const promises2 = [];
for (let i=0; i v) {
n = k - 1;
} else if (va < v) {
m = k + 1;
} else {
n = k;
}
}
return 4 + m*sCoef;
}
}
*/
async function joinABC(curve, zkey, a, b, c, logger) {
const MAX_CHUNK_SIZE = 1 << 22;
const n8 = curve.Fr.n8;
const nElements = Math.floor(a.byteLength / curve.Fr.n8);
const promises = [];
for (let i=0; i.
*/
async function wtnsCalculate$1(input, wasmFileName, wtnsFileName, options) {
const fdWasm = await fastFile__namespace.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
const wc = await circom_runtime.WitnessCalculatorBuilder(wasm);
const w = await wc.calculateBinWitness(input);
const fdWtns = await binFileUtils__namespace.createBinFile(wtnsFileName, "wtns", 2, 2);
await writeBin(fdWtns, w, wc.prime);
await fdWtns.close();
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
async function groth16FullProve$1(input, wasmFile, zkeyFileName, logger) {
const wtns= {
type: "mem"
};
await wtnsCalculate$1(input, wasmFile, wtns);
return await groth16Prove$1(zkeyFileName, wtns, logger);
}
/*
Copyright 2018 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see .
*/
const {unstringifyBigInts: unstringifyBigInts$2} = ffjavascript.utils;
async function groth16Verify$1(vk_verifier, publicSignals, proof, logger) {
/*
let cpub = vk_verifier.IC[0];
for (let s= 0; s< vk_verifier.nPublic; s++) {
cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s]));
}
*/
vk_verifier = unstringifyBigInts$2(vk_verifier);
proof = unstringifyBigInts$2(proof);
publicSignals = unstringifyBigInts$2(publicSignals);
const curve = await getCurveFromName(vk_verifier.curve);
const IC0 = curve.G1.fromObject(vk_verifier.IC[0]);
const IC = new Uint8Array(curve.G1.F.n8*2 * publicSignals.length);
const w = new Uint8Array(curve.Fr.n8 * publicSignals.length);
for (let i=0; i.
*/
function p256$1(n) {
let nstr = n.toString(16);
while (nstr.length < 64) nstr = "0"+nstr;
nstr = `"0x${nstr}"`;
return nstr;
}
async function groth16ExportSolidityCallData(proof, pub) {
let inputs = "";
for (let i=0; i.
*/
async function plonkSetup$1(r1csName, ptauName, zkeyName, logger) {
await Blake2b__default["default"].ready();
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
const r1cs = await r1csfile.readR1csHeader(fdR1cs, sectionsR1cs, false);
const sG1 = curve.G1.F.n8*2;
const G1 = curve.G1;
const sG2 = curve.G2.F.n8*2;
const Fr = curve.Fr;
const n8r = curve.Fr.n8;
if (logger) logger.info("Reading r1cs");
let sR1cs = await binFileUtils.readSection(fdR1cs, sectionsR1cs, 2);
const plonkConstraints = new BigArray();
const plonkAdditions = new BigArray();
let plonkNVars = r1cs.nVars;
const nPublic = r1cs.nOutputs + r1cs.nPubInputs;
await processConstraints();
const fdZKey = await binFileUtils.createBinFile(zkeyName, "zkey", 1, 14, 1<<22, 1<<24);
if (r1cs.prime != curve.r) {
if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve");
return -1;
}
let cirPower = log2(plonkConstraints.length -1) +1;
if (cirPower < 3) cirPower = 3; // As the t polinomal is n+5 whe need at least a power of 4
const domainSize = 2 ** cirPower;
if (logger) logger.info("Plonk constraints: " + plonkConstraints.length);
if (cirPower > power) {
if (logger) logger.error(`circuit too big for this power of tau ceremony. ${plonkConstraints.length} > 2**${power}`);
return -1;
}
if (!sectionsPTau[12]) {
if (logger) logger.error("Powers of tau is not prepared.");
return -1;
}
const LPoints = new ffjavascript.BigBuffer(domainSize*sG1);
const o = sectionsPTau[12][0].p + ((2 ** (cirPower)) -1)*sG1;
await fdPTau.readToBuffer(LPoints, 0, domainSize*sG1, o);
const [k1, k2] = getK1K2();
const vk = {};
await writeAdditions(3, "Additions");
await writeWitnessMap(4, 0, "Amap");
await writeWitnessMap(5, 1, "Bmap");
await writeWitnessMap(6, 2, "Cmap");
await writeQMap(7, 3, "Qm");
await writeQMap(8, 4, "Ql");
await writeQMap(9, 5, "Qr");
await writeQMap(10, 6, "Qo");
await writeQMap(11, 7, "Qc");
await writeSigma(12, "sigma");
await writeLs(13, "lagrange polynomials");
// Write PTau points
////////////
await binFileUtils.startWriteSection(fdZKey, 14);
const buffOut = new ffjavascript.BigBuffer((domainSize+6)*sG1);
await fdPTau.readToBuffer(buffOut, 0, (domainSize+6)*sG1, sectionsPTau[2][0].p);
await fdZKey.write(buffOut);
await binFileUtils.endWriteSection(fdZKey);
await writeHeaders();
await fdZKey.close();
await fdR1cs.close();
await fdPTau.close();
if (logger) logger.info("Setup Finished");
return ;
async function processConstraints() {
let r1csPos = 0;
function r1cs_readULE32() {
const buff = sR1cs.slice(r1csPos, r1csPos+4);
r1csPos += 4;
const buffV = new DataView(buff.buffer);
return buffV.getUint32(0, true);
}
function r1cs_readCoef() {
const res = Fr.fromRprLE(sR1cs.slice(r1csPos, r1csPos+curve.Fr.n8));
r1csPos += curve.Fr.n8;
return res;
}
function r1cs_readCoefs() {
const coefs = [];
const res = {
k: curve.Fr.zero
};
const nA = r1cs_readULE32();
for (let i=0; i> 1);
const arr2 = coefs.slice(coefs.length >> 1);
const coef1 = reduceCoef(arr1);
const coef2 = reduceCoef(arr2);
const sl = coef1[0];
const sr = coef2[0];
const so = plonkNVars++;
const qm = curve.Fr.zero;
const ql = Fr.neg(coef1[1]);
const qr = Fr.neg(coef2[1]);
const qo = curve.Fr.one;
const qc = curve.Fr.zero;
plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]);
plonkAdditions.push([sl, sr, coef1[1], coef2[1]]);
return [so, curve.Fr.one];
}
for (let s = 1; s <= nPublic ; s++) {
const sl = s;
const sr = 0;
const so = 0;
const qm = curve.Fr.zero;
const ql = curve.Fr.one;
const qr = curve.Fr.zero;
const qo = curve.Fr.zero;
const qc = curve.Fr.zero;
plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]);
}
for (let c=0; c.
*/
const {stringifyBigInts: stringifyBigInts$1} = ffjavascript.utils;
const { keccak256: keccak256$1 } = jsSha3__default["default"];
async function plonk16Prove(zkeyFileName, witnessFileName, logger) {
const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils__namespace.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
const wtns = await readHeader(fdWtns, sectionsWtns);
const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
const zkey = await readHeader$1(fdZKey, sectionsZKey);
if (zkey.protocol != "plonk") {
throw new Error("zkey file is not groth16");
}
if (!ffjavascript.Scalar.eq(zkey.r, wtns.q)) {
throw new Error("Curve of the witness does not match the curve of the proving key");
}
if (wtns.nWitness != zkey.nVars -zkey.nAdditions) {
throw new Error(`Invalid witness length. Circuit: ${zkey.nVars}, witness: ${wtns.nWitness}, ${zkey.nAdditions}`);
}
const curve = await getCurveFromQ(zkey.q);
const Fr = curve.Fr;
const G1 = curve.G1;
const n8r = curve.Fr.n8;
if (logger) logger.debug("Reading Wtns");
const buffWitness = await binFileUtils__namespace.readSection(fdWtns, sectionsWtns, 2);
// First element in plonk is not used and can be any value. (But always the same).
// We set it to zero to go faster in the exponentiations.
buffWitness.set(Fr.zero, 0);
const buffInternalWitness = new ffjavascript.BigBuffer(n8r*zkey.nAdditions);
await calculateAdditions();
let A,B,C,Z;
let A4, B4, C4, Z4;
let pol_a,pol_b,pol_c, pol_z, pol_t, pol_r;
let proof = {};
const sigmaBuff = new ffjavascript.BigBuffer(zkey.domainSize*n8r*4*3);
let o = sectionsZKey[12][0].p + zkey.domainSize*n8r;
await fdZKey.readToBuffer(sigmaBuff, 0 , zkey.domainSize*n8r*4, o);
o += zkey.domainSize*n8r*5;
await fdZKey.readToBuffer(sigmaBuff, zkey.domainSize*n8r*4 , zkey.domainSize*n8r*4, o);
o += zkey.domainSize*n8r*5;
await fdZKey.readToBuffer(sigmaBuff, zkey.domainSize*n8r*8 , zkey.domainSize*n8r*4, o);
const pol_s1 = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_s1, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p);
const pol_s2 = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_s2, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 5*zkey.domainSize*n8r);
const PTau = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 14);
const ch = {};
await round1();
await round2();
await round3();
await round4();
await round5();
///////////////////////
// Final adjustments //
///////////////////////
proof.protocol = "plonk";
proof.curve = curve.name;
await fdZKey.close();
await fdWtns.close();
let publicSignals = [];
for (let i=1; i<= zkey.nPublic; i++) {
const pub = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8);
publicSignals.push(ffjavascript.Scalar.fromRprLE(pub));
}
proof.A = G1.toObject(proof.A);
proof.B = G1.toObject(proof.B);
proof.C = G1.toObject(proof.C);
proof.Z = G1.toObject(proof.Z);
proof.T1 = G1.toObject(proof.T1);
proof.T2 = G1.toObject(proof.T2);
proof.T3 = G1.toObject(proof.T3);
proof.eval_a = Fr.toObject(proof.eval_a);
proof.eval_b = Fr.toObject(proof.eval_b);
proof.eval_c = Fr.toObject(proof.eval_c);
proof.eval_s1 = Fr.toObject(proof.eval_s1);
proof.eval_s2 = Fr.toObject(proof.eval_s2);
proof.eval_zw = Fr.toObject(proof.eval_zw);
proof.eval_t = Fr.toObject(proof.eval_t);
proof.eval_r = Fr.toObject(proof.eval_r);
proof.Wxi = G1.toObject(proof.Wxi);
proof.Wxiw = G1.toObject(proof.Wxiw);
delete proof.eval_t;
proof = stringifyBigInts$1(proof);
publicSignals = stringifyBigInts$1(publicSignals);
return {proof, publicSignals};
async function calculateAdditions() {
const additionsBuff = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 3);
const sSum = 8+curve.Fr.n8*2;
for (let i=0; i0)&&(Fr.isZero(p.slice(deg*n8r, deg*n8r+n8r)))) deg--;
return deg;
}
function printPol(P) {
const n=(P.byteLength/n8r);
console.log("[");
for (let i=0; i (zkey.domainSize*3 -4) ) {
if (!Fr.isZero(a)) {
throw new Error("T Polynomial is not divisible");
}
}
}
if (logger) logger.debug("ifft Tz");
const tz = await Fr.ifft(Tz);
for (let i=0; i (zkey.domainSize*3 +5) ) {
if (!Fr.isZero(a)) {
throw new Error("Tz Polynomial is not well calculated");
}
} else {
t.set(
Fr.add(
t.slice(i*n8r, (i+1)*n8r),
a
),
i*n8r
);
}
}
pol_t = t.slice(0, (zkey.domainSize*3+6)*n8r);
proof.T1 = await expTau( t.slice(0, zkey.domainSize*n8r) , "multiexp T1");
proof.T2 = await expTau( t.slice(zkey.domainSize*n8r, zkey.domainSize*2*n8r) , "multiexp T2");
proof.T3 = await expTau( t.slice(zkey.domainSize*2*n8r, (zkey.domainSize*3+6)*n8r) , "multiexp T3");
function mul2(a,b, ap, bp, p) {
let r, rz;
const a_b = Fr.mul(a,b);
const a_bp = Fr.mul(a,bp);
const ap_b = Fr.mul(ap,b);
const ap_bp = Fr.mul(ap,bp);
r = a_b;
let a0 = Fr.add(a_bp, ap_b);
let a1 = ap_bp;
rz = a0;
if (p) {
rz = Fr.add(rz, Fr.mul(Z1[p], a1));
}
return [r, rz];
}
function mul4(a,b,c,d, ap, bp, cp, dp, p) {
let r, rz;
const a_b = Fr.mul(a,b);
const a_bp = Fr.mul(a,bp);
const ap_b = Fr.mul(ap,b);
const ap_bp = Fr.mul(ap,bp);
const c_d = Fr.mul(c,d);
const c_dp = Fr.mul(c,dp);
const cp_d = Fr.mul(cp,d);
const cp_dp = Fr.mul(cp,dp);
r = Fr.mul(a_b, c_d);
let a0 = Fr.mul(ap_b, c_d);
a0 = Fr.add(a0, Fr.mul(a_bp, c_d));
a0 = Fr.add(a0, Fr.mul(a_b, cp_d));
a0 = Fr.add(a0, Fr.mul(a_b, c_dp));
let a1 = Fr.mul(ap_bp, c_d);
a1 = Fr.add(a1, Fr.mul(ap_b, cp_d));
a1 = Fr.add(a1, Fr.mul(ap_b, c_dp));
a1 = Fr.add(a1, Fr.mul(a_bp, cp_d));
a1 = Fr.add(a1, Fr.mul(a_bp, c_dp));
a1 = Fr.add(a1, Fr.mul(a_b, cp_dp));
let a2 = Fr.mul(a_bp, cp_dp);
a2 = Fr.add(a2, Fr.mul(ap_b, cp_dp));
a2 = Fr.add(a2, Fr.mul(ap_bp, c_dp));
a2 = Fr.add(a2, Fr.mul(ap_bp, cp_d));
let a3 = Fr.mul(ap_bp, cp_dp);
rz = a0;
if (p) {
rz = Fr.add(rz, Fr.mul(Z1[p], a1));
rz = Fr.add(rz, Fr.mul(Z2[p], a2));
rz = Fr.add(rz, Fr.mul(Z3[p], a3));
}
return [r, rz];
}
}
async function round4() {
const pol_qm = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_qm, 0 , zkey.domainSize*n8r, sectionsZKey[7][0].p);
const pol_ql = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_ql, 0 , zkey.domainSize*n8r, sectionsZKey[8][0].p);
const pol_qr = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_qr, 0 , zkey.domainSize*n8r, sectionsZKey[9][0].p);
const pol_qo = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_qo, 0 , zkey.domainSize*n8r, sectionsZKey[10][0].p);
const pol_qc = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_qc, 0 , zkey.domainSize*n8r, sectionsZKey[11][0].p);
const pol_s3 = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_s3, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 10*zkey.domainSize*n8r);
const transcript4 = new Uint8Array(G1.F.n8*2*3);
G1.toRprUncompressed(transcript4, 0, proof.T1);
G1.toRprUncompressed(transcript4, G1.F.n8*2, proof.T2);
G1.toRprUncompressed(transcript4, G1.F.n8*4, proof.T3);
ch.xi = hashToFr(transcript4);
if (logger) logger.debug("xi: " + Fr.toString(ch.xi));
proof.eval_a = evalPol(pol_a, ch.xi);
proof.eval_b = evalPol(pol_b, ch.xi);
proof.eval_c = evalPol(pol_c, ch.xi);
proof.eval_s1 = evalPol(pol_s1, ch.xi);
proof.eval_s2 = evalPol(pol_s2, ch.xi);
proof.eval_t = evalPol(pol_t, ch.xi);
proof.eval_zw = evalPol(pol_z, Fr.mul(ch.xi, Fr.w[zkey.power]));
const coef_ab = Fr.mul(proof.eval_a, proof.eval_b);
let e2a = proof.eval_a;
const betaxi = Fr.mul(ch.beta, ch.xi);
e2a = Fr.add( e2a, betaxi);
e2a = Fr.add( e2a, ch.gamma);
let e2b = proof.eval_b;
e2b = Fr.add( e2b, Fr.mul(betaxi, zkey.k1));
e2b = Fr.add( e2b, ch.gamma);
let e2c = proof.eval_c;
e2c = Fr.add( e2c, Fr.mul(betaxi, zkey.k2));
e2c = Fr.add( e2c, ch.gamma);
const e2 = Fr.mul(Fr.mul(Fr.mul(e2a, e2b), e2c), ch.alpha);
let e3a = proof.eval_a;
e3a = Fr.add( e3a, Fr.mul(ch.beta, proof.eval_s1));
e3a = Fr.add( e3a, ch.gamma);
let e3b = proof.eval_b;
e3b = Fr.add( e3b, Fr.mul(ch.beta, proof.eval_s2));
e3b = Fr.add( e3b, ch.gamma);
let e3 = Fr.mul(e3a, e3b);
e3 = Fr.mul(e3, ch.beta);
e3 = Fr.mul(e3, proof.eval_zw);
e3 = Fr.mul(e3, ch.alpha);
ch.xim= ch.xi;
for (let i=0; i=0; i--) {
res = Fr.add(Fr.mul(res, x), P.slice(i*n8r, (i+1)*n8r));
}
return res;
}
function divPol1(P, d) {
const n = P.byteLength/n8r;
const res = new ffjavascript.BigBuffer(n*n8r);
res.set(Fr.zero, (n-1) *n8r);
res.set(P.slice((n-1)*n8r, n*n8r), (n-2)*n8r);
for (let i=n-3; i>=0; i--) {
res.set(
Fr.add(
P.slice((i+1)*n8r, (i+2)*n8r),
Fr.mul(
d,
res.slice((i+1)*n8r, (i+2)*n8r)
)
),
i*n8r
);
}
if (!Fr.eq(
P.slice(0, n8r),
Fr.mul(
Fr.neg(d),
res.slice(0, n8r)
)
)) {
throw new Error("Polinomial does not divide");
}
return res;
}
async function expTau(b, name) {
const n = b.byteLength/n8r;
const PTauN = PTau.slice(0, n*curve.G1.F.n8*2);
const bm = await curve.Fr.batchFromMontgomery(b);
let res = await curve.G1.multiExpAffine(PTauN, bm, logger, name);
res = curve.G1.toAffine(res);
return res;
}
async function to4T(A, pz) {
pz = pz || [];
let a = await Fr.ifft(A);
const a4 = new ffjavascript.BigBuffer(n8r*zkey.domainSize*4);
a4.set(a, 0);
const a1 = new ffjavascript.BigBuffer(n8r*(zkey.domainSize + pz.length));
a1.set(a, 0);
for (let i= 0; i.
*/
async function plonkFullProve$1(input, wasmFile, zkeyFileName, logger) {
const wtns= {
type: "mem"
};
await wtnsCalculate$1(input, wasmFile, wtns);
return await plonk16Prove(zkeyFileName, wtns, logger);
}
/*
Copyright 2021 0kims association.
This file is part of snarkjs.
snarkjs is a free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your option)
any later version.
snarkjs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see .
*/
const {unstringifyBigInts: unstringifyBigInts$1} = ffjavascript.utils;
const { keccak256 } = jsSha3__default["default"];
async function plonkVerify$1(vk_verifier, publicSignals, proof, logger) {
vk_verifier = unstringifyBigInts$1(vk_verifier);
proof = unstringifyBigInts$1(proof);
publicSignals = unstringifyBigInts$1(publicSignals);
const curve = await getCurveFromName(vk_verifier.curve);
const Fr = curve.Fr;
const G1 = curve.G1;
proof = fromObjectProof(curve,proof);
vk_verifier = fromObjectVk(curve, vk_verifier);
if (!isWellConstructed(curve, proof)) {
logger.error("Proof is not well constructed");
return false;
}
const challanges = calculateChallanges(curve, proof);
if (logger) {
logger.debug("beta: " + Fr.toString(challanges.beta, 16));
logger.debug("gamma: " + Fr.toString(challanges.gamma, 16));
logger.debug("alpha: " + Fr.toString(challanges.alpha, 16));
logger.debug("xi: " + Fr.toString(challanges.xi, 16));
logger.debug("v1: " + Fr.toString(challanges.v[1], 16));
logger.debug("v6: " + Fr.toString(challanges.v[6], 16));
logger.debug("u: " + Fr.toString(challanges.u, 16));
}
const L = calculateLagrangeEvaluations(curve, challanges, vk_verifier);
if (logger) {
logger.debug("Lagrange Evaluations: ");
for (let i=1; i.
*/
function i2hex(i) {
return ("0" + i.toString(16)).slice(-2);
}
function p256(n) {
let nstr = n.toString(16);
while (nstr.length < 64) nstr = "0"+nstr;
nstr = `"0x${nstr}"`;
return nstr;
}
async function plonkExportSolidityCallData(proof, pub) {
const curve = await getCurveFromName(proof.curve);
const G1 = curve.G1;
const Fr = curve.Fr;
let inputs = "";
for (let i=0; i.
*/
async function wtnsDebug$1(input, wasmFileName, wtnsFileName, symName, options, logger) {
const fdWasm = await fastFile__namespace.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
let wcOps = {
sanityCheck: true
};
let sym = await loadSymbols(symName);
if (options.set) {
if (!sym) sym = await loadSymbols(symName);
wcOps.logSetSignal= function(labelIdx, value) {
if (logger) logger.info("SET " + sym.labelIdx2Name[labelIdx] + " <-- " + value.toString());
};
}
if (options.get) {
if (!sym) sym = await loadSymbols(symName);
wcOps.logGetSignal= function(varIdx, value) {
if (logger) logger.info("GET " + sym.labelIdx2Name[varIdx] + " --> " + value.toString());
};
}
if (options.trigger) {
if (!sym) sym = await loadSymbols(symName);
wcOps.logStartComponent= function(cIdx) {
if (logger) logger.info("START: " + sym.componentIdx2Name[cIdx]);
};
wcOps.logFinishComponent= function(cIdx) {
if (logger) logger.info("FINISH: " + sym.componentIdx2Name[cIdx]);
};
}
wcOps.sym = sym;
const wc = await circom_runtime.WitnessCalculatorBuilder(wasm, wcOps);
const w = await wc.calculateWitness(input);
const fdWtns = await binFileUtils__namespace.createBinFile(wtnsFileName, "wtns", 2, 2);
await write(fdWtns, w, wc.prime);
await fdWtns.close();
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
async function wtnsExportJson$1(wtnsFileName) {
const w = await read(wtnsFileName);
return w;
}
/*
Copyright 2018 0KIMS association.
This file is part of snarkJS.
snarkJS is a free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
snarkJS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
License for more details.
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
const {stringifyBigInts, unstringifyBigInts} = ffjavascript.utils;
const logger = Logger__default["default"].create("snarkJS", {showTimestamp:false});
Logger__default["default"].setLogLevel("INFO");
const __dirname$1 = path__default["default"].dirname(new URL((typeof document === 'undefined' ? new (require('u' + 'rl').URL)('file:' + __filename).href : (document.currentScript && document.currentScript.src || new URL('cli.cjs', document.baseURI).href))).pathname);
const commands = [
{
cmd: "powersoftau new [powersoftau_0000.ptau]",
description: "Starts a powers of tau ceremony",
alias: ["ptn"],
options: "-verbose|v",
action: powersOfTauNew
},
{
cmd: "powersoftau contribute ",
description: "creates a ptau file with a new contribution",
alias: ["ptc"],
options: "-verbose|v -name|n -entropy|e",
action: powersOfTauContribute
},
{
cmd: "powersoftau export challenge [challenge]",
description: "Creates a challenge",
alias: ["ptec"],
options: "-verbose|v",
action: powersOfTauExportChallenge
},
{
cmd: "powersoftau challenge contribute [response]",
description: "Contribute to a challenge",
alias: ["ptcc"],
options: "-verbose|v -entropy|e",
action: powersOfTauChallengeContribute
},
{
cmd: "powersoftau import response <",
description: "import a response to a ptau file",
alias: ["ptir"],
options: "-verbose|v -nopoints -nocheck -name|n",
action: powersOfTauImport
},
{
cmd: "powersoftau beacon ",
description: "adds a beacon",
alias: ["ptb"],
options: "-verbose|v -name|n",
action: powersOfTauBeacon
},
{
cmd: "powersoftau prepare phase2 ",
description: "Prepares phase 2. ",
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
alias: ["pt2"],
options: "-verbose|v",
action: powersOfTauPreparePhase2
},
{
cmd: "powersoftau convert ",
description: "Convert ptau",
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
alias: ["ptcv"],
options: "-verbose|v",
action: powersOfTauConvert
},
{
cmd: "powersoftau truncate ",
description: "Generate diferent powers of tau with smoller sizes ",
longDescription: " This process generates smaller ptau files from a bigger power ptau",
alias: ["ptt"],
options: "-verbose|v",
action: powersOfTauTruncate
},
{
cmd: "powersoftau verify ",
description: "verifies a powers of tau file",
alias: ["ptv"],
options: "-verbose|v",
action: powersOfTauVerify
},
{
cmd: "powersoftau export json ",
description: "Exports a power of tau file to a JSON",
alias: ["ptej"],
options: "-verbose|v",
action: powersOfTauExportJson
},
{
cmd: "r1cs info [circuit.r1cs]",
description: "Print statistiscs of a circuit",
alias: ["ri", "info -r|r1cs:circuit.r1cs"],
action: r1csInfo
},
{
cmd: "r1cs print [circuit.r1cs] [circuit.sym]",
description: "Print the constraints of a circuit",
alias: ["rp", "print -r|r1cs:circuit.r1cs -s|sym"],
action: r1csPrint
},
{
cmd: "r1cs export json [circuit.r1cs] [circuit.json]",
description: "Export r1cs to JSON file",
alias: ["rej"],
action: r1csExportJSON
},
{
cmd: "wtns calculate [circuit.wasm] [input.json] [witness.wtns]",
description: "Caclculate specific witness of a circuit given an input",
alias: ["wc", "calculatewitness -ws|wasm:circuit.wasm -i|input:input.json -wt|witness:witness.wtns"],
action: wtnsCalculate
},
{
cmd: "wtns debug [circuit.wasm] [input.json] [witness.wtns] [circuit.sym]",
description: "Calculate the witness with debug info.",
longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ",
options: "-get|g -set|s -trigger|t",
alias: ["wd"],
action: wtnsDebug
},
{
cmd: "wtns export json [witness.wtns] [witnes.json]",
description: "Calculate the witness with debug info.",
longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ",
options: "-verbose|v",
alias: ["wej"],
action: wtnsExportJson
},
{
cmd: "zkey contribute ",
description: "creates a zkey file with a new contribution",
alias: ["zkc"],
options: "-verbose|v -entropy|e -name|n",
action: zkeyContribute
},
{
cmd: "zkey export bellman [circuit.mpcparams]",
description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman)",
alias: ["zkeb"],
options: "-verbose|v",
action: zkeyExportBellman
},
{
cmd: "zkey bellman contribute ",
description: "contributes to a challenge file in bellman format",
alias: ["zkbc"],
options: "-verbose|v -entropy|e",
action: zkeyBellmanContribute
},
{
cmd: "zkey import bellman ",
description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman) ",
alias: ["zkib"],
options: "-verbose|v -name|n",
action: zkeyImportBellman
},
{
cmd: "zkey beacon ",
description: "adds a beacon",
alias: ["zkb"],
options: "-verbose|v -name|n",
action: zkeyBeacon
},
{
cmd: "zkey verify r1cs [circuit.r1cs] [powersoftau.ptau] [circuit_final.zkey]",
description: "Verify zkey file contributions and verify that matches with the original circuit.r1cs and ptau",
alias: ["zkv", "zkvr", "zkey verify"],
options: "-verbose|v",
action: zkeyVerifyFromR1cs
},
{
cmd: "zkey verify init [circuit_0000.zkey] [powersoftau.ptau] [circuit_final.zkey]",
description: "Verify zkey file contributions and verify that matches with the original circuit.r1cs and ptau",
alias: ["zkvi"],
options: "-verbose|v",
action: zkeyVerifyFromInit
},
{
cmd: "zkey export verificationkey [circuit_final.zkey] [verification_key.json]",
description: "Exports a verification key",
alias: ["zkev"],
action: zkeyExportVKey
},
{
cmd: "zkey export json [circuit_final.zkey] [circuit_final.zkey.json]",
description: "Exports a circuit key to a JSON file",
alias: ["zkej"],
options: "-verbose|v",
action: zkeyExportJson
},
{
cmd: "zkey export solidityverifier [circuit_final.zkey] [verifier.sol]",
description: "Creates a verifier in solidity",
alias: ["zkesv", "generateverifier -vk|verificationkey -v|verifier"],
action: zkeyExportSolidityVerifier
},
{
cmd: "zkey export soliditycalldata [public.json] [proof.json]",
description: "Generates call parameters ready to be called.",
alias: ["zkesc", "generatecall -pub|public -p|proof"],
action: zkeyExportSolidityCalldata
},
{
cmd: "groth16 setup [circuit.r1cs] [powersoftau.ptau] [circuit_0000.zkey]",
description: "Creates an initial groth16 pkey file with zero contributions",
alias: ["g16s", "zkn", "zkey new"],
options: "-verbose|v",
action: zkeyNew
},
{
cmd: "groth16 prove [circuit_final.zkey] [witness.wtns] [proof.json] [public.json]",
description: "Generates a zk Proof from witness",
alias: ["g16p", "zpw", "zksnark proof", "proof -pk|provingkey -wt|witness -p|proof -pub|public"],
options: "-verbose|v -protocol",
action: groth16Prove
},
{
cmd: "groth16 fullprove [input.json] [circuit_final.wasm] [circuit_final.zkey] [proof.json] [public.json]",
description: "Generates a zk Proof from input",
alias: ["g16f", "g16i"],
options: "-verbose|v -protocol",
action: groth16FullProve
},
{
cmd: "groth16 verify [verification_key.json] [public.json] [proof.json]",
description: "Verify a zk Proof",
alias: ["g16v", "verify -vk|verificationkey -pub|public -p|proof"],
action: groth16Verify
},
{
cmd: "plonk setup [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]",
description: "Creates an initial PLONK pkey ",
alias: ["pks"],
options: "-verbose|v",
action: plonkSetup
},
{
cmd: "plonk prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]",
description: "Generates a PLONK Proof from witness",
alias: ["pkp"],
options: "-verbose|v -protocol",
action: plonkProve
},
{
cmd: "plonk fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]",
description: "Generates a PLONK Proof from input",
alias: ["pkf"],
options: "-verbose|v -protocol",
action: plonkFullProve
},
{
cmd: "plonk verify [verification_key.json] [public.json] [proof.json]",
description: "Verify a PLONK Proof",
alias: ["pkv"],
options: "-verbose|v",
action: plonkVerify
}
];
clProcessor(commands).then( (res) => {
process.exit(res);
}, (err) => {
logger.error(err);
process.exit(1);
});
/*
TODO COMMANDS
=============
{
cmd: "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]",
description: "Run a simple setup for a circuit generating the proving key.",
alias: ["zs", "setup -r1cs|r -provingkey|pk -verificationkey|vk"],
options: "-verbose|v -protocol",
action: zksnarkSetup
},
{
cmd: "witness verify ",
description: "Verify a witness agains a r1cs",
alias: ["wv"],
action: witnessVerify
},
{
cmd: "powersOfTau export response"
}
*/
function changeExt(fileName, newExt) {
let S = fileName;
while ((S.length>0) && (S[S.length-1] != ".")) S = S.slice(0, S.length-1);
if (S.length>0) {
return S + newExt;
} else {
return fileName+"."+newExt;
}
}
// r1cs export circomJSON [circuit.r1cs] [circuit.json]
async function r1csInfo(params, options) {
const r1csName = params[0] || "circuit.r1cs";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
await r1csInfo$1(r1csName, logger);
return 0;
}
// r1cs print [circuit.r1cs] [circuit.sym]
async function r1csPrint(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const symName = params[1] || changeExt(r1csName, "sym");
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const cir = await r1csfile.readR1cs(r1csName, true, true, false);
const sym = await loadSymbols(symName);
await r1csPrint$1(cir, sym, logger);
return 0;
}
// r1cs export json [circuit.r1cs] [circuit.json]
async function r1csExportJSON(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const jsonName = params[1] || changeExt(r1csName, "json");
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const r1csObj = await r1csExportJson(r1csName, logger);
const S = JSON.stringify(r1csObj, null, 1);
await fs__default["default"].promises.writeFile(jsonName, S);
return 0;
}
// wtns calculate
async function wtnsCalculate(params, options) {
const wasmName = params[0] || "circuit.wasm";
const inputName = params[1] || "input.json";
const witnessName = params[2] || "witness.wtns";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs__default["default"].promises.readFile(inputName, "utf8")));
await wtnsCalculate$1(input, wasmName, witnessName);
return 0;
}
// wtns debug
// -get|g -set|s -trigger|t
async function wtnsDebug(params, options) {
const wasmName = params[0] || "circuit.wasm";
const inputName = params[1] || "input.json";
const witnessName = params[2] || "witness.wtns";
const symName = params[3] || changeExt(wasmName, "sym");
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs__default["default"].promises.readFile(inputName, "utf8")));
await wtnsDebug$1(input, wasmName, witnessName, symName, options, logger);
return 0;
}
// wtns export json [witness.wtns] [witness.json]
// -get|g -set|s -trigger|t
async function wtnsExportJson(params, options) {
const wtnsName = params[0] || "witness.wtns";
const jsonName = params[1] || "witness.json";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const w = await wtnsExportJson$1(wtnsName);
await fs__default["default"].promises.writeFile(jsonName, JSON.stringify(stringifyBigInts(w), null, 1));
return 0;
}
/*
// zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]
async function zksnarkSetup(params, options) {
const r1csName = params[0] || "circuit.r1cs";
const zkeyName = params[1] || changeExt(r1csName, "zkey");
const verificationKeyName = params[2] || "verification_key.json";
const protocol = options.protocol || "groth16";
const cir = await readR1cs(r1csName, true);
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
const setup = zkSnark[protocol].setup(cir, options.verbose);
await zkey.utils.write(zkeyName, setup.vk_proof);
// await fs.promises.writeFile(provingKeyName, JSON.stringify(stringifyBigInts(setup.vk_proof), null, 1), "utf-8");
await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(setup.vk_verifier), null, 1), "utf-8");
return 0;
}
*/
// groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
async function groth16Prove(params, options) {
const zkeyName = params[0] || "circuit_final.zkey";
const witnessName = params[1] || "witness.wtns";
const proofName = params[2] || "proof.json";
const publicName = params[3] || "public.json";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const {proof, publicSignals} = await groth16Prove$1(zkeyName, witnessName, logger);
await fs__default["default"].promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs__default["default"].promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
// groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]
async function groth16FullProve(params, options) {
const inputName = params[0] || "input.json";
const wasmName = params[1] || "circuit.wasm";
const zkeyName = params[2] || "circuit_final.zkey";
const proofName = params[3] || "proof.json";
const publicName = params[4] || "public.json";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs__default["default"].promises.readFile(inputName, "utf8")));
const {proof, publicSignals} = await groth16FullProve$1(input, wasmName, zkeyName, logger);
await fs__default["default"].promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs__default["default"].promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
// groth16 verify [verification_key.json] [public.json] [proof.json]
async function groth16Verify(params, options) {
const verificationKeyName = params[0] || "verification_key.json";
const publicName = params[1] || "public.json";
const proofName = params[2] || "proof.json";
const verificationKey = unstringifyBigInts(JSON.parse(fs__default["default"].readFileSync(verificationKeyName, "utf8")));
const pub = unstringifyBigInts(JSON.parse(fs__default["default"].readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs__default["default"].readFileSync(proofName, "utf8")));
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const isValid = await groth16Verify$1(verificationKey, pub, proof, logger);
if (isValid) {
return 0;
} else {
return 1;
}
}
// zkey export vkey [circuit_final.zkey] [verification_key.json]",
async function zkeyExportVKey(params, options) {
const zkeyName = params[0] || "circuit_final.zkey";
const verificationKeyName = params[1] || "verification_key.json";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const vKey = await zkeyExportVerificationKey(zkeyName);
const S = JSON.stringify(ffjavascript.utils.stringifyBigInts(vKey), null, 1);
await fs__default["default"].promises.writeFile(verificationKeyName, S);
}
// zkey export json [circuit_final.zkey] [circuit.zkey.json]",
async function zkeyExportJson(params, options) {
const zkeyName = params[0] || "circuit_final.zkey";
const zkeyJsonName = params[1] || "circuit_final.zkey.json";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const zKey = await zkeyExportJson$1(zkeyName);
const S = JSON.stringify(ffjavascript.utils.stringifyBigInts(zKey), null, 1);
await fs__default["default"].promises.writeFile(zkeyJsonName, S);
}
async function fileExists(file) {
return fs__default["default"].promises.access(file, fs__default["default"].constants.F_OK)
.then(() => true)
.catch(() => false);
}
// solidity genverifier [circuit_final.zkey] [verifier.sol]
async function zkeyExportSolidityVerifier(params, options) {
let zkeyName;
let verifierName;
if (params.length < 1) {
zkeyName = "circuit_final.zkey";
} else {
zkeyName = params[0];
}
if (params.length < 2) {
verifierName = "verifier.sol";
} else {
verifierName = params[1];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const templates = {};
if (await fileExists(path__default["default"].join(__dirname$1, "templates"))) {
templates.groth16 = await fs__default["default"].promises.readFile(path__default["default"].join(__dirname$1, "templates", "verifier_groth16.sol.ejs"), "utf8");
templates.plonk = await fs__default["default"].promises.readFile(path__default["default"].join(__dirname$1, "templates", "verifier_plonk.sol.ejs"), "utf8");
} else {
templates.groth16 = await fs__default["default"].promises.readFile(path__default["default"].join(__dirname$1, "..", "templates", "verifier_groth16.sol.ejs"), "utf8");
templates.plonk = await fs__default["default"].promises.readFile(path__default["default"].join(__dirname$1, "..", "templates", "verifier_plonk.sol.ejs"), "utf8");
}
const verifierCode = await exportSolidityVerifier(zkeyName, templates);
fs__default["default"].writeFileSync(verifierName, verifierCode, "utf-8");
return 0;
}
// solidity gencall
async function zkeyExportSolidityCalldata(params, options) {
let publicName;
let proofName;
if (params.length < 1) {
publicName = "public.json";
} else {
publicName = params[0];
}
if (params.length < 2) {
proofName = "proof.json";
} else {
proofName = params[1];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const pub = unstringifyBigInts(JSON.parse(fs__default["default"].readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs__default["default"].readFileSync(proofName, "utf8")));
let res;
if (proof.protocol == "groth16") {
res = await groth16ExportSolidityCallData(proof, pub);
} else if (proof.protocol == "plonk") {
res = await plonkExportSolidityCallData(proof, pub);
} else {
throw new Error("Invalid Protocol");
}
console.log(res);
return 0;
}
// powersoftau new [powersoftau_0000.ptau]",
async function powersOfTauNew(params, options) {
let curveName;
let power;
let ptauName;
curveName = params[0];
power = parseInt(params[1]);
if ((power<1) || (power>28)) {
throw new Error("Power must be between 1 and 28");
}
if (params.length < 3) {
ptauName = "powersOfTau" + power + "_0000.ptau";
} else {
ptauName = params[2];
}
const curve = await getCurveFromName(curveName);
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await newAccumulator(curve, power, ptauName, logger);
}
async function powersOfTauExportChallenge(params, options) {
let ptauName;
let challengeName;
ptauName = params[0];
if (params.length < 2) {
challengeName = "challenge";
} else {
challengeName = params[1];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await exportChallenge(ptauName, challengeName, logger);
}
// powersoftau challenge contribute [response]
async function powersOfTauChallengeContribute(params, options) {
let challengeName;
let responseName;
const curve = await getCurveFromName(params[0]);
challengeName = params[1];
if (params.length < 3) {
responseName = changeExt(challengeName, "response");
} else {
responseName = params[2];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await challengeContribute(curve, challengeName, responseName, options.entropy, logger);
}
async function powersOfTauImport(params, options) {
let oldPtauName;
let response;
let newPtauName;
let importPoints = true;
let doCheck = true;
oldPtauName = params[0];
response = params[1];
newPtauName = params[2];
if (options.nopoints) importPoints = false;
if (options.nocheck) doCheck = false;
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const res = await importResponse(oldPtauName, response, newPtauName, options.name, importPoints, logger);
if (res) return res;
if (!doCheck) return;
// TODO Verify
}
async function powersOfTauVerify(params, options) {
let ptauName;
ptauName = params[0];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const res = await verify(ptauName, logger);
if (res === true) {
return 0;
} else {
return 1;
}
}
async function powersOfTauBeacon(params, options) {
let oldPtauName;
let newPtauName;
let beaconHashStr;
let numIterationsExp;
oldPtauName = params[0];
newPtauName = params[1];
beaconHashStr = params[2];
numIterationsExp = params[3];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await beacon$1(oldPtauName, newPtauName, options.name ,beaconHashStr, numIterationsExp, logger);
}
async function powersOfTauContribute(params, options) {
let oldPtauName;
let newPtauName;
oldPtauName = params[0];
newPtauName = params[1];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await contribute(oldPtauName, newPtauName, options.name , options.entropy, logger);
}
async function powersOfTauPreparePhase2(params, options) {
let oldPtauName;
let newPtauName;
oldPtauName = params[0];
newPtauName = params[1];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await preparePhase2(oldPtauName, newPtauName, logger);
}
async function powersOfTauConvert(params, options) {
let oldPtauName;
let newPtauName;
oldPtauName = params[0];
newPtauName = params[1];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await convert(oldPtauName, newPtauName, logger);
}
async function powersOfTauTruncate(params, options) {
let ptauName;
ptauName = params[0];
let template = ptauName;
while ((template.length>0) && (template[template.length-1] != ".")) template = template.slice(0, template.length-1);
template = template.slice(0, template.length-1);
template = template+"_";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await truncate(ptauName, template, logger);
}
// powersoftau export json ",
async function powersOfTauExportJson(params, options) {
let ptauName;
let jsonName;
ptauName = params[0];
jsonName = params[1];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const pTau = await exportJson(ptauName, logger);
const S = JSON.stringify(stringifyBigInts(pTau), null, 1);
await fs__default["default"].promises.writeFile(jsonName, S);
}
// phase2 new
async function zkeyNew(params, options) {
let r1csName;
let ptauName;
let zkeyName;
if (params.length < 1) {
r1csName = "circuit.r1cs";
} else {
r1csName = params[0];
}
if (params.length < 2) {
ptauName = "powersoftau.ptau";
} else {
ptauName = params[1];
}
if (params.length < 3) {
zkeyName = "circuit_0000.zkey";
} else {
zkeyName = params[2];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return newZKey(r1csName, ptauName, zkeyName, logger);
}
// zkey export bellman [circuit_0000.zkey] [circuit.mpcparams]
async function zkeyExportBellman(params, options) {
let zkeyName;
let mpcparamsName;
zkeyName = params[0];
if (params.length < 2) {
mpcparamsName = "circuit.mpcparams";
} else {
mpcparamsName = params[1];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return phase2exportMPCParams(zkeyName, mpcparamsName, logger);
}
// zkey import bellman
async function zkeyImportBellman(params, options) {
let zkeyNameOld;
let mpcParamsName;
let zkeyNameNew;
zkeyNameOld = params[0];
mpcParamsName = params[1];
zkeyNameNew = params[2];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return phase2importMPCParams(zkeyNameOld, mpcParamsName, zkeyNameNew, options.name, logger);
}
// phase2 verify r1cs [circuit.r1cs] [powersoftau.ptau] [circuit_final.zkey]
async function zkeyVerifyFromR1cs(params, options) {
let r1csName;
let ptauName;
let zkeyName;
if (params.length < 1) {
r1csName = "circuit.r1cs";
} else {
r1csName = params[0];
}
if (params.length < 2) {
ptauName = "powersoftau.ptau";
} else {
ptauName = params[1];
}
if (params.length < 3) {
zkeyName = "circuit_final.zkey";
} else {
zkeyName = params[2];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const res = await phase2verifyFromR1cs(r1csName, ptauName, zkeyName, logger);
if (res === true) {
return 0;
} else {
return 1;
}
}
// phase2 verify [circuit_0000] [powersoftau.ptau] [circuit_final.zkey]
async function zkeyVerifyFromInit(params, options) {
let initZKeyName;
let ptauName;
let zkeyName;
if (params.length < 1) {
initZKeyName = "circuit_0000.zkey";
} else {
initZKeyName = params[0];
}
if (params.length < 2) {
ptauName = "powersoftau.ptau";
} else {
ptauName = params[1];
}
if (params.length < 3) {
zkeyName = "circuit_final.zkey";
} else {
zkeyName = params[2];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const res = await phase2verifyFromInit(initZKeyName, ptauName, zkeyName, logger);
if (res === true) {
return 0;
} else {
return 1;
}
}
// zkey contribute
async function zkeyContribute(params, options) {
let zkeyOldName;
let zkeyNewName;
zkeyOldName = params[0];
zkeyNewName = params[1];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return phase2contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, logger);
}
// zkey beacon
async function zkeyBeacon(params, options) {
let zkeyOldName;
let zkeyNewName;
let beaconHashStr;
let numIterationsExp;
zkeyOldName = params[0];
zkeyNewName = params[1];
beaconHashStr = params[2];
numIterationsExp = params[3];
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return await beacon(zkeyOldName, zkeyNewName, options.name ,beaconHashStr, numIterationsExp, logger);
}
// zkey challenge contribute [response]",
async function zkeyBellmanContribute(params, options) {
let challengeName;
let responseName;
const curve = await getCurveFromName(params[0]);
challengeName = params[1];
if (params.length < 3) {
responseName = changeExt(challengeName, "response");
} else {
responseName = params[2];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return bellmanContribute(curve, challengeName, responseName, options.entropy, logger);
}
// plonk setup
async function plonkSetup(params, options) {
let r1csName;
let ptauName;
let zkeyName;
if (params.length < 1) {
r1csName = "circuit.r1cs";
} else {
r1csName = params[0];
}
if (params.length < 2) {
ptauName = "powersoftau.ptau";
} else {
ptauName = params[1];
}
if (params.length < 3) {
zkeyName = "circuit.zkey";
} else {
zkeyName = params[2];
}
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
return plonkSetup$1(r1csName, ptauName, zkeyName, logger);
}
// plonk prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
async function plonkProve(params, options) {
const zkeyName = params[0] || "circuit.zkey";
const witnessName = params[1] || "witness.wtns";
const proofName = params[2] || "proof.json";
const publicName = params[3] || "public.json";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const {proof, publicSignals} = await plonk16Prove(zkeyName, witnessName, logger);
await fs__default["default"].promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs__default["default"].promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
// plonk fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]
async function plonkFullProve(params, options) {
const inputName = params[0] || "input.json";
const wasmName = params[1] || "circuit.wasm";
const zkeyName = params[2] || "circuit.zkey";
const proofName = params[3] || "proof.json";
const publicName = params[4] || "public.json";
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const input = unstringifyBigInts(JSON.parse(await fs__default["default"].promises.readFile(inputName, "utf8")));
const {proof, publicSignals} = await plonkFullProve$1(input, wasmName, zkeyName, logger);
await fs__default["default"].promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8");
await fs__default["default"].promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8");
return 0;
}
// plonk verify [verification_key.json] [public.json] [proof.json]
async function plonkVerify(params, options) {
const verificationKeyName = params[0] || "verification_key.json";
const publicName = params[1] || "public.json";
const proofName = params[2] || "proof.json";
const verificationKey = unstringifyBigInts(JSON.parse(fs__default["default"].readFileSync(verificationKeyName, "utf8")));
const pub = unstringifyBigInts(JSON.parse(fs__default["default"].readFileSync(publicName, "utf8")));
const proof = unstringifyBigInts(JSON.parse(fs__default["default"].readFileSync(proofName, "utf8")));
if (options.verbose) Logger__default["default"].setLogLevel("DEBUG");
const isValid = await plonkVerify$1(verificationKey, pub, proof, logger);
if (isValid) {
return 0;
} else {
return 1;
}
}