diff --git a/build/main.cjs b/build/main.cjs
index cce426d..e704e45 100644
--- a/build/main.cjs
+++ b/build/main.cjs
@@ -15,14 +15,34 @@ var jsSha3 = require('js-sha3');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
+function _interopNamespace(e) {
+ if (e && e.__esModule) return e;
+ var n = Object.create(null);
+ if (e) {
+ Object.keys(e).forEach(function (k) {
+ if (k !== 'default') {
+ var d = Object.getOwnPropertyDescriptor(e, k);
+ Object.defineProperty(n, k, d.get ? d : {
+ enumerable: true,
+ get: function () { return e[k]; }
+ });
+ }
+ });
+ }
+ n["default"] = e;
+ return Object.freeze(n);
+}
+
+var binFileUtils__namespace = /*#__PURE__*/_interopNamespace(binFileUtils);
var Blake2b__default = /*#__PURE__*/_interopDefaultLegacy(Blake2b);
var readline__default = /*#__PURE__*/_interopDefaultLegacy(readline);
var crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto);
+var fastFile__namespace = /*#__PURE__*/_interopNamespace(fastFile);
var ejs__default = /*#__PURE__*/_interopDefaultLegacy(ejs);
var jsSha3__default = /*#__PURE__*/_interopDefaultLegacy(jsSha3);
-const bls12381r = ffjavascript.Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
-const bn128r = ffjavascript.Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
+ffjavascript.Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
+ffjavascript.Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const bls12381q = ffjavascript.Scalar.e("1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab", 16);
const bn128q = ffjavascript.Scalar.e("21888242871839275222246405745257275088696311157297823662689037894645226208583");
@@ -127,12 +147,12 @@ function hashIsEqual(h1, h2) {
function cloneHasher(h) {
const ph = h.getPartialHash();
- const res = Blake2b__default['default'](64);
+ const res = Blake2b__default["default"](64);
res.setPartialHash(ph);
return res;
}
-async function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
+async function sameRatio$2(curve, g1s, g1sx, g2s, g2sx) {
if (curve.G1.isZero(g1s)) return false;
if (curve.G1.isZero(g1sx)) return false;
if (curve.G2.isZero(g2s)) return false;
@@ -147,7 +167,7 @@ function askEntropy() {
if (process.browser) {
return window.prompt("Enter a random text. (Entropy): ", "");
} else {
- const rl = readline__default['default'].createInterface({
+ const rl = readline__default["default"].createInterface({
input: process.stdin,
output: process.stdout
});
@@ -163,8 +183,8 @@ async function getRandomRng(entropy) {
while (!entropy) {
entropy = await askEntropy();
}
- const hasher = Blake2b__default['default'](64);
- hasher.update(crypto__default['default'].randomBytes(64));
+ const hasher = Blake2b__default["default"](64);
+ hasher.update(crypto__default["default"].randomBytes(64));
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
@@ -191,7 +211,7 @@ function rngFromBeaconParams(beaconHash, numIterationsExp) {
let curHash = beaconHash;
for (let i=0; i.
*/
-const {stringifyBigInts} = ffjavascript.utils;
+const {stringifyBigInts: stringifyBigInts$3} = ffjavascript.utils;
async function groth16Prove(zkeyFileName, witnessFileName, logger) {
- const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
+ const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils__namespace.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
- const wtns = await readHeader$1(fdWtns, sectionsWtns);
+ const wtns = await readHeader(fdWtns, sectionsWtns);
- const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
+ const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
- const zkey = await readHeader(fdZKey, sectionsZKey);
+ const zkey = await readHeader$1(fdZKey, sectionsZKey);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
@@ -763,9 +783,9 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
const power = log2(zkey.domainSize);
if (logger) logger.debug("Reading Wtns");
- const buffWitness = await binFileUtils.readSection(fdWtns, sectionsWtns, 2);
+ const buffWitness = await binFileUtils__namespace.readSection(fdWtns, sectionsWtns, 2);
if (logger) logger.debug("Reading Coeffs");
- const buffCoeffs = await binFileUtils.readSection(fdZKey, sectionsZKey, 4);
+ const buffCoeffs = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 4);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC1(curve, zkey, buffWitness, buffCoeffs, logger);
@@ -790,23 +810,23 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
let proof = {};
if (logger) logger.debug("Reading A Points");
- const buffBasesA = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
+ const buffBasesA = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 5);
proof.pi_a = await curve.G1.multiExpAffine(buffBasesA, buffWitness, logger, "multiexp A");
if (logger) logger.debug("Reading B1 Points");
- const buffBasesB1 = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
+ const buffBasesB1 = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 6);
let pib1 = await curve.G1.multiExpAffine(buffBasesB1, buffWitness, logger, "multiexp B1");
if (logger) logger.debug("Reading B2 Points");
- const buffBasesB2 = await binFileUtils.readSection(fdZKey, sectionsZKey, 7);
+ const buffBasesB2 = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 7);
proof.pi_b = await curve.G2.multiExpAffine(buffBasesB2, buffWitness, logger, "multiexp B2");
if (logger) logger.debug("Reading C Points");
- const buffBasesC = await binFileUtils.readSection(fdZKey, sectionsZKey, 8);
+ const buffBasesC = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 8);
proof.pi_c = await curve.G1.multiExpAffine(buffBasesC, buffWitness.slice((zkey.nPublic+1)*curve.Fr.n8), logger, "multiexp C");
if (logger) logger.debug("Reading H Points");
- const buffBasesH = await binFileUtils.readSection(fdZKey, sectionsZKey, 9);
+ const buffBasesH = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 9);
const resH = await curve.G1.multiExpAffine(buffBasesH, buffPodd_T, logger, "multiexp H");
const r = curve.Fr.random();
@@ -846,8 +866,8 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
await fdZKey.close();
await fdWtns.close();
- proof = stringifyBigInts(proof);
- publicSignals = stringifyBigInts(publicSignals);
+ proof = stringifyBigInts$3(proof);
+ publicSignals = stringifyBigInts$3(publicSignals);
return {proof, publicSignals};
}
@@ -1103,18 +1123,26 @@ async function joinABC(curve, zkey, a, b, c, logger) {
async function wtnsCalculate(input, wasmFileName, wtnsFileName, options) {
- const fdWasm = await fastFile.readExisting(wasmFileName);
+ const fdWasm = await fastFile__namespace.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
const wc = await circom_runtime.WitnessCalculatorBuilder(wasm);
- const w = await wc.calculateBinWitness(input);
+ if (wc.circom_version() == 1) {
+ const w = await wc.calculateBinWitness(input);
- const fdWtns = await binFileUtils.createBinFile(wtnsFileName, "wtns", 2, 2);
+ const fdWtns = await binFileUtils__namespace.createBinFile(wtnsFileName, "wtns", 2, 2);
- await writeBin(fdWtns, w, wc.prime);
- await fdWtns.close();
+ await writeBin(fdWtns, w, wc.prime);
+ await fdWtns.close();
+ } else {
+ const fdWtns = await fastFile__namespace.createOverride(wtnsFileName);
+ const w = await wc.calculateWTNSBin(input);
+
+ await fdWtns.write(w);
+ await fdWtns.close();
+ }
}
/*
@@ -1162,7 +1190,7 @@ async function groth16FullProve(input, wasmFile, zkeyFileName, logger) {
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see .
*/
-const {unstringifyBigInts} = ffjavascript.utils;
+const {unstringifyBigInts: unstringifyBigInts$1} = ffjavascript.utils;
async function groth16Verify(vk_verifier, publicSignals, proof, logger) {
/*
@@ -1172,9 +1200,9 @@ async function groth16Verify(vk_verifier, publicSignals, proof, logger) {
}
*/
- vk_verifier = unstringifyBigInts(vk_verifier);
- proof = unstringifyBigInts(proof);
- publicSignals = unstringifyBigInts(publicSignals);
+ vk_verifier = unstringifyBigInts$1(vk_verifier);
+ proof = unstringifyBigInts$1(proof);
+ publicSignals = unstringifyBigInts$1(publicSignals);
const curve = await getCurveFromName(vk_verifier.curve);
@@ -1236,7 +1264,7 @@ async function groth16Verify(vk_verifier, publicSignals, proof, logger) {
along with snarkJS. If not, see .
*/
-function p256(n) {
+function p256$1(n) {
let nstr = n.toString(16);
while (nstr.length < 64) nstr = "0"+nstr;
nstr = `"0x${nstr}"`;
@@ -1248,13 +1276,13 @@ async function groth16ExportSolidityCallData(proof, pub) {
let inputs = "";
for (let i=0; i.
*/
-const sameRatio$1 = sameRatio;
+const sameRatio$1 = sameRatio$2;
async function verifyContribution(curve, cur, prev, logger) {
let sr;
@@ -2233,9 +2261,9 @@ async function verifyContribution(curve, cur, prev, logger) {
async function verify(tauFilename, logger) {
let sr;
- await Blake2b__default['default'].ready();
+ await Blake2b__default["default"].ready();
- const {fd, sections} = await binFileUtils.readBinFile(tauFilename, "ptau", 1);
+ const {fd, sections} = await binFileUtils__namespace.readBinFile(tauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await readPTauHeader(fd, sections);
const contrs = await readContributions(fd, curve, sections);
@@ -2250,7 +2278,7 @@ async function verify(tauFilename, logger) {
betaG1: curve.G1.g,
betaG2: curve.G2.g,
nextChallenge: calculateFirstChallengeHash(curve, ceremonyPower, logger),
- responseHash: Blake2b__default['default'](64).digest()
+ responseHash: Blake2b__default["default"](64).digest()
};
if (contrs.length == 0) {
@@ -2270,7 +2298,7 @@ async function verify(tauFilename, logger) {
if (!res) return false;
- const nextContributionHasher = Blake2b__default['default'](64);
+ const nextContributionHasher = Blake2b__default["default"](64);
nextContributionHasher.update(curContr.responseHash);
// Verify powers and compute nextChallengeHash
@@ -2404,7 +2432,7 @@ async function verify(tauFilename, logger) {
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false);
- const responseHasher = Blake2b__default['default'](64);
+ const responseHasher = Blake2b__default["default"](64);
responseHasher.setPartialHash(curContr.partialHash);
responseHasher.update(buffV);
const responseHash = responseHasher.digest();
@@ -2448,7 +2476,7 @@ async function verify(tauFilename, logger) {
const MAX_CHUNK_SIZE = 1<<16;
const G = curve[groupName];
const sG = G.F.n8*2;
- await binFileUtils.startReadUniqueSection(fd, sections, idSection);
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, idSection);
const singularPoints = [];
@@ -2466,12 +2494,12 @@ async function verify(tauFilename, logger) {
nextContributionHasher.update(basesU);
const scalars = new Uint8Array(4*(n-1));
- crypto__default['default'].randomFillSync(scalars);
+ crypto__default["default"].randomFillSync(scalars);
if (i>0) {
const firstBase = G.fromRprLEM(bases, 0);
- const r = crypto__default['default'].randomBytes(4).readUInt32BE(0, true);
+ const r = crypto__default["default"].randomBytes(4).readUInt32BE(0, true);
R1 = G.add(R1, G.timesScalar(lastBase, r));
R2 = G.add(R2, G.timesScalar(firstBase, r));
@@ -2494,7 +2522,7 @@ async function verify(tauFilename, logger) {
}
}
- await binFileUtils.endReadSection(fd);
+ await binFileUtils__namespace.endReadSection(fd);
return {
R1: R1,
@@ -2512,7 +2540,7 @@ async function verify(tauFilename, logger) {
const seed= new Array(8);
for (let i=0; i<8; i++) {
- seed[i] = crypto__default['default'].randomBytes(4).readUInt32BE(0, true);
+ seed[i] = crypto__default["default"].randomBytes(4).readUInt32BE(0, true);
}
for (let p=0; p<= power; p ++) {
@@ -2548,7 +2576,7 @@ async function verify(tauFilename, logger) {
buff_r = new Uint8Array(buff_r.buffer, buff_r.byteOffset, buff_r.byteLength);
if (logger) logger.debug(`reading points Powers${p}...`);
- await binFileUtils.startReadUniqueSection(fd, sections, tauSection);
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, tauSection);
buffG = new ffjavascript.BigBuffer(nPoints*sG);
if (p == power+1) {
await fd.readToBuffer(buffG, 0, (nPoints-1)*sG);
@@ -2556,7 +2584,7 @@ async function verify(tauFilename, logger) {
} else {
await fd.readToBuffer(buffG, 0, nPoints*sG);
}
- await binFileUtils.endReadSection(fd, true);
+ await binFileUtils__namespace.endReadSection(fd, true);
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
@@ -2583,10 +2611,10 @@ async function verify(tauFilename, logger) {
buff_r = await curve.Fr.batchFromMontgomery(buff_r);
if (logger) logger.debug(`reading points Lagrange${p}...`);
- await binFileUtils.startReadUniqueSection(fd, sections, lagrangeSection);
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, lagrangeSection);
fd.pos += sG*((2 ** p)-1);
await fd.readToBuffer(buffG, 0, nPoints*sG);
- await binFileUtils.endReadSection(fd, true);
+ await binFileUtils__namespace.endReadSection(fd, true);
const resLagrange = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p + "_transformed");
@@ -2632,8 +2660,8 @@ async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, group
const sG = G.F.n8*2;
const nPoints = sections[idSection][0].size / sG;
- await binFileUtils.startReadUniqueSection(fdOld, sections,idSection );
- await binFileUtils.startWriteSection(fdNew, idSection);
+ await binFileUtils__namespace.startReadUniqueSection(fdOld, sections,idSection );
+ await binFileUtils__namespace.startWriteSection(fdNew, idSection);
let t = first;
for (let i=0; i.
*/
-async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) {
+async function beacon$1(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) {
const beaconHash = hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
@@ -2807,9 +2835,9 @@ async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,num
}
- await Blake2b__default['default'].ready();
+ await Blake2b__default["default"].ready();
- const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
@@ -2836,10 +2864,10 @@ async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,num
curContribution.key = keyFromBeacon(curve, lastChallengeHash, beaconHash, numIterationsExp);
- const responseHasher = new Blake2b__default['default'](64);
+ const responseHasher = new Blake2b__default["default"](64);
responseHasher.update(lastChallengeHash);
- const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 7);
await writePTauHeader(fdNew, curve, power);
const startSections = [];
@@ -2867,7 +2895,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,num
if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: "));
- const nextChallengeHasher = new Blake2b__default['default'](64);
+ const nextChallengeHasher = new Blake2b__default["default"](64);
nextChallengeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger);
@@ -2893,7 +2921,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,num
const res = [];
fdOld.pos = sections[sectionId][0].p;
- await binFileUtils.startWriteSection(fdNew, sectionId);
+ await binFileUtils__namespace.startWriteSection(fdNew, sectionId);
startSections[sectionId] = fdNew.pos;
@@ -2925,7 +2953,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,num
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
- await binFileUtils.endWriteSection(fdNew);
+ await binFileUtils__namespace.endWriteSection(fdNew);
return res;
}
@@ -2975,9 +3003,9 @@ async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,num
*/
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logger) {
- await Blake2b__default['default'].ready();
+ await Blake2b__default["default"].ready();
- const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
if (power != ceremonyPower) {
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
@@ -3008,10 +3036,10 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logge
curContribution.key = createPTauKey(curve, lastChallengeHash, rng);
- const responseHasher = new Blake2b__default['default'](64);
+ const responseHasher = new Blake2b__default["default"](64);
responseHasher.update(lastChallengeHash);
- const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 7);
await writePTauHeader(fdNew, curve, power);
const startSections = [];
@@ -3039,7 +3067,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logge
if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: "));
- const nextChallengeHasher = new Blake2b__default['default'](64);
+ const nextChallengeHasher = new Blake2b__default["default"](64);
nextChallengeHasher.update(hashResponse);
await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1");
@@ -3065,7 +3093,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logge
const res = [];
fdOld.pos = sections[sectionId][0].p;
- await binFileUtils.startWriteSection(fdNew, sectionId);
+ await binFileUtils__namespace.startWriteSection(fdNew, sectionId);
startSections[sectionId] = fdNew.pos;
@@ -3097,7 +3125,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logge
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}
- await binFileUtils.endWriteSection(fdNew);
+ await binFileUtils__namespace.endWriteSection(fdNew);
return res;
}
@@ -3150,18 +3178,18 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logge
async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
- const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await readPTauHeader(fdOld, sections);
- const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 11);
await writePTauHeader(fdNew, curve, power);
- await binFileUtils.copySection(fdOld, sections, fdNew, 2);
- await binFileUtils.copySection(fdOld, sections, fdNew, 3);
- await binFileUtils.copySection(fdOld, sections, fdNew, 4);
- await binFileUtils.copySection(fdOld, sections, fdNew, 5);
- await binFileUtils.copySection(fdOld, sections, fdNew, 6);
- await binFileUtils.copySection(fdOld, sections, fdNew, 7);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 2);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 3);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 4);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 5);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 6);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", "tauG1" );
await processSection(3, 13, "G2", "tauG2" );
@@ -3178,7 +3206,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
if (logger) logger.debug("Starting section: "+sectionName);
- await binFileUtils.startWriteSection(fdNew, newSectionId);
+ await binFileUtils__namespace.startWriteSection(fdNew, newSectionId);
for (let p=0; p<=power; p++) {
await processSectionPower(p);
@@ -3188,27 +3216,27 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
await processSectionPower(power+1);
}
- await binFileUtils.endWriteSection(fdNew);
+ await binFileUtils__namespace.endWriteSection(fdNew);
async function processSectionPower(p) {
const nPoints = 2 ** p;
const G = curve[Gstr];
- const Fr = curve.Fr;
+ curve.Fr;
const sGin = G.F.n8*2;
- const sGmid = G.F.n8*3;
+ G.F.n8*3;
let buff;
buff = new ffjavascript.BigBuffer(nPoints*sGin);
- await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
+ await binFileUtils__namespace.startReadUniqueSection(fdOld, sections, oldSectionId);
if ((oldSectionId == 2)&&(p==power+1)) {
await fdOld.readToBuffer(buff, 0,(nPoints-1)*sGin );
buff.set(curve.G1.zeroAffine, (nPoints-1)*sGin );
} else {
await fdOld.readToBuffer(buff, 0,nPoints*sGin );
}
- await binFileUtils.endReadSection(fdOld, true);
+ await binFileUtils__namespace.endReadSection(fdOld, true);
buff = await G.lagrangeEvaluations(buff, "affine", "affine", logger, sectionName);
@@ -3288,7 +3316,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
async function truncate(ptauFilename, template, logger) {
- const {fd: fdOld, sections} = await binFileUtils.readBinFile(ptauFilename, "ptau", 1);
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(ptauFilename, "ptau", 1);
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
const sG1 = curve.G1.F.n8*2;
@@ -3309,19 +3337,19 @@ async function truncate(ptauFilename, template, logger) {
if (logger) logger.debug("Writing Power: "+sP);
- const fdNew = await binFileUtils.createBinFile(template + sP + ".ptau", "ptau", 1, 11);
+ const fdNew = await binFileUtils__namespace.createBinFile(template + sP + ".ptau", "ptau", 1, 11);
await writePTauHeader(fdNew, curve, p, ceremonyPower);
- await binFileUtils.copySection(fdOld, sections, fdNew, 2, ((2 ** p)*2-1) * sG1 ); // tagG1
- await binFileUtils.copySection(fdOld, sections, fdNew, 3, (2 ** p) * sG2); // tauG2
- await binFileUtils.copySection(fdOld, sections, fdNew, 4, (2 ** p) * sG1); // alfaTauG1
- await binFileUtils.copySection(fdOld, sections, fdNew, 5, (2 ** p) * sG1); // betaTauG1
- await binFileUtils.copySection(fdOld, sections, fdNew, 6, sG2); // betaTauG2
- await binFileUtils.copySection(fdOld, sections, fdNew, 7); // contributions
- await binFileUtils.copySection(fdOld, sections, fdNew, 12, ((2 ** (p+1))*2 -1) * sG1); // L_tauG1
- await binFileUtils.copySection(fdOld, sections, fdNew, 13, ((2 ** p)*2 -1) * sG2); // L_tauG2
- await binFileUtils.copySection(fdOld, sections, fdNew, 14, ((2 ** p)*2 -1) * sG1); // L_alfaTauG1
- await binFileUtils.copySection(fdOld, sections, fdNew, 15, ((2 ** p)*2 -1) * sG1); // L_betaTauG1
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 2, ((2 ** p)*2-1) * sG1 ); // tagG1
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 3, (2 ** p) * sG2); // tauG2
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 4, (2 ** p) * sG1); // alfaTauG1
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 5, (2 ** p) * sG1); // betaTauG1
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 6, sG2); // betaTauG2
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 7); // contributions
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 12, ((2 ** (p+1))*2 -1) * sG1); // L_tauG1
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 13, ((2 ** p)*2 -1) * sG2); // L_tauG2
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 14, ((2 ** p)*2 -1) * sG1); // L_alfaTauG1
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 15, ((2 ** p)*2 -1) * sG1); // L_betaTauG1
await fdNew.close();
}
@@ -3350,25 +3378,25 @@ async function truncate(ptauFilename, template, logger) {
async function convert(oldPtauFilename, newPTauFilename, logger) {
- const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
+ const {fd: fdOld, sections} = await binFileUtils__namespace.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await readPTauHeader(fdOld, sections);
- const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
+ const fdNew = await binFileUtils__namespace.createBinFile(newPTauFilename, "ptau", 1, 11);
await writePTauHeader(fdNew, curve, power);
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
- await binFileUtils.copySection(fdOld, sections, fdNew, 2);
- await binFileUtils.copySection(fdOld, sections, fdNew, 3);
- await binFileUtils.copySection(fdOld, sections, fdNew, 4);
- await binFileUtils.copySection(fdOld, sections, fdNew, 5);
- await binFileUtils.copySection(fdOld, sections, fdNew, 6);
- await binFileUtils.copySection(fdOld, sections, fdNew, 7);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 2);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 3);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 4);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 5);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 6);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 7);
await processSection(2, 12, "G1", "tauG1" );
- await binFileUtils.copySection(fdOld, sections, fdNew, 13);
- await binFileUtils.copySection(fdOld, sections, fdNew, 14);
- await binFileUtils.copySection(fdOld, sections, fdNew, 15);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 13);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 14);
+ await binFileUtils__namespace.copySection(fdOld, sections, fdNew, 15);
await fdOld.close();
await fdNew.close();
@@ -3380,23 +3408,23 @@ async function convert(oldPtauFilename, newPTauFilename, logger) {
async function processSection(oldSectionId, newSectionId, Gstr, sectionName) {
if (logger) logger.debug("Starting section: "+sectionName);
- await binFileUtils.startWriteSection(fdNew, newSectionId);
+ await binFileUtils__namespace.startWriteSection(fdNew, newSectionId);
const size = sections[newSectionId][0].size;
const chunkSize = fdOld.pageSize;
- await binFileUtils.startReadUniqueSection(fdOld, sections, newSectionId);
+ await binFileUtils__namespace.startReadUniqueSection(fdOld, sections, newSectionId);
for (let p=0; p.
*/
-const bls12381r$1 = ffjavascript.Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
-const bn128r$1 = ffjavascript.Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
+const bls12381r = ffjavascript.Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16);
+const bn128r = ffjavascript.Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617");
async function r1csInfo(r1csName, logger) {
const cir = await r1csfile.readR1cs(r1csName);
- if (ffjavascript.Scalar.eq(cir.prime, bn128r$1)) {
+ if (ffjavascript.Scalar.eq(cir.prime, bn128r)) {
if (logger) logger.info("Curve: bn-128");
- } else if (ffjavascript.Scalar.eq(cir.prime, bls12381r$1)) {
+ } else if (ffjavascript.Scalar.eq(cir.prime, bls12381r)) {
if (logger) logger.info("Curve: bls12-381");
} else {
if (logger) logger.info(`Unknown Curve. Prime: ${ffjavascript.Scalar.toString(cir.prime)}`);
@@ -3701,16 +3729,16 @@ async function r1csInfo(r1csName, logger) {
along with snarkJS. If not, see .
*/
-function stringifyBigInts$1(Fr, o) {
+function stringifyBigInts$2(Fr, o) {
if (o instanceof Uint8Array) {
return Fr.toString(o);
} else if (Array.isArray(o)) {
- return o.map(stringifyBigInts$1.bind(null, Fr));
+ return o.map(stringifyBigInts$2.bind(null, Fr));
} else if (typeof o == "object") {
const res = {};
const keys = Object.keys(o);
keys.forEach( (k) => {
- res[k] = stringifyBigInts$1(Fr, o[k]);
+ res[k] = stringifyBigInts$2(Fr, o[k]);
});
return res;
} else if ((typeof(o) == "bigint") || o.eq !== undefined) {
@@ -3727,7 +3755,7 @@ async function r1csExportJson(r1csFileName, logger) {
const Fr=cir.curve.Fr;
delete cir.curve;
- return stringifyBigInts$1(Fr, cir);
+ return stringifyBigInts$2(Fr, cir);
}
/*
@@ -3781,7 +3809,7 @@ async function loadSymbols(symFileName) {
varIdx2Name: [ "one" ],
componentIdx2Name: []
};
- const fd = await fastFile.readExisting(symFileName);
+ const fd = await fastFile__namespace.readExisting(symFileName);
const buff = await fd.read(fd.totalSize);
const symsStr = new TextDecoder("utf-8").decode(buff);
const lines = symsStr.split("\n");
@@ -3831,7 +3859,7 @@ async function loadSymbols(symFileName) {
async function wtnsDebug(input, wasmFileName, wtnsFileName, symName, options, logger) {
- const fdWasm = await fastFile.readExisting(wasmFileName);
+ const fdWasm = await fastFile__namespace.readExisting(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close();
@@ -3866,7 +3894,7 @@ async function wtnsDebug(input, wasmFileName, wtnsFileName, symName, options, lo
const wc = await circom_runtime.WitnessCalculatorBuilder(wasm, wcOps);
const w = await wc.calculateWitness(input);
- const fdWtns = await binFileUtils.createBinFile(wtnsFileName, "wtns", 2, 2);
+ const fdWtns = await binFileUtils__namespace.createBinFile(wtnsFileName, "wtns", 2, 2);
await write(fdWtns, w, wc.prime);
@@ -4049,8 +4077,8 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
const TAU_G2 = 1;
const ALPHATAU_G1 = 2;
const BETATAU_G1 = 3;
- await Blake2b__default['default'].ready();
- const csHasher = Blake2b__default['default'](64);
+ await Blake2b__default["default"].ready();
+ const csHasher = Blake2b__default["default"](64);
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
@@ -4598,8 +4626,8 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
- const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
- const zkey = await readHeader(fdZKey, sectionsZKey);
+ const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils__namespace.readBinFile(zkeyName, "zkey", 2);
+ const zkey = await readHeader$1(fdZKey, sectionsZKey);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
@@ -4610,7 +4638,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
const mpcParams = await readMPCParams(fdZKey, curve, sectionsZKey);
- const fdMPCParams = await fastFile.createOverride(mpcparamsName);
+ const fdMPCParams = await fastFile__namespace.createOverride(mpcparamsName);
/////////////////////
// Verification Key Section
@@ -4624,7 +4652,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
// IC
let buffBasesIC;
- buffBasesIC = await binFileUtils.readSection(fdZKey, sectionsZKey, 3);
+ buffBasesIC = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 3);
buffBasesIC = await curve.G1.batchLEMtoU(buffBasesIC);
await writePointArray("G1", buffBasesIC);
@@ -4632,7 +4660,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
/////////////////////
// h Section
/////////////////////
- const buffBasesH_Lodd = await binFileUtils.readSection(fdZKey, sectionsZKey, 9);
+ const buffBasesH_Lodd = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 9);
let buffBasesH_Tau;
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", logger);
@@ -4647,7 +4675,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
// L section
/////////////////////
let buffBasesC;
- buffBasesC = await binFileUtils.readSection(fdZKey, sectionsZKey, 8);
+ buffBasesC = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 8);
buffBasesC = await curve.G1.batchLEMtoU(buffBasesC);
await writePointArray("G1", buffBasesC);
@@ -4655,7 +4683,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
// A Section (C section)
/////////////////////
let buffBasesA;
- buffBasesA = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
+ buffBasesA = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 5);
buffBasesA = await curve.G1.batchLEMtoU(buffBasesA);
await writePointArray("G1", buffBasesA);
@@ -4663,7 +4691,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
// B1 Section
/////////////////////
let buffBasesB1;
- buffBasesB1 = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
+ buffBasesB1 = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 6);
buffBasesB1 = await curve.G1.batchLEMtoU(buffBasesB1);
await writePointArray("G1", buffBasesB1);
@@ -4671,7 +4699,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
// B2 Section
/////////////////////
let buffBasesB2;
- buffBasesB2 = await binFileUtils.readSection(fdZKey, sectionsZKey, 7);
+ buffBasesB2 = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 7);
buffBasesB2 = await curve.G2.batchLEMtoU(buffBasesB2);
await writePointArray("G2", buffBasesB2);
@@ -4751,8 +4779,8 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, name, logger) {
- const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
- const zkeyHeader = await readHeader(fdZKeyOld, sectionsZKeyOld, false);
+ const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await binFileUtils__namespace.readBinFile(zkeyNameOld, "zkey", 2);
+ const zkeyHeader = await readHeader$1(fdZKeyOld, sectionsZKeyOld, false);
if (zkeyHeader.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
@@ -4764,7 +4792,7 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
const oldMPCParams = await readMPCParams(fdZKeyOld, curve, sectionsZKeyOld);
const newMPCParams = {};
- const fdMPCParams = await fastFile.readExisting(mpcparamsName);
+ const fdMPCParams = await fastFile__namespace.readExisting(mpcparamsName);
fdMPCParams.pos =
sG1*3 + sG2*3 + // vKey
@@ -4824,7 +4852,7 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
}
}
- const fdZKeyNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);
+ const fdZKeyNew = await binFileUtils__namespace.createBinFile(zkeyNameNew, "zkey", 1, 10);
fdMPCParams.pos = 0;
// Header
@@ -4844,10 +4872,10 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nPublic+1);
- await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 3);
+ await binFileUtils__namespace.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 3);
// Coeffs (Keep original)
- await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 4);
+ await binFileUtils__namespace.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 4);
// H Section
const nH = await fdMPCParams.readUBE32();
@@ -4866,9 +4894,9 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
const wInv = curve.Fr.inv(curve.Fr.w[zkeyHeader.power+1]);
buffH = await curve.G1.batchApplyKey(buffH, n2Inv, wInv, "affine", "jacobian", logger);
buffH = await curve.G1.ifft(buffH, "jacobian", "affine", logger);
- await binFileUtils.startWriteSection(fdZKeyNew, 9);
+ await binFileUtils__namespace.startWriteSection(fdZKeyNew, 9);
await fdZKeyNew.write(buffH);
- await binFileUtils.endWriteSection(fdZKeyNew);
+ await binFileUtils__namespace.endWriteSection(fdZKeyNew);
// C Secion (L section)
const nL = await fdMPCParams.readUBE32();
@@ -4880,9 +4908,9 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
let buffL;
buffL = await fdMPCParams.read(sG1*(zkeyHeader.nVars-zkeyHeader.nPublic-1));
buffL = await curve.G1.batchUtoLEM(buffL);
- await binFileUtils.startWriteSection(fdZKeyNew, 8);
+ await binFileUtils__namespace.startWriteSection(fdZKeyNew, 8);
await fdZKeyNew.write(buffL);
- await binFileUtils.endWriteSection(fdZKeyNew);
+ await binFileUtils__namespace.endWriteSection(fdZKeyNew);
// A Section
const nA = await fdMPCParams.readUBE32();
@@ -4892,7 +4920,7 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nVars);
- await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 5);
+ await binFileUtils__namespace.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 5);
// B1 Section
const nB1 = await fdMPCParams.readUBE32();
@@ -4902,7 +4930,7 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
return false;
}
fdMPCParams.pos += sG1*(zkeyHeader.nVars);
- await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 6);
+ await binFileUtils__namespace.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 6);
// B2 Section
const nB2 = await fdMPCParams.readUBE32();
@@ -4912,7 +4940,7 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
return false;
}
fdMPCParams.pos += sG2*(zkeyHeader.nVars);
- await binFileUtils.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 7);
+ await binFileUtils__namespace.copySection(fdZKeyOld, sectionsZKeyOld, fdZKeyNew, 7);
await writeMPCParams(fdZKeyNew, curve, newMPCParams);
@@ -4963,17 +4991,17 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
You should have received a copy of the GNU General Public License
along with snarkJS. If not, see .
*/
-const sameRatio$2 = sameRatio;
+const sameRatio = sameRatio$2;
async function phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, logger) {
let sr;
- await Blake2b__default['default'].ready();
+ await Blake2b__default["default"].ready();
- const {fd, sections} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2);
- const zkey = await readHeader(fd, sections, false);
+ const {fd, sections} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2);
+ const zkey = await readHeader$1(fd, sections, false);
if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
}
@@ -4983,7 +5011,7 @@ async function phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, lo
const mpcParams = await readMPCParams(fd, curve, sections);
- const accumulatedHasher = Blake2b__default['default'](64);
+ const accumulatedHasher = Blake2b__default["default"](64);
accumulatedHasher.update(mpcParams.csHash);
let curDelta = curve.G1.g;
for (let i=0; i.
*/
-const {stringifyBigInts: stringifyBigInts$2} = ffjavascript.utils;
+const {stringifyBigInts: stringifyBigInts$1} = ffjavascript.utils;
async function zkeyExportVerificationKey(zkeyName, /* logger */ ) {
- const {fd, sections} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
- const zkey = await readHeader(fd, sections);
+ const {fd, sections} = await binFileUtils__namespace.readBinFile(zkeyName, "zkey", 2);
+ const zkey = await readHeader$1(fd, sections);
let res;
if (zkey.protocol == "groth16") {
@@ -5856,16 +5884,16 @@ async function groth16Vk(zkey, fd, sections) {
// Read IC Section
///////////
- await binFileUtils.startReadUniqueSection(fd, sections, 3);
+ await binFileUtils__namespace.startReadUniqueSection(fd, sections, 3);
vKey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
const buff = await fd.read(sG1);
const P = curve.G1.toObject(buff);
vKey.IC.push(P);
}
- await binFileUtils.endReadSection(fd);
+ await binFileUtils__namespace.endReadSection(fd);
- vKey = stringifyBigInts$2(vKey);
+ vKey = stringifyBigInts$1(vKey);
return vKey;
}
@@ -5897,7 +5925,7 @@ async function plonkVk(zkey) {
w: curve.Fr.toObject(curve.Fr.w[zkey.power])
};
- vKey = stringifyBigInts$2(vKey);
+ vKey = stringifyBigInts$1(vKey);
return vKey;
}
@@ -5913,7 +5941,7 @@ async function exportSolidityVerifier(zKeyName, templates, logger) {
let template = templates[verificationKey.protocol];
- return ejs__default['default'].render(template , verificationKey);
+ return ejs__default["default"].render(template , verificationKey);
}
/*
@@ -5943,7 +5971,7 @@ var zkey = /*#__PURE__*/Object.freeze({
verifyFromR1cs: phase2verifyFromR1cs,
verifyFromInit: phase2verifyFromInit,
contribute: phase2contribute,
- beacon: beacon$1,
+ beacon: beacon,
exportJson: zkeyExportJson,
bellmanContribute: bellmanContribute,
exportVerificationKey: zkeyExportVerificationKey,
@@ -5972,7 +6000,7 @@ var zkey = /*#__PURE__*/Object.freeze({
async function plonkSetup(r1csName, ptauName, zkeyName, logger) {
- await Blake2b__default['default'].ready();
+ await Blake2b__default["default"].ready();
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
@@ -6374,17 +6402,17 @@ async function plonkSetup(r1csName, ptauName, zkeyName, logger) {
You should have received a copy of the GNU General Public License along with
snarkjs. If not, see .
*/
-const {stringifyBigInts: stringifyBigInts$3} = ffjavascript.utils;
-const { keccak256 } = jsSha3__default['default'];
+const {stringifyBigInts} = ffjavascript.utils;
+const { keccak256: keccak256$1 } = jsSha3__default["default"];
async function plonk16Prove(zkeyFileName, witnessFileName, logger) {
- const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
+ const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils__namespace.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23);
- const wtns = await readHeader$1(fdWtns, sectionsWtns);
+ const wtns = await readHeader(fdWtns, sectionsWtns);
- const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
+ const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils__namespace.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
- const zkey = await readHeader(fdZKey, sectionsZKey);
+ const zkey = await readHeader$1(fdZKey, sectionsZKey);
if (zkey.protocol != "plonk") {
throw new Error("zkey file is not groth16");
}
@@ -6403,7 +6431,7 @@ async function plonk16Prove(zkeyFileName, witnessFileName, logger) {
const n8r = curve.Fr.n8;
if (logger) logger.debug("Reading Wtns");
- const buffWitness = await binFileUtils.readSection(fdWtns, sectionsWtns, 2);
+ const buffWitness = await binFileUtils__namespace.readSection(fdWtns, sectionsWtns, 2);
// First element in plonk is not used and can be any value. (But always the same).
// We set it to zero to go faster in the exponentiations.
buffWitness.set(Fr.zero, 0);
@@ -6430,7 +6458,7 @@ async function plonk16Prove(zkeyFileName, witnessFileName, logger) {
const pol_s2 = new ffjavascript.BigBuffer(zkey.domainSize*n8r);
await fdZKey.readToBuffer(pol_s2, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 5*zkey.domainSize*n8r);
- const PTau = await binFileUtils.readSection(fdZKey, sectionsZKey, 14);
+ const PTau = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 14);
const ch = {};
@@ -6482,13 +6510,13 @@ async function plonk16Prove(zkeyFileName, witnessFileName, logger) {
delete proof.eval_t;
- proof = stringifyBigInts$3(proof);
- publicSignals = stringifyBigInts$3(publicSignals);
+ proof = stringifyBigInts(proof);
+ publicSignals = stringifyBigInts(publicSignals);
return {proof, publicSignals};
async function calculateAdditions() {
- const additionsBuff = await binFileUtils.readSection(fdZKey, sectionsZKey, 3);
+ const additionsBuff = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 3);
const sSum = 8+curve.Fr.n8*2;
@@ -6514,9 +6542,9 @@ async function plonk16Prove(zkeyFileName, witnessFileName, logger) {
let B = new ffjavascript.BigBuffer(zkey.domainSize * n8r);
let C = new ffjavascript.BigBuffer(zkey.domainSize * n8r);
- const aMap = await binFileUtils.readSection(fdZKey, sectionsZKey, 4);
- const bMap = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
- const cMap = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
+ const aMap = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 4);
+ const bMap = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 5);
+ const cMap = await binFileUtils__namespace.readSection(fdZKey, sectionsZKey, 6);
for (let i=0; i.
*/
-const {unstringifyBigInts: unstringifyBigInts$1} = ffjavascript.utils;
-const { keccak256: keccak256$1 } = jsSha3__default['default'];
+const {unstringifyBigInts} = ffjavascript.utils;
+const { keccak256 } = jsSha3__default["default"];
async function plonkVerify(vk_verifier, publicSignals, proof, logger) {
- vk_verifier = unstringifyBigInts$1(vk_verifier);
- proof = unstringifyBigInts$1(proof);
- publicSignals = unstringifyBigInts$1(publicSignals);
+ vk_verifier = unstringifyBigInts(vk_verifier);
+ proof = unstringifyBigInts(proof);
+ publicSignals = unstringifyBigInts(publicSignals);
const curve = await getCurveFromName(vk_verifier.curve);
@@ -7456,7 +7484,7 @@ function calculateLagrangeEvaluations(curve, challanges, vk) {
}
function hashToFr(curve, transcript) {
- const v = ffjavascript.Scalar.fromRprBE(new Uint8Array(keccak256$1.arrayBuffer(transcript)));
+ const v = ffjavascript.Scalar.fromRprBE(new Uint8Array(keccak256.arrayBuffer(transcript)));
return curve.Fr.e(v);
}
@@ -7642,7 +7670,7 @@ function i2hex(i) {
return ("0" + i.toString(16)).slice(-2);
}
-function p256$1(n) {
+function p256(n) {
let nstr = n.toString(16);
while (nstr.length < 64) nstr = "0"+nstr;
nstr = `"0x${nstr}"`;
@@ -7658,7 +7686,7 @@ async function plonkExportSolidityCallData(proof, pub) {
let inputs = "";
for (let i=0; i