'use strict'; Object.defineProperty(exports, '__esModule', { value: true }); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var ffjavascript = require('ffjavascript'); var fs = _interopDefault(require('fs')); var Blake2b = _interopDefault(require('blake2b-wasm')); var readline = _interopDefault(require('readline')); var crypto = _interopDefault(require('crypto')); var circomRuntime = _interopDefault(require('circom_runtime')); async function open(fileName, openFlags, cacheSize, pageSize) { cacheSize = cacheSize || 4096*64; if (["w+", "wx+", "r", "ax+", "a+"].indexOf(openFlags) <0) throw new Error("Invalid open option"); const fd =await fs.promises.open(fileName, openFlags); const stats = await fd.stat(); return new FastFile(fd, stats, cacheSize, pageSize, fileName); } class FastFile { constructor(fd, stats, cacheSize, pageSize, fileName) { this.fileName = fileName; this.fd = fd; this.pos = 0; this.pageSize = pageSize || (1 << 8); while (this.pageSize < stats.blksize) { this.pageSize *= 2; } this.totalSize = stats.size; this.totalPages = Math.floor((stats.size -1) / this.pageSize)+1; this.maxPagesLoaded = Math.floor( cacheSize / this.pageSize)+1; this.pages = {}; this.pendingLoads = []; this.writing = false; this.reading = false; this.avBuffs = []; this.history = {}; } _loadPage(p) { const self = this; const P = new Promise((resolve, reject)=> { self.pendingLoads.push({ page: p, resolve: resolve, reject: reject }); }); self.__statusPage("After Load request: ", p); return P; } __statusPage(s, p) { const logEntry = []; const self=this; if (!self.logHistory) return; logEntry.push("==" + s+ " " +p); let S = ""; for (let i=0; i " + self.history[p][i][j]); } } } _triggerLoad() { const self = this; if (self.reading) return; if (self.pendingLoads.length==0) return; const pageIdxs = Object.keys(self.pages); const deletablePages = []; for (let i=0; i0) && ( (typeof self.pages[self.pendingLoads[0].page] != "undefined" ) ||( (freePages>0) ||(deletablePages.length>0)))) { const load = self.pendingLoads.shift(); if (typeof self.pages[load.page] != "undefined") { self.pages[load.page].pendingOps ++; const idx = deletablePages.indexOf(load.page); if (idx>=0) deletablePages.splice(idx, 1); if (self.pages[load.page].loading) { self.pages[load.page].loading.push(load); } else { load.resolve(); } self.__statusPage("After Load (cached): ", load.page); } else { if (freePages) { freePages--; } else { const fp = deletablePages.shift(); self.__statusPage("Before Unload: ", fp); self.avBuffs.unshift(self.pages[fp]); delete self.pages[fp]; self.__statusPage("After Unload: ", fp); } if (load.page>=self.totalPages) { self.pages[load.page] = getNewPage(); load.resolve(); self.__statusPage("After Load (new): ", load.page); } else { self.reading = true; self.pages[load.page] = getNewPage(); self.pages[load.page].loading = [load]; ops.push(self.fd.read(self.pages[load.page].buff, 0, self.pageSize, load.page*self.pageSize).then((res)=> { self.pages[load.page].size = res.bytesRead; const loading = self.pages[load.page].loading; delete self.pages[load.page].loading; for (let i=0; i { load.reject(err); })); self.__statusPage("After Load (loading): ", load.page); } } } // if (ops.length>1) console.log(ops.length); Promise.all(ops).then( () => { self.reading = false; if (self.pendingLoads.length>0) setImmediate(self._triggerLoad.bind(self)); }); function getNewPage() { if (self.avBuffs.length>0) { const p = self.avBuffs.shift(); p.dirty = false; p.pendingOps = 1; p.size =0; return p; } else { return { dirty: false, buff: new Uint8Array(self.pageSize), pendingOps: 1, size: 0 }; } } } _triggerWrite() { const self = this; if (self.writing) return; const pageIdxs = Object.keys(self.pages); const ops = []; for (let i=0; i { page.writing = false; return; }, (err) => { console.log("ERROR Writing: "+err); self.error = err; self._tryClose(); })); } } if (self.writing) { Promise.all(ops).then( () => { self.writing = false; setImmediate(self._triggerWrite.bind(self)); self._tryClose(); if (self.pendingLoads.length>0) setImmediate(self._triggerLoad.bind(self)); }); } } _getDirtyPage() { for (let p in this.pages) { if (this.pages[p].dirty) return p; } return -1; } async write(buff, pos) { if (buff.byteLength == 0) return; const self = this; /* if (buff.byteLength > self.pageSize*self.maxPagesLoaded*0.8) { const cacheSize = Math.floor(buff.byteLength * 1.1); this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1; } */ if (typeof pos == "undefined") pos = self.pos; self.pos = pos+buff.byteLength; if (self.totalSize < pos + buff.byteLength) self.totalSize = pos + buff.byteLength; if (self.pendingClose) throw new Error("Writing a closing file"); const firstPage = Math.floor(pos / self.pageSize); const lastPage = Math.floor((pos + buff.byteLength -1) / self.pageSize); const pagePromises = []; for (let i=firstPage; i<=lastPage; i++) pagePromises.push(self._loadPage(i)); self._triggerLoad(); let p = firstPage; let o = pos % self.pageSize; let r = buff.byteLength; while (r>0) { await pagePromises[p-firstPage]; const l = (o+r > self.pageSize) ? (self.pageSize -o) : r; const srcView = buff.slice( buff.byteLength - r, buff.byteLength - r + l); const dstView = new Uint8Array(self.pages[p].buff.buffer, o, l); dstView.set(srcView); self.pages[p].dirty = true; self.pages[p].pendingOps --; self.pages[p].size = Math.max(o+l, self.pages[p].size); if (p>=self.totalPages) { self.totalPages = p+1; } r = r-l; p ++; o = 0; if (!self.writing) setImmediate(self._triggerWrite.bind(self)); } } async read(len, pos) { const self = this; let buff = new Uint8Array(len); await self.readToBuffer(buff, 0, len, pos); return buff; } async readToBuffer(buffDst, offset, len, pos) { if (len == 0) { return; } const self = this; if (len > self.pageSize*self.maxPagesLoaded*0.8) { const cacheSize = Math.floor(len * 1.1); this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1; } if (typeof pos == "undefined") pos = self.pos; self.pos = pos+len; if (self.pendingClose) throw new Error("Reading a closing file"); const firstPage = Math.floor(pos / self.pageSize); const lastPage = Math.floor((pos + len -1) / self.pageSize); const pagePromises = []; for (let i=firstPage; i<=lastPage; i++) pagePromises.push(self._loadPage(i)); self._triggerLoad(); let p = firstPage; let o = pos % self.pageSize; // Remaining bytes to read let r = pos + len > self.totalSize ? len - (pos + len - self.totalSize): len; while (r>0) { await pagePromises[p - firstPage]; self.__statusPage("After Await (read): ", p); // bytes to copy from this page const l = (o+r > self.pageSize) ? (self.pageSize -o) : r; const srcView = new Uint8Array(self.pages[p].buff.buffer, self.pages[p].buff.byteOffset + o, l); buffDst.set(srcView, offset+len-r); self.pages[p].pendingOps --; self.__statusPage("After Op done: ", p); r = r-l; p ++; o = 0; if (self.pendingLoads.length>0) setImmediate(self._triggerLoad.bind(self)); } this.pos = pos + len; } _tryClose() { const self = this; if (!self.pendingClose) return; if (self.error) { self.pendingCloseReject(self.error); } const p = self._getDirtyPage(); if ((p>=0) || (self.writing) || (self.reading) || (self.pendingLoads.length>0)) return; self.pendingClose(); } close() { const self = this; if (self.pendingClose) throw new Error("Closing the file twice"); return new Promise((resolve, reject) => { self.pendingClose = resolve; self.pendingCloseReject = reject; self._tryClose(); }).then(()=> { self.fd.close(); }, (err) => { self.fd.close(); throw (err); }); } async discard() { const self = this; await self.close(); await fs.promises.unlink(this.fileName); } async writeULE32(v, pos) { const self = this; const tmpBuff32 = new Uint8Array(4); const tmpBuff32v = new DataView(tmpBuff32.buffer); tmpBuff32v.setUint32(0, v, true); await self.write(tmpBuff32, pos); } async writeUBE32(v, pos) { const self = this; const tmpBuff32 = new Uint8Array(4); const tmpBuff32v = new DataView(tmpBuff32.buffer); tmpBuff32v.setUint32(0, v, false); await self.write(tmpBuff32, pos); } async writeULE64(v, pos) { const self = this; const tmpBuff64 = new Uint8Array(8); const tmpBuff64v = new DataView(tmpBuff64.buffer); tmpBuff64v.setUint32(0, v & 0xFFFFFFFF, true); tmpBuff64v.setUint32(4, Math.floor(v / 0x100000000) , true); await self.write(tmpBuff64, pos); } async readULE32(pos) { const self = this; const b = await self.read(4, pos); const view = new Uint32Array(b.buffer); return view[0]; } async readUBE32(pos) { const self = this; const b = await self.read(4, pos); const view = new DataView(b.buffer); return view.getUint32(0, false); } async readULE64(pos) { const self = this; const b = await self.read(8, pos); const view = new Uint32Array(b.buffer); return view[1] * 0x100000000 + view[0]; } } function createNew(o) { const initialSize = o.initialSize || 1<<20; const fd = new MemFile(); fd.o = o; fd.o.data = new Uint8Array(initialSize); fd.allocSize = initialSize; fd.totalSize = 0; fd.readOnly = false; fd.pos = 0; return fd; } function readExisting(o) { const fd = new MemFile(); fd.o = o; fd.allocSize = o.data.byteLength; fd.totalSize = o.data.byteLength; fd.readOnly = true; fd.pos = 0; return fd; } const tmpBuff32 = new Uint8Array(4); const tmpBuff32v = new DataView(tmpBuff32.buffer); const tmpBuff64 = new Uint8Array(8); const tmpBuff64v = new DataView(tmpBuff64.buffer); class MemFile { constructor() { this.pageSize = 1 << 14; // for compatibility } _resizeIfNeeded(newLen) { if (newLen > this.allocSize) { const newAllocSize = Math.max( this.allocSize + (1 << 20), Math.floor(this.allocSize * 1.1), newLen ); const newData = new Uint8Array(newAllocSize); newData.set(this.o.data); this.o.data = newData; this.allocSize = newAllocSize; } } async write(buff, pos) { const self =this; if (typeof pos == "undefined") pos = self.pos; if (this.readOnly) throw new Error("Writing a read only file"); this._resizeIfNeeded(pos + buff.byteLength); this.o.data.set(buff.slice(), pos); if (pos + buff.byteLength > this.totalSize) this.totalSize = pos + buff.byteLength; this.pos = pos + buff.byteLength; } async readToBuffer(buffDest, offset, len, pos) { const self = this; if (typeof pos == "undefined") pos = self.pos; if (this.readOnly) { if (pos + len > this.totalSize) throw new Error("Reading out of bounds"); } this._resizeIfNeeded(pos + len); const buffSrc = new Uint8Array(this.o.data.buffer, this.o.data.byteOffset + pos, len); buffDest.set(buffSrc, offset); this.pos = pos + len; } async read(len, pos) { const self = this; const buff = new Uint8Array(len); await self.readToBuffer(buff, 0, len, pos); return buff; } close() { if (this.o.data.byteLength != this.totalSize) { this.o.data = this.o.data.slice(0, this.totalSize); } } async discard() { } async writeULE32(v, pos) { const self = this; tmpBuff32v.setUint32(0, v, true); await self.write(tmpBuff32, pos); } async writeUBE32(v, pos) { const self = this; tmpBuff32v.setUint32(0, v, false); await self.write(tmpBuff32, pos); } async writeULE64(v, pos) { const self = this; tmpBuff64v.setUint32(0, v & 0xFFFFFFFF, true); tmpBuff64v.setUint32(4, Math.floor(v / 0x100000000) , true); await self.write(tmpBuff64, pos); } async readULE32(pos) { const self = this; const b = await self.read(4, pos); const view = new Uint32Array(b.buffer); return view[0]; } async readUBE32(pos) { const self = this; const b = await self.read(4, pos); const view = new DataView(b.buffer); return view.getUint32(0, false); } async readULE64(pos) { const self = this; const b = await self.read(8, pos); const view = new Uint32Array(b.buffer); return view[1] * 0x100000000 + view[0]; } } const PAGE_SIZE = 1<<22; function createNew$1(o) { const initialSize = o.initialSize || 0; const fd = new BigMemFile(); fd.o = o; const nPages = initialSize ? Math.floor((initialSize - 1) / PAGE_SIZE)+1 : 0; fd.o.data = []; for (let i=0; i0) { const l = (o+r > PAGE_SIZE) ? (PAGE_SIZE -o) : r; const srcView = buff.slice(buff.byteLength - r, buff.byteLength - r + l); const dstView = new Uint8Array(self.o.data[p].buffer, o, l); dstView.set(srcView); r = r-l; p ++; o = 0; } this.pos = pos + buff.byteLength; } async readToBuffer(buffDst, offset, len, pos) { const self = this; if (typeof pos == "undefined") pos = self.pos; if (this.readOnly) { if (pos + len > this.totalSize) throw new Error("Reading out of bounds"); } this._resizeIfNeeded(pos + len); const firstPage = Math.floor(pos / PAGE_SIZE); let p = firstPage; let o = pos % PAGE_SIZE; // Remaining bytes to read let r = len; while (r>0) { // bytes to copy from this page const l = (o+r > PAGE_SIZE) ? (PAGE_SIZE -o) : r; const srcView = new Uint8Array(self.o.data[p].buffer, o, l); buffDst.set(srcView, offset+len-r); r = r-l; p ++; o = 0; } this.pos = pos + len; } async read(len, pos) { const self = this; const buff = new Uint8Array(len); await self.readToBuffer(buff, 0, len, pos); return buff; } close() { } async discard() { } async writeULE32(v, pos) { const self = this; tmpBuff32v$1.setUint32(0, v, true); await self.write(tmpBuff32$1, pos); } async writeUBE32(v, pos) { const self = this; tmpBuff32v$1.setUint32(0, v, false); await self.write(tmpBuff32$1, pos); } async writeULE64(v, pos) { const self = this; tmpBuff64v$1.setUint32(0, v & 0xFFFFFFFF, true); tmpBuff64v$1.setUint32(4, Math.floor(v / 0x100000000) , true); await self.write(tmpBuff64$1, pos); } async readULE32(pos) { const self = this; const b = await self.read(4, pos); const view = new Uint32Array(b.buffer); return view[0]; } async readUBE32(pos) { const self = this; const b = await self.read(4, pos); const view = new DataView(b.buffer); return view.getUint32(0, false); } async readULE64(pos) { const self = this; const b = await self.read(8, pos); const view = new Uint32Array(b.buffer); return view[1] * 0x100000000 + view[0]; } } /* global fetch */ const DEFAULT_CACHE_SIZE = (1 << 16); const DEFAULT_PAGE_SIZE = (1 << 13); async function createOverride(o, b, c) { if (typeof o === "string") { o = { type: "file", fileName: o, cacheSize: b || DEFAULT_CACHE_SIZE, pageSize: c || DEFAULT_PAGE_SIZE }; } if (o.type == "file") { return await open(o.fileName, "w+", o.cacheSize, o.pageSize); } else if (o.type == "mem") { return createNew(o); } else if (o.type == "bigMem") { return createNew$1(o); } else { throw new Error("Invalid FastFile type: "+o.type); } } async function readExisting$2(o, b, c) { if (o instanceof Uint8Array) { o = { type: "mem", data: o }; } if (process.browser) { if (typeof o === "string") { const buff = await fetch(o).then( function(res) { return res.arrayBuffer(); }).then(function (ab) { return new Uint8Array(ab); }); o = { type: "mem", data: buff }; } } else { if (typeof o === "string") { o = { type: "file", fileName: o, cacheSize: b || DEFAULT_CACHE_SIZE, pageSize: c || DEFAULT_PAGE_SIZE }; } } if (o.type == "file") { return await open(o.fileName, "r", o.cacheSize, o.pageSize); } else if (o.type == "mem") { return await readExisting(o); } else if (o.type == "bigMem") { return await readExisting$1(o); } else { throw new Error("Invalid FastFile type: "+o.type); } } async function readBinFile(fileName, type, maxVersion, cacheSize, pageSize) { const fd = await readExisting$2(fileName, cacheSize, pageSize); const b = await fd.read(4); let readedType = ""; for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]); if (readedType != type) throw new Error(fileName + ": Invalid File format"); let v = await fd.readULE32(); if (v>maxVersion) throw new Error("Version not supported"); const nSections = await fd.readULE32(); // Scan sections let sections = []; for (let i=0; i1) throw new Error(fd.fileName +": Section Duplicated " +idSection); fd.pos = sections[idSection][0].p; fd.readingSection = sections[idSection][0]; } async function endReadSection(fd, noCheck) { if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section"); if (!noCheck) { if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading"); } delete fd.readingSection; } async function writeBigInt(fd, n, n8, pos) { const buff = new Uint8Array(n8); ffjavascript.Scalar.toRprLE(buff, 0, n, n8); await fd.write(buff, pos); } async function readBigInt(fd, n8, pos) { const buff = await fd.read(n8, pos); return ffjavascript.Scalar.fromRprLE(buff, 0, n8); } async function copySection(fdFrom, sections, fdTo, sectionId, size) { if (typeof size === "undefined") { size = sections[sectionId][0].size; } const chunkSize = fdFrom.pageSize; await startReadUniqueSection(fdFrom, sections, sectionId); await startWriteSection(fdTo, sectionId); for (let p=0; p sections[idSection][0].size) { throw new Error("Reading out of the range of the section"); } let buff; if (length < (1 << 30) ) { buff = new Uint8Array(length); } else { buff = new ffjavascript.BigBuffer(length); } await fd.readToBuffer(buff, 0, length, sections[idSection][0].p + offset); return buff; } async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) { const MAX_BUFF_SIZE = fd1.pageSize * 16; await startReadUniqueSection(fd1, sections1, idSection); await startReadUniqueSection(fd2, sections2, idSection); if (sections1[idSection][0].size != sections2[idSection][0].size) return false; const totalBytes=sections1[idSection][0].size; for (let i=0; i= 0) { curve = await ffjavascript.buildBn128(); } else if (["BLS12381"].indexOf(normName) >= 0) { curve = await ffjavascript.buildBls12381(); } else { throw new Error(`Curve not supported: ${name}`); } return curve; function normalizeName(n) { return n.toUpperCase().match(/[A-Za-z0-9]+/g).join(""); } } /* global window */ const _revTable = []; for (let i=0; i<256; i++) { _revTable[i] = _revSlow(i, 8); } function _revSlow(idx, bits) { let res =0; let a = idx; for (let i=0; i>=1; } return res; } function log2( V ) { return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) ); } function formatHash(b, title) { const a = new DataView(b.buffer, b.byteOffset, b.byteLength); let S = ""; for (let i=0; i<4; i++) { if (i>0) S += "\n"; S += "\t\t"; for (let j=0; j<4; j++) { if (j>0) S += " "; S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0"); } } if (title) S = title + "\n" + S; return S; } function hashIsEqual(h1, h2) { if (h1.byteLength != h2.byteLength) return false; var dv1 = new Int8Array(h1); var dv2 = new Int8Array(h2); for (var i = 0 ; i != h1.byteLength ; i++) { if (dv1[i] != dv2[i]) return false; } return true; } function cloneHasher(h) { const ph = h.getPartialHash(); const res = Blake2b(64); res.setPartialHash(ph); return res; } async function sameRatio(curve, g1s, g1sx, g2s, g2sx) { if (curve.G1.isZero(g1s)) return false; if (curve.G1.isZero(g1sx)) return false; if (curve.G2.isZero(g2s)) return false; if (curve.G2.isZero(g2sx)) return false; // return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s)); const res = await curve.pairingEq(g1s, g2sx, curve.G1.neg(g1sx), g2s); return res; } function askEntropy() { if (process.browser) { return window.prompt("Enter a random text. (Entropy): ", ""); } else { const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); return new Promise((resolve) => { rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) ); }); } } async function getRandomRng(entropy) { // Generate a random Rng while (!entropy) { entropy = await askEntropy(); } const hasher = Blake2b(64); hasher.update(crypto.randomBytes(64)); const enc = new TextEncoder(); // always utf-8 hasher.update(enc.encode(entropy)); const hash = Buffer.from(hasher.digest()); const seed = []; for (let i=0;i<8;i++) { seed[i] = hash.readUInt32BE(i*4); } const rng = new ffjavascript.ChaCha(seed); return rng; } function rngFromBeaconParams(beaconHash, numIterationsExp) { let nIterationsInner; let nIterationsOuter; if (numIterationsExp<32) { nIterationsInner = (1 << numIterationsExp) >>> 0; nIterationsOuter = 1; } else { nIterationsInner = 0x100000000; nIterationsOuter = (1 << (numIterationsExp-32)) >>> 0; } let curHash = beaconHash; for (let i=0; i0) { const paramsBuff = new Uint8Array(params); await fd.writeULE32(paramsBuff.byteLength); await fd.write(paramsBuff); } else { await fd.writeULE32(0); } } async function writeMPCParams(fd, curve, mpcParams) { await startWriteSection(fd, 10); await fd.write(mpcParams.csHash); await fd.writeULE32(mpcParams.contributions.length); for (let i=0; i> 1; const va = coeffsDV.getUint32(4 + k*sCoef + 4, true); if (va > v) { n = k - 1; } else if (va < v) { m = k + 1; } else { n = k; } } return 4 + m*sCoef; } } async function joinABC(curve, zkey, a, b, c) { const concurrency = curve.tm.concurrency; const n8 = curve.Fr.n8; const nElements = Math.floor(a.byteLength / curve.Fr.n8); const elementsPerChunk = Math.floor(nElements/concurrency); const promises = []; for (let i=0; i. */ const {unstringifyBigInts} = ffjavascript.utils; async function groth16Verify(vk_verifier, publicSignals, proof, logger) { /* let cpub = vk_verifier.IC[0]; for (let s= 0; s< vk_verifier.nPublic; s++) { cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s])); } */ vk_verifier = unstringifyBigInts(vk_verifier); proof = unstringifyBigInts(proof); publicSignals = unstringifyBigInts(publicSignals); const curve = await getCurveFromName(vk_verifier.curve); const IC0 = curve.G1.fromObject(vk_verifier.IC[0]); const IC = new Uint8Array(curve.G1.F.n8*2 * publicSignals.length); const w = new Uint8Array(curve.Fr.n8 * publicSignals.length); for (let i=0; i1) throw new Error(fd.fileName +": File has more than one header"); fd.pos = sections[1][0].p; const n8 = await fd.readULE32(); const buff = await fd.read(n8); const q = ffjavascript.Scalar.fromRprLE(buff); const curve = await getCurveFromQ(q); if (curve.F1.n64*8 != n8) throw new Error(fd.fileName +": Invalid size"); const power = await fd.readULE32(); const ceremonyPower = await fd.readULE32(); if (fd.pos-sections[1][0].p != sections[1][0].size) throw new Error("Invalid PTau header size"); return {curve, power, ceremonyPower}; } async function readPtauPubKey(fd, curve, montgomery) { const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3); return fromPtauPubKeyRpr(buff, 0, curve, montgomery); } function fromPtauPubKeyRpr(buff, pos, curve, montgomery) { const key = { tau: {}, alpha: {}, beta: {} }; key.tau.g1_s = readG1(); key.tau.g1_sx = readG1(); key.alpha.g1_s = readG1(); key.alpha.g1_sx = readG1(); key.beta.g1_s = readG1(); key.beta.g1_sx = readG1(); key.tau.g2_spx = readG2(); key.alpha.g2_spx = readG2(); key.beta.g2_spx = readG2(); return key; function readG1() { let p; if (montgomery) { p = curve.G1.fromRprLEM( buff, pos ); } else { p = curve.G1.fromRprUncompressed( buff, pos ); } pos += curve.G1.F.n8*2; return p; } function readG2() { let p; if (montgomery) { p = curve.G2.fromRprLEM( buff, pos ); } else { p = curve.G2.fromRprUncompressed( buff, pos ); } pos += curve.G2.F.n8*2; return p; } } function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) { writeG1(key.tau.g1_s); writeG1(key.tau.g1_sx); writeG1(key.alpha.g1_s); writeG1(key.alpha.g1_sx); writeG1(key.beta.g1_s); writeG1(key.beta.g1_sx); writeG2(key.tau.g2_spx); writeG2(key.alpha.g2_spx); writeG2(key.beta.g2_spx); async function writeG1(p) { if (montgomery) { curve.G1.toRprLEM(buff, pos, p); } else { curve.G1.toRprUncompressed(buff, pos, p); } pos += curve.F1.n8*2; } async function writeG2(p) { if (montgomery) { curve.G2.toRprLEM(buff, pos, p); } else { curve.G2.toRprUncompressed(buff, pos, p); } pos += curve.F2.n8*2; } return buff; } async function writePtauPubKey(fd, curve, key, montgomery) { const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3); toPtauPubKeyRpr(buff, 0, curve, key, montgomery); await fd.write(buff); } async function readContribution$1(fd, curve) { const c = {}; c.tauG1 = await readG1(); c.tauG2 = await readG2(); c.alphaG1 = await readG1(); c.betaG1 = await readG1(); c.betaG2 = await readG2(); c.key = await readPtauPubKey(fd, curve, true); c.partialHash = await fd.read(216); c.nextChallenge = await fd.read(64); c.type = await fd.readULE32(); const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3); toPtauPubKeyRpr(buffV, 0, curve, c.key, false); const responseHasher = Blake2b(64); responseHasher.setPartialHash(c.partialHash); responseHasher.update(buffV); c.responseHash = responseHasher.digest(); const paramLength = await fd.readULE32(); const curPos = fd.pos; let lastType =0; while (fd.pos-curPos < paramLength) { const buffType = await readDV(1); if (buffType[0]<= lastType) throw new Error("Parameters in the contribution must be sorted"); lastType = buffType[0]; if (buffType[0]==1) { // Name const buffLen = await readDV(1); const buffStr = await readDV(buffLen[0]); c.name = new TextDecoder().decode(buffStr); } else if (buffType[0]==2) { const buffExp = await readDV(1); c.numIterationsExp = buffExp[0]; } else if (buffType[0]==3) { const buffLen = await readDV(1); c.beaconHash = await readDV(buffLen[0]); } else { throw new Error("Parameter not recognized"); } } if (fd.pos != curPos + paramLength) { throw new Error("Parametes do not match"); } return c; async function readG1() { const pBuff = await fd.read(curve.G1.F.n8*2); return curve.G1.fromRprLEM( pBuff ); } async function readG2() { const pBuff = await fd.read(curve.G2.F.n8*2); return curve.G2.fromRprLEM( pBuff ); } async function readDV(n) { const b = await fd.read(n); return new Uint8Array(b); } } async function readContributions(fd, curve, sections) { if (!sections[7]) throw new Error(fd.fileName + ": File has no contributions"); if (sections[7][0].length>1) throw new Error(fd.fileName +": File has more than one contributions section"); fd.pos = sections[7][0].p; const nContributions = await fd.readULE32(); const contributions = []; for (let i=0; i0) { const paramsBuff = new Uint8Array(params); await fd.writeULE32(paramsBuff.byteLength); await fd.write(paramsBuff); } else { await fd.writeULE32(0); } async function writeG1(p) { curve.G1.toRprLEM(buffG1, 0, p); await fd.write(buffG1); } async function writeG2(p) { curve.G2.toRprLEM(buffG2, 0, p); await fd.write(buffG2); } } async function writeContributions(fd, curve, contributions) { await fd.writeULE32(7); // Header type const pContributionsSize = fd.pos; await fd.writeULE64(0); // Temporally set to 0 length await fd.writeULE32(contributions.length); for (let i=0; i< contributions.length; i++) { await writeContribution$1(fd, curve, contributions[i]); } const contributionsSize = fd.pos - pContributionsSize - 8; const oldPos = fd.pos; await fd.writeULE64(contributionsSize, pContributionsSize); fd.pos = oldPos; } function calculateFirstChallengeHash(curve, power, logger) { if (logger) logger.debug("Calculating First Challenge Hash"); const hasher = new Blake2b(64); const vG1 = new Uint8Array(curve.G1.F.n8*2); const vG2 = new Uint8Array(curve.G2.F.n8*2); curve.G1.toRprUncompressed(vG1, 0, curve.G1.g); curve.G2.toRprUncompressed(vG2, 0, curve.G2.g); hasher.update(Blake2b(64).digest()); let n; n=(2 ** power)*2 -1; if (logger) logger.debug("Calculate Initial Hash: tauG1"); hashBlock(vG1, n); n= 2 ** power; if (logger) logger.debug("Calculate Initial Hash: tauG2"); hashBlock(vG2, n); if (logger) logger.debug("Calculate Initial Hash: alphaTauG1"); hashBlock(vG1, n); if (logger) logger.debug("Calculate Initial Hash: betaTauG1"); hashBlock(vG1, n); hasher.update(vG2); return hasher.digest(); function hashBlock(buff, n) { const blockSize = 500000; const nBlocks = Math.floor(n / blockSize); const rem = n % blockSize; const bigBuff = new Uint8Array(blockSize * buff.byteLength); for (let i=0; i0) { lastChallengeHash = contributions[contributions.length-1].nextChallenge; } else { lastChallengeHash = calculateFirstChallengeHash(curve, power, logger); } const fdNew = await createBinFile(newPTauFilename, "ptau", 1, importPoints ? 7: 2); await writePTauHeader(fdNew, curve, power); const contributionPreviousHash = await fdResponse.read(64); if (hashIsEqual(noHash,lastChallengeHash)) { lastChallengeHash = contributionPreviousHash; contributions[contributions.length-1].nextChallenge = lastChallengeHash; } if(!hashIsEqual(contributionPreviousHash,lastChallengeHash)) throw new Error("Wrong contribution. this contribution is not based on the previus hash"); const hasherResponse = new Blake2b(64); hasherResponse.update(contributionPreviousHash); const startSections = []; let res; res = await processSection(fdResponse, fdNew, "G1", 2, (2 ** power) * 2 -1, [1], "tauG1"); currentContribution.tauG1 = res[0]; res = await processSection(fdResponse, fdNew, "G2", 3, (2 ** power) , [1], "tauG2"); currentContribution.tauG2 = res[0]; res = await processSection(fdResponse, fdNew, "G1", 4, (2 ** power) , [0], "alphaG1"); currentContribution.alphaG1 = res[0]; res = await processSection(fdResponse, fdNew, "G1", 5, (2 ** power) , [0], "betaG1"); currentContribution.betaG1 = res[0]; res = await processSection(fdResponse, fdNew, "G2", 6, 1 , [0], "betaG2"); currentContribution.betaG2 = res[0]; currentContribution.partialHash = hasherResponse.getPartialHash(); const buffKey = await fdResponse.read(curve.F1.n8*2*6+curve.F2.n8*2*3); currentContribution.key = fromPtauPubKeyRpr(buffKey, 0, curve, false); hasherResponse.update(new Uint8Array(buffKey)); const hashResponse = hasherResponse.digest(); if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: ")); if (importPoints) { const nextChallengeHasher = new Blake2b(64); nextChallengeHasher.update(hashResponse); await hashSection(nextChallengeHasher, fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger); await hashSection(nextChallengeHasher, fdNew, "G2", 3, (2 ** power) , "tauG2", logger); await hashSection(nextChallengeHasher, fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger); await hashSection(nextChallengeHasher, fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger); await hashSection(nextChallengeHasher, fdNew, "G2", 6, 1 , "betaG2", logger); currentContribution.nextChallenge = nextChallengeHasher.digest(); if (logger) logger.info(formatHash(currentContribution.nextChallenge, "Next Challenge Hash: ")); } else { currentContribution.nextChallenge = noHash; } contributions.push(currentContribution); await writeContributions(fdNew, curve, contributions); await fdResponse.close(); await fdNew.close(); await fdOld.close(); return currentContribution.nextChallenge; async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) { if (importPoints) { return await processSectionImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName); } else { return await processSectionNoImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName); } } async function processSectionImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) { const G = curve[groupName]; const scG = G.F.n8; const sG = G.F.n8*2; const singularPoints = []; await startWriteSection(fdTo, sectionId); const nPointsChunk = Math.floor((1<<24)/sG); startSections[sectionId] = fdTo.pos; for (let i=0; i< nPoints; i += nPointsChunk) { if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`); const n = Math.min(nPoints-i, nPointsChunk); const buffC = await fdFrom.read(n * scG); hasherResponse.update(buffC); const buffLEM = await G.batchCtoLEM(buffC); await fdTo.write(buffLEM); for (let j=0; j=i) && (sp < i+n)) { const P = G.fromRprLEM(buffLEM, (sp-i)*sG); singularPoints.push(P); } } } await endWriteSection(fdTo); return singularPoints; } async function processSectionNoImportPoints(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) { const G = curve[groupName]; const scG = G.F.n8; const singularPoints = []; const nPointsChunk = Math.floor((1<<24)/scG); for (let i=0; i< nPoints; i += nPointsChunk) { if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`); const n = Math.min(nPoints-i, nPointsChunk); const buffC = await fdFrom.read(n * scG); hasherResponse.update(buffC); for (let j=0; j=i) && (sp < i+n)) { const P = G.fromRprCompressed(buffC, (sp-i)*scG); singularPoints.push(P); } } } return singularPoints; } async function hashSection(nextChallengeHasher, fdTo, groupName, sectionId, nPoints, sectionName, logger) { const G = curve[groupName]; const sG = G.F.n8*2; const nPointsChunk = Math.floor((1<<24)/sG); const oldPos = fdTo.pos; fdTo.pos = startSections[sectionId]; for (let i=0; i< nPoints; i += nPointsChunk) { if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`); const n = Math.min(nPoints-i, nPointsChunk); const buffLEM = await fdTo.read(n * sG); const buffU = await G.batchLEMtoU(buffLEM); nextChallengeHasher.update(buffU); } fdTo.pos = oldPos; } } const sameRatio$1 = sameRatio; async function verifyContribution(curve, cur, prev, logger) { let sr; if (cur.type == 1) { // Verify the beacon. const beaconKey = keyFromBeacon(curve, prev.nextChallenge, cur.beaconHash, cur.numIterationsExp); if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) { if (logger) logger.error(`BEACON key (tauG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) { if (logger) logger.error(`BEACON key (tauG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) { if (logger) logger.error(`BEACON key (tauG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) { if (logger) logger.error(`BEACON key (alphaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) { if (logger) logger.error(`BEACON key (alphaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) { if (logger) logger.error(`BEACON key (alphaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) { if (logger) logger.error(`BEACON key (betaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) { if (logger) logger.error(`BEACON key (betaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) { if (logger) logger.error(`BEACON key (betaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } } cur.key.tau.g2_sp = curve.G2.toAffine(getG2sp(curve, 0, prev.nextChallenge, cur.key.tau.g1_s, cur.key.tau.g1_sx)); cur.key.alpha.g2_sp = curve.G2.toAffine(getG2sp(curve, 1, prev.nextChallenge, cur.key.alpha.g1_s, cur.key.alpha.g1_sx)); cur.key.beta.g2_sp = curve.G2.toAffine(getG2sp(curve, 2, prev.nextChallenge, cur.key.beta.g1_s, cur.key.beta.g1_sx)); sr = await sameRatio$1(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID key (tau) in challenge #"+cur.id); return false; } sr = await sameRatio$1(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID key (alpha) in challenge #"+cur.id); return false; } sr = await sameRatio$1(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID key (beta) in challenge #"+cur.id); return false; } sr = await sameRatio$1(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID tau*G1. challenge #"+cur.id+" It does not follow the previous contribution"); return false; } sr = await sameRatio$1(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2); if (sr !== true) { if (logger) logger.error("INVALID tau*G2. challenge #"+cur.id+" It does not follow the previous contribution"); return false; } sr = await sameRatio$1(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID alpha*G1. challenge #"+cur.id+" It does not follow the previous contribution"); return false; } sr = await sameRatio$1(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID beta*G1. challenge #"+cur.id+" It does not follow the previous contribution"); return false; } sr = await sameRatio$1(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2); if (sr !== true) { if (logger) logger.error("INVALID beta*G2. challenge #"+cur.id+"It does not follow the previous contribution"); return false; } if (logger) logger.info("Powers Of tau file OK!"); return true; } async function verify(tauFilename, logger) { let sr; await Blake2b.ready(); const {fd, sections} = await readBinFile(tauFilename, "ptau", 1); const {curve, power, ceremonyPower} = await readPTauHeader(fd, sections); const contrs = await readContributions(fd, curve, sections); if (logger) logger.debug("power: 2**" + power); // Verify Last contribution if (logger) logger.debug("Computing initial contribution hash"); const initialContribution = { tauG1: curve.G1.g, tauG2: curve.G2.g, alphaG1: curve.G1.g, betaG1: curve.G1.g, betaG2: curve.G2.g, nextChallenge: calculateFirstChallengeHash(curve, ceremonyPower, logger), responseHash: Blake2b(64).digest() }; if (contrs.length == 0) { if (logger) logger.error("This file has no contribution! It cannot be used in production"); return false; } let prevContr; if (contrs.length>1) { prevContr = contrs[contrs.length-2]; } else { prevContr = initialContribution; } const curContr = contrs[contrs.length-1]; if (logger) logger.debug("Validating contribution #"+contrs[contrs.length-1].id); const res = await verifyContribution(curve, curContr, prevContr, logger); if (!res) return false; const nextContributionHasher = Blake2b(64); nextContributionHasher.update(curContr.responseHash); // Verify powers and compute nextChallengeHash // await test(); // Verify Section tau*G1 if (logger) logger.debug("Verifying powers in tau*G1 section"); const rTau1 = await processSection(2, "G1", "tauG1", (2 ** power)*2-1, [0, 1], logger); sr = await sameRatio$1(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2); if (sr !== true) { if (logger) logger.error("tauG1 section. Powers do not match"); return false; } if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) { if (logger) logger.error("First element of tau*G1 section must be the generator"); return false; } if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) { if (logger) logger.error("Second element of tau*G1 section does not match the one in the contribution section"); return false; } // await test(); // Verify Section tau*G2 if (logger) logger.debug("Verifying powers in tau*G2 section"); const rTau2 = await processSection(3, "G2", "tauG2", 2 ** power, [0, 1], logger); sr = await sameRatio$1(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2); if (sr !== true) { if (logger) logger.error("tauG2 section. Powers do not match"); return false; } if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) { if (logger) logger.error("First element of tau*G2 section must be the generator"); return false; } if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) { if (logger) logger.error("Second element of tau*G2 section does not match the one in the contribution section"); return false; } // Verify Section alpha*tau*G1 if (logger) logger.debug("Verifying powers in alpha*tau*G1 section"); const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 2 ** power, [0], logger); sr = await sameRatio$1(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2); if (sr !== true) { if (logger) logger.error("alphaTauG1 section. Powers do not match"); return false; } if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) { if (logger) logger.error("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section"); return false; } // Verify Section beta*tau*G1 if (logger) logger.debug("Verifying powers in beta*tau*G1 section"); const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 2 ** power, [0], logger); sr = await sameRatio$1(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2); if (sr !== true) { if (logger) logger.error("betaTauG1 section. Powers do not match"); return false; } if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) { if (logger) logger.error("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section"); return false; } //Verify Beta G2 const betaG2 = await processSectionBetaG2(logger); if (!curve.G2.eq(curContr.betaG2, betaG2)) { if (logger) logger.error("betaG2 element in betaG2 section does not match the one in the contribution section"); return false; } const nextContributionHash = nextContributionHasher.digest(); // Check the nextChallengeHash if (power == ceremonyPower) { if (!hashIsEqual(nextContributionHash,curContr.nextChallenge)) { if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section"); return false; } } if (logger) logger.info(formatHash(nextContributionHash, "Next challenge hash: ")); // Verify Previous contributions printContribution(curContr, prevContr); for (let i = contrs.length-2; i>=0; i--) { const curContr = contrs[i]; const prevContr = (i>0) ? contrs[i-1] : initialContribution; const res = await verifyContribution(curve, curContr, prevContr, logger); if (!res) return false; printContribution(curContr, prevContr); } if (logger) logger.info("-----------------------------------------------------"); if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) { if (logger) logger.warn( "this file does not contain phase2 precalculated values. Please run: \n" + " snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony." ); } else { let res; res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1", logger); if (!res) return false; res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2", logger); if (!res) return false; res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1", logger); if (!res) return false; res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1", logger); if (!res) return false; } await fd.close(); if (logger) logger.info("Powers of Tau Ok!"); return true; function printContribution(curContr, prevContr) { if (!logger) return; logger.info("-----------------------------------------------------"); logger.info(`Contribution #${curContr.id}: ${curContr.name ||""}`); logger.info(formatHash(curContr.nextChallenge, "Next Challenge: ")); const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3); toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false); const responseHasher = Blake2b(64); responseHasher.setPartialHash(curContr.partialHash); responseHasher.update(buffV); const responseHash = responseHasher.digest(); logger.info(formatHash(responseHash, "Response Hash:")); logger.info(formatHash(prevContr.nextChallenge, "Response Hash:")); if (curContr.type == 1) { logger.info(`Beacon generator: ${byteArray2hex(curContr.beaconHash)}`); logger.info(`Beacon iterations Exp: ${curContr.numIterationsExp}`); } } async function processSectionBetaG2(logger) { const G = curve.G2; const sG = G.F.n8*2; const buffUv = new Uint8Array(sG); if (!sections[6]) { logger.error("File has no BetaG2 section"); throw new Error("File has no BetaG2 section"); } if (sections[6].length>1) { logger.error("File has no BetaG2 section"); throw new Error("File has more than one GetaG2 section"); } fd.pos = sections[6][0].p; const buff = await fd.read(sG); const P = G.fromRprLEM(buff); G.toRprUncompressed(buffUv, 0, P); nextContributionHasher.update(buffUv); return P; } async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes, logger) { const MAX_CHUNK_SIZE = 1<<16; const G = curve[groupName]; const sG = G.F.n8*2; await startReadUniqueSection(fd, sections, idSection); const singularPoints = []; let R1 = G.zero; let R2 = G.zero; let lastBase = G.zero; for (let i=0; i0) { const firstBase = G.fromRprLEM(bases, 0); const r = crypto.randomBytes(4).readUInt32BE(0, true); R1 = G.add(R1, G.timesScalar(lastBase, r)); R2 = G.add(R2, G.timesScalar(firstBase, r)); } const r1 = await G.multiExpAffine(bases.slice(0, (n-1)*sG), scalars); const r2 = await G.multiExpAffine(bases.slice(sG), scalars); R1 = G.add(R1, r1); R2 = G.add(R2, r2); lastBase = G.fromRprLEM( bases, (n-1)*sG); for (let j=0; j=i) && (sp < i+n)) { const P = G.fromRprLEM(bases, (sp-i)*sG); singularPoints.push(P); } } } await endReadSection(fd); return { R1: R1, R2: R2, singularPoints: singularPoints }; } async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName, logger) { if (logger) logger.debug(`Verifying phase2 calculated values ${sectionName}...`); const G = curve[gName]; const sG = G.F.n8*2; const seed= new Array(8); for (let i=0; i<8; i++) { seed[i] = crypto.randomBytes(4).readUInt32BE(0, true); } for (let p=0; p<= power; p ++) { const res = await verifyPower(p); if (!res) return false; } if (tauSection == 2) { const res = await verifyPower(power+1); if (!res) return false; } return true; async function verifyPower(p) { if (logger) logger.debug(`Power ${p}...`); const n8r = curve.Fr.n8; const nPoints = 2 ** p; let buff_r = new Uint32Array(nPoints); let buffG; let rng = new ffjavascript.ChaCha(seed); if (logger) logger.debug(`Creating random numbers Powers${p}...`); for (let i=0; i1) { e = e /2; power += 1; } if (2 ** power != domainSize) throw new Error("Invalid file size"); if (logger) logger.debug("Power to tau size: "+power); const rng = await getRandomRng(entropy); const fdTo = await createOverride(responesFileName); // Calculate the hash const challengeHasher = Blake2b(64); for (let i=0; i { logger.debug(k + ".g1_s: " + curve.G1.toString(key[k].g1_s, 16)); logger.debug(k + ".g1_sx: " + curve.G1.toString(key[k].g1_sx, 16)); logger.debug(k + ".g2_sp: " + curve.G2.toString(key[k].g2_sp, 16)); logger.debug(k + ".g2_spx: " + curve.G2.toString(key[k].g2_spx, 16)); logger.debug(""); }); } const responseHasher = Blake2b(64); await fdTo.write(challengeHash); responseHasher.update(challengeHash); await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power)*2-1, curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG1" , logger ); await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G2", (2 ** power) , curve.Fr.one , key.tau.prvKey, "COMPRESSED", "tauG2" , logger ); await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power) , key.alpha.prvKey, key.tau.prvKey, "COMPRESSED", "alphaTauG1", logger ); await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (2 ** power) , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG1" , logger ); await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G2", 1 , key.beta.prvKey , key.tau.prvKey, "COMPRESSED", "betaTauG2" , logger ); // Write and hash key const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3); toPtauPubKeyRpr(buffKey, 0, curve, key, false); await fdTo.write(buffKey); responseHasher.update(buffKey); const responseHash = responseHasher.digest(); if (logger) logger.info(formatHash(responseHash, "Contribution Response Hash: ")); await fdTo.close(); await fdFrom.close(); } async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,numIterationsExp, logger) { const beaconHash = hex2ByteArray(beaconHashStr); if ( (beaconHash.byteLength == 0) || (beaconHash.byteLength*2 !=beaconHashStr.length)) { if (logger) logger.error("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)"); return false; } if (beaconHash.length>=256) { if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes"); return false; } numIterationsExp = parseInt(numIterationsExp); if ((numIterationsExp<10)||(numIterationsExp>63)) { if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)"); return false; } await Blake2b.ready(); const {fd: fdOld, sections} = await readBinFile(oldPtauFilename, "ptau", 1); const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections); if (power != ceremonyPower) { if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file."); return false; } if (sections[12]) { if (logger) logger.warn("Contributing into a file that has phase2 calculated. You will have to prepare phase2 again."); } const contributions = await readContributions(fdOld, curve, sections); const curContribution = { name: name, type: 1, // Beacon numIterationsExp: numIterationsExp, beaconHash: beaconHash }; let lastChallengeHash; if (contributions.length>0) { lastChallengeHash = contributions[contributions.length-1].nextChallenge; } else { lastChallengeHash = calculateFirstChallengeHash(curve, power, logger); } curContribution.key = keyFromBeacon(curve, lastChallengeHash, beaconHash, numIterationsExp); const responseHasher = new Blake2b(64); responseHasher.update(lastChallengeHash); const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 7); await writePTauHeader(fdNew, curve, power); const startSections = []; let firstPoints; firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1", logger ); curContribution.tauG1 = firstPoints[1]; firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2", logger ); curContribution.tauG2 = firstPoints[1]; firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1", logger ); curContribution.alphaG1 = firstPoints[0]; firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1", logger ); curContribution.betaG1 = firstPoints[0]; firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2", logger ); curContribution.betaG2 = firstPoints[0]; curContribution.partialHash = responseHasher.getPartialHash(); const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3); toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false); responseHasher.update(new Uint8Array(buffKey)); const hashResponse = responseHasher.digest(); if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: ")); const nextChallengeHasher = new Blake2b(64); nextChallengeHasher.update(hashResponse); await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1", logger); await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2", logger); await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1", logger); await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1", logger); await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger); curContribution.nextChallenge = nextChallengeHasher.digest(); if (logger) logger.info(formatHash(curContribution.nextChallenge, "Next Challenge Hash: ")); contributions.push(curContribution); await writeContributions(fdNew, curve, contributions); await fdOld.close(); await fdNew.close(); return hashResponse; async function processSection(sectionId, groupName, NPoints, first, inc, sectionName, logger) { const res = []; fdOld.pos = sections[sectionId][0].p; await startWriteSection(fdNew, sectionId); startSections[sectionId] = fdNew.pos; const G = curve[groupName]; const sG = G.F.n8*2; const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks let t = first; for (let i=0 ; i0) { lastChallengeHash = contributions[contributions.length-1].nextChallenge; } else { lastChallengeHash = calculateFirstChallengeHash(curve, power, logger); } // Generate a random key curContribution.key = createPTauKey(curve, lastChallengeHash, rng); const responseHasher = new Blake2b(64); responseHasher.update(lastChallengeHash); const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 7); await writePTauHeader(fdNew, curve, power); const startSections = []; let firstPoints; firstPoints = await processSection(2, "G1", (2 ** power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1" ); curContribution.tauG1 = firstPoints[1]; firstPoints = await processSection(3, "G2", (2 ** power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2" ); curContribution.tauG2 = firstPoints[1]; firstPoints = await processSection(4, "G1", (2 ** power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1" ); curContribution.alphaG1 = firstPoints[0]; firstPoints = await processSection(5, "G1", (2 ** power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1" ); curContribution.betaG1 = firstPoints[0]; firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2" ); curContribution.betaG2 = firstPoints[0]; curContribution.partialHash = responseHasher.getPartialHash(); const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3); toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false); responseHasher.update(new Uint8Array(buffKey)); const hashResponse = responseHasher.digest(); if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: ")); const nextChallengeHasher = new Blake2b(64); nextChallengeHasher.update(hashResponse); await hashSection(fdNew, "G1", 2, (2 ** power) * 2 -1, "tauG1"); await hashSection(fdNew, "G2", 3, (2 ** power) , "tauG2"); await hashSection(fdNew, "G1", 4, (2 ** power) , "alphaTauG1"); await hashSection(fdNew, "G1", 5, (2 ** power) , "betaTauG1"); await hashSection(fdNew, "G2", 6, 1 , "betaG2"); curContribution.nextChallenge = nextChallengeHasher.digest(); if (logger) logger.info(formatHash(curContribution.nextChallenge, "Next Challenge Hash: ")); contributions.push(curContribution); await writeContributions(fdNew, curve, contributions); await fdOld.close(); await fdNew.close(); return hashResponse; async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) { const res = []; fdOld.pos = sections[sectionId][0].p; await startWriteSection(fdNew, sectionId); startSections[sectionId] = fdNew.pos; const G = curve[groupName]; const sG = G.F.n8*2; const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks let t = first; for (let i=0 ; i { let S = ""; const keys = Object.keys(lc); keys.forEach( (k) => { let name = syms.varIdx2Name[k]; if (name == "one") name = ""; let vs = r1cs.curve.Fr.toString(lc[k]); if (vs == "1") vs = ""; // Do not show ones if (vs == "-1") vs = "-"; // Do not show ones if ((S!="")&&(vs[0]!="-")) vs = "+"+vs; if (S!="") vs = " "+vs; S= S + vs + name; }); return S; }; const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`; if (logger) logger.info(S); } } const SUBARRAY_SIZE = 0x40000; const BigArrayHandler = { get: function(obj, prop) { if (!isNaN(prop)) { return obj.getElement(prop); } else return obj[prop]; }, set: function(obj, prop, value) { if (!isNaN(prop)) { return obj.setElement(prop, value); } else { obj[prop] = value; return true; } } }; class _BigArray { constructor (initSize) { this.length = initSize || 0; this.arr = new Array(SUBARRAY_SIZE); for (let i=0; i= this.length) this.length = idx+1; return true; } getKeys() { const newA = new BigArray(); for (let i=0; i1<<20) { constraints = new BigArray(); } else { constraints = []; } for (let i=0; i1<<20) { map = new BigArray(); } else { map = []; } for (let i=0; i { res[k] = stringifyBigInts$1(Fr, o[k]); }); return res; } else if ((typeof(o) == "bigint") || o.eq !== undefined) { return o.toString(10); } else { return o; } } async function r1csExportJson(r1csFileName, logger) { const cir = await readR1cs(r1csFileName, true, true, true, logger); const Fr=cir.curve.Fr; delete cir.curve; return stringifyBigInts$1(Fr, cir); } var r1cs = /*#__PURE__*/Object.freeze({ __proto__: null, print: r1csPrint, info: r1csInfo, exportJson: r1csExportJson }); async function loadSymbols(symFileName) { const sym = { labelIdx2Name: [ "one" ], varIdx2Name: [ "one" ], componentIdx2Name: [] }; const fd = await readExisting$2(symFileName); const buff = await fd.read(fd.totalSize); const symsStr = new TextDecoder("utf-8").decode(buff); const lines = symsStr.split("\n"); for (let i=0; i " + value.toString()); }; } if (options.trigger) { if (!sym) sym = await loadSymbols(symName); wcOps.logStartComponent= function(cIdx) { if (logger) logger.info("START: " + sym.componentIdx2Name[cIdx]); }; wcOps.logFinishComponent= function(cIdx) { if (logger) logger.info("FINISH: " + sym.componentIdx2Name[cIdx]); }; } wcOps.sym = sym; const wc = await WitnessCalculatorBuilder$1(wasm, wcOps); const w = await wc.calculateWitness(input); const fdWtns = await createBinFile(wtnsFileName, "wtns", 2, 2); await write(fdWtns, w, wc.prime); await fdWtns.close(); } async function wtnsExportJson(wtnsFileName) { const w = await read(wtnsFileName); return w; } var wtns = /*#__PURE__*/Object.freeze({ __proto__: null, calculate: wtnsCalculate, debug: wtnsDebug, exportJson: wtnsExportJson }); const SUBARRAY_SIZE$1 = 0x40000; const BigArrayHandler$1 = { get: function(obj, prop) { if (!isNaN(prop)) { return obj.getElement(prop); } else return obj[prop]; }, set: function(obj, prop, value) { if (!isNaN(prop)) { return obj.setElement(prop, value); } else { obj[prop] = value; return true; } } }; class _BigArray$1 { constructor (initSize) { this.length = initSize || 0; this.arr = new Array(SUBARRAY_SIZE$1); for (let i=0; i= this.length) this.length = idx+1; return true; } getKeys() { const newA = new BigArray$1(); for (let i=0; i power) { if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`); return -1; } if (!sectionsPTau[12]) { if (logger) logger.error("Powers of tau is not prepared."); return -1; } const nPublic = r1cs.nOutputs + r1cs.nPubInputs; const domainSize = 2 ** cirPower; // Write the header /////////// await startWriteSection(fdZKey, 1); await fdZKey.writeULE32(1); // Groth await endWriteSection(fdZKey); // Write the Groth header section /////////// await startWriteSection(fdZKey, 2); const primeQ = curve.q; const n8q = (Math.floor( (ffjavascript.Scalar.bitLength(primeQ) - 1) / 64) +1)*8; const primeR = curve.r; const n8r = (Math.floor( (ffjavascript.Scalar.bitLength(primeR) - 1) / 64) +1)*8; const Rr = ffjavascript.Scalar.mod(ffjavascript.Scalar.shl(1, n8r*8), primeR); const R2r = curve.Fr.e(ffjavascript.Scalar.mod(ffjavascript.Scalar.mul(Rr,Rr), primeR)); await fdZKey.writeULE32(n8q); await writeBigInt(fdZKey, primeQ, n8q); await fdZKey.writeULE32(n8r); await writeBigInt(fdZKey, primeR, n8r); await fdZKey.writeULE32(r1cs.nVars); // Total number of bars await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE) await fdZKey.writeULE32(domainSize); // domainSize let bAlpha1; bAlpha1 = await fdPTau.read(sG1, sectionsPTau[4][0].p); await fdZKey.write(bAlpha1); bAlpha1 = await curve.G1.batchLEMtoU(bAlpha1); csHasher.update(bAlpha1); let bBeta1; bBeta1 = await fdPTau.read(sG1, sectionsPTau[5][0].p); await fdZKey.write(bBeta1); bBeta1 = await curve.G1.batchLEMtoU(bBeta1); csHasher.update(bBeta1); let bBeta2; bBeta2 = await fdPTau.read(sG2, sectionsPTau[6][0].p); await fdZKey.write(bBeta2); bBeta2 = await curve.G2.batchLEMtoU(bBeta2); csHasher.update(bBeta2); const bg1 = new Uint8Array(sG1); curve.G1.toRprLEM(bg1, 0, curve.G1.g); const bg2 = new Uint8Array(sG2); curve.G2.toRprLEM(bg2, 0, curve.G2.g); const bg1U = new Uint8Array(sG1); curve.G1.toRprUncompressed(bg1U, 0, curve.G1.g); const bg2U = new Uint8Array(sG2); curve.G2.toRprUncompressed(bg2U, 0, curve.G2.g); await fdZKey.write(bg2); // gamma2 await fdZKey.write(bg1); // delta1 await fdZKey.write(bg2); // delta2 csHasher.update(bg2U); // gamma2 csHasher.update(bg1U); // delta1 csHasher.update(bg2U); // delta2 await endWriteSection(fdZKey); if (logger) logger.info("Reading r1cs"); let sR1cs = await readSection(fdR1cs, sectionsR1cs, 2); const A = new BigArray$1(r1cs.nVars); const B1 = new BigArray$1(r1cs.nVars); const B2 = new BigArray$1(r1cs.nVars); const C = new BigArray$1(r1cs.nVars- nPublic -1); const IC = new Array(nPublic+1); if (logger) logger.info("Reading tauG1"); let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1); if (logger) logger.info("Reading tauG2"); let sTauG2 = await readSection(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2); if (logger) logger.info("Reading alphatauG1"); let sAlphaTauG1 = await readSection(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1); if (logger) logger.info("Reading betatauG1"); let sBetaTauG1 = await readSection(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1); await processConstraints(); await composeAndWritePoints(3, "G1", IC, "IC"); await writeHs(); await hashHPoints(); await composeAndWritePoints(8, "G1", C, "C"); await composeAndWritePoints(5, "G1", A, "A"); await composeAndWritePoints(6, "G1", B1, "B1"); await composeAndWritePoints(7, "G2", B2, "B2"); const csHash = csHasher.digest(); // Contributions section await startWriteSection(fdZKey, 10); await fdZKey.write(csHash); await fdZKey.writeULE32(0); await endWriteSection(fdZKey); if (logger) logger.info(formatHash(csHash, "Circuit hash: ")); await fdZKey.close(); await fdR1cs.close(); await fdPTau.close(); return csHash; async function writeHs() { await startWriteSection(fdZKey, 9); const buffOut = new ffjavascript.BigBuffer(domainSize*sG1); if (cirPower < curve.Fr.s) { let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1); for (let i=0; i< domainSize; i++) { if ((logger)&&(i%10000 == 0)) logger.debug(`spliting buffer: ${i}/${domainSize}`); const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 ); buffOut.set(buff, i*sG1); } } else if (cirPower == curve.Fr.s) { const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1; await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, o + domainSize*sG1); } else { if (logger) logger.error("Circuit too big"); throw new Error("Circuit too big for this curve"); } await fdZKey.write(buffOut); await endWriteSection(fdZKey); } async function processConstraints() { const buffCoeff = new Uint8Array(12 + curve.Fr.n8); const buffCoeffV = new DataView(buffCoeff.buffer); const bOne = new Uint8Array(curve.Fr.n8); curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1)); let r1csPos = 0; function r1cs_readULE32() { const buff = sR1cs.slice(r1csPos, r1csPos+4); r1csPos += 4; const buffV = new DataView(buff.buffer); return buffV.getUint32(0, true); } const coefs = new BigArray$1(); for (let c=0; c=0) { n = curve.Fr.fromRprLE(sR1cs.slice(c[3], c[3] + curve.Fr.n8), 0); } else { n = curve.Fr.fromRprLE(bOne, 0); } const nR2 = curve.Fr.mul(n, R2r); curve.Fr.toRprLE(buffCoeff, 12, nR2); buffSection.set(buffCoeff, coefsPos); coefsPos += buffCoeff.length; } } async function composeAndWritePoints(idSection, groupName, arr, sectionName) { const CHUNK_SIZE= 1<<13; const G = curve[groupName]; hashU32(arr.length); await startWriteSection(fdZKey, idSection); let opPromises = []; let i=0; while (i { if (logger) logger.debug(`Writing points end ${sectionName}: ${_i}/${arr.length}`); return r; })); i += n; t++; } const result = await Promise.all(opPromises); for (let k=0; k 2<<14) { bBases = new ffjavascript.BigBuffer(acc*sGin); bScalars = new ffjavascript.BigBuffer(acc*curve.Fr.n8); } else { bBases = new Uint8Array(acc*sGin); bScalars = new Uint8Array(acc*curve.Fr.n8); } let pB =0; let pS =0; const sBuffs = [ sTauG1, sTauG2, sAlphaTauG1, sBetaTauG1 ]; const bOne = new Uint8Array(curve.Fr.n8); curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1)); let offset = 0; for (let i=0; i=0) { bScalars.set( sR1cs.slice( arr[i][j][2], arr[i][j][2] + curve.Fr.n8 ), offset*curve.Fr.n8 ); } else { bScalars.set(bOne, offset*curve.Fr.n8); } offset ++; } } if (arr.length>1) { const task = []; task.push({cmd: "ALLOCSET", var: 0, buff: bBases}); task.push({cmd: "ALLOCSET", var: 1, buff: bScalars}); task.push({cmd: "ALLOC", var: 2, len: arr.length*sGmid}); pB = 0; pS = 0; let pD =0; for (let i=0; i newMPCParams.contributions.length) { if (logger) logger.error("The impoerted file does not include new contributions"); return false; } for (let i=0; i=0; i--) { const c = mpcParams.contributions[i]; if (logger) logger.info("-------------------------"); if (logger) logger.info(formatHash(c.contributionHash, `contribution #${i+1} ${c.name ? c.name : ""}:`)); if (c.type == 1) { if (logger) logger.info(`Beacon generator: ${byteArray2hex(c.beaconHash)}`); if (logger) logger.info(`Beacon iterations Exp: ${c.numIterationsExp}`); } } if (logger) logger.info("-------------------------"); if (logger) logger.info("ZKey Ok!"); return true; async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) { const MAX_CHUNK_SIZE = 1<<20; const G = curve[groupName]; const sG = G.F.n8*2; await startReadUniqueSection(fd1, sections1, idSection); await startReadUniqueSection(fd2, sections2, idSection); let R1 = G.zero; let R2 = G.zero; const nPoints = sections1[idSection][0].size / sG; for (let i=0; i=256) { if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes"); return false; } numIterationsExp = parseInt(numIterationsExp); if ((numIterationsExp<10)||(numIterationsExp>63)) { if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)"); return false; } const {fd: fdOld, sections: sections} = await readBinFile(zkeyNameOld, "zkey", 2); const zkey = await readHeader(fdOld, sections, "groth16"); const curve = await getCurveFromQ(zkey.q); const mpcParams = await readMPCParams(fdOld, curve, sections); const fdNew = await createBinFile(zkeyNameNew, "zkey", 1, 10); const rng = await rngFromBeaconParams(beaconHash, numIterationsExp); const transcriptHasher = Blake2b(64); transcriptHasher.update(mpcParams.csHash); for (let i=0; i", vkalpha1_str); const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+ `${verificationKey.vk_beta_2[0][0].toString()}], `+ `[${verificationKey.vk_beta_2[1][1].toString()},` + `${verificationKey.vk_beta_2[1][0].toString()}]`; template = template.replace("<%vk_beta2%>", vkbeta2_str); const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+ `${verificationKey.vk_gamma_2[0][0].toString()}], `+ `[${verificationKey.vk_gamma_2[1][1].toString()},` + `${verificationKey.vk_gamma_2[1][0].toString()}]`; template = template.replace("<%vk_gamma2%>", vkgamma2_str); const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+ `${verificationKey.vk_delta_2[0][0].toString()}], `+ `[${verificationKey.vk_delta_2[1][1].toString()},` + `${verificationKey.vk_delta_2[1][0].toString()}]`; template = template.replace("<%vk_delta2%>", vkdelta2_str); // The points template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString()); template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString()); let vi = ""; for (let i=0; i", vi); return template; } var zkey = /*#__PURE__*/Object.freeze({ __proto__: null, newZKey: newZKey, exportBellman: phase2exportMPCParams, importBellman: phase2importMPCParams, verify: phase2verify, contribute: phase2contribute, beacon: beacon$1, exportJson: zkeyExportJson, bellmanContribute: bellmanContribute, exportVerificationKey: zkeyExportVerificationKey, exportSolidityVerifier: exportSolidityVerifier }); exports.groth16 = groth16; exports.powersOfTau = powersoftau; exports.r1cs = r1cs; exports.wtns = wtns; exports.zKey = zkey;