#! /usr/bin/env node 'use strict'; function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var fs = _interopDefault(require('fs')); var ffjavascript = require('ffjavascript'); var Blake2b = _interopDefault(require('blake2b-wasm')); var readline = _interopDefault(require('readline')); var crypto = _interopDefault(require('crypto')); var circomRuntime = _interopDefault(require('circom_runtime')); var path = _interopDefault(require('path')); var Logger = _interopDefault(require('logplease')); async function open(fileName, openFlags, cacheSize) { cacheSize = cacheSize || 4096*64; if (["w+", "wx+", "r", "ax+", "a+"].indexOf(openFlags) <0) throw new Error("Invalid open option"); const fd =await fs.promises.open(fileName, openFlags); const stats = await fd.stat(); return new FastFile(fd, stats, cacheSize, fileName); } class FastFile { constructor(fd, stats, cacheSize, fileName) { this.fileName = fileName; this.fd = fd; this.pos = 0; this.pageBits = 8; this.pageSize = (1 << this.pageBits); while (this.pageSize < stats.blksize*4) { this.pageBits ++; this.pageSize *= 2; } this.totalSize = stats.size; this.totalPages = Math.floor((stats.size -1) / this.pageSize)+1; this.maxPagesLoaded = Math.floor( cacheSize / this.pageSize)+1; this.pages = {}; this.pendingLoads = []; this.writing = false; this.reading = false; } _loadPage(p) { const self = this; return new Promise((resolve, reject)=> { self.pendingLoads.push({ page: p, resolve: resolve, reject: reject }); setImmediate(self._triggerLoad.bind(self)); }); } _triggerLoad() { const self = this; processPendingLoads(); if (self.pendingLoads.length == 0) return; if (Object.keys(self.pages).length >= self.maxPagesLoaded) { const dp = getDeletablePage(); if (dp<0) { // // No sizes available // setTimeout(self._triggerLoad.bind(self), 10000); return; } delete self.pages[dp]; } const load = self.pendingLoads.shift(); if (load.page>=self.totalPages) { self.pages[load.page] = { dirty: false, buff: new Uint8Array(self.pageSize), pendingOps: 1, size: 0 }; load.resolve(); setImmediate(self._triggerLoad.bind(self)); return; } if (self.reading) { self.pendingLoads.unshift(load); return; // Only one read at a time. } self.reading = true; const page = { dirty: false, buff: new Uint8Array(self.pageSize), pendingOps: 1, size: 0 }; self.fd.read(page.buff, 0, self.pageSize, load.page*self.pageSize).then((res)=> { page.size = res.bytesRead; self.pages[load.page] = page; self.reading = false; load.resolve(); setImmediate(self._triggerLoad.bind(self)); }, (err) => { load.reject(err); }); function processPendingLoads() { const newPendingLoads = []; for (let i=0; i { self.writing = false; setImmediate(self._triggerWrite.bind(self)); setImmediate(self._triggerLoad.bind(self)); }, (err) => { console.log("ERROR Writing: "+err); self.error = err; self._tryClose(); }); } _getDirtyPage() { for (let p in this.pages) { if (this.pages[p].dirty) return p; } return -1; } async write(buff, pos) { if (buff.byteLength == 0) return; const self = this; if (buff.byteLength > self.pageSize*self.maxPagesLoaded*0.8) { const cacheSize = Math.floor(buff.byteLength * 1.1); this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1; } if (typeof pos == "undefined") pos = self.pos; self.pos = pos+buff.byteLength; if (self.totalSize < pos + buff.byteLength) self.totalSize = pos + buff.byteLength; if (self.pendingClose) throw new Error("Writing a closing file"); const firstPage = Math.floor(pos / self.pageSize); const lastPage = Math.floor((pos+buff.byteLength-1) / self.pageSize); for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i); let p = firstPage; let o = pos % self.pageSize; let r = buff.byteLength; while (r>0) { const l = (o+r > self.pageSize) ? (self.pageSize -o) : r; const srcView = new Uint8Array(buff.buffer, buff.byteLength - r, l); const dstView = new Uint8Array(self.pages[p].buff.buffer, o, l); dstView.set(srcView); self.pages[p].dirty = true; self.pages[p].pendingOps --; self.pages[p].size = Math.max(o+l, self.pages[p].size); if (p>=self.totalPages) { self.totalPages = p+1; } r = r-l; p ++; o = 0; } setImmediate(self._triggerWrite.bind(self)); } async read(len, pos) { if (len == 0) { return new Uint8Array(0); } const self = this; if (len > self.pageSize*self.maxPagesLoaded*0.8) { const cacheSize = Math.floor(len * 1.1); this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1; } if (typeof pos == "undefined") pos = self.pos; self.pos = pos+len; if (self.pendingClose) throw new Error("Reading a closing file"); const firstPage = Math.floor(pos / self.pageSize); const lastPage = Math.floor((pos+len-1) / self.pageSize); for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i); let buff = new Uint8Array(len); let dstView = new Uint8Array(buff); let p = firstPage; let o = pos % self.pageSize; // Remaining bytes to read let r = pos + len > self.totalSize ? len - (pos + len - self.totalSize): len; while (r>0) { // bytes to copy from this page const l = (o+r > self.pageSize) ? (self.pageSize -o) : r; const srcView = new Uint8Array(self.pages[p].buff.buffer, o, l); buff.set(srcView, dstView.byteLength-r); self.pages[p].pendingOps --; r = r-l; p ++; o = 0; } setImmediate(self._triggerLoad.bind(self)); return buff; } _tryClose() { const self = this; if (!self.pendingClose) return; if (self.error) { self.pendingCloseReject(self.error); } const p = self._getDirtyPage(); if ((p>=0) || (self.writing) || (self.reading) || (self.pendingLoads.length>0)) return; self.pendingClose(); } close() { const self = this; if (self.pendingClose) throw new Error("Closing the file twice"); return new Promise((resolve, reject) => { self.pendingClose = resolve; self.pendingCloseReject = reject; self._tryClose(); }).then(()=> { self.fd.close(); }, (err) => { self.fd.close(); throw (err); }); } async discard() { const self = this; await self.close(); await fs.promises.unlink(this.fileName); } async writeULE32(v, pos) { const self = this; const b = Uint32Array.of(v); await self.write(new Uint8Array(b.buffer), pos); } async writeUBE32(v, pos) { const self = this; const buff = new Uint8Array(4); const buffV = new DataView(buff.buffer); buffV.setUint32(0, v, false); await self.write(buff, pos); } async writeULE64(v, pos) { const self = this; const b = Uint32Array.of(v & 0xFFFFFFFF, Math.floor(v / 0x100000000)); await self.write(new Uint8Array(b.buffer), pos); } async readULE32(pos) { const self = this; const b = await self.read(4, pos); const view = new Uint32Array(b.buffer); return view[0]; } async readUBE32(pos) { const self = this; const b = await self.read(4, pos); const view = new DataView(b.buffer); return view.getUint32(0, false); } async readULE64(pos) { const self = this; const b = await self.read(8, pos); const view = new Uint32Array(b.buffer); return view[1] * 0x100000000 + view[0]; } } function readExisting(o) { const fd = new MemFile(); fd.o = o; fd.allocSize = o.data.byteLength; fd.totalSize = o.data.byteLength; fd.readOnly = true; fd.pos = 0; return fd; } class MemFile { constructor() { this.pageSize = 1 << 14; // for compatibility } _resizeIfNeeded(newLen) { if (newLen > this.allocSize) { const newAllocSize = Math.max( this.allocSize + (1 << 20), Math.floor(this.allocSize * 1.1), newLen ); const newData = new Uint8Array(newAllocSize); newData.set(this.o.data); this.o.data = newData; this.allocSize = newAllocSize; } } async write(buff, pos) { const self =this; if (typeof pos == "undefined") pos = self.pos; if (this.readOnly) throw new Error("Writing a read only file"); this._resizeIfNeeded(pos + buff.byteLength); this.o.data.set(buff, pos); if (pos + buff.byteLength > this.totalSize) this.totalSize = pos + buff.byteLength; this.pos = pos + buff.byteLength; } async read(len, pos) { const self = this; if (typeof pos == "undefined") pos = self.pos; if (this.readOnly) { if (pos + len > this.totalSize) throw new Error("Reading out of bounds"); } this._resizeIfNeeded(pos + len); const buff = this.o.data.slice(pos, pos+len); this.pos = pos + len; return buff; } close() { if (this.o.data.byteLength != this.totalSize) { this.o.data = this.o.data.slice(0, this.totalSize); } } async discard() { } async writeULE32(v, pos) { const self = this; const b = Uint32Array.of(v); await self.write(new Uint8Array(b.buffer), pos); } async writeUBE32(v, pos) { const self = this; const buff = new Uint8Array(4); const buffV = new DataView(buff.buffer); buffV.setUint32(0, v, false); await self.write(buff, pos); } async writeULE64(v, pos) { const self = this; const b = Uint32Array.of(v & 0xFFFFFFFF, Math.floor(v / 0x100000000)); await self.write(new Uint8Array(b.buffer), pos); } async readULE32(pos) { const self = this; const b = await self.read(4, pos); const view = new Uint32Array(b.buffer); return view[0]; } async readUBE32(pos) { const self = this; const b = await self.read(4, pos); const view = new DataView(b.buffer); return view.getUint32(0, false); } async readULE64(pos) { const self = this; const b = await self.read(8, pos); const view = new Uint32Array(b.buffer); return view[1] * 0x100000000 + view[0]; } } /* global fetch */ async function readExisting$1(o, b) { if (o instanceof Uint8Array) { o = { type: "mem", data: o }; } if (process.browser) { if (typeof o === "string") { const buff = await fetch(o).then( function(res) { return res.arrayBuffer(); }).then(function (ab) { return new Uint8Array(ab); }); o = { type: "mem", data: buff }; } } else { if (typeof o === "string") { o = { type: "file", fileName: o, cacheSize: b }; } } if (o.type == "file") { return await open(o.fileName, "r", o.cacheSize); } else if (o.type == "mem") { return await readExisting(o); } else { throw new Error("Invalid FastFile type: "+o.type); } } async function readBinFile(fileName, type, maxVersion) { const fd = await readExisting$1(fileName); const b = await fd.read(4); let readedType = ""; for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]); if (readedType != type) throw new Error(fileName + ": Invalid File format"); let v = await fd.readULE32(); if (v>maxVersion) throw new Error("Version not supported"); const nSections = await fd.readULE32(); // Scan sections let sections = []; for (let i=0; i1) throw new Error(fd.fileName +": Section Duplicated " +idSection); fd.pos = sections[idSection][0].p; fd.readingSection = sections[idSection][0]; } async function endReadSection(fd, noCheck) { if (typeof fd.readingSection == "undefined") throw new Error("Not reading a section"); if (!noCheck) { if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size"); } delete fd.readingSection; } async function readBigInt(fd, n8, pos) { const buff = await fd.read(n8, pos); return ffjavascript.Scalar.fromRprLE(buff, 0, n8); } async function loadHeader(fd,sections) { const res = {}; await startReadUniqueSection(fd, sections, 1); // Read Header res.n8 = await fd.readULE32(); res.prime = await readBigInt(fd, res.n8); res.Fr = new ffjavascript.ZqField(res.prime); res.nVars = await fd.readULE32(); res.nOutputs = await fd.readULE32(); res.nPubInputs = await fd.readULE32(); res.nPrvInputs = await fd.readULE32(); res.nLabels = await fd.readULE64(); res.nConstraints = await fd.readULE32(); await endReadSection(fd); return res; } async function load(fileName, loadConstraints, loadMap) { const {fd, sections} = await readBinFile(fileName, "r1cs", 1); const res = await loadHeader(fd, sections); if (loadConstraints) { await startReadUniqueSection(fd, sections, 2); res.constraints = []; for (let i=0; i { self.pendingLoads.push({ page: p, resolve: resolve, reject: reject }); setImmediate(self._triggerLoad.bind(self)); }); } _triggerLoad() { const self = this; processPendingLoads(); if (self.pendingLoads.length == 0) return; if (Object.keys(self.pages).length >= self.maxPagesLoaded) { const dp = getDeletablePage(); if (dp<0) { // // No sizes available // setTimeout(self._triggerLoad.bind(self), 10000); return; } delete self.pages[dp]; } const load = self.pendingLoads.shift(); if (load.page>=self.totalPages) { self.pages[load.page] = { dirty: false, buff: new Uint8Array(self.pageSize), pendingOps: 1, size: 0 }; load.resolve(); setImmediate(self._triggerLoad.bind(self)); return; } if (self.reading) { self.pendingLoads.unshift(load); return; // Only one read at a time. } self.reading = true; const page = { dirty: false, buff: new Uint8Array(self.pageSize), pendingOps: 1, size: 0 }; self.fd.read(page.buff, 0, self.pageSize, load.page*self.pageSize).then((res)=> { page.size = res.bytesRead; self.pages[load.page] = page; self.reading = false; load.resolve(); setImmediate(self._triggerLoad.bind(self)); }, (err) => { load.reject(err); }); function processPendingLoads() { const newPendingLoads = []; for (let i=0; i { self.writing = false; setImmediate(self._triggerWrite.bind(self)); setImmediate(self._triggerLoad.bind(self)); }, (err) => { console.log("ERROR Writing: "+err); self.error = err; self._tryClose(); }); } _getDirtyPage() { for (let p in this.pages) { if (this.pages[p].dirty) return p; } return -1; } async write(buff, pos) { if (buff.byteLength == 0) return; const self = this; /* if (buff.byteLength > self.pageSize*self.maxPagesLoaded*0.8) { const cacheSize = Math.floor(buff.byteLength * 1.1); this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1; } */ if (typeof pos == "undefined") pos = self.pos; self.pos = pos+buff.byteLength; if (self.totalSize < pos + buff.byteLength) self.totalSize = pos + buff.byteLength; if (self.pendingClose) throw new Error("Writing a closing file"); const firstPage = Math.floor(pos / self.pageSize); const lastPage = Math.floor((pos+buff.byteLength-1) / self.pageSize); // for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i); let p = firstPage; let o = pos % self.pageSize; let r = buff.byteLength; while (r>0) { await self._loadPage(p); const l = (o+r > self.pageSize) ? (self.pageSize -o) : r; const srcView = new Uint8Array(buff.buffer, buff.byteLength - r, l); const dstView = new Uint8Array(self.pages[p].buff.buffer, o, l); dstView.set(srcView); self.pages[p].dirty = true; self.pages[p].pendingOps --; self.pages[p].size = Math.max(o+l, self.pages[p].size); if (p>=self.totalPages) { self.totalPages = p+1; } r = r-l; p ++; o = 0; } setImmediate(self._triggerWrite.bind(self)); } async read(len, pos) { if (len == 0) { return new Uint8Array(0); } const self = this; if (len > self.pageSize*self.maxPagesLoaded*0.8) { const cacheSize = Math.floor(len * 1.1); this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1; } if (typeof pos == "undefined") pos = self.pos; self.pos = pos+len; if (self.pendingClose) throw new Error("Reading a closing file"); const firstPage = Math.floor(pos / self.pageSize); const lastPage = Math.floor((pos+len-1) / self.pageSize); for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i); let buff = new Uint8Array(len); let dstView = new Uint8Array(buff); let p = firstPage; let o = pos % self.pageSize; // Remaining bytes to read let r = pos + len > self.totalSize ? len - (pos + len - self.totalSize): len; while (r>0) { // bytes to copy from this page const l = (o+r > self.pageSize) ? (self.pageSize -o) : r; const srcView = new Uint8Array(self.pages[p].buff.buffer, o, l); buff.set(srcView, dstView.byteLength-r); self.pages[p].pendingOps --; r = r-l; p ++; o = 0; } setImmediate(self._triggerLoad.bind(self)); return buff; } _tryClose() { const self = this; if (!self.pendingClose) return; if (self.error) { self.pendingCloseReject(self.error); } const p = self._getDirtyPage(); if ((p>=0) || (self.writing) || (self.reading) || (self.pendingLoads.length>0)) return; self.pendingClose(); } close() { const self = this; if (self.pendingClose) throw new Error("Closing the file twice"); return new Promise((resolve, reject) => { self.pendingClose = resolve; self.pendingCloseReject = reject; self._tryClose(); }).then(()=> { self.fd.close(); }, (err) => { self.fd.close(); throw (err); }); } async discard() { const self = this; await self.close(); await fs.promises.unlink(this.fileName); } async writeULE32(v, pos) { const self = this; tmpBuff32v.setUint32(0, v, true); await self.write(tmpBuff32, pos); } async writeUBE32(v, pos) { const self = this; tmpBuff32v.setUint32(0, v, true); await self.write(tmpBuff32, pos); } async writeULE64(v, pos) { const self = this; tmpBuff64v.setUint32(0, v & 0xFFFFFFFF, true); tmpBuff64v.setUint32(4, Math.floor(v / 0x100000000) , true); await self.write(tmpBuff64, pos); } async readULE32(pos) { const self = this; const b = await self.read(4, pos); const view = new Uint32Array(b.buffer); return view[0]; } async readUBE32(pos) { const self = this; const b = await self.read(4, pos); const view = new DataView(b.buffer); return view.getUint32(0, false); } async readULE64(pos) { const self = this; const b = await self.read(8, pos); const view = new Uint32Array(b.buffer); return view[1] * 0x100000000 + view[0]; } } function createNew(o) { const initialSize = o.initialSize || 1<<20; const fd = new MemFile$1(); fd.o = o; fd.o.data = new Uint8Array(initialSize); fd.allocSize = initialSize; fd.totalSize = 0; fd.readOnly = false; fd.pos = 0; return fd; } function readExisting$2(o) { const fd = new MemFile$1(); fd.o = o; fd.allocSize = o.data.byteLength; fd.totalSize = o.data.byteLength; fd.readOnly = true; fd.pos = 0; return fd; } class MemFile$1 { constructor() { this.pageSize = 1 << 14; // for compatibility } _resizeIfNeeded(newLen) { if (newLen > this.allocSize) { const newAllocSize = Math.max( this.allocSize + (1 << 20), Math.floor(this.allocSize * 1.1), newLen ); const newData = new Uint8Array(newAllocSize); newData.set(this.o.data); this.o.data = newData; this.allocSize = newAllocSize; } } async write(buff, pos) { const self =this; if (typeof pos == "undefined") pos = self.pos; if (this.readOnly) throw new Error("Writing a read only file"); this._resizeIfNeeded(pos + buff.byteLength); this.o.data.set(buff, pos); if (pos + buff.byteLength > this.totalSize) this.totalSize = pos + buff.byteLength; this.pos = pos + buff.byteLength; } async read(len, pos) { const self = this; if (typeof pos == "undefined") pos = self.pos; if (this.readOnly) { if (pos + len > this.totalSize) throw new Error("Reading out of bounds"); } this._resizeIfNeeded(pos + len); const buff = this.o.data.slice(pos, pos+len); this.pos = pos + len; return buff; } close() { if (this.o.data.byteLength != this.totalSize) { this.o.data = this.o.data.slice(0, this.totalSize); } } async discard() { } async writeULE32(v, pos) { const self = this; const b = Uint32Array.of(v); await self.write(new Uint8Array(b.buffer), pos); } async writeUBE32(v, pos) { const self = this; const buff = new Uint8Array(4); const buffV = new DataView(buff.buffer); buffV.setUint32(0, v, false); await self.write(buff, pos); } async writeULE64(v, pos) { const self = this; const b = Uint32Array.of(v & 0xFFFFFFFF, Math.floor(v / 0x100000000)); await self.write(new Uint8Array(b.buffer), pos); } async readULE32(pos) { const self = this; const b = await self.read(4, pos); const view = new Uint32Array(b.buffer); return view[0]; } async readUBE32(pos) { const self = this; const b = await self.read(4, pos); const view = new DataView(b.buffer); return view.getUint32(0, false); } async readULE64(pos) { const self = this; const b = await self.read(8, pos); const view = new Uint32Array(b.buffer); return view[1] * 0x100000000 + view[0]; } } /* global fetch */ async function createOverride(o, b) { if (typeof o === "string") { o = { type: "file", fileName: o, cacheSize: b }; } if (o.type == "file") { return await open$1(o.fileName, "w+", o.cacheSize); } else if (o.type == "mem") { return createNew(o); } else { throw new Error("Invalid FastFile type: "+o.type); } } async function readExisting$3(o, b) { if (o instanceof Uint8Array) { o = { type: "mem", data: o }; } if (process.browser) { if (typeof o === "string") { const buff = await fetch(o).then( function(res) { return res.arrayBuffer(); }).then(function (ab) { return new Uint8Array(ab); }); o = { type: "mem", data: buff }; } } else { if (typeof o === "string") { o = { type: "file", fileName: o, cacheSize: b }; } } if (o.type == "file") { return await open$1(o.fileName, "r", o.cacheSize); } else if (o.type == "mem") { return await readExisting$2(o); } else { throw new Error("Invalid FastFile type: "+o.type); } } async function loadSymbols(symFileName) { const sym = { labelIdx2Name: [ "one" ], varIdx2Name: [ "one" ], componentIdx2Name: [] }; const fd = await readExisting$3(symFileName); const buff = await fd.read(fd.totalSize); const symsStr = new TextDecoder("utf-8").decode(buff); const lines = symsStr.split("\n"); for (let i=0; i { let S = ""; const keys = Object.keys(lc); keys.forEach( (k) => { let name = syms.varIdx2Name[k]; if (name == "one") name = ""; let vs = r1cs.Fr.toString(lc[k]); if (vs == "1") vs = ""; // Do not show ones if (vs == "-1") vs = "-"; // Do not show ones if ((S!="")&&(vs[0]!="-")) vs = "+"+vs; if (S!="") vs = " "+vs; S= S + vs + name; }); return S; }; const S = `[ ${lc2str(c[0])} ] * [ ${lc2str(c[1])} ] - [ ${lc2str(c[2])} ] = 0`; if (logger) logger.info(S); } } const bls12381r = ffjavascript.Scalar.e("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16); const bn128r = ffjavascript.Scalar.e("21888242871839275222246405745257275088548364400416034343698204186575808495617"); async function r1csInfo(r1csName, logger) { const cir = await load(r1csName); if (ffjavascript.Scalar.eq(cir.prime, bn128r)) { if (logger) logger.info("Curve: bn-128"); } else if (ffjavascript.Scalar.eq(cir.prime, bls12381r)) { if (logger) logger.info("Curve: bls12-381"); } else { if (logger) logger.info(`Unknown Curve. Prime: ${ffjavascript.Scalar.toString(cir.prime)}`); } if (logger) logger.info(`# of Wires: ${cir.nVars}`); if (logger) logger.info(`# of Constraints: ${cir.nConstraints}`); if (logger) logger.info(`# of Private Inputs: ${cir.nPrvInputs}`); if (logger) logger.info(`# of Public Inputs: ${cir.nPubInputs}`); if (logger) logger.info(`# of Outputs: ${cir.nOutputs}`); return cir; } async function r1csExportJson(r1csFileName, logger) { const cir = await load(r1csFileName, true, true); return cir; } var name = "snarkjs"; var type = "module"; var version = "0.3.7"; var description = "zkSNARKs implementation in JavaScript"; var main = "./build/main.cjs"; var module$1 = "./main.js"; var exports$1 = { "import": "./main.js", require: "./build/main.cjs" }; var scripts = { test: "mocha", build: "rollup -c config/rollup.cjs.config.js", buildcli: "rollup -c config/rollup.cli.config.js", buildiife: "BROWSER=true rollup -c config/rollup.iife.config.js", buildiifemin: "BROWSER=true rollup -c config/rollup.iife_min.config.js" }; var bin = { snarkjs: "build/cli.cjs" }; var directories = { templates: "templates" }; var keywords = [ "zksnark", "zcash", "ethereum", "zero", "knowlage", "cryptography", "circuit" ]; var author = "Jordi Baylina"; var license = "GPL-3.0"; var repository = { type: "git", url: "https://github.com/iden3/snarkjs.git" }; var dependencies = { "blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git", circom_runtime: "0.0.9", fastfile: "0.0.9", ffjavascript: "0.2.4", keccak: "^3.0.0", logplease: "^1.2.15", r1csfile: "0.0.12", yargs: "^12.0.5" }; var devDependencies = { chai: "^4.2.0", eslint: "^6.8.0", lodash: "^4.17.15", mocha: "^7.1.1", rollup: "^2.20.0", "rollup-plugin-commonjs": "^10.1.0", "rollup-plugin-ignore": "^1.0.6", "rollup-plugin-json": "^4.0.0", "rollup-plugin-node-resolve": "^5.2.0", "rollup-plugin-replace": "^2.2.0", "rollup-plugin-terser": "^6.1.0" }; var pkg = { name: name, type: type, version: version, description: description, main: main, module: module$1, exports: exports$1, scripts: scripts, bin: bin, directories: directories, keywords: keywords, author: author, license: license, repository: repository, dependencies: dependencies, devDependencies: devDependencies }; const version$1 = pkg.version; let selectedCommand = null; async function clProcessor(commands) { const cl = []; const argv = {}; for (let i=2; i 1) { argv[arr[0]] = arr.slice(1).join("="); } else { argv[arr[0]] = true; } } else { cl.push(process.argv[i]); } } for (let i=0; i0) console.log("Invalid number of parameters"); helpCmd(cmd); } return; } } if (cl.length>0) console.log("Invalid command"); helpAll(); function calculateMatch(cmd, cl) { const alias = []; alias.push(parseLine(cmd.cmd)); if (cmd.alias) { if (Array.isArray(cmd.alias)) { for (let i=0; i1) ? arr1[1] : null }; } function areParamsValid(cmd, params) { const pl = parseLine(cmd); if (params.length > pl.params.length) return false; let minParams = pl.params.length; while ((minParams>0)&&(pl.params[minParams-1][0] == "[")) minParams --; if (params.length < minParams) return false; for (let i=0; (i< pl.params.length)&&(pl.params[i][0]=="<"); i++) { if (typeof params[i] == "undefined") return false; } return true; } function getOptions(options) { const res = {}; const opts = options.match(/(\S+)/g); for (let i=0; i ... "); console.log(" or snarkjs ... "); console.log(""); console.log("Type snarkjs --help to get more information for that command"); console.log(""); console.log("Full Command Description"); console.log("============ ================="); for (let i=0; i>=1; } return res; } function bitReverse(idx, bits) { return ( _revTable[idx >>> 24] | (_revTable[(idx >>> 16) & 0xFF] << 8) | (_revTable[(idx >>> 8) & 0xFF] << 16) | (_revTable[idx & 0xFF] << 24) ) >>> (32-bits); } function log2( V ) { return( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000, 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00, 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0, 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC, 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) ); } function formatHash(b, title) { const a = new DataView(b.buffer, b.byteOffset, b.byteLength); let S = ""; for (let i=0; i<4; i++) { if (i>0) S += "\n"; S += "\t\t"; for (let j=0; j<4; j++) { if (j>0) S += " "; S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0"); } } if (title) S = title + "\n" + S; return S; } function hashIsEqual(h1, h2) { if (h1.byteLength != h2.byteLength) return false; var dv1 = new Int8Array(h1); var dv2 = new Int8Array(h2); for (var i = 0 ; i != h1.byteLength ; i++) { if (dv1[i] != dv2[i]) return false; } return true; } function cloneHasher(h) { const ph = h.getPartialHash(); const res = Blake2b(64); res.setPartialHash(ph); return res; } async function sameRatio(curve, g1s, g1sx, g2s, g2sx) { if (curve.G1.isZero(g1s)) return false; if (curve.G1.isZero(g1sx)) return false; if (curve.G2.isZero(g2s)) return false; if (curve.G2.isZero(g2sx)) return false; // return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s)); const res = await curve.pairingEq(g1s, g2sx, curve.G1.neg(g1sx), g2s); return res; } function askEntropy() { if (process.browser) { return window.prompt("Enter a random text. (Entropy): ", ""); } else { const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); return new Promise((resolve) => { rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) ); }); } } async function getRandomRng(entropy) { // Generate a random Rng while (!entropy) { entropy = await askEntropy(); } const hasher = Blake2b(64); hasher.update(crypto.randomBytes(64)); const enc = new TextEncoder(); // always utf-8 hasher.update(enc.encode(entropy)); const hash = Buffer.from(hasher.digest()); const seed = []; for (let i=0;i<8;i++) { seed[i] = hash.readUInt32BE(i*4); } const rng = new ffjavascript.ChaCha(seed); return rng; } function rngFromBeaconParams(beaconHash, numIterationsExp) { let nIterationsInner; let nIterationsOuter; if (numIterationsExp<32) { nIterationsInner = (1 << numIterationsExp) >>> 0; nIterationsOuter = 1; } else { nIterationsInner = 0x100000000; nIterationsOuter = (1 << (numIterationsExp-32)) >>> 0; } let curHash = beaconHash; for (let i=0; i= 0) { curve = await ffjavascript.buildBn128(); } else if (["BLS12381"].indexOf(normName) >= 0) { curve = await ffjavascript.buildBls12381(); } else { throw new Error(`Curve not supported: ${name}`); } return curve; function normalizeName(n) { return n.toUpperCase().match(/[A-Za-z0-9]+/g).join(""); } } async function writePTauHeader(fd, curve, power, ceremonyPower) { // Write the header /////////// if (! ceremonyPower) ceremonyPower = power; await fd.writeULE32(1); // Header type const pHeaderSize = fd.pos; await fd.writeULE64(0); // Temporally set to 0 length await fd.writeULE32(curve.F1.n64*8); const buff = new Uint8Array(curve.F1.n8); ffjavascript.Scalar.toRprLE(buff, 0, curve.q, curve.F1.n8); await fd.write(buff); await fd.writeULE32(power); // power await fd.writeULE32(ceremonyPower); // power const headerSize = fd.pos - pHeaderSize - 8; const oldPos = fd.pos; fd.writeULE64(headerSize, pHeaderSize); fd.pos = oldPos; } async function readPTauHeader(fd, sections) { if (!sections[1]) throw new Error(fd.fileName + ": File has no header"); if (sections[1].length>1) throw new Error(fd.fileName +": File has more than one header"); fd.pos = sections[1][0].p; const n8 = await fd.readULE32(); const buff = await fd.read(n8); const q = ffjavascript.Scalar.fromRprLE(buff); const curve = await getCurveFromQ(q); if (curve.F1.n64*8 != n8) throw new Error(fd.fileName +": Invalid size"); const power = await fd.readULE32(); const ceremonyPower = await fd.readULE32(); if (fd.pos-sections[1][0].p != sections[1][0].size) throw new Error("Invalid PTau header size"); return {curve, power, ceremonyPower}; } async function readPtauPubKey(fd, curve, montgomery) { const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3); return fromPtauPubKeyRpr(buff, 0, curve, montgomery); } function fromPtauPubKeyRpr(buff, pos, curve, montgomery) { const key = { tau: {}, alpha: {}, beta: {} }; key.tau.g1_s = readG1(); key.tau.g1_sx = readG1(); key.alpha.g1_s = readG1(); key.alpha.g1_sx = readG1(); key.beta.g1_s = readG1(); key.beta.g1_sx = readG1(); key.tau.g2_spx = readG2(); key.alpha.g2_spx = readG2(); key.beta.g2_spx = readG2(); return key; function readG1() { let p; if (montgomery) { p = curve.G1.fromRprLEM( buff, pos ); } else { p = curve.G1.fromRprUncompressed( buff, pos ); } pos += curve.G1.F.n8*2; return p; } function readG2() { let p; if (montgomery) { p = curve.G2.fromRprLEM( buff, pos ); } else { p = curve.G2.fromRprUncompressed( buff, pos ); } pos += curve.G2.F.n8*2; return p; } } function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) { writeG1(key.tau.g1_s); writeG1(key.tau.g1_sx); writeG1(key.alpha.g1_s); writeG1(key.alpha.g1_sx); writeG1(key.beta.g1_s); writeG1(key.beta.g1_sx); writeG2(key.tau.g2_spx); writeG2(key.alpha.g2_spx); writeG2(key.beta.g2_spx); async function writeG1(p) { if (montgomery) { curve.G1.toRprLEM(buff, pos, p); } else { curve.G1.toRprUncompressed(buff, pos, p); } pos += curve.F1.n8*2; } async function writeG2(p) { if (montgomery) { curve.G2.toRprLEM(buff, pos, p); } else { curve.G2.toRprUncompressed(buff, pos, p); } pos += curve.F2.n8*2; } return buff; } async function writePtauPubKey(fd, curve, key, montgomery) { const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3); toPtauPubKeyRpr(buff, 0, curve, key, montgomery); await fd.write(buff); } async function readContribution(fd, curve) { const c = {}; c.tauG1 = await readG1(); c.tauG2 = await readG2(); c.alphaG1 = await readG1(); c.betaG1 = await readG1(); c.betaG2 = await readG2(); c.key = await readPtauPubKey(fd, curve, true); c.partialHash = await fd.read(216); c.nextChallenge = await fd.read(64); c.type = await fd.readULE32(); const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3); toPtauPubKeyRpr(buffV, 0, curve, c.key, false); const responseHasher = Blake2b(64); responseHasher.setPartialHash(c.partialHash); responseHasher.update(buffV); c.responseHash = responseHasher.digest(); const paramLength = await fd.readULE32(); const curPos = fd.pos; let lastType =0; while (fd.pos-curPos < paramLength) { const buffType = await readDV(1); if (buffType[0]<= lastType) throw new Error("Parameters in the contribution must be sorted"); lastType = buffType[0]; if (buffType[0]==1) { // Name const buffLen = await readDV(1); const buffStr = await readDV(buffLen[0]); c.name = new TextDecoder().decode(buffStr); } else if (buffType[0]==2) { const buffExp = await readDV(1); c.numIterationsExp = buffExp[0]; } else if (buffType[0]==3) { const buffLen = await readDV(1); c.beaconHash = await readDV(buffLen[0]); } else { throw new Error("Parameter not recognized"); } } if (fd.pos != curPos + paramLength) { throw new Error("Parametes do not match"); } return c; async function readG1() { const pBuff = await fd.read(curve.G1.F.n8*2); return curve.G1.fromRprLEM( pBuff ); } async function readG2() { const pBuff = await fd.read(curve.G2.F.n8*2); return curve.G2.fromRprLEM( pBuff ); } async function readDV(n) { const b = await fd.read(n); return new Uint8Array(b); } } async function readContributions(fd, curve, sections) { if (!sections[7]) throw new Error(fd.fileName + ": File has no contributions"); if (sections[7][0].length>1) throw new Error(fd.fileName +": File has more than one contributions section"); fd.pos = sections[7][0].p; const nContributions = await fd.readULE32(); const contributions = []; for (let i=0; i0) { const paramsBuff = new Uint8Array(params); await fd.writeULE32(paramsBuff.byteLength); await fd.write(paramsBuff); } else { await fd.writeULE32(0); } async function writeG1(p) { curve.G1.toRprLEM(buffG1, 0, p); await fd.write(buffG1); } async function writeG2(p) { curve.G2.toRprLEM(buffG2, 0, p); await fd.write(buffG2); } } async function writeContributions(fd, curve, contributions) { await fd.writeULE32(7); // Header type const pContributionsSize = fd.pos; await fd.writeULE64(0); // Temporally set to 0 length await fd.writeULE32(contributions.length); for (let i=0; i< contributions.length; i++) { await writeContribution(fd, curve, contributions[i]); } const contributionsSize = fd.pos - pContributionsSize - 8; const oldPos = fd.pos; fd.writeULE64(contributionsSize, pContributionsSize); fd.pos = oldPos; } function calculateFirstChallengeHash(curve, power, logger) { if (logger) logger.debug("Calculating First Challenge Hash"); const hasher = new Blake2b(64); const vG1 = new Uint8Array(curve.G1.F.n8*2); const vG2 = new Uint8Array(curve.G2.F.n8*2); curve.G1.toRprUncompressed(vG1, 0, curve.G1.g); curve.G2.toRprUncompressed(vG2, 0, curve.G2.g); hasher.update(Blake2b(64).digest()); let n; n=(1 << power)*2 -1; if (logger) logger.debug("Calculate Initial Hash: tauG1"); hashBlock(vG1, n); n= 1 << power; if (logger) logger.debug("Calculate Initial Hash: tauG2"); hashBlock(vG2, n); if (logger) logger.debug("Calculate Initial Hash: alphaTauG1"); hashBlock(vG1, n); if (logger) logger.debug("Calculate Initial Hash: betaTauG1"); hashBlock(vG1, n); hasher.update(vG2); return hasher.digest(); function hashBlock(buff, n) { const blockSize = 500000; const nBlocks = Math.floor(n / blockSize); const rem = n % blockSize; const bigBuff = new Uint8Array(blockSize * buff.byteLength); for (let i=0; imaxVersion) throw new Error("Version not supported"); const nSections = await fd.readULE32(); // Scan sections let sections = []; for (let i=0; i1) throw new Error(fd.fileName +": Section Duplicated " +idSection); fd.pos = sections[idSection][0].p; fd.readingSection = sections[idSection][0]; } async function endReadSection$1(fd, noCheck) { if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section"); if (!noCheck) { if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading"); } delete fd.readingSection; } async function writeBigInt(fd, n, n8, pos) { const buff = new Uint8Array(n8); ffjavascript.Scalar.toRprLE(buff, 0, n, n8); await fd.write(buff, pos); } async function readBigInt$1(fd, n8, pos) { const buff = await fd.read(n8, pos); return ffjavascript.Scalar.fromRprLE(buff, 0, n8); } async function copySection(fdFrom, sections, fdTo, sectionId) { const chunkSize = fdFrom.pageSize; await startReadUniqueSection$1(fdFrom, sections, sectionId); await startWriteSection(fdTo, sectionId); for (let p=0; p0) { lastChallengeHash = contributions[contributions.length-1].nextChallenge; } else { lastChallengeHash = calculateFirstChallengeHash(curve, power, logger); } const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 7); await writePTauHeader(fdNew, curve, power); const contributionPreviousHash = await fdResponse.read(64); if(!hashIsEqual(contributionPreviousHash,lastChallengeHash)) throw new Error("Wrong contribution. this contribution is not based on the previus hash"); const hasherResponse = new Blake2b(64); hasherResponse.update(contributionPreviousHash); const startSections = []; let res; res = await processSection(fdResponse, fdNew, "G1", 2, (1 << power) * 2 -1, [1], "tauG1"); currentContribution.tauG1 = res[0]; res = await processSection(fdResponse, fdNew, "G2", 3, (1 << power) , [1], "tauG2"); currentContribution.tauG2 = res[0]; res = await processSection(fdResponse, fdNew, "G1", 4, (1 << power) , [0], "alphaG1"); currentContribution.alphaG1 = res[0]; res = await processSection(fdResponse, fdNew, "G1", 5, (1 << power) , [0], "betaG1"); currentContribution.betaG1 = res[0]; res = await processSection(fdResponse, fdNew, "G2", 6, 1 , [0], "betaG2"); currentContribution.betaG2 = res[0]; currentContribution.partialHash = hasherResponse.getPartialHash(); const buffKey = await fdResponse.read(curve.F1.n8*2*6+curve.F2.n8*2*3); currentContribution.key = fromPtauPubKeyRpr(buffKey, 0, curve, false); hasherResponse.update(new Uint8Array(buffKey)); const hashResponse = hasherResponse.digest(); if (logger) logger.info(formatHash(hashResponse, "Contribution Response Hash imported: ")); const nextChallengeHasher = new Blake2b(64); nextChallengeHasher.update(hashResponse); await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1", logger); await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2", logger); await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1", logger); await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1", logger); await hashSection(fdNew, "G2", 6, 1 , "betaG2", logger); currentContribution.nextChallenge = nextChallengeHasher.digest(); if (logger) logger.info(formatHash(currentContribution.nextChallenge, "Next Challenge Hash: ")); contributions.push(currentContribution); await writeContributions(fdNew, curve, contributions); await fdResponse.close(); await fdNew.close(); await fdOld.close(); return currentContribution.nextChallenge; async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) { const G = curve[groupName]; const scG = G.F.n8; const sG = G.F.n8*2; const singularPoints = []; await startWriteSection(fdTo, sectionId); const nPointsChunk = Math.floor((1<<24)/sG); startSections[sectionId] = fdTo.pos; for (let i=0; i< nPoints; i += nPointsChunk) { if (logger) logger.debug(`Importing ${sectionName}: ${i}/${nPoints}`); const n = Math.min(nPoints-i, nPointsChunk); const buffC = await fdFrom.read(n * scG); hasherResponse.update(buffC); const buffLEM = await G.batchCtoLEM(buffC); await fdTo.write(buffLEM); for (let j=0; j=i) && (sp < i+n)) { const P = G.fromRprLEM(buffLEM, (sp-i)*sG); singularPoints.push(P); } } } await endWriteSection(fdTo); return singularPoints; } async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName, logger) { const G = curve[groupName]; const sG = G.F.n8*2; const nPointsChunk = Math.floor((1<<24)/sG); const oldPos = fdTo.pos; fdTo.pos = startSections[sectionId]; for (let i=0; i< nPoints; i += nPointsChunk) { if (logger) logger.debug(`Hashing ${sectionName}: ${i}/${nPoints}`); const n = Math.min(nPoints-i, nPointsChunk); const buffLEM = await fdTo.read(n * sG); const buffU = await G.batchLEMtoU(buffLEM); nextChallengeHasher.update(buffU); } fdTo.pos = oldPos; } } const sameRatio$1 = sameRatio; async function verifyContribution(curve, cur, prev, logger) { let sr; if (cur.type == 1) { // Verify the beacon. const beaconKey = keyFromBeacon(curve, prev.nextChallenge, cur.beaconHash, cur.numIterationsExp); if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) { if (logger) logger.error(`BEACON key (tauG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) { if (logger) logger.error(`BEACON key (tauG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) { if (logger) logger.error(`BEACON key (tauG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) { if (logger) logger.error(`BEACON key (alphaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) { if (logger) logger.error(`BEACON key (alphaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) { if (logger) logger.error(`BEACON key (alphaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) { if (logger) logger.error(`BEACON key (betaG1_s) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) { if (logger) logger.error(`BEACON key (betaG1_sx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) { if (logger) logger.error(`BEACON key (betaG2_spx) is not generated correctly in challenge #${cur.id} ${cur.name || ""}` ); return false; } } cur.key.tau.g2_sp = curve.G2.toAffine(getG2sp(curve, 0, prev.nextChallenge, cur.key.tau.g1_s, cur.key.tau.g1_sx)); cur.key.alpha.g2_sp = curve.G2.toAffine(getG2sp(curve, 1, prev.nextChallenge, cur.key.alpha.g1_s, cur.key.alpha.g1_sx)); cur.key.beta.g2_sp = curve.G2.toAffine(getG2sp(curve, 2, prev.nextChallenge, cur.key.beta.g1_s, cur.key.beta.g1_sx)); sr = await sameRatio$1(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, cur.key.tau.g2_sp, cur.key.tau.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID key (tau) in challenge #"+cur.id); return false; } sr = await sameRatio$1(curve, cur.key.alpha.g1_s, cur.key.alpha.g1_sx, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID key (alpha) in challenge #"+cur.id); return false; } sr = await sameRatio$1(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, cur.key.beta.g2_sp, cur.key.beta.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID key (beta) in challenge #"+cur.id); return false; } sr = await sameRatio$1(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID tau*G1. challenge #"+cur.id+" It does not follow the previous contribution"); return false; } sr = await sameRatio$1(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2); if (sr !== true) { if (logger) logger.error("INVALID tau*G2. challenge #"+cur.id+" It does not follow the previous contribution"); return false; } sr = await sameRatio$1(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID alpha*G1. challenge #"+cur.id+" It does not follow the previous contribution"); return false; } sr = await sameRatio$1(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx); if (sr !== true) { if (logger) logger.error("INVALID beta*G1. challenge #"+cur.id+" It does not follow the previous contribution"); return false; } sr = await sameRatio$1(curve, cur.key.beta.g1_s, cur.key.beta.g1_sx, prev.betaG2, cur.betaG2); if (sr !== true) { if (logger) logger.error("INVALID beta*G2. challenge #"+cur.id+"It does not follow the previous contribution"); return false; } if (logger) logger.info("Powers Of tau file OK!"); return true; } async function verify(tauFilename, logger) { let sr; await Blake2b.ready(); const {fd, sections} = await readBinFile$1(tauFilename, "ptau", 1); const {curve, power, ceremonyPower} = await readPTauHeader(fd, sections); const contrs = await readContributions(fd, curve, sections); if (logger) logger.debug("power: 2**" + power); // Verify Last contribution if (logger) logger.debug("Computing initial contribution hash"); const initialContribution = { tauG1: curve.G1.g, tauG2: curve.G2.g, alphaG1: curve.G1.g, betaG1: curve.G1.g, betaG2: curve.G2.g, nextChallenge: calculateFirstChallengeHash(curve, ceremonyPower, logger), responseHash: Blake2b(64).digest() }; if (contrs.length == 0) { if (logger) logger.error("This file has no contribution! It cannot be used in production"); return false; } let prevContr; if (contrs.length>1) { prevContr = contrs[contrs.length-2]; } else { prevContr = initialContribution; } const curContr = contrs[contrs.length-1]; if (logger) logger.debug("Validating contribution #"+contrs[contrs.length-1].id); const res = await verifyContribution(curve, curContr, prevContr, logger); if (!res) return false; const nextContributionHasher = Blake2b(64); nextContributionHasher.update(curContr.responseHash); // Verify powers and compute nextChallengeHash // await test(); // Verify Section tau*G1 if (logger) logger.debug("Verifying powers in tau*G1 section"); const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1], logger); sr = await sameRatio$1(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2); if (sr !== true) { if (logger) logger.error("tauG1 section. Powers do not match"); return false; } if (!curve.G1.eq(curve.G1.g, rTau1.singularPoints[0])) { if (logger) logger.error("First element of tau*G1 section must be the generator"); return false; } if (!curve.G1.eq(curContr.tauG1, rTau1.singularPoints[1])) { if (logger) logger.error("Second element of tau*G1 section does not match the one in the contribution section"); return false; } // await test(); // Verify Section tau*G2 if (logger) logger.debug("Verifying powers in tau*G2 section"); const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1], logger); sr = await sameRatio$1(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2); if (sr !== true) { if (logger) logger.error("tauG2 section. Powers do not match"); return false; } if (!curve.G2.eq(curve.G2.g, rTau2.singularPoints[0])) { if (logger) logger.error("First element of tau*G2 section must be the generator"); return false; } if (!curve.G2.eq(curContr.tauG2, rTau2.singularPoints[1])) { if (logger) logger.error("Second element of tau*G2 section does not match the one in the contribution section"); return false; } // Verify Section alpha*tau*G1 if (logger) logger.debug("Verifying powers in alpha*tau*G1 section"); const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0], logger); sr = await sameRatio$1(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2); if (sr !== true) { if (logger) logger.error("alphaTauG1 section. Powers do not match"); return false; } if (!curve.G1.eq(curContr.alphaG1, rAlphaTauG1.singularPoints[0])) { if (logger) logger.error("First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section"); return false; } // Verify Section beta*tau*G1 if (logger) logger.debug("Verifying powers in beta*tau*G1 section"); const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0], logger); sr = await sameRatio$1(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2); if (sr !== true) { if (logger) logger.error("betaTauG1 section. Powers do not match"); return false; } if (!curve.G1.eq(curContr.betaG1, rBetaTauG1.singularPoints[0])) { if (logger) logger.error("First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section"); return false; } //Verify Beta G2 const betaG2 = await processSectionBetaG2(logger); if (!curve.G2.eq(curContr.betaG2, betaG2)) { if (logger) logger.error("betaG2 element in betaG2 section does not match the one in the contribution section"); return false; } const nextContributionHash = nextContributionHasher.digest(); // Check the nextChallengeHash if (!hashIsEqual(nextContributionHash,curContr.nextChallenge)) { if (logger) logger.error("Hash of the values does not match the next challenge of the last contributor in the contributions section"); return false; } if (logger) logger.info(formatHash(nextContributionHash, "Next challenge hash: ")); // Verify Previous contributions printContribution(curContr, prevContr); for (let i = contrs.length-2; i>=0; i--) { const curContr = contrs[i]; const prevContr = (i>0) ? contrs[i-1] : initialContribution; const res = await verifyContribution(curve, curContr, prevContr, logger); if (!res) return false; printContribution(curContr, prevContr); } if (logger) logger.info("-----------------------------------------------------"); if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) { if (logger) logger.warn( "this file does not contain phase2 precalculated values. Please run: \n" + " snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony." ); } else { let res; res = await verifyLagrangeEvaluations("G1", 2, 12, "tauG1", logger); if (!res) return false; res = await verifyLagrangeEvaluations("G2", 3, 13, "tauG2", logger); if (!res) return false; res = await verifyLagrangeEvaluations("G1", 4, 14, "alphaTauG1", logger); if (!res) return false; res = await verifyLagrangeEvaluations("G1", 5, 15, "betaTauG1", logger); if (!res) return false; } await fd.close(); return true; function printContribution(curContr, prevContr) { if (!logger) return; logger.info("-----------------------------------------------------"); logger.info(`Contribution #${curContr.id}: ${curContr.name ||""}`); logger.info(formatHash(curContr.nextChallenge, "Next Challenge: ")); const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3); toPtauPubKeyRpr(buffV, 0, curve, curContr.key, false); const responseHasher = Blake2b(64); responseHasher.setPartialHash(curContr.partialHash); responseHasher.update(buffV); const responseHash = responseHasher.digest(); logger.info(formatHash(responseHash, "Response Hash:")); logger.info(formatHash(prevContr.nextChallenge, "Response Hash:")); if (curContr.type == 1) { logger.info(`Beacon generator: ${byteArray2hex(curContr.beaconHash)}`); logger.info(`Beacon iterations Exp: ${curContr.numIterationsExp}`); } } async function processSectionBetaG2(logger) { const G = curve.G2; const sG = G.F.n8*2; const buffUv = new Uint8Array(sG); if (!sections[6]) { logger.error("File has no BetaG2 section"); throw new Error("File has no BetaG2 section"); } if (sections[6].length>1) { logger.error("File has no BetaG2 section"); throw new Error("File has more than one GetaG2 section"); } fd.pos = sections[6][0].p; const buff = await fd.read(sG); const P = G.fromRprLEM(buff); G.toRprUncompressed(buffUv, 0, P); nextContributionHasher.update(buffUv); return P; } async function processSection(idSection, groupName, sectionName, nPoints, singularPointIndexes, logger) { const MAX_CHUNK_SIZE = 1<<16; const G = curve[groupName]; const sG = G.F.n8*2; await startReadUniqueSection$1(fd, sections, idSection); const singularPoints = []; let R1 = G.zero; let R2 = G.zero; let lastBase = G.zero; for (let i=0; i0) { const firstBase = G.fromRprLEM(bases, 0); const r = crypto.randomBytes(4).readUInt32BE(0, true); R1 = G.add(R1, G.timesScalar(lastBase, r)); R2 = G.add(R2, G.timesScalar(firstBase, r)); } const r1 = await G.multiExpAffine(bases.slice(0, (n-1)*sG), scalars); const r2 = await G.multiExpAffine(bases.slice(sG), scalars); R1 = G.add(R1, r1); R2 = G.add(R2, r2); lastBase = G.fromRprLEM( bases, (n-1)*sG); for (let j=0; j=i) && (sp < i+n)) { const P = G.fromRprLEM(bases, (sp-i)*sG); singularPoints.push(P); } } } await endReadSection$1(fd); return { R1: R1, R2: R2, singularPoints: singularPoints }; } async function verifyLagrangeEvaluations(gName, tauSection, lagrangeSection, sectionName, logger) { if (logger) logger.debug(`Verifying phase2 calculated values ${sectionName}...`); const G = curve[gName]; const sG = G.F.n8*2; const seed= new Array(8); for (let i=0; i<8; i++) { seed[i] = crypto.randomBytes(4).readUInt32BE(0, true); } const rng = new ffjavascript.ChaCha(seed); for (let p=0; p<= power; p ++) { const res = await verifyPower(p); if (!res) return false; } return true; async function verifyPower(p) { if (logger) logger.debug(`Power ${p}...`); const n8r = curve.Fr.n8; const nPoints = 1<1) { e = e /2; power += 1; } if (1< { logger.debug(k + ".g1_s: " + curve.G1.toString(key[k].g1_s, 16)); logger.debug(k + ".g1_sx: " + curve.G1.toString(key[k].g1_sx, 16)); logger.debug(k + ".g2_sp: " + curve.G2.toString(key[k].g2_sp, 16)); logger.debug(k + ".g2_spx: " + curve.G2.toString(key[k].g2_spx, 16)); logger.debug(""); }); } const responseHasher = Blake2b(64); await fdTo.write(challengeHash); responseHasher.update(challengeHash); await applyKeyToChallengeSection(fdFrom, fdTo, responseHasher, curve, "G1", (1<=256) { if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes"); return false; } numIterationsExp = parseInt(numIterationsExp); if ((numIterationsExp<10)||(numIterationsExp>63)) { if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)"); return false; } await Blake2b.ready(); const {fd: fdOld, sections} = await readBinFile$1(oldPtauFilename, "ptau", 1); const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections); if (power != ceremonyPower) { if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file."); return false; } if (sections[12]) { if (logger) logger.warn("Contributing into a file that has phase2 calculated. You will have to prepare phase2 again."); } const contributions = await readContributions(fdOld, curve, sections); const curContribution = { name: name, type: 1, // Beacon numIterationsExp: numIterationsExp, beaconHash: beaconHash }; let lastChallengeHash; if (contributions.length>0) { lastChallengeHash = contributions[contributions.length-1].nextChallenge; } else { lastChallengeHash = calculateFirstChallengeHash(curve, power, logger); } curContribution.key = keyFromBeacon(curve, lastChallengeHash, beaconHash, numIterationsExp); const responseHasher = new Blake2b(64); responseHasher.update(lastChallengeHash); const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 7); await writePTauHeader(fdNew, curve, power); const startSections = []; let firstPoints; firstPoints = await processSection(2, "G1", (1<0) { lastChallengeHash = contributions[contributions.length-1].nextChallenge; } else { lastChallengeHash = calculateFirstChallengeHash(curve, power, logger); } // Generate a random key curContribution.key = createPTauKey(curve, lastChallengeHash, rng); const responseHasher = new Blake2b(64); responseHasher.update(lastChallengeHash); const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 7); await writePTauHeader(fdNew, curve, power); const startSections = []; let firstPoints; firstPoints = await processSection(2, "G1", (1< CHUNKPOW ? CHUNKPOW : p; const pointsPerChunk = 1< CHUNKPOW ? CHUNKPOW : p; const pointsPerChunk = 1< power) { if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints} > 2**${power}`); return -1; } if (!sectionsPTau[12]) { if (logger) logger.error("Powers of tau is not prepared."); return -1; } const nPublic = r1cs.nOutputs + r1cs.nPubInputs; const domainSize = 1 << cirPower; // Write the header /////////// await startWriteSection(fdZKey, 1); await fdZKey.writeULE32(1); // Groth await endWriteSection(fdZKey); // Write the Groth header section /////////// await startWriteSection(fdZKey, 2); const primeQ = curve.q; const n8q = (Math.floor( (ffjavascript.Scalar.bitLength(primeQ) - 1) / 64) +1)*8; const primeR = curve.r; const n8r = (Math.floor( (ffjavascript.Scalar.bitLength(primeR) - 1) / 64) +1)*8; const Rr = ffjavascript.Scalar.mod(ffjavascript.Scalar.shl(1, n8r*8), primeR); const R2r = curve.Fr.e(ffjavascript.Scalar.mod(ffjavascript.Scalar.mul(Rr,Rr), primeR)); await fdZKey.writeULE32(n8q); await writeBigInt(fdZKey, primeQ, n8q); await fdZKey.writeULE32(n8r); await writeBigInt(fdZKey, primeR, n8r); await fdZKey.writeULE32(r1cs.nVars); // Total number of bars await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE) await fdZKey.writeULE32(domainSize); // domainSize let bAlpha1; bAlpha1 = await fdPTau.read(sG1, sectionsPTau[4][0].p); await fdZKey.write(bAlpha1); bAlpha1 = await curve.G1.batchLEMtoU(bAlpha1); csHasher.update(bAlpha1); let bBeta1; bBeta1 = await fdPTau.read(sG1, sectionsPTau[5][0].p); await fdZKey.write(bBeta1); bBeta1 = await curve.G1.batchLEMtoU(bBeta1); csHasher.update(bBeta1); let bBeta2; bBeta2 = await fdPTau.read(sG2, sectionsPTau[6][0].p); await fdZKey.write(bBeta2); bBeta2 = await curve.G2.batchLEMtoU(bBeta2); csHasher.update(bBeta2); const bg1 = new Uint8Array(sG1); curve.G1.toRprLEM(bg1, 0, curve.G1.g); const bg2 = new Uint8Array(sG2); curve.G2.toRprLEM(bg2, 0, curve.G2.g); const bg1U = new Uint8Array(sG1); curve.G1.toRprUncompressed(bg1U, 0, curve.G1.g); const bg2U = new Uint8Array(sG2); curve.G2.toRprUncompressed(bg2U, 0, curve.G2.g); await fdZKey.write(bg2); // gamma2 await fdZKey.write(bg1); // delta1 await fdZKey.write(bg2); // delta2 csHasher.update(bg2U); // gamma2 csHasher.update(bg1U); // delta1 csHasher.update(bg2U); // delta2 await endWriteSection(fdZKey); const A = new Array(r1cs.nVars); const B1 = new Array(r1cs.nVars); const B2 = new Array(r1cs.nVars); const C = new Array(r1cs.nVars- nPublic -1); const IC = new Array(nPublic+1); const lTauG1 = sectionsPTau[12][0].p + ((1 << cirPower) -1)*sG1; const lTauG2 = sectionsPTau[13][0].p + ((1 << cirPower) -1)*sG2; const lAlphaTauG1 = sectionsPTau[14][0].p + ((1 << cirPower) -1)*sG1; const lBetaTauG1 = sectionsPTau[15][0].p + ((1 << cirPower) -1)*sG1; await startWriteSection(fdZKey, 4); await startReadUniqueSection$1(fdR1cs, sectionsR1cs, 2); const pNCoefs = fdZKey.pos; let nCoefs = 0; fdZKey.pos += 4; for (let c=0; c0) { const paramsBuff = new Uint8Array(params); await fd.writeULE32(paramsBuff.byteLength); await fd.write(paramsBuff); } else { await fd.writeULE32(0); } } async function writeMPCParams(fd, curve, mpcParams) { await startWriteSection(fd, 10); await fd.write(mpcParams.csHash); await fd.writeULE32(mpcParams.contributions.length); for (let i=0; i newMPCParams.contributions.length) { if (logger) logger.error("The impoerted file does not include new contributions"); return false; } for (let i=0; i=0; i--) { const c = mpcParams.contributions[i]; if (logger) logger.info("-------------------------"); if (logger) logger.info(formatHash(c.contributionHash, `contribution #${i+1} ${c.name ? c.name : ""}:`)); if (c.type == 1) { if (logger) logger.info(`Beacon generator: ${byteArray2hex(c.beaconHash)}`); if (logger) logger.info(`Beacon iterations Exp: ${c.numIterationsExp}`); } } if (logger) logger.info("-------------------------"); if (logger) logger.info("ZKey Ok!"); return true; async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) { const MAX_CHUNK_SIZE = 1<<20; const G = curve[groupName]; const sG = G.F.n8*2; await startReadUniqueSection$1(fd1, sections1, idSection); await startReadUniqueSection$1(fd2, sections2, idSection); let R1 = G.zero; let R2 = G.zero; const nPoints = sections1[idSection][0].size / sG; for (let i=0; i=256) { if (logger) logger.error("Maximum lenght of beacon hash is 255 bytes"); return false; } numIterationsExp = parseInt(numIterationsExp); if ((numIterationsExp<10)||(numIterationsExp>63)) { if (logger) logger.error("Invalid numIterationsExp. (Must be between 10 and 63)"); return false; } const {fd: fdOld, sections: sections} = await readBinFile$1(zkeyNameOld, "zkey", 2); const zkey = await readHeader(fdOld, sections, "groth16"); const curve = await getCurveFromQ(zkey.q); const mpcParams = await readMPCParams(fdOld, curve, sections); const fdNew = await createBinFile(zkeyNameNew, "zkey", 1, 10); const rng = await rngFromBeaconParams(beaconHash, numIterationsExp); const transcriptHasher = Blake2b(64); transcriptHasher.update(mpcParams.csHash); for (let i=0; i", vkalpha1_str); const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+ `${verificationKey.vk_beta_2[0][0].toString()}], `+ `[${verificationKey.vk_beta_2[1][1].toString()},` + `${verificationKey.vk_beta_2[1][0].toString()}]`; template = template.replace("<%vk_beta2%>", vkbeta2_str); const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+ `${verificationKey.vk_gamma_2[0][0].toString()}], `+ `[${verificationKey.vk_gamma_2[1][1].toString()},` + `${verificationKey.vk_gamma_2[1][0].toString()}]`; template = template.replace("<%vk_gamma2%>", vkgamma2_str); const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+ `${verificationKey.vk_delta_2[0][0].toString()}], `+ `[${verificationKey.vk_delta_2[1][1].toString()},` + `${verificationKey.vk_delta_2[1][0].toString()}]`; template = template.replace("<%vk_delta2%>", vkdelta2_str); // The points template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString()); template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString()); let vi = ""; for (let i=0; i", vi); return template; } async function write(fd, witness, prime) { await startWriteSection(fd, 1); const n8 = (Math.floor( (ffjavascript.Scalar.bitLength(prime) - 1) / 64) +1)*8; await fd.writeULE32(n8); await writeBigInt(fd, prime, n8); await fd.writeULE32(witness.length); await endWriteSection(fd); await startWriteSection(fd, 2); for (let i=0; i> 1; const va = coeffsDV.getUint32(4 + k*sCoef + 4, true); if (va > v) { n = k - 1; } else if (va < v) { m = k + 1; } else { n = k; } } return 4 + m*sCoef; } } async function joinABC(curve, zkey, a, b, c) { const concurrency = curve.tm.concurrency; const n8 = curve.Fr.n8; const nElements = Math.floor(a.byteLength / curve.Fr.n8); const elementsPerChunk = Math.floor(nElements/concurrency); const promises = []; for (let i=0; i. */ const {unstringifyBigInts} = ffjavascript.utils; async function groth16Verify(vk_verifier, publicSignals, proof, logger) { /* let cpub = vk_verifier.IC[0]; for (let s= 0; s< vk_verifier.nPublic; s++) { cpub = G1.add( cpub, G1.timesScalar( vk_verifier.IC[s+1], publicSignals[s])); } */ vk_verifier = unstringifyBigInts(vk_verifier); proof = unstringifyBigInts(proof); publicSignals = unstringifyBigInts(publicSignals); const curve = await getCurveFromName(vk_verifier.curve); const IC0 = curve.G1.fromObject(vk_verifier.IC[0]); const IC = new Uint8Array(curve.G1.F.n8*2 * publicSignals.length); const w = new Uint8Array(curve.Fr.n8 * publicSignals.length); for (let i=0; i " + value.toString()); }; } if (options.trigger) { if (!sym) sym = await loadSymbols(symName); wcOps.logStartComponent= function(cIdx) { if (logger) logger.info("START: " + sym.componentIdx2Name[cIdx]); }; wcOps.logFinishComponent= function(cIdx) { if (logger) logger.info("FINISH: " + sym.componentIdx2Name[cIdx]); }; } const wc = await WitnessCalculatorBuilder$1(wasm, wcOps); const w = await wc.calculateWitness(input); const fdWtns = await createBinFile(wtnsFileName, "wtns", 2, 2); await write(fdWtns, w, wc.prime); await fdWtns.close(); } async function wtnsExportJson(wtnsFileName) { const w = await read(wtnsFileName); return w; } /* Copyright 2018 0KIMS association. This file is part of jaz (Zero Knowledge Circuit Compiler). jaz is a free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. jaz is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with jaz. If not, see . */ const {stringifyBigInts: stringifyBigInts$2, unstringifyBigInts: unstringifyBigInts$1} = ffjavascript.utils; const logger = Logger.create("snarkJS", {showTimestamp:false}); Logger.setLogLevel("INFO"); const commands = [ { cmd: "powersoftau new [powersoftau_0000.ptau]", description: "Starts a powers of tau ceremony", alias: ["ptn"], options: "-verbose|v", action: powersOfTawNew }, { cmd: "powersoftau contribute ", description: "creates a ptau file with a new contribution", alias: ["ptc"], options: "-verbose|v -name|n -entropy|e", action: powersOfTawContribute }, { cmd: "powersoftau export challenge [challenge]", description: "Creates a challenge", alias: ["ptec"], options: "-verbose|v", action: powersOfTawExportChallenge }, { cmd: "powersoftau challenge contribute [response]", description: "Contribute to a challenge", alias: ["ptcc"], options: "-verbose|v -entropy|e", action: powersOfTawChallengeContribute }, { cmd: "powersoftau import response <", description: "import a response to a ptau file", alias: ["ptir"], options: "-verbose|v -nopoints -nocheck -name|n", action: powersOfTawImport }, { cmd: "powersoftau beacon ", description: "adds a beacon", alias: ["ptb"], options: "-verbose|v -name|n", action: powersOfTawBeacon }, { cmd: "powersoftau prepare phase2 ", description: "Prepares phase 2. ", longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau", alias: ["pt2"], options: "-verbose|v", action: powersOfTawPreparePhase2 }, { cmd: "powersoftau verify ", description: "verifies a powers of tau file", alias: ["ptv"], options: "-verbose|v", action: powersOfTawVerify }, { cmd: "powersoftau export json ", description: "Exports a power of tau file to a JSON", alias: ["ptej"], options: "-verbose|v", action: powersOfTawExportJson }, { cmd: "r1cs info [circuit.r1cs]", description: "Print statistiscs of a circuit", alias: ["ri", "info -r|r1cs:circuit.r1cs"], action: r1csInfo$1 }, { cmd: "r1cs print [circuit.r1cs] [circuit.sym]", description: "Print the constraints of a circuit", alias: ["rp", "print -r|r1cs:circuit.r1cs -s|sym"], action: r1csPrint$1 }, { cmd: "r1cs export json [circuit.r1cs] [circuit.json]", description: "Export r1cs to JSON file", alias: ["rej"], action: r1csExportJSON }, { cmd: "wtns calculate [circuit.wasm] [input.json] [witness.wtns]", description: "Caclculate specific witness of a circuit given an input", alias: ["wc", "calculatewitness -ws|wasm:circuit.wasm -i|input:input.json -wt|witness:witness.wtns"], action: wtnsCalculate$1 }, { cmd: "wtns debug [circuit.wasm] [input.json] [witness.wtns] [circuit.sym]", description: "Calculate the witness with debug info.", longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ", options: "-get|g -set|s -trigger|t", alias: ["wd"], action: wtnsDebug$1 }, { cmd: "wtns export json [witness.wtns] [witnes.json]", description: "Calculate the witness with debug info.", longDescription: "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers ", options: "-verbose|v", alias: ["wej"], action: wtnsExportJson$1 }, { cmd: "zkey new [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]", description: "Creates an initial pkey file with zero contributions ", alias: ["zkn"], options: "-verbose|v", action: zkeyNew }, { cmd: "zkey contribute ", description: "creates a zkey file with a new contribution", alias: ["zkc"], options: "-verbose|v -entropy|e -name|n", action: zkeyContribute }, { cmd: "zkey export bellman [circuit.zkey] [circuit.mpcparams]", description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman)", alias: ["zkeb"], options: "-verbose|v", action: zkeyExportBellman }, { cmd: "zkey bellman contribute ", description: "contributes to a llallange file in bellman format", alias: ["zkbc"], options: "-verbose|v -entropy|e", action: zkeyBellmanContribute }, { cmd: "zkey import bellman ", description: "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman) ", alias: ["zkib"], options: "-verbose|v -name|n", action: zkeyImportBellman }, { cmd: "zkey beacon ", description: "adds a beacon", alias: ["zkb"], options: "-verbose|v -name|n", action: zkeyBeacon }, { cmd: "zkey verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]", description: "Verify zkey file contributions and verify that matches with the original circuit.r1cs and ptau", alias: ["zkv"], options: "-verbose|v", action: zkeyVerify }, { cmd: "zkey export verificationkey [circuit.zkey] [verification_key.json]", description: "Exports a verification key", alias: ["zkev"], action: zkeyExportVKey }, { cmd: "zkey export json [circuit.zkey] [circuit.zkey.json]", description: "Exports a circuit key to a JSON file", alias: ["zkej"], options: "-verbose|v", action: zkeyExportJson$1 }, { cmd: "zkey export solidityverifier [circuit.zkey] [verifier.sol]", description: "Creates a verifier in solidity", alias: ["zkesv", "generateverifier -vk|verificationkey -v|verifier"], action: zkeyExportSolidityVerifier }, { cmd: "zkey export soliditycalldata ", description: "Generates call parameters ready to be called.", alias: ["zkesc", "generatecall -pub|public -p|proof"], action: zkeyExportSolidityCalldata }, { cmd: "groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]", description: "Generates a zk Proof from witness", alias: ["g16p", "zpw", "zksnark proof", "proof -pk|provingkey -wt|witness -p|proof -pub|public"], options: "-verbose|v -protocol", action: groth16Prove$1 }, { cmd: "groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]", description: "Generates a zk Proof from input", alias: ["g16f", "g16i"], options: "-verbose|v -protocol", action: groth16FullProve$1 }, { cmd: "groth16 verify [verification_key.json] [public.json] [proof.json]", description: "Verify a zk Proof", alias: ["g16v", "verify -vk|verificationkey -pub|public -p|proof"], action: groth16Verify$1 }, ]; clProcessor(commands).then( (res) => { process.exit(res); }, (err) => { logger.error(err); process.exit(1); }); /* TODO COMMANDS ============= { cmd: "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]", description: "Run a simple setup for a circuit generating the proving key.", alias: ["zs", "setup -r1cs|r -provingkey|pk -verificationkey|vk"], options: "-verbose|v -protocol", action: zksnarkSetup }, { cmd: "witness verify ", description: "Verify a witness agains a r1cs", alias: ["wv"], action: witnessVerify }, { cmd: "powersOfTau export response" } */ function p256(n) { let nstr = n.toString(16); while (nstr.length < 64) nstr = "0"+nstr; nstr = `"0x${nstr}"`; return nstr; } function changeExt(fileName, newExt) { let S = fileName; while ((S.length>0) && (S[S.length-1] != ".")) S = S.slice(0, S.length-1); if (S.length>0) { return S + newExt; } else { return fileName+"."+newExt; } } // r1cs export circomJSON [circuit.r1cs] [circuit.json] async function r1csInfo$1(params, options) { const r1csName = params[0] || "circuit.r1cs"; if (options.verbose) Logger.setLogLevel("DEBUG"); await r1csInfo(r1csName, logger); return 0; } // r1cs print [circuit.r1cs] [circuit.sym] async function r1csPrint$1(params, options) { const r1csName = params[0] || "circuit.r1cs"; const symName = params[1] || changeExt(r1csName, "sym"); if (options.verbose) Logger.setLogLevel("DEBUG"); const cir = await load(r1csName, true, true); const sym = await loadSymbols(symName); await r1csPrint(cir, sym, logger); return 0; } // r1cs export json [circuit.r1cs] [circuit.json] async function r1csExportJSON(params, options) { const r1csName = params[0] || "circuit.r1cs"; const jsonName = params[1] || changeExt(r1csName, "json"); if (options.verbose) Logger.setLogLevel("DEBUG"); const r1csObj = await r1csExportJson(r1csName); const S = JSON.stringify(ffjavascript.utils.stringifyBigInts(r1csObj), null, 1); await fs.promises.writeFile(jsonName, S); return 0; } // wtns calculate async function wtnsCalculate$1(params, options) { const wasmName = params[0] || "circuit.wasm"; const inputName = params[1] || "input.json"; const witnessName = params[2] || "witness.wtns"; if (options.verbose) Logger.setLogLevel("DEBUG"); const input = unstringifyBigInts$1(JSON.parse(await fs.promises.readFile(inputName, "utf8"))); await wtnsCalculate(input, wasmName, witnessName); return 0; } // wtns debug // -get|g -set|s -trigger|t async function wtnsDebug$1(params, options) { const wasmName = params[0] || "circuit.wasm"; const inputName = params[1] || "input.json"; const witnessName = params[2] || "witness.wtns"; const symName = params[3] || changeExt(wasmName, "sym"); if (options.verbose) Logger.setLogLevel("DEBUG"); const input = unstringifyBigInts$1(JSON.parse(await fs.promises.readFile(inputName, "utf8"))); await wtnsDebug(input, wasmName, witnessName, symName, options, logger); return 0; } // wtns export json [witness.wtns] [witness.json] // -get|g -set|s -trigger|t async function wtnsExportJson$1(params, options) { const wtnsName = params[0] || "witness.wtns"; const jsonName = params[1] || "witness.json"; if (options.verbose) Logger.setLogLevel("DEBUG"); const w = await wtnsExportJson(wtnsName); await fs.promises.writeFile(jsonName, JSON.stringify(stringifyBigInts$2(w), null, 1)); return 0; } /* // zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json] async function zksnarkSetup(params, options) { const r1csName = params[0] || "circuit.r1cs"; const zkeyName = params[1] || changeExt(r1csName, "zkey"); const verificationKeyName = params[2] || "verification_key.json"; const protocol = options.protocol || "groth16"; const cir = await loadR1cs(r1csName, true); if (!zkSnark[protocol]) throw new Error("Invalid protocol"); const setup = zkSnark[protocol].setup(cir, options.verbose); await zkey.utils.write(zkeyName, setup.vk_proof); // await fs.promises.writeFile(provingKeyName, JSON.stringify(stringifyBigInts(setup.vk_proof), null, 1), "utf-8"); await fs.promises.writeFile(verificationKeyName, JSON.stringify(stringifyBigInts(setup.vk_verifier), null, 1), "utf-8"); return 0; } */ // groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json] async function groth16Prove$1(params, options) { const zkeyName = params[0] || "circuit.zkey"; const witnessName = params[1] || "witness.wtns"; const proofName = params[2] || "proof.json"; const publicName = params[3] || "public.json"; if (options.verbose) Logger.setLogLevel("DEBUG"); const {proof, publicSignals} = await groth16Prove(zkeyName, witnessName); await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts$2(proof), null, 1), "utf-8"); await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts$2(publicSignals), null, 1), "utf-8"); return 0; } // groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json] async function groth16FullProve$1(params, options) { const inputName = params[0] || "input.json"; const wasmName = params[1] || "circuit.wasm"; const zkeyName = params[2] || "circuit.zkey"; const proofName = params[3] || "proof.json"; const publicName = params[4] || "public.json"; if (options.verbose) Logger.setLogLevel("DEBUG"); const input = unstringifyBigInts$1(JSON.parse(await fs.promises.readFile(inputName, "utf8"))); const {proof, publicSignals} = await groth16FullProve(input, wasmName, zkeyName); await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts$2(proof), null, 1), "utf-8"); await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts$2(publicSignals), null, 1), "utf-8"); return 0; } // groth16 verify [verification_key.json] [public.json] [proof.json] async function groth16Verify$1(params, options) { const verificationKeyName = params[0] || "verification_key.json"; const publicName = params[1] || "public.json"; const proofName = params[2] || "proof.json"; const verificationKey = unstringifyBigInts$1(JSON.parse(fs.readFileSync(verificationKeyName, "utf8"))); const pub = unstringifyBigInts$1(JSON.parse(fs.readFileSync(publicName, "utf8"))); const proof = unstringifyBigInts$1(JSON.parse(fs.readFileSync(proofName, "utf8"))); if (options.verbose) Logger.setLogLevel("DEBUG"); const isValid = await groth16Verify(verificationKey, pub, proof, logger); if (isValid) { return 0; } else { return 1; } } // zkey export vkey [circuit.zkey] [verification_key.json]", async function zkeyExportVKey(params, options) { const zkeyName = params[0] || "circuit.zkey"; const verificationKeyName = params[2] || "verification_key.json"; if (options.verbose) Logger.setLogLevel("DEBUG"); const vKey = await zkeyExportVerificationKey(zkeyName); const S = JSON.stringify(ffjavascript.utils.stringifyBigInts(vKey), null, 1); await fs.promises.writeFile(verificationKeyName, S); } // zkey export json [circuit.zkey] [circuit.zkey.json]", async function zkeyExportJson$1(params, options) { const zkeyName = params[0] || "circuit.zkey"; const zkeyJsonName = params[1] || "circuit.zkey.json"; if (options.verbose) Logger.setLogLevel("DEBUG"); const zKey = await zkeyExportJson(zkeyName); const S = JSON.stringify(ffjavascript.utils.stringifyBigInts(zKey), null, 1); await fs.promises.writeFile(zkeyJsonName, S); } // solidity genverifier [circuit.zkey] [verifier.sol] async function zkeyExportSolidityVerifier(params, options) { let zkeyName; let verifierName; if (params.length < 1) { zkeyName = "circuit.zkey"; } else { zkeyName = params[0]; } if (params.length < 2) { verifierName = "verifier.sol"; } else { verifierName = params[1]; } if (options.verbose) Logger.setLogLevel("DEBUG"); let templateName; try { templateName = path.join( __dirname, "templates", "verifier_groth16.sol"); await fs.promises.stat(templateName); } catch (err) { templateName = path.join( __dirname, "..", "templates", "verifier_groth16.sol"); } const verifierCode = await exportSolidityVerifier(zkeyName, templateName); fs.writeFileSync(verifierName, verifierCode, "utf-8"); return 0; } // solidity gencall async function zkeyExportSolidityCalldata(params, options) { let publicName; let proofName; if (params.length < 1) { publicName = "public.json"; } else { publicName = params[0]; } if (params.length < 2) { proofName = "proof.json"; } else { proofName = params[1]; } if (options.verbose) Logger.setLogLevel("DEBUG"); const pub = unstringifyBigInts$1(JSON.parse(fs.readFileSync(publicName, "utf8"))); const proof = unstringifyBigInts$1(JSON.parse(fs.readFileSync(proofName, "utf8"))); let inputs = ""; for (let i=0; i [powersoftau_0000.ptau]", async function powersOfTawNew(params, options) { let curveName; let power; let ptauName; curveName = params[0]; power = parseInt(params[1]); if ((power<1) || (power>28)) { throw new Error("Power must be between 1 and 28"); } if (params.length < 3) { ptauName = "powersOfTaw" + power + "_0000.ptau"; } else { ptauName = params[2]; } const curve = await getCurveFromName(curveName); if (options.verbose) Logger.setLogLevel("DEBUG"); return await newAccumulator(curve, power, ptauName, logger); } async function powersOfTawExportChallenge(params, options) { let ptauName; let challengeName; ptauName = params[0]; if (params.length < 2) { challengeName = "challenge"; } else { challengeName = params[1]; } if (options.verbose) Logger.setLogLevel("DEBUG"); return await exportChallenge(ptauName, challengeName, logger); } // powersoftau challenge contribute [response] async function powersOfTawChallengeContribute(params, options) { let challengeName; let responseName; const curve = await getCurveFromName(params[0]); challengeName = params[1]; if (params.length < 3) { responseName = changeExt(challengeName, "response"); } else { responseName = params[2]; } if (options.verbose) Logger.setLogLevel("DEBUG"); return await challengeContribute(curve, challengeName, responseName, options.entropy, logger); } async function powersOfTawImport(params, options) { let oldPtauName; let response; let newPtauName; let importPoints = true; let doCheck = true; oldPtauName = params[0]; response = params[1]; newPtauName = params[2]; if (options.nopoints) importPoints = false; if (options.nocheck) doCheck = false; if (options.verbose) Logger.setLogLevel("DEBUG"); const res = await importResponse(oldPtauName, response, newPtauName, options.name, importPoints, logger); if (res) return res; if (!doCheck) return; // TODO Verify } async function powersOfTawVerify(params, options) { let ptauName; ptauName = params[0]; if (options.verbose) Logger.setLogLevel("DEBUG"); const res = await verify(ptauName, logger); if (res === true) { return 0; } else { return 1; } } async function powersOfTawBeacon(params, options) { let oldPtauName; let newPtauName; let beaconHashStr; let numIterationsExp; oldPtauName = params[0]; newPtauName = params[1]; beaconHashStr = params[2]; numIterationsExp = params[3]; if (options.verbose) Logger.setLogLevel("DEBUG"); return await beacon(oldPtauName, newPtauName, options.name ,beaconHashStr, numIterationsExp, logger); } async function powersOfTawContribute(params, options) { let oldPtauName; let newPtauName; oldPtauName = params[0]; newPtauName = params[1]; if (options.verbose) Logger.setLogLevel("DEBUG"); return await contribute(oldPtauName, newPtauName, options.name , options.entropy, logger); } async function powersOfTawPreparePhase2(params, options) { let oldPtauName; let newPtauName; oldPtauName = params[0]; newPtauName = params[1]; if (options.verbose) Logger.setLogLevel("DEBUG"); return await preparePhase2(oldPtauName, newPtauName, logger); } // powersoftau export json ", async function powersOfTawExportJson(params, options) { let ptauName; let jsonName; ptauName = params[0]; jsonName = params[1]; if (options.verbose) Logger.setLogLevel("DEBUG"); const pTau = await exportJson(ptauName, logger); const S = JSON.stringify(stringifyBigInts$2(pTau), null, 1); await fs.promises.writeFile(jsonName, S); } // phase2 new async function zkeyNew(params, options) { let r1csName; let ptauName; let zkeyName; if (params.length < 1) { r1csName = "circuit.r1cs"; } else { r1csName = params[0]; } if (params.length < 2) { ptauName = "powersoftau.ptau"; } else { ptauName = params[1]; } if (params.length < 3) { zkeyName = "circuit.zkey"; } else { zkeyName = params[2]; } if (options.verbose) Logger.setLogLevel("DEBUG"); return newZKey(r1csName, ptauName, zkeyName, logger); } // zkey export bellman [circuit.zkey] [circuit.mpcparams] async function zkeyExportBellman(params, options) { let zkeyName; let mpcparamsName; if (params.length < 1) { zkeyName = "circuit.zkey"; } else { zkeyName = params[0]; } if (params.length < 2) { mpcparamsName = "circuit.mpcparams"; } else { mpcparamsName = params[1]; } if (options.verbose) Logger.setLogLevel("DEBUG"); return phase2exportMPCParams(zkeyName, mpcparamsName, logger); } // zkey import bellman async function zkeyImportBellman(params, options) { let zkeyNameOld; let mpcParamsName; let zkeyNameNew; zkeyNameOld = params[0]; mpcParamsName = params[1]; zkeyNameNew = params[2]; if (options.verbose) Logger.setLogLevel("DEBUG"); return phase2importMPCParams(zkeyNameOld, mpcParamsName, zkeyNameNew, options.name, logger); } // phase2 verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey] async function zkeyVerify(params, options) { let r1csName; let ptauName; let zkeyName; if (params.length < 1) { r1csName = "circuit.r1cs"; } else { r1csName = params[0]; } if (params.length < 2) { ptauName = "powersoftau.ptau"; } else { ptauName = params[1]; } if (params.length < 3) { zkeyName = "circuit.zkey"; } else { zkeyName = params[2]; } if (options.verbose) Logger.setLogLevel("DEBUG"); const res = await phase2verify(r1csName, ptauName, zkeyName, logger); if (res === true) { return 0; } else { return 1; } } // zkey contribute async function zkeyContribute(params, options) { let zkeyOldName; let zkeyNewName; zkeyOldName = params[0]; zkeyNewName = params[1]; if (options.verbose) Logger.setLogLevel("DEBUG"); return phase2contribute(zkeyOldName, zkeyNewName, options.name, options.entropy, logger); } // zkey beacon async function zkeyBeacon(params, options) { let zkeyOldName; let zkeyNewName; let beaconHashStr; let numIterationsExp; zkeyOldName = params[0]; zkeyNewName = params[1]; beaconHashStr = params[2]; numIterationsExp = params[3]; if (options.verbose) Logger.setLogLevel("DEBUG"); return await beacon$1(zkeyOldName, zkeyNewName, options.name ,beaconHashStr, numIterationsExp, logger); } // zkey challenge contribute [response]", async function zkeyBellmanContribute(params, options) { let challengeName; let responseName; const curve = await getCurveFromName(params[0]); challengeName = params[1]; if (params.length < 3) { responseName = changeExt(challengeName, "response"); } else { responseName = params[2]; } if (options.verbose) Logger.setLogLevel("DEBUG"); return bellmanContribute(curve, challengeName, responseName, options.entropy, logger); }