diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..95b6967 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,70 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "type": "pwa-node", + "request": "launch", + "name": "plonk setup", + "skipFiles": [ + "/**" + ], + "program": "cli.js", + "args": [ + "pks", + "test/plonk_circuit/circuit.r1cs", + "test/plonk_circuit/powersOfTau15_final.ptau", + "test/plonk_circuit/circuit.zkey" + ] + }, + { + "type": "pwa-node", + "request": "launch", + "name": "plonk prove", + "skipFiles": [ + "/**" + ], + "program": "cli.js", + "args": [ + "pkp", + "test/plonk_circuit/circuit.zkey", + "test/plonk_circuit/witness.wtns", + "test/plonk_circuit/proof.json", + "test/plonk_circuit/public.json", + "-v" + ] + }, + { + "type": "pwa-node", + "request": "launch", + "name": "plonk export vk", + "skipFiles": [ + "/**" + ], + "program": "cli.js", + "args": [ + "zkev", + "test/plonk_circuit/circuit.zkey", + "test/plonk_circuit/verification_key.json", + ] + }, + { + "type": "pwa-node", + "request": "launch", + "name": "plonk verify", + "skipFiles": [ + "/**" + ], + "program": "cli.js", + "args": [ + "pkv", + "test/plonk_circuit/verification_key.json", + "test/plonk_circuit/public.json", + "test/plonk_circuit/proof.json", + "-v" + ] + } + ] +} \ No newline at end of file diff --git a/cli.js b/cli.js index 8f560b2..84a0cb7 100755 --- a/cli.js +++ b/cli.js @@ -35,6 +35,7 @@ const {stringifyBigInts, unstringifyBigInts} = utils; import * as zkey from "./src/zkey.js"; import * as groth16 from "./src/groth16.js"; +import * as plonk from "./src/plonk.js"; import * as wtns from "./src/wtns.js"; import * as curves from "./src/curves.js"; import path from "path"; @@ -267,7 +268,34 @@ const commands = [ alias: ["g16v", "verify -vk|verificationkey -pub|public -p|proof"], action: groth16Verify }, - + { + cmd: "plonk setup [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]", + description: "Creates an initial PLONK pkey ", + alias: ["pks"], + options: "-verbose|v", + action: plonkSetup + }, + { + cmd: "plonk prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]", + description: "Generates a PLONK Proof from witness", + alias: ["pkp"], + options: "-verbose|v -protocol", + action: plonkProve + }, + { + cmd: "plonk fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]", + description: "Generates a PLONK Proof from input", + alias: ["pkf"], + options: "-verbose|v -protocol", + action: plonkFullProve + }, + { + cmd: "plonk verify [verification_key.json] [public.json] [proof.json]", + description: "Verify a PLONK Proof", + alias: ["pkv"], + options: "-verbose|v", + action: plonkVerify + } ]; @@ -986,3 +1014,96 @@ async function zkeyBellmanContribute(params, options) { return zkey.bellmanContribute(curve, challengeName, responseName, options.entropy, logger); } + +// plonk setup +async function plonkSetup(params, options) { + let r1csName; + let ptauName; + let zkeyName; + + if (params.length < 1) { + r1csName = "circuit.r1cs"; + } else { + r1csName = params[0]; + } + + if (params.length < 2) { + ptauName = "powersoftau.ptau"; + } else { + ptauName = params[1]; + } + + if (params.length < 3) { + zkeyName = "circuit.zkey"; + } else { + zkeyName = params[2]; + } + + if (options.verbose) Logger.setLogLevel("DEBUG"); + + return plonk.setup(r1csName, ptauName, zkeyName, logger); +} + + +// plonk prove [circuit.zkey] [witness.wtns] [proof.json] [public.json] +async function plonkProve(params, options) { + + const zkeyName = params[0] || "circuit.zkey"; + const witnessName = params[1] || "witness.wtns"; + const proofName = params[2] || "proof.json"; + const publicName = params[3] || "public.json"; + + if (options.verbose) Logger.setLogLevel("DEBUG"); + + const {proof, publicSignals} = await plonk.prove(zkeyName, witnessName, logger); + + await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8"); + await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8"); + + return 0; +} + + +// plonk fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json] +async function plonkFullProve(params, options) { + + const inputName = params[0] || "input.json"; + const wasmName = params[1] || "circuit.wasm"; + const zkeyName = params[2] || "circuit.zkey"; + const proofName = params[3] || "proof.json"; + const publicName = params[4] || "public.json"; + + if (options.verbose) Logger.setLogLevel("DEBUG"); + + const input = unstringifyBigInts(JSON.parse(await fs.promises.readFile(inputName, "utf8"))); + + const {proof, publicSignals} = await plonk.fullProve(input, wasmName, zkeyName, logger); + + await fs.promises.writeFile(proofName, JSON.stringify(stringifyBigInts(proof), null, 1), "utf-8"); + await fs.promises.writeFile(publicName, JSON.stringify(stringifyBigInts(publicSignals), null, 1), "utf-8"); + + return 0; +} + + +// plonk verify [verification_key.json] [public.json] [proof.json] +async function plonkVerify(params, options) { + + const verificationKeyName = params[0] || "verification_key.json"; + const publicName = params[1] || "public.json"; + const proofName = params[2] || "proof.json"; + + const verificationKey = unstringifyBigInts(JSON.parse(fs.readFileSync(verificationKeyName, "utf8"))); + const pub = unstringifyBigInts(JSON.parse(fs.readFileSync(publicName, "utf8"))); + const proof = unstringifyBigInts(JSON.parse(fs.readFileSync(proofName, "utf8"))); + + if (options.verbose) Logger.setLogLevel("DEBUG"); + + const isValid = await plonk.verify(verificationKey, pub, proof, logger); + + if (isValid) { + return 0; + } else { + return 1; + } +} diff --git a/package-lock.json b/package-lock.json index 685bf1e..059f655 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,6 +13,7 @@ "circom_runtime": "0.1.13", "fastfile": "0.0.19", "ffjavascript": "0.2.35", + "js-sha3": "^0.8.0", "logplease": "^1.2.15", "r1csfile": "0.0.32", "readline": "^1.3.0" @@ -1670,6 +1671,11 @@ "node": ">= 10.13.0" } }, + "node_modules/js-sha3": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", + "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==" + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -4601,6 +4607,11 @@ "supports-color": "^7.0.0" } }, + "js-sha3": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", + "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==" + }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", diff --git a/package.json b/package.json index 552df8d..f061a9c 100644 --- a/package.json +++ b/package.json @@ -43,6 +43,7 @@ "circom_runtime": "0.1.13", "fastfile": "0.0.19", "ffjavascript": "0.2.35", + "js-sha3": "^0.8.0", "logplease": "^1.2.15", "r1csfile": "0.0.32", "readline": "^1.3.0" diff --git a/src/groth16_prove.js b/src/groth16_prove.js index dd135ad..8a07f12 100644 --- a/src/groth16_prove.js +++ b/src/groth16_prove.js @@ -13,7 +13,11 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23); - const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey, "groth16"); + const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey); + + if (zkey.protocol != "groth16") { + throw new Error("zkey file is not groth16"); + } if (!Scalar.eq(zkey.r, wtns.q)) { throw new Error("Curve of the witness does not match the curve of the proving key"); @@ -162,7 +166,7 @@ async function buldABC1(curve, zkey, witness, coeffs, logger) { } - +/* async function buldABC(curve, zkey, witness, coeffs, logger) { const concurrency = curve.tm.concurrency; const sCoef = 4*3 + zkey.n8r; @@ -291,7 +295,7 @@ async function buldABC(curve, zkey, witness, coeffs, logger) { return 4 + m*sCoef; } } - +*/ async function joinABC(curve, zkey, a, b, c, logger) { const MAX_CHUNK_SIZE = 1 << 22; diff --git a/src/groth16_verify.js b/src/groth16_verify.js index 3d48044..ad63024 100644 --- a/src/groth16_verify.js +++ b/src/groth16_verify.js @@ -74,4 +74,4 @@ export default async function groth16Verify(vk_verifier, publicSignals, proof, l if (logger) logger.info("OK!"); return true; -}; +} diff --git a/src/plonk.js b/src/plonk.js new file mode 100644 index 0000000..fff7c50 --- /dev/null +++ b/src/plonk.js @@ -0,0 +1,4 @@ +export {default as setup} from "./plonk_setup.js"; +export {default as fullProve} from "./plonk_fullprove.js"; +export {default as prove} from "./plonk_prove.js"; +export {default as verify} from "./plonk_verify.js"; diff --git a/src/plonk_fullprove.js b/src/plonk_fullprove.js new file mode 100644 index 0000000..0e33a2e --- /dev/null +++ b/src/plonk_fullprove.js @@ -0,0 +1,10 @@ +import plonk_prove from "./plonk_prove.js"; +import wtns_calculate from "./wtns_calculate.js"; + +export default async function plonkFullProve(input, wasmFile, zkeyFileName, logger) { + const wtns= { + type: "mem" + }; + await wtns_calculate(input, wasmFile, wtns); + return await plonk_prove(zkeyFileName, wtns, logger); +} diff --git a/src/plonk_prove.js b/src/plonk_prove.js new file mode 100644 index 0000000..81fa170 --- /dev/null +++ b/src/plonk_prove.js @@ -0,0 +1,717 @@ +/* + Copyright 2021 0kims association. + + This file is part of snarkjs. + + snarkjs is a free software: you can redistribute it and/or + modify it under the terms of the GNU General Public License as published by the + Free Software Foundation, either version 3 of the License, or (at your option) + any later version. + + snarkjs is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + more details. + + You should have received a copy of the GNU General Public License along with + snarkjs. If not, see . +*/ + +/* Implementation of this paper: https://eprint.iacr.org/2019/953.pdf section 8.4 */ + +import * as binFileUtils from "@iden3/binfileutils"; +import * as zkeyUtils from "./zkey_utils.js"; +import * as wtnsUtils from "./wtns_utils.js"; +import { getCurveFromQ as getCurve } from "./curves.js"; +import { Scalar, utils, BigBuffer } from "ffjavascript"; +const {stringifyBigInts} = utils; +import jsSha3 from "js-sha3"; +const { keccak256 } = jsSha3; + +export default async function plonk16Prove(zkeyFileName, witnessFileName, logger) { + const {fd: fdWtns, sections: sectionsWtns} = await binFileUtils.readBinFile(witnessFileName, "wtns", 2, 1<<25, 1<<23); + + const wtns = await wtnsUtils.readHeader(fdWtns, sectionsWtns); + + const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23); + + const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey); + if (zkey.protocol != "plonk") { + throw new Error("zkey file is not groth16"); + } + + if (!Scalar.eq(zkey.r, wtns.q)) { + throw new Error("Curve of the witness does not match the curve of the proving key"); + } + + if (wtns.nWitness != zkey.nVars -zkey.nAdditions) { + throw new Error(`Invalid witness length. Circuit: ${zkey.nVars}, witness: ${wtns.nWitness}, ${zkey.nAdditions}`); + } + + const curve = await getCurve(zkey.q); + const Fr = curve.Fr; + const G1 = curve.G1; + const n8r = curve.Fr.n8; + + if (logger) logger.debug("Reading Wtns"); + const buffWitness = await binFileUtils.readSection(fdWtns, sectionsWtns, 2); + // First element in plonk is not used and can be any value. (But always the same). + // We set it to zero to go faster in the exponentiations. + buffWitness.set(Fr.zero, 0); + const buffInternalWitness = new BigBuffer(n8r*zkey.nAdditions); + + await calculateAdditions(); + + let A,B,C,Z; + let pol_a,pol_b,pol_c, pol_z, pol_t, pol_r; + let proof = {}; + if (logger) logger.debug("Reading L Points"); + const lagrangeBases = await binFileUtils.readSection(fdZKey, sectionsZKey, 14); + + const sigmaBuff = new BigBuffer(zkey.domainSize*n8r*4*3); + let o = sectionsZKey[12][0].p + zkey.domainSize*n8r; + await fdZKey.readToBuffer(sigmaBuff, 0 , zkey.domainSize*n8r*4, o); + o += zkey.domainSize*n8r*5; + await fdZKey.readToBuffer(sigmaBuff, zkey.domainSize*n8r*4 , zkey.domainSize*n8r*4, o); + o += zkey.domainSize*n8r*5; + await fdZKey.readToBuffer(sigmaBuff, zkey.domainSize*n8r*8 , zkey.domainSize*n8r*4, o); + + const pol_s1 = new BigBuffer(zkey.domainSize*n8r); + await fdZKey.readToBuffer(pol_s1, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p); + + const pol_s2 = new BigBuffer(zkey.domainSize*n8r); + await fdZKey.readToBuffer(pol_s2, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 5*zkey.domainSize*n8r); + + const PTau = await binFileUtils.readSection(fdZKey, sectionsZKey, 15); + + + let alpha, beta, gamma, xi; + let xim; + const b=[]; + + await round1(); + await round2(); + await round3(); + await round4(); + await round5(); + + + /////////////////////// + // Final adjustments // + /////////////////////// + + proof.protocol = "plonk"; + + await fdZKey.close(); + await fdWtns.close(); + + let publicSignals = []; + + for (let i=1; i<= zkey.nPublic; i++) { + const pub = buffWitness.slice(i*Fr.n8, i*Fr.n8+Fr.n8); + publicSignals.push(Scalar.fromRprLE(pub)); + } + + proof.A = G1.toObject(G1.toAffine(proof.A)); + proof.B = G1.toObject(G1.toAffine(proof.B)); + proof.C = G1.toObject(G1.toAffine(proof.C)); + proof.Z = G1.toObject(G1.toAffine(proof.Z)); + + proof.T1 = G1.toObject(G1.toAffine(proof.T1)); + proof.T2 = G1.toObject(G1.toAffine(proof.T2)); + proof.T3 = G1.toObject(G1.toAffine(proof.T3)); + + proof.eval_a = Fr.toObject(proof.eval_a); + proof.eval_b = Fr.toObject(proof.eval_b); + proof.eval_c = Fr.toObject(proof.eval_c); + proof.eval_s1 = Fr.toObject(proof.eval_s1); + proof.eval_s2 = Fr.toObject(proof.eval_s2); + proof.eval_zw = Fr.toObject(proof.eval_zw); + proof.eval_t = Fr.toObject(proof.eval_t); + proof.eval_r = Fr.toObject(proof.eval_r); + + proof.Wxi = G1.toObject(G1.toAffine(proof.Wxi)); + proof.Wxiw = G1.toObject(G1.toAffine(proof.Wxiw)); + + proof = stringifyBigInts(proof); + publicSignals = stringifyBigInts(publicSignals); + + return {proof, publicSignals}; + + async function calculateAdditions() { + const additionsBuff = await binFileUtils.readSection(fdZKey, sectionsZKey, 3); + + const sSum = 8+curve.Fr.n8*2; + + for (let i=0; i0)&&(Fr.isZero(p.slice(deg*n8r, deg*n8r+n8r)))) deg--; + return deg; + } + + function printPol(P) { + const n=(P.byteLength/n8r); + console.log("["); + for (let i=0; i (zkey.domainSize*3 -4) ) { + if (!Fr.isZero(a)) { + throw new Error("T Polynomial is not divisible"); + } + } + } + + pol_t = t.slice(0, (zkey.domainSize*3)*n8r); + + t = await Fr.batchFromMontgomery(t); + + proof.T1 = await curve.G1.multiExpAffine(PTau, t.slice(0, zkey.domainSize*n8r), logger, "multiexp T1"); + proof.T2 = await curve.G1.multiExpAffine(PTau, t.slice(zkey.domainSize*n8r, zkey.domainSize*2*n8r), logger, "multiexp T2"); + proof.T3 = await curve.G1.multiExpAffine(PTau, t.slice(zkey.domainSize*2*n8r, (zkey.domainSize*3)*n8r), logger, "multiexp T3"); + + + async function to4T(A) { + const a = await Fr.ifft(A); + const a4 = new BigBuffer(n8r*zkey.domainSize*4); + a4.set(a, 0); + const A4 = await Fr.fft(a4); + return [a, A4]; + } + } + + async function round4() { + const pol_qm = new BigBuffer(zkey.domainSize*n8r); + await fdZKey.readToBuffer(pol_qm, 0 , zkey.domainSize*n8r, sectionsZKey[7][0].p); + + const pol_ql = new BigBuffer(zkey.domainSize*n8r); + await fdZKey.readToBuffer(pol_ql, 0 , zkey.domainSize*n8r, sectionsZKey[8][0].p); + + const pol_qr = new BigBuffer(zkey.domainSize*n8r); + await fdZKey.readToBuffer(pol_qr, 0 , zkey.domainSize*n8r, sectionsZKey[9][0].p); + + const pol_qo = new BigBuffer(zkey.domainSize*n8r); + await fdZKey.readToBuffer(pol_qo, 0 , zkey.domainSize*n8r, sectionsZKey[10][0].p); + + const pol_qc = new BigBuffer(zkey.domainSize*n8r); + await fdZKey.readToBuffer(pol_qc, 0 , zkey.domainSize*n8r, sectionsZKey[11][0].p); + + const pol_s3 = new BigBuffer(zkey.domainSize*n8r); + await fdZKey.readToBuffer(pol_s3, 0 , zkey.domainSize*n8r, sectionsZKey[12][0].p + 10*zkey.domainSize*n8r); + + const transcript4 = new Uint8Array(G1.F.n8*2*3); + G1.toRprUncompressed(transcript4, 0, proof.T1); + G1.toRprUncompressed(transcript4, G1.F.n8*2, proof.T2); + G1.toRprUncompressed(transcript4, G1.F.n8*4, proof.T3); + xi = hashToFr(transcript4); + + if (logger) logger.debug("xi: " + Fr.toString(xi)); + + proof.eval_a = evalPol(pol_a, xi); + proof.eval_b = evalPol(pol_b, xi); + proof.eval_c = evalPol(pol_c, xi); + proof.eval_s1 = evalPol(pol_s1, xi); + proof.eval_s2 = evalPol(pol_s2, xi); + proof.eval_t = evalPol(pol_t, xi); + proof.eval_zw = evalPol(pol_z, Fr.mul(xi, Fr.w[zkey.power])); + + const coef_ab = Fr.mul(proof.eval_a, proof.eval_b); + + let e2a = proof.eval_a; + const betaxi = Fr.mul(beta, xi); + e2a = Fr.add( e2a, betaxi); + e2a = Fr.add( e2a, gamma); + + let e2b = proof.eval_b; + e2b = Fr.add( e2b, Fr.mul(betaxi, zkey.k1)); + e2b = Fr.add( e2b, gamma); + + let e2c = proof.eval_c; + e2c = Fr.add( e2c, Fr.mul(betaxi, zkey.k2)); + e2c = Fr.add( e2c, gamma); + + const e2 = Fr.mul(Fr.mul(Fr.mul(e2a, e2b), e2c), alpha); + + let e3a = proof.eval_a; + e3a = Fr.add( e3a, Fr.mul(beta, proof.eval_s1)); + e3a = Fr.add( e3a, gamma); + + let e3b = proof.eval_b; + e3b = Fr.add( e3b, Fr.mul(beta, proof.eval_s2)); + e3b = Fr.add( e3b, gamma); + + let e3 = Fr.mul(e3a, e3b); + e3 = Fr.mul(e3, beta); + e3 = Fr.mul(e3, proof.eval_zw); + e3 = Fr.mul(e3, alpha); + + xim= xi; + for (let i=0; i=0; i--) { + res = Fr.add(Fr.mul(res, x), P.slice(i*n8r, (i+1)*n8r)); + } + return res; + } + + function divPol1(P, d) { + const n = P.byteLength/n8r; + const res = new BigBuffer(n*n8r); + res.set(Fr.zero, (n-1) *n8r); + res.set(P.slice((n-1)*n8r, n*n8r), (n-2)*n8r); + for (let i=n-3; i>=0; i--) { + res.set( + Fr.add( + P.slice((i+1)*n8r, (i+2)*n8r), + Fr.mul( + d, + res.slice((i+1)*n8r, (i+2)*n8r) + ) + ), + i*n8r + ); + } + if (!Fr.eq( + P.slice(0, n8r), + Fr.mul( + Fr.neg(d), + res.slice(0, n8r) + ) + )) { + throw new Error("Polinomial does not divide"); + } + return res; + } +} + + + + diff --git a/src/plonk_setup.js b/src/plonk_setup.js new file mode 100644 index 0000000..a27baa1 --- /dev/null +++ b/src/plonk_setup.js @@ -0,0 +1,448 @@ +/* + Copyright 2021 0kims association. + + This file is part of snarkjs. + + snarkjs is a free software: you can redistribute it and/or + modify it under the terms of the GNU General Public License as published by the + Free Software Foundation, either version 3 of the License, or (at your option) + any later version. + + snarkjs is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY + or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + more details. + + You should have received a copy of the GNU General Public License along with + snarkjs. If not, see . +*/ + +/* Implementation of this paper: https://eprint.iacr.org/2019/953.pdf */ + +import {readR1csHeader} from "r1csfile"; +import * as utils from "./powersoftau_utils.js"; +import { + readBinFile, + createBinFile, + readSection, + writeBigInt, + startWriteSection, + endWriteSection, +} from "@iden3/binfileutils"; +import { log2 } from "./misc.js"; +import { Scalar, BigBuffer } from "ffjavascript"; +import Blake2b from "blake2b-wasm"; +import BigArray from "./bigarray.js"; + + +export default async function plonkSetup(r1csName, ptauName, zkeyName, logger) { + + await Blake2b.ready(); + + const {fd: fdPTau, sections: sectionsPTau} = await readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24); + const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau); + const {fd: fdR1cs, sections: sectionsR1cs} = await readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24); + const r1cs = await readR1csHeader(fdR1cs, sectionsR1cs, false); + + const sG1 = curve.G1.F.n8*2; + const G1 = curve.G1; + const sG2 = curve.G2.F.n8*2; + const Fr = curve.Fr; + const n8r = curve.Fr.n8; + + if (logger) logger.info("Reading r1cs"); + let sR1cs = await readSection(fdR1cs, sectionsR1cs, 2); + + const plonkConstraints = new BigArray(); + const plonkAdditions = new BigArray(); + let plonkNVars = r1cs.nVars; + + const nPublic = r1cs.nOutputs + r1cs.nPubInputs; + + await processConstraints(); + + const fdZKey = await createBinFile(zkeyName, "zkey", 1, 15, 1<<22, 1<<24); + + + if (r1cs.prime != curve.r) { + if (logger) logger.error("r1cs curve does not match powers of tau ceremony curve"); + return -1; + } + + const cirPower = log2(plonkConstraints.length -1) +1; + const domainSize = 2 ** cirPower; + + if (cirPower > power) { + if (logger) logger.error(`circuit too big for this power of tau ceremony. ${r1cs.nConstraints}*2 > 2**${power}`); + return -1; + } + + if (!sectionsPTau[12]) { + if (logger) logger.error("Powers of tau is not prepared."); + return -1; + } + + + const LPoints = new BigBuffer(domainSize*sG1); + const o = sectionsPTau[12][0].p + ((2 ** (cirPower)) -1)*sG1; + await fdPTau.readToBuffer(LPoints, 0, domainSize*sG1, o); + + const [k1, k2] = getK1K2(); + + const vk = {}; + + + await writeAdditions(3, "Additions"); + await writeWitnessMap(4, 0, "Amap"); + await writeWitnessMap(5, 1, "Bmap"); + await writeWitnessMap(6, 2, "Cmap"); + await writeQMap(7, 3, "Qm"); + await writeQMap(8, 4, "Ql"); + await writeQMap(9, 5, "Qr"); + await writeQMap(10, 6, "Qo"); + await writeQMap(11, 7, "Qc"); + await writeSigma(12, "sigma"); + await writeLs(13, "lagrange polynomials"); + + // Write Lagrange Points Section + /////////// + await startWriteSection(fdZKey, 14); + await fdZKey.write(LPoints); + await endWriteSection(fdZKey); + + // Write PTau points + //////////// + + await startWriteSection(fdZKey, 15); + const buffOut = new BigBuffer(domainSize*sG1); + await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, sectionsPTau[2][0].p); + await fdZKey.write(buffOut); + await endWriteSection(fdZKey); + + + await writeHeaders(); + + await fdZKey.close(); + await fdR1cs.close(); + await fdPTau.close(); + + if (logger) logger.info("Setup Finished"); + + return ; + + async function processConstraints() { + + let r1csPos = 0; + + function r1cs_readULE32() { + const buff = sR1cs.slice(r1csPos, r1csPos+4); + r1csPos += 4; + const buffV = new DataView(buff.buffer); + return buffV.getUint32(0, true); + } + + function r1cs_readCoef() { + const res = Fr.fromRprLE(sR1cs.slice(r1csPos, r1csPos+curve.Fr.n8)); + r1csPos += curve.Fr.n8; + return res; + } + + function r1cs_readCoefs() { + const coefs = []; + const res = { + k: curve.Fr.zero + }; + const nA = r1cs_readULE32(); + for (let i=0; i> 1); + const arr2 = coefs.slice(coefs.length >> 1); + const coef1 = reduceCoef(arr1); + const coef2 = reduceCoef(arr2); + + const sl = coef1[0]; + const sr = coef2[0]; + const so = plonkNVars++; + const qm = curve.Fr.zero; + const ql = Fr.neg(coef1[1]); + const qr = Fr.neg(coef2[1]); + const qo = curve.Fr.one; + const qc = curve.Fr.zero; + + plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]); + + plonkAdditions.push([sl, sr, coef1[1], coef2[1]]); + + return [so, curve.Fr.one]; + } + + for (let s = 1; s <= nPublic ; s++) { + const sl = s; + const sr = 0; + const so = 0; + const qm = curve.Fr.zero; + const ql = curve.Fr.one; + const qr = curve.Fr.zero; + const qo = curve.Fr.zero; + const qc = curve.Fr.zero; + + plonkConstraints.push([sl, sr, so, qm, ql, qr, qo, qc]); + } + + for (let c=0; c. +*/ + +/* Implementation of this paper: https://eprint.iacr.org/2019/953.pdf */ +import { Scalar } from "ffjavascript"; +import * as curves from "./curves.js"; +import { utils } from "ffjavascript"; +const {unstringifyBigInts} = utils; +import jsSha3 from "js-sha3"; +const { keccak256 } = jsSha3; + + +export default async function plonkVerify(vk_verifier, publicSignals, proof, logger) { + vk_verifier = unstringifyBigInts(vk_verifier); + proof = unstringifyBigInts(proof); + publicSignals = unstringifyBigInts(publicSignals); + + const curve = await curves.getCurveFromName(vk_verifier.curve); + + const Fr = curve.Fr; + const G1 = curve.G1; + + proof = fromObjectProof(curve,proof); + vk_verifier = fromObjectVk(curve, vk_verifier); + if (!isWellConstructed(curve, proof)) { + logger.error("Proof is not well constructed"); + return false; + } + const challanges = calculateChallanges(curve, proof); + if (logger) { + logger.debug("beta: " + Fr.toString(challanges.beta)); + logger.debug("gamma: " + Fr.toString(challanges.gamma)); + logger.debug("alpha: " + Fr.toString(challanges.alpha)); + logger.debug("xi: " + Fr.toString(challanges.xi)); + logger.debug("v: " + Fr.toString(challanges.v)); + logger.debug("u: " + Fr.toString(challanges.u)); + } + const L = calculateLagrangeEvaluations(curve, challanges, vk_verifier); + if (logger) { + logger.debug("Lagrange Evaluations: "); + for (let i=1; i