add setup for .r1cs files
This commit is contained in:
parent
cb7a749135
commit
762fbe19a4
12
cli.js
12
cli.js
@ -45,6 +45,8 @@ setup command
|
||||
|
||||
Filename of the compiled circuit file generated by circom.
|
||||
|
||||
Filename could have extension .json or .r1cs
|
||||
|
||||
Default: circuit.json
|
||||
|
||||
--pk or --provingkey <provingKeyFile>
|
||||
@ -291,8 +293,14 @@ try {
|
||||
cir.printConstraints();
|
||||
|
||||
} else if (argv._[0].toUpperCase() == "SETUP") {
|
||||
const cirDef = JSON.parse(fs.readFileSync(circuitName, "utf8"));
|
||||
const cir = new zkSnark.Circuit(cirDef);
|
||||
const cirExtension = circuitName.split(".").pop();
|
||||
|
||||
let cir;
|
||||
if (cirExtension == "json"){
|
||||
const cirDefJSON = JSON.parse(fs.readFileSync(circuitName, "utf8"));
|
||||
cir = new zkSnark.Circuit(cirDefJSON);
|
||||
} else if (cirExtension == "r1cs")
|
||||
cir = zkSnark.parseR1csSync(circuitName);
|
||||
|
||||
if (!zkSnark[protocol]) throw new Error("Invalid protocol");
|
||||
const setup = zkSnark[protocol].setup(cir);
|
||||
|
3
index.js
3
index.js
@ -42,3 +42,6 @@ exports.unstringifyBigInts = require("./src/stringifybigint.js").unstringifyBigI
|
||||
|
||||
const Bn128 = require("./src/bn128.js");
|
||||
exports.bn128 = new Bn128();
|
||||
|
||||
exports.parseR1cs = require("./src/r1cs_parser.js").loadR1cs;
|
||||
exports.parseR1csSync = require("./src/r1cs_parser.js").loadR1csSynch;
|
@ -37,6 +37,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint-plugin-mocha": "^5.3.0",
|
||||
"lodash": "^4.17.15",
|
||||
"mocha": "^5.2.0"
|
||||
}
|
||||
}
|
||||
|
289
src/r1cs_parser.js
Normal file
289
src/r1cs_parser.js
Normal file
@ -0,0 +1,289 @@
|
||||
const fs = require("fs");
|
||||
const assert = require("assert");
|
||||
const bigInt = require("big-integer");
|
||||
|
||||
module.exports.loadR1cs = loadR1cs;
|
||||
module.exports.loadR1csSynch = loadR1csSync;
|
||||
|
||||
async function loadR1cs(fileName) {
|
||||
const res = {};
|
||||
const fd = await fs.promises.open(fileName, "r");
|
||||
|
||||
const b = Buffer.allocUnsafe(4);
|
||||
await fd.read(b, 0, 4, 0);
|
||||
if (b.toString() != "r1cs") assert(false, "Invalid File format");
|
||||
|
||||
let p=4;
|
||||
|
||||
let v = await readU32();
|
||||
|
||||
if (v>1) assert(false, "Version not supported");
|
||||
|
||||
const nSections = await readU32();
|
||||
|
||||
let pHeader;
|
||||
let pConstraints;
|
||||
let headerSize;
|
||||
let constraintsSize;
|
||||
let pMap;
|
||||
let mapSize;
|
||||
for (let i=0; i<nSections; i++) {
|
||||
let ht = await readU32();
|
||||
let hl = await readDouble64();
|
||||
if (ht == 1) {
|
||||
if (typeof pHeader != "undefined") assert(false, "File has two headder sections");
|
||||
pHeader = p;
|
||||
headerSize = hl;
|
||||
} else if (ht==2) {
|
||||
if (typeof pConstraints != "undefined") assert(false, "File has two constraints sections");
|
||||
pConstraints = p;
|
||||
constraintsSize = hl;
|
||||
} else if (ht==3) {
|
||||
pMap = p;
|
||||
mapSize = hl;
|
||||
}
|
||||
p += hl;
|
||||
}
|
||||
|
||||
if (typeof pHeader == "undefined") assert(false, "File has two header");
|
||||
|
||||
// Read Header
|
||||
p = pHeader;
|
||||
const fieldDefSize = await readU32();
|
||||
const pFieldDef = p;
|
||||
|
||||
const defType = await readU32();
|
||||
if (defType != 1) if (typeof pConstraints != "undefined") assert(false, "Field type not supported");
|
||||
|
||||
res.prime = await readBigInt();
|
||||
|
||||
if ( p != pFieldDef + fieldDefSize) assert("Invalid fieldDef size");
|
||||
|
||||
const bigIntFormat = await readU32();
|
||||
if (bigIntFormat != 0) assert(false, "BigInt format not supported");
|
||||
|
||||
const idSize = await readU32();
|
||||
if (idSize != 4) assert(false, "idSize not supported. Mus be 4");
|
||||
|
||||
res.nVars = await readU32();
|
||||
res.nOutputs = await readU32();
|
||||
res.nPubInputs = await readU32();
|
||||
res.nPrvIns = await readU32();
|
||||
res.nLabels = await readU32();
|
||||
res.nConstraints = await readU32();
|
||||
|
||||
if (p != pHeader + headerSize) assert(false, "Invalid header section size");
|
||||
|
||||
// Read Constraints
|
||||
p = pConstraints;
|
||||
|
||||
res.constraints = [];
|
||||
for (let i=0; i<res.nConstraints; i++) {
|
||||
const c = await readConstraint();
|
||||
res.constraints.push(c);
|
||||
}
|
||||
if (p != pConstraints + constraintsSize) assert(false, "Invalid constraints size");
|
||||
|
||||
await fd.close();
|
||||
|
||||
return res;
|
||||
|
||||
async function readU32() {
|
||||
const b = Buffer.allocUnsafe(4);
|
||||
await fd.read(b, 0, 4, p);
|
||||
p+=4;
|
||||
|
||||
return b.readInt32LE(0);
|
||||
}
|
||||
|
||||
async function readDouble64() {
|
||||
const b = Buffer.allocUnsafe(8);
|
||||
await fd.read(b, 0, 8, p);
|
||||
|
||||
p+=8;
|
||||
|
||||
return b.readDoubleLE(0);
|
||||
}
|
||||
|
||||
async function readBigInt() {
|
||||
const bl = Buffer.allocUnsafe(1);
|
||||
await fd.read(bl, 0, 1, p);
|
||||
p++;
|
||||
|
||||
const l = bl[0];
|
||||
const b = Buffer.allocUnsafe(l);
|
||||
await fd.read(b, 0, l, p);
|
||||
p += l;
|
||||
|
||||
const arr = Uint8Array.from(b);
|
||||
|
||||
const arrr = new Array(arr.length);
|
||||
for (let i=0; i<arr.length; i++) {
|
||||
arrr[i] = arr[arr.length-1-i];
|
||||
}
|
||||
|
||||
const n = bigInt.fromArray(arrr, 256);
|
||||
|
||||
return n.toString();
|
||||
}
|
||||
|
||||
async function readConstraint() {
|
||||
const c = [];
|
||||
c.push(await readLC());
|
||||
c.push(await readLC());
|
||||
c.push(await readLC());
|
||||
return c;
|
||||
}
|
||||
|
||||
async function readLC() {
|
||||
const lc= {};
|
||||
const nIdx = await readU32();
|
||||
for (let i=0; i<nIdx; i++) {
|
||||
const idx = await readU32();
|
||||
const val = await readBigInt();
|
||||
lc[idx] = val;
|
||||
}
|
||||
return lc;
|
||||
}
|
||||
}
|
||||
|
||||
function loadR1csSync(fileName) {
|
||||
const res = {};
|
||||
const fd = fs.openSync(fileName, "r");
|
||||
|
||||
const b = Buffer.allocUnsafe(4);
|
||||
fs.readSync(fd, b, 0, 4, 0);
|
||||
if (b.toString() != "r1cs") assert(false, "Invalid File format");
|
||||
|
||||
let p=4;
|
||||
|
||||
let v = readU32();
|
||||
|
||||
if (v>1) assert(false, "Version not supported");
|
||||
|
||||
const nSections = readU32();
|
||||
|
||||
let pHeader;
|
||||
let pConstraints;
|
||||
let headerSize;
|
||||
let constraintsSize;
|
||||
let pMap;
|
||||
let mapSize;
|
||||
for (let i=0; i<nSections; i++) {
|
||||
let ht = readU32();
|
||||
let hl = readDouble64();
|
||||
if (ht == 1) {
|
||||
if (typeof pHeader != "undefined") assert(false, "File has two headder sections");
|
||||
pHeader = p;
|
||||
headerSize = hl;
|
||||
} else if (ht==2) {
|
||||
if (typeof pConstraints != "undefined") assert(false, "File has two constraints sections");
|
||||
pConstraints = p;
|
||||
constraintsSize = hl;
|
||||
} else if (ht==3) {
|
||||
pMap = p;
|
||||
mapSize = hl;
|
||||
}
|
||||
p += hl;
|
||||
}
|
||||
|
||||
if (typeof pHeader == "undefined") assert(false, "File has two header");
|
||||
|
||||
// Read Header
|
||||
p = pHeader;
|
||||
const fieldDefSize = readU32();
|
||||
const pFieldDef = p;
|
||||
|
||||
const defType = readU32();
|
||||
if (defType != 1) if (typeof pConstraints != "undefined") assert(false, "Field type not supported");
|
||||
|
||||
res.prime = readBigInt();
|
||||
|
||||
if ( p != pFieldDef + fieldDefSize) assert("Invalid fieldDef size");
|
||||
|
||||
const bigIntFormat = readU32();
|
||||
if (bigIntFormat != 0) assert(false, "BigInt format not supported");
|
||||
|
||||
const idSize = readU32();
|
||||
if (idSize != 4) assert(false, "idSize not supported. Mus be 4");
|
||||
|
||||
res.nVars = readU32();
|
||||
res.nOutputs = readU32();
|
||||
res.nPubInputs = readU32();
|
||||
res.nPrvIns = readU32();
|
||||
res.nLabels = readU32();
|
||||
res.nConstraints = readU32();
|
||||
|
||||
if (p != pHeader + headerSize) assert(false, "Invalid header section size");
|
||||
|
||||
// Read Constraints
|
||||
p = pConstraints;
|
||||
|
||||
res.constraints = [];
|
||||
for (let i=0; i<res.nConstraints; i++) {
|
||||
const c = readConstraint();
|
||||
res.constraints.push(c);
|
||||
}
|
||||
if (p != pConstraints + constraintsSize) assert(false, "Invalid constraints size");
|
||||
|
||||
fs.closeSync(fd);
|
||||
|
||||
return res;
|
||||
|
||||
function readU32() {
|
||||
const b = Buffer.allocUnsafe(4);
|
||||
fs.readSync(fd, b, 0, 4, p);
|
||||
p+=4;
|
||||
|
||||
return b.readInt32LE(0);
|
||||
}
|
||||
|
||||
function readDouble64() {
|
||||
const b = Buffer.allocUnsafe(8);
|
||||
fs.readSync(fd, b, 0, 8, p);
|
||||
p+=8;
|
||||
|
||||
return b.readDoubleLE(0);
|
||||
}
|
||||
|
||||
function readBigInt() {
|
||||
const bl = Buffer.allocUnsafe(1);
|
||||
fs.readSync(fd, bl, 0, 1, p);
|
||||
p++;
|
||||
|
||||
const l = bl[0];
|
||||
const b = Buffer.allocUnsafe(l);
|
||||
fs.readSync(fd,b, 0, l, p);
|
||||
p += l;
|
||||
|
||||
const arr = Uint8Array.from(b);
|
||||
|
||||
const arrr = new Array(arr.length);
|
||||
for (let i=0; i<arr.length; i++) {
|
||||
arrr[i] = arr[arr.length-1-i];
|
||||
}
|
||||
|
||||
const n = bigInt.fromArray(arrr, 256);
|
||||
|
||||
return n.toString();
|
||||
}
|
||||
|
||||
function readConstraint() {
|
||||
const c = [];
|
||||
c.push(readLC());
|
||||
c.push(readLC());
|
||||
c.push(readLC());
|
||||
return c;
|
||||
}
|
||||
|
||||
function readLC() {
|
||||
const lc= {};
|
||||
const nIdx = readU32();
|
||||
for (let i=0; i<nIdx; i++) {
|
||||
const idx = readU32();
|
||||
const val = readBigInt();
|
||||
lc[idx] = val;
|
||||
}
|
||||
return lc;
|
||||
}
|
||||
}
|
126
test/r1cs/circuit.json
Normal file
126
test/r1cs/circuit.json
Normal file
@ -0,0 +1,126 @@
|
||||
{
|
||||
"mainCode": "{\n}\n",
|
||||
"signalName2Idx": {
|
||||
"one": 0,
|
||||
"main.a": 3,
|
||||
"main.b": 4,
|
||||
"main.c": 5,
|
||||
"main.d": 6,
|
||||
"main.e": 1,
|
||||
"main.f": 2
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"name": "main",
|
||||
"params": {},
|
||||
"template": "Multiplier",
|
||||
"inputSignals": 4
|
||||
}
|
||||
],
|
||||
"componentName2Idx": {
|
||||
"main": 0
|
||||
},
|
||||
"signals": [
|
||||
{
|
||||
"names": [
|
||||
"one"
|
||||
],
|
||||
"triggerComponents": []
|
||||
},
|
||||
{
|
||||
"names": [
|
||||
"main.e"
|
||||
],
|
||||
"triggerComponents": []
|
||||
},
|
||||
{
|
||||
"names": [
|
||||
"main.f"
|
||||
],
|
||||
"triggerComponents": []
|
||||
},
|
||||
{
|
||||
"names": [
|
||||
"main.a"
|
||||
],
|
||||
"triggerComponents": [
|
||||
0
|
||||
]
|
||||
},
|
||||
{
|
||||
"names": [
|
||||
"main.b"
|
||||
],
|
||||
"triggerComponents": [
|
||||
0
|
||||
]
|
||||
},
|
||||
{
|
||||
"names": [
|
||||
"main.c"
|
||||
],
|
||||
"triggerComponents": [
|
||||
0
|
||||
]
|
||||
},
|
||||
{
|
||||
"names": [
|
||||
"main.d"
|
||||
],
|
||||
"triggerComponents": [
|
||||
0
|
||||
]
|
||||
}
|
||||
],
|
||||
"constraints": [
|
||||
[
|
||||
{},
|
||||
{},
|
||||
{
|
||||
"0": "21888242871839275222246405745257275088548364400416034343698204186575808495616",
|
||||
"3": "1"
|
||||
}
|
||||
],
|
||||
[
|
||||
{},
|
||||
{},
|
||||
{
|
||||
"0": "21888242871839275222246405745257275088548364400416034343698204186575808495616",
|
||||
"4": "1"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"3": "21888242871839275222246405745257275088548364400416034343698204186575808495616"
|
||||
},
|
||||
{
|
||||
"4": "1"
|
||||
},
|
||||
{
|
||||
"1": "21888242871839275222246405745257275088548364400416034343698204186575808495616"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"5": "21888242871839275222246405745257275088548364400416034343698204186575808495616"
|
||||
},
|
||||
{
|
||||
"6": "1"
|
||||
},
|
||||
{
|
||||
"2": "21888242871839275222246405745257275088548364400416034343698204186575808495616"
|
||||
}
|
||||
]
|
||||
],
|
||||
"templates": {
|
||||
"Multiplier": "function(ctx) {\n ctx.assert(\"1\", ctx.getSignal(\"a\", []), \"/home/ixnay/iden3Dev/github/test/circom-test/circuit.circom:9:4\");\n ctx.assert(\"1\", ctx.getSignal(\"b\", []), \"/home/ixnay/iden3Dev/github/test/circom-test/circuit.circom:10:4\");\n ctx.setSignal(\"e\", [], bigInt(ctx.getSignal(\"a\", [])).mul(bigInt(ctx.getSignal(\"b\", []))).mod(__P__));\n ctx.setSignal(\"f\", [], bigInt(ctx.getSignal(\"c\", [])).mul(bigInt(ctx.getSignal(\"d\", []))).mod(__P__));\n}\n"
|
||||
},
|
||||
"functions": {},
|
||||
"nPrvInputs": 4,
|
||||
"nPubInputs": 0,
|
||||
"nInputs": 4,
|
||||
"nOutputs": 2,
|
||||
"nVars": 7,
|
||||
"nConstants": 0,
|
||||
"nSignals": 7
|
||||
}
|
BIN
test/r1cs/circuit.r1cs
Normal file
BIN
test/r1cs/circuit.r1cs
Normal file
Binary file not shown.
55
test/setup_r1cs.js
Normal file
55
test/setup_r1cs.js
Normal file
@ -0,0 +1,55 @@
|
||||
const chai = require("chai");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const lodash = require("lodash");
|
||||
|
||||
const zkSnark = require("../index.js");
|
||||
|
||||
const assert = chai.assert;
|
||||
|
||||
describe("R1CS", () => {
|
||||
|
||||
it("parser", () => {
|
||||
// Load circuit with .json file
|
||||
const cirDefJSON = JSON.parse(fs.readFileSync(path.join(__dirname, "r1cs", "circuit.json"), "utf8"));
|
||||
const cirJSON = new zkSnark.Circuit(cirDefJSON);
|
||||
// Load circuit with .r1cs file (async)
|
||||
zkSnark.parseR1cs(path.join(__dirname, "r1cs", "circuit.r1cs"))
|
||||
.then( cirDefR1cs => {
|
||||
assert(cirJSON.nVars == cirDefR1cs.nVars);
|
||||
assert(cirJSON.nPubInputs == cirDefR1cs.nPubInputs);
|
||||
assert(cirJSON.nOutputs == cirDefR1cs.nOutputs);
|
||||
assert(cirJSON.constraints.length == cirDefR1cs.nConstraints);
|
||||
|
||||
for (let i = 0; i < cirDefR1cs.nConstraints; i++){
|
||||
const constraintJSON = cirJSON.constraints[i];
|
||||
const constraintR1CS = cirDefR1cs.constraints[i];
|
||||
assert(constraintJSON.length, constraintR1CS.length);
|
||||
for (let j = 0; j < constraintJSON.length; j++)
|
||||
assert(lodash.isEqual(constraintJSON[j], constraintR1CS[j]));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it("check setup", () => {
|
||||
// load JSON circuit
|
||||
const cirDef = JSON.parse(fs.readFileSync(path.join(__dirname, "r1cs", "circuit.json"), "utf8"));
|
||||
const cir = new zkSnark.Circuit(cirDef);
|
||||
|
||||
// load .r1cs circuit (sync)
|
||||
const cirDefR1cs = zkSnark.parseR1csSync(path.join(__dirname, "r1cs", "circuit.r1cs"));
|
||||
|
||||
// calculate prover and verifier from R1CS circuit
|
||||
const setupR1cs = zkSnark["groth"].setup(cirDefR1cs);
|
||||
|
||||
// calculate witness from regular circuit
|
||||
const witness = cir.calculateWitness({"a": "1", "b": "1", "c": "5", "d": "5", "e": "1", "f": "25"});
|
||||
|
||||
// generate proof
|
||||
const { proof, publicSignals } = zkSnark["groth"].genProof(setupR1cs.vk_proof, witness);
|
||||
|
||||
// check proof
|
||||
const isValid = zkSnark["groth"].isValid(setupR1cs.vk_verifier, proof, publicSignals);
|
||||
assert(isValid);
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue
Block a user