improve zkey new speed
This commit is contained in:
parent
1abf7b6ed3
commit
8d3d179bd1
@ -2114,6 +2114,26 @@ async function readFullSection(fd, sections, idSection) {
|
||||
return res;
|
||||
}
|
||||
|
||||
async function readSection(fd, sections, idSection, offset, length) {
|
||||
|
||||
offset = (typeof offset === "undefined") ? 0 : offset;
|
||||
length = (typeof length === "undefined") ? sections[idSection][0].size - offset : length;
|
||||
|
||||
if (offset + length > sections[idSection][0].size) {
|
||||
throw new Error("Reading out of the range of the section");
|
||||
}
|
||||
|
||||
let buff;
|
||||
if (length < (1 << 30) ) {
|
||||
buff = new Uint8Array(length);
|
||||
} else {
|
||||
buff = new ffjavascript.BigBuffer(length);
|
||||
}
|
||||
|
||||
await fd.readToBuffer(buff, 0, length, sections[idSection][0].p + offset);
|
||||
return buff;
|
||||
}
|
||||
|
||||
async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
|
||||
const MAX_BUFF_SIZE = fd1.pageSize * 16;
|
||||
await startReadUniqueSection$1(fd1, sections1, idSection);
|
||||
@ -3999,10 +4019,10 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const buffCoeff = new Uint8Array(12 + curve.Fr.n8);
|
||||
const buffCoeffV = new DataView(buffCoeff.buffer);
|
||||
|
||||
const lTauG1 = sectionsPTau[12][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const lTauG2 = sectionsPTau[13][0].p + ((2 ** cirPower) -1)*sG2;
|
||||
const lAlphaTauG1 = sectionsPTau[14][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const lBetaTauG1 = sectionsPTau[15][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1);
|
||||
const sTauG2 = await readSection(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2);
|
||||
const sAlphaTauG1 = await readSection(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1);
|
||||
const sBetaTauG1 = await readSection(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1);
|
||||
|
||||
await startWriteSection(fdZKey, 4);
|
||||
await startReadUniqueSection$1(fdR1cs, sectionsR1cs, 2);
|
||||
@ -4017,8 +4037,8 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l2 = lBetaTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
const l2 = sBetaTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (typeof A[s] === "undefined") A[s] = [];
|
||||
A[s].push([l1, coef]);
|
||||
|
||||
@ -4038,9 +4058,9 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l2 = lTauG2 + sG2*c;
|
||||
const l3 = lAlphaTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
const l2 = sTauG2.slice(sG2*c, sG2*c + sG2);
|
||||
const l3 = sAlphaTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (typeof B1[s] === "undefined") B1[s] = [];
|
||||
B1[s].push([l1, coef]);
|
||||
if (typeof B2[s] === "undefined") B2[s] = [];
|
||||
@ -4063,7 +4083,7 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (s <= nPublic) {
|
||||
if (typeof IC[s] === "undefined") IC[s] = [];
|
||||
IC[s].push([l1, coef]);
|
||||
@ -4077,8 +4097,8 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const bOne = new Uint8Array(curve.Fr.n8);
|
||||
curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
|
||||
for (let s = 0; s <= nPublic ; s++) {
|
||||
const l1 = lTauG1 + sG1*(r1cs.nConstraints + s);
|
||||
const l2 = lBetaTauG1 + sG1*(r1cs.nConstraints + s);
|
||||
const l1 = sTauG1.slice(sG1*(r1cs.nConstraints + s), sG1*(r1cs.nConstraints + s) + sG1);
|
||||
const l2 = sBetaTauG1.slice(sG1*(r1cs.nConstraints + s), sG1*(r1cs.nConstraints + s) + sG1);
|
||||
if (typeof A[s] === "undefined") A[s] = [];
|
||||
A[s].push([l1, bOne]);
|
||||
if (typeof IC[s] === "undefined") IC[s] = [];
|
||||
@ -4233,34 +4253,16 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
let pB =0;
|
||||
let pS =0;
|
||||
|
||||
let readOps = [];
|
||||
let scalars = [];
|
||||
let offset = 0;
|
||||
for (let i=0; i<arr.length; i++) {
|
||||
if (!arr[i]) continue;
|
||||
for (let j=0; j<arr[i].length; j++) {
|
||||
if (readOps.length > 2<<14) {
|
||||
logger.debug(`${sectionName}: Long MExp Load ${j}/${arr[i].length}`);
|
||||
|
||||
const points = await Promise.all(readOps);
|
||||
for (let k=0; k<points.length; k++) {
|
||||
bBases.set(points[k], (offset+k)*sGin);
|
||||
bScalars.set(scalars[k], (offset+k)*curve.Fr.n8);
|
||||
}
|
||||
offset += readOps.length;
|
||||
readOps = [];
|
||||
scalars = [];
|
||||
}
|
||||
scalars.push(arr[i][j][1]);
|
||||
readOps.push(fdPTau.read(sGin, arr[i][j][0]));
|
||||
bBases.set(arr[i][j][0], offset*sGin);
|
||||
bScalars.set(arr[i][j][1], offset*curve.Fr.n8);
|
||||
offset ++;
|
||||
}
|
||||
}
|
||||
|
||||
const points = await Promise.all(readOps);
|
||||
for (let i=0; i<points.length; i++) {
|
||||
bBases.set(points[i], (offset+i)*sGin);
|
||||
bScalars.set(scalars[i], (offset+i)*curve.Fr.n8);
|
||||
}
|
||||
|
||||
if (arr.length>1) {
|
||||
const task = [];
|
||||
|
@ -944,6 +944,26 @@ async function readFullSection(fd, sections, idSection) {
|
||||
return res;
|
||||
}
|
||||
|
||||
async function readSection(fd, sections, idSection, offset, length) {
|
||||
|
||||
offset = (typeof offset === "undefined") ? 0 : offset;
|
||||
length = (typeof length === "undefined") ? sections[idSection][0].size - offset : length;
|
||||
|
||||
if (offset + length > sections[idSection][0].size) {
|
||||
throw new Error("Reading out of the range of the section");
|
||||
}
|
||||
|
||||
let buff;
|
||||
if (length < (1 << 30) ) {
|
||||
buff = new Uint8Array(length);
|
||||
} else {
|
||||
buff = new ffjavascript.BigBuffer(length);
|
||||
}
|
||||
|
||||
await fd.readToBuffer(buff, 0, length, sections[idSection][0].p + offset);
|
||||
return buff;
|
||||
}
|
||||
|
||||
async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
|
||||
const MAX_BUFF_SIZE = fd1.pageSize * 16;
|
||||
await startReadUniqueSection(fd1, sections1, idSection);
|
||||
@ -4565,10 +4585,10 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const buffCoeff = new Uint8Array(12 + curve.Fr.n8);
|
||||
const buffCoeffV = new DataView(buffCoeff.buffer);
|
||||
|
||||
const lTauG1 = sectionsPTau[12][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const lTauG2 = sectionsPTau[13][0].p + ((2 ** cirPower) -1)*sG2;
|
||||
const lAlphaTauG1 = sectionsPTau[14][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const lBetaTauG1 = sectionsPTau[15][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1);
|
||||
const sTauG2 = await readSection(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2);
|
||||
const sAlphaTauG1 = await readSection(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1);
|
||||
const sBetaTauG1 = await readSection(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1);
|
||||
|
||||
await startWriteSection(fdZKey, 4);
|
||||
await startReadUniqueSection(fdR1cs, sectionsR1cs, 2);
|
||||
@ -4583,8 +4603,8 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l2 = lBetaTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
const l2 = sBetaTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (typeof A[s] === "undefined") A[s] = [];
|
||||
A[s].push([l1, coef]);
|
||||
|
||||
@ -4604,9 +4624,9 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l2 = lTauG2 + sG2*c;
|
||||
const l3 = lAlphaTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
const l2 = sTauG2.slice(sG2*c, sG2*c + sG2);
|
||||
const l3 = sAlphaTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (typeof B1[s] === "undefined") B1[s] = [];
|
||||
B1[s].push([l1, coef]);
|
||||
if (typeof B2[s] === "undefined") B2[s] = [];
|
||||
@ -4629,7 +4649,7 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (s <= nPublic) {
|
||||
if (typeof IC[s] === "undefined") IC[s] = [];
|
||||
IC[s].push([l1, coef]);
|
||||
@ -4643,8 +4663,8 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const bOne = new Uint8Array(curve.Fr.n8);
|
||||
curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
|
||||
for (let s = 0; s <= nPublic ; s++) {
|
||||
const l1 = lTauG1 + sG1*(r1cs.nConstraints + s);
|
||||
const l2 = lBetaTauG1 + sG1*(r1cs.nConstraints + s);
|
||||
const l1 = sTauG1.slice(sG1*(r1cs.nConstraints + s), sG1*(r1cs.nConstraints + s) + sG1);
|
||||
const l2 = sBetaTauG1.slice(sG1*(r1cs.nConstraints + s), sG1*(r1cs.nConstraints + s) + sG1);
|
||||
if (typeof A[s] === "undefined") A[s] = [];
|
||||
A[s].push([l1, bOne]);
|
||||
if (typeof IC[s] === "undefined") IC[s] = [];
|
||||
@ -4799,34 +4819,16 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
let pB =0;
|
||||
let pS =0;
|
||||
|
||||
let readOps = [];
|
||||
let scalars = [];
|
||||
let offset = 0;
|
||||
for (let i=0; i<arr.length; i++) {
|
||||
if (!arr[i]) continue;
|
||||
for (let j=0; j<arr[i].length; j++) {
|
||||
if (readOps.length > 2<<14) {
|
||||
logger.debug(`${sectionName}: Long MExp Load ${j}/${arr[i].length}`);
|
||||
|
||||
const points = await Promise.all(readOps);
|
||||
for (let k=0; k<points.length; k++) {
|
||||
bBases.set(points[k], (offset+k)*sGin);
|
||||
bScalars.set(scalars[k], (offset+k)*curve.Fr.n8);
|
||||
}
|
||||
offset += readOps.length;
|
||||
readOps = [];
|
||||
scalars = [];
|
||||
}
|
||||
scalars.push(arr[i][j][1]);
|
||||
readOps.push(fdPTau.read(sGin, arr[i][j][0]));
|
||||
bBases.set(arr[i][j][0], offset*sGin);
|
||||
bScalars.set(arr[i][j][1], offset*curve.Fr.n8);
|
||||
offset ++;
|
||||
}
|
||||
}
|
||||
|
||||
const points = await Promise.all(readOps);
|
||||
for (let i=0; i<points.length; i++) {
|
||||
bBases.set(points[i], (offset+i)*sGin);
|
||||
bScalars.set(scalars[i], (offset+i)*curve.Fr.n8);
|
||||
}
|
||||
|
||||
if (arr.length>1) {
|
||||
const task = [];
|
||||
|
File diff suppressed because one or more lines are too long
2
build/snarkjs.min.js
vendored
2
build/snarkjs.min.js
vendored
File diff suppressed because one or more lines are too long
17
package-lock.json
generated
17
package-lock.json
generated
@ -36,9 +36,9 @@
|
||||
"integrity": "sha512-Xzdyxqm1bOFF6pdIsiHLLl3HkSLjbhqJHVyqaTxXt3RqXBEnmsUmEW47H7VOi/ak7TdkRpNkxjyK5Zbkm+y52g=="
|
||||
},
|
||||
"@iden3/binfileutils": {
|
||||
"version": "0.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.2.tgz",
|
||||
"integrity": "sha512-hrIV3d3SfoQC0HT2oRILxVsxwXwrRFMIZsOW1Ag+pqESNUYYPs651sHPzveM9BN7PQOzMMBWpkk813pqbzFJ9A==",
|
||||
"version": "0.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.3.tgz",
|
||||
"integrity": "sha512-TSKsxU0RKkzRQFUEIQ5zLGF+yzfi60y+eIpY4U71BX5qWan/p+gcOTeO+iyZ6jwBarAKM/+tQpzH5LupYXRYDA==",
|
||||
"requires": {
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "^0.2.23"
|
||||
@ -1536,6 +1536,17 @@
|
||||
"@iden3/binfileutils": "0.0.2",
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "0.2.24"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iden3/binfileutils": {
|
||||
"version": "0.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.2.tgz",
|
||||
"integrity": "sha512-hrIV3d3SfoQC0HT2oRILxVsxwXwrRFMIZsOW1Ag+pqESNUYYPs651sHPzveM9BN7PQOzMMBWpkk813pqbzFJ9A==",
|
||||
"requires": {
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "^0.2.23"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"randombytes": {
|
||||
|
@ -38,6 +38,7 @@
|
||||
"url": "https://github.com/iden3/snarkjs.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iden3/binfileutils": "0.0.3",
|
||||
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
|
||||
"circom_runtime": "0.1.5",
|
||||
"fastfile": "0.0.18",
|
||||
|
@ -1,139 +0,0 @@
|
||||
|
||||
import { Scalar } from "ffjavascript";
|
||||
import * as fastFile from "fastfile";
|
||||
|
||||
export async function readBinFile(fileName, type, maxVersion, cacheSize, pageSize) {
|
||||
|
||||
const fd = await fastFile.readExisting(fileName, cacheSize, pageSize);
|
||||
|
||||
const b = await fd.read(4);
|
||||
let readedType = "";
|
||||
for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]);
|
||||
|
||||
if (readedType != type) throw new Error(fileName + ": Invalid File format");
|
||||
|
||||
let v = await fd.readULE32();
|
||||
|
||||
if (v>maxVersion) throw new Error("Version not supported");
|
||||
|
||||
const nSections = await fd.readULE32();
|
||||
|
||||
// Scan sections
|
||||
let sections = [];
|
||||
for (let i=0; i<nSections; i++) {
|
||||
let ht = await fd.readULE32();
|
||||
let hl = await fd.readULE64();
|
||||
if (typeof sections[ht] == "undefined") sections[ht] = [];
|
||||
sections[ht].push({
|
||||
p: fd.pos,
|
||||
size: hl
|
||||
});
|
||||
fd.pos += hl;
|
||||
}
|
||||
|
||||
return {fd, sections};
|
||||
}
|
||||
|
||||
export async function createBinFile(fileName, type, version, nSections, cacheSize, pageSize) {
|
||||
|
||||
const fd = await fastFile.createOverride(fileName, cacheSize, pageSize);
|
||||
|
||||
const buff = new Uint8Array(4);
|
||||
for (let i=0; i<4; i++) buff[i] = type.charCodeAt(i);
|
||||
await fd.write(buff, 0); // Magic "r1cs"
|
||||
|
||||
await fd.writeULE32(version); // Version
|
||||
await fd.writeULE32(nSections); // Number of Sections
|
||||
|
||||
return fd;
|
||||
}
|
||||
|
||||
export async function startWriteSection(fd, idSection) {
|
||||
if (typeof fd.writingSection !== "undefined") throw new Error("Already writing a section");
|
||||
await fd.writeULE32(idSection); // Header type
|
||||
fd.writingSection = {
|
||||
pSectionSize: fd.pos
|
||||
};
|
||||
await fd.writeULE64(0); // Temporally set to 0 length
|
||||
}
|
||||
|
||||
export async function endWriteSection(fd) {
|
||||
if (typeof fd.writingSection === "undefined") throw new Error("Not writing a section");
|
||||
|
||||
const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8;
|
||||
const oldPos = fd.pos;
|
||||
fd.pos = fd.writingSection.pSectionSize;
|
||||
await fd.writeULE64(sectionSize);
|
||||
fd.pos = oldPos;
|
||||
delete fd.writingSection;
|
||||
}
|
||||
|
||||
export async function startReadUniqueSection(fd, sections, idSection) {
|
||||
if (typeof fd.readingSection !== "undefined") throw new Error("Already reading a section");
|
||||
if (!sections[idSection]) throw new Error(fd.fileName + ": Missing section "+ idSection );
|
||||
if (sections[idSection].length>1) throw new Error(fd.fileName +": Section Duplicated " +idSection);
|
||||
|
||||
fd.pos = sections[idSection][0].p;
|
||||
|
||||
fd.readingSection = sections[idSection][0];
|
||||
}
|
||||
|
||||
export async function endReadSection(fd, noCheck) {
|
||||
if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
|
||||
if (!noCheck) {
|
||||
if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
|
||||
}
|
||||
delete fd.readingSection;
|
||||
}
|
||||
|
||||
export async function writeBigInt(fd, n, n8, pos) {
|
||||
const buff = new Uint8Array(n8);
|
||||
Scalar.toRprLE(buff, 0, n, n8);
|
||||
await fd.write(buff, pos);
|
||||
}
|
||||
|
||||
export async function readBigInt(fd, n8, pos) {
|
||||
const buff = await fd.read(n8, pos);
|
||||
return Scalar.fromRprLE(buff, 0, n8);
|
||||
}
|
||||
|
||||
export async function copySection(fdFrom, sections, fdTo, sectionId, size) {
|
||||
if (typeof size === "undefined") {
|
||||
size = sections[sectionId][0].size;
|
||||
}
|
||||
const chunkSize = fdFrom.pageSize;
|
||||
await startReadUniqueSection(fdFrom, sections, sectionId);
|
||||
await startWriteSection(fdTo, sectionId);
|
||||
for (let p=0; p<size; p+=chunkSize) {
|
||||
const l = Math.min(size -p, chunkSize);
|
||||
const buff = await fdFrom.read(l);
|
||||
await fdTo.write(buff);
|
||||
}
|
||||
await endWriteSection(fdTo);
|
||||
await endReadSection(fdFrom, size != sections[sectionId][0].size);
|
||||
|
||||
}
|
||||
|
||||
export async function readFullSection(fd, sections, idSection) {
|
||||
await startReadUniqueSection(fd, sections, idSection);
|
||||
const res = await fd.read(fd.readingSection.size);
|
||||
await endReadSection(fd);
|
||||
return res;
|
||||
}
|
||||
|
||||
export async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
|
||||
const MAX_BUFF_SIZE = fd1.pageSize * 16;
|
||||
await startReadUniqueSection(fd1, sections1, idSection);
|
||||
await startReadUniqueSection(fd2, sections2, idSection);
|
||||
if (sections1[idSection][0].size != sections2[idSection][0].size) return false;
|
||||
const totalBytes=sections1[idSection][0].size;
|
||||
for (let i=0; i<totalBytes; i+= MAX_BUFF_SIZE) {
|
||||
const n = Math.min(totalBytes-i, MAX_BUFF_SIZE);
|
||||
const buff1 = await fd1.read(n);
|
||||
const buff2 = await fd2.read(n);
|
||||
for (let j=0; j<n; j++) if (buff1[j] != buff2[j]) return false;
|
||||
}
|
||||
await endReadSection(fd1);
|
||||
await endReadSection(fd2);
|
||||
return true;
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as zkeyUtils from "./zkey_utils.js";
|
||||
import * as wtnsUtils from "./wtns_utils.js";
|
||||
import { getCurveFromQ as getCurve } from "./curves.js";
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
|
||||
/*
|
||||
This function creates a new section in the fdTo file with id idSection.
|
||||
|
@ -1,7 +1,7 @@
|
||||
import Blake2b from "blake2b-wasm";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as misc from "./misc.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
|
||||
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
import Blake2b from "blake2b-wasm";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as keyPair from "./keypair.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as misc from "./misc.js";
|
||||
|
||||
export default async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logger) {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as fastFile from "fastfile";
|
||||
import {BigBuffer} from "ffjavascript";
|
||||
|
@ -9,7 +9,7 @@
|
||||
import * as fastFile from "fastfile";
|
||||
import Blake2b from "blake2b-wasm";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as misc from "./misc.js";
|
||||
|
||||
export default async function exportChallenge(pTauFilename, challengeFilename, logger) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
|
||||
export default async function exportJson(pTauFilename, verbose) {
|
||||
const {fd, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as fastFile from "fastfile";
|
||||
import Blake2b from "blake2b-wasm";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as misc from "./misc.js";
|
||||
|
||||
export default async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, logger) {
|
||||
|
@ -47,7 +47,7 @@ contributions(7)
|
||||
*/
|
||||
|
||||
import * as ptauUtils from "./powersoftau_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import Blake2b from "blake2b-wasm";
|
||||
import * as misc from "./misc.js";
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import {BigBuffer} from "ffjavascript";
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
|
||||
export default async function truncate(ptauFilename, template, logger) {
|
||||
|
@ -2,7 +2,7 @@ import Blake2b from "blake2b-wasm";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as keyPair from "./keypair.js";
|
||||
import crypto from "crypto";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import { ChaCha, BigBuffer } from "ffjavascript";
|
||||
import * as misc from "./misc.js";
|
||||
const sameRatio = misc.sameRatio;
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as fastFile from "fastfile";
|
||||
import circomRuntime from "circom_runtime";
|
||||
import * as wtnsUtils from "./wtns_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
|
||||
const { WitnessCalculatorBuilder } = circomRuntime;
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as fastFile from "fastfile";
|
||||
import circomRuntime from "circom_runtime";
|
||||
import * as wtnsUtils from "./wtns_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import loadSyms from "./loadsyms.js";
|
||||
|
||||
const { WitnessCalculatorBuilder } = circomRuntime;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { Scalar } from "ffjavascript";
|
||||
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
|
||||
|
||||
export async function write(fd, witness, prime) {
|
||||
|
@ -1,4 +1,4 @@
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as zkeyUtils from "./zkey_utils.js";
|
||||
import { getCurveFromQ as getCurve } from "./curves.js";
|
||||
import * as misc from "./misc.js";
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as zkeyUtils from "./zkey_utils.js";
|
||||
import { getCurveFromQ as getCurve } from "./curves.js";
|
||||
import * as misc from "./misc.js";
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as zkeyUtils from "./zkey_utils.js";
|
||||
import * as fastFile from "fastfile";
|
||||
import { getCurveFromQ as getCurve } from "./curves.js";
|
||||
|
@ -1,4 +1,4 @@
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as zkeyUtils from "./zkey_utils.js";
|
||||
import { getCurveFromQ as getCurve } from "./curves.js";
|
||||
import { utils } from "ffjavascript";
|
||||
|
@ -1,5 +1,5 @@
|
||||
import * as zkeyUtils from "./zkey_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as fastFile from "fastfile";
|
||||
import { getCurveFromQ as getCurve } from "./curves.js";
|
||||
import * as misc from "./misc.js";
|
||||
|
@ -1,7 +1,16 @@
|
||||
|
||||
import {readR1csHeader} from "r1csfile";
|
||||
import * as utils from "./powersoftau_utils.js";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import {
|
||||
readBinFile,
|
||||
createBinFile,
|
||||
readSection,
|
||||
writeBigInt,
|
||||
startWriteSection,
|
||||
endWriteSection,
|
||||
startReadUniqueSection,
|
||||
endReadSection
|
||||
} from "@iden3/binfileutils";
|
||||
import { log2, formatHash } from "./misc.js";
|
||||
import { Scalar, BigBuffer } from "ffjavascript";
|
||||
import Blake2b from "blake2b-wasm";
|
||||
@ -12,13 +21,13 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
await Blake2b.ready();
|
||||
const csHasher = Blake2b(64);
|
||||
|
||||
const {fd: fdR1cs, sections: sectionsR1cs} = await binFileUtils.readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
|
||||
const {fd: fdR1cs, sections: sectionsR1cs} = await readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
|
||||
const r1cs = await readR1csHeader(fdR1cs, sectionsR1cs, false);
|
||||
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await binFileUtils.readBinFile(ptauName, "ptau", 1);
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile(ptauName, "ptau", 1);
|
||||
const {curve, power} = await utils.readPTauHeader(fdPTau, sectionsPTau);
|
||||
|
||||
const fdZKey = await binFileUtils.createBinFile(zkeyName, "zkey", 1, 10, 1<<22, 1<<24);
|
||||
const fdZKey = await createBinFile(zkeyName, "zkey", 1, 10, 1<<22, 1<<24);
|
||||
|
||||
const sG1 = curve.G1.F.n8*2;
|
||||
const sG2 = curve.G2.F.n8*2;
|
||||
@ -45,14 +54,14 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
|
||||
// Write the header
|
||||
///////////
|
||||
await binFileUtils.startWriteSection(fdZKey, 1);
|
||||
await startWriteSection(fdZKey, 1);
|
||||
await fdZKey.writeULE32(1); // Groth
|
||||
await binFileUtils.endWriteSection(fdZKey);
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
// Write the Groth header section
|
||||
///////////
|
||||
|
||||
await binFileUtils.startWriteSection(fdZKey, 2);
|
||||
await startWriteSection(fdZKey, 2);
|
||||
const primeQ = curve.q;
|
||||
const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
|
||||
|
||||
@ -62,9 +71,9 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const R2r = curve.Fr.e(Scalar.mod(Scalar.mul(Rr,Rr), primeR));
|
||||
|
||||
await fdZKey.writeULE32(n8q);
|
||||
await binFileUtils.writeBigInt(fdZKey, primeQ, n8q);
|
||||
await writeBigInt(fdZKey, primeQ, n8q);
|
||||
await fdZKey.writeULE32(n8r);
|
||||
await binFileUtils.writeBigInt(fdZKey, primeR, n8r);
|
||||
await writeBigInt(fdZKey, primeR, n8r);
|
||||
await fdZKey.writeULE32(r1cs.nVars); // Total number of bars
|
||||
await fdZKey.writeULE32(nPublic); // Total number of public vars (not including ONE)
|
||||
await fdZKey.writeULE32(domainSize); // domainSize
|
||||
@ -102,7 +111,7 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
csHasher.update(bg2U); // gamma2
|
||||
csHasher.update(bg1U); // delta1
|
||||
csHasher.update(bg2U); // delta2
|
||||
await binFileUtils.endWriteSection(fdZKey);
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
|
||||
const A = new BigArray(r1cs.nVars);
|
||||
@ -114,13 +123,13 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const buffCoeff = new Uint8Array(12 + curve.Fr.n8);
|
||||
const buffCoeffV = new DataView(buffCoeff.buffer);
|
||||
|
||||
const lTauG1 = sectionsPTau[12][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const lTauG2 = sectionsPTau[13][0].p + ((2 ** cirPower) -1)*sG2;
|
||||
const lAlphaTauG1 = sectionsPTau[14][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const lBetaTauG1 = sectionsPTau[15][0].p + ((2 ** cirPower) -1)*sG1;
|
||||
const sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1);
|
||||
const sTauG2 = await readSection(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2);
|
||||
const sAlphaTauG1 = await readSection(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1);
|
||||
const sBetaTauG1 = await readSection(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1);
|
||||
|
||||
await binFileUtils.startWriteSection(fdZKey, 4);
|
||||
await binFileUtils.startReadUniqueSection(fdR1cs, sectionsR1cs, 2);
|
||||
await startWriteSection(fdZKey, 4);
|
||||
await startReadUniqueSection(fdR1cs, sectionsR1cs, 2);
|
||||
|
||||
const pNCoefs = fdZKey.pos;
|
||||
let nCoefs = 0;
|
||||
@ -132,8 +141,8 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l2 = lBetaTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
const l2 = sBetaTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (typeof A[s] === "undefined") A[s] = [];
|
||||
A[s].push([l1, coef]);
|
||||
|
||||
@ -153,9 +162,9 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l2 = lTauG2 + sG2*c;
|
||||
const l3 = lAlphaTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
const l2 = sTauG2.slice(sG2*c, sG2*c + sG2);
|
||||
const l3 = sAlphaTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (typeof B1[s] === "undefined") B1[s] = [];
|
||||
B1[s].push([l1, coef]);
|
||||
if (typeof B2[s] === "undefined") B2[s] = [];
|
||||
@ -178,7 +187,7 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const s = await fdR1cs.readULE32();
|
||||
const coef = await fdR1cs.read(r1cs.n8);
|
||||
|
||||
const l1 = lTauG1 + sG1*c;
|
||||
const l1 = sTauG1.slice(sG1*c, sG1*c + sG1);
|
||||
if (s <= nPublic) {
|
||||
if (typeof IC[s] === "undefined") IC[s] = [];
|
||||
IC[s].push([l1, coef]);
|
||||
@ -192,8 +201,8 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const bOne = new Uint8Array(curve.Fr.n8);
|
||||
curve.Fr.toRprLE(bOne, 0, curve.Fr.e(1));
|
||||
for (let s = 0; s <= nPublic ; s++) {
|
||||
const l1 = lTauG1 + sG1*(r1cs.nConstraints + s);
|
||||
const l2 = lBetaTauG1 + sG1*(r1cs.nConstraints + s);
|
||||
const l1 = sTauG1.slice(sG1*(r1cs.nConstraints + s), sG1*(r1cs.nConstraints + s) + sG1);
|
||||
const l2 = sBetaTauG1.slice(sG1*(r1cs.nConstraints + s), sG1*(r1cs.nConstraints + s) + sG1);
|
||||
if (typeof A[s] === "undefined") A[s] = [];
|
||||
A[s].push([l1, bOne]);
|
||||
if (typeof IC[s] === "undefined") IC[s] = [];
|
||||
@ -206,8 +215,8 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
await fdZKey.writeULE32(nCoefs, pNCoefs);
|
||||
fdZKey.pos = oldPos;
|
||||
|
||||
await binFileUtils.endWriteSection(fdZKey);
|
||||
await binFileUtils.endReadSection(fdR1cs);
|
||||
await endWriteSection(fdZKey);
|
||||
await endReadSection(fdR1cs);
|
||||
|
||||
/*
|
||||
zKey.hExps = new Array(zKey.domainSize-1);
|
||||
@ -221,7 +230,7 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
await composeAndWritePoints(3, "G1", IC, "IC");
|
||||
|
||||
// Write Hs
|
||||
await binFileUtils.startWriteSection(fdZKey, 9);
|
||||
await startWriteSection(fdZKey, 9);
|
||||
const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1;
|
||||
|
||||
if (cirPower < curve.Fr.s) {
|
||||
@ -237,7 +246,7 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
if (logger) logger.error("Circuit too big");
|
||||
throw new Error("Circuit too big for this curve");
|
||||
}
|
||||
await binFileUtils.endWriteSection(fdZKey);
|
||||
await endWriteSection(fdZKey);
|
||||
await hashHPoints();
|
||||
|
||||
await composeAndWritePoints(8, "G1", C, "C");
|
||||
@ -247,10 +256,10 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
|
||||
const csHash = csHasher.digest();
|
||||
// Contributions section
|
||||
await binFileUtils.startWriteSection(fdZKey, 10);
|
||||
await startWriteSection(fdZKey, 10);
|
||||
await fdZKey.write(csHash);
|
||||
await fdZKey.writeULE32(0);
|
||||
await binFileUtils.endWriteSection(fdZKey);
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
|
||||
|
||||
@ -276,7 +285,7 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const G = curve[groupName];
|
||||
|
||||
hashU32(arr.length);
|
||||
await binFileUtils.startWriteSection(fdZKey, idSection);
|
||||
await startWriteSection(fdZKey, idSection);
|
||||
|
||||
let opPromises = [];
|
||||
|
||||
@ -312,7 +321,7 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
opPromises = [];
|
||||
|
||||
}
|
||||
await binFileUtils.endWriteSection(fdZKey);
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
}
|
||||
|
||||
@ -348,34 +357,16 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
let pB =0;
|
||||
let pS =0;
|
||||
|
||||
let readOps = [];
|
||||
let scalars = [];
|
||||
let offset = 0;
|
||||
for (let i=0; i<arr.length; i++) {
|
||||
if (!arr[i]) continue;
|
||||
for (let j=0; j<arr[i].length; j++) {
|
||||
if (readOps.length > 2<<14) {
|
||||
logger.debug(`${sectionName}: Long MExp Load ${j}/${arr[i].length}`);
|
||||
|
||||
const points = await Promise.all(readOps);
|
||||
for (let k=0; k<points.length; k++) {
|
||||
bBases.set(points[k], (offset+k)*sGin);
|
||||
bScalars.set(scalars[k], (offset+k)*curve.Fr.n8);
|
||||
}
|
||||
offset += readOps.length;
|
||||
readOps = [];
|
||||
scalars = [];
|
||||
}
|
||||
scalars.push(arr[i][j][1]);
|
||||
readOps.push(fdPTau.read(sGin, arr[i][j][0]));
|
||||
bBases.set(arr[i][j][0], offset*sGin);
|
||||
bScalars.set(arr[i][j][1], offset*curve.Fr.n8);
|
||||
offset ++;
|
||||
}
|
||||
}
|
||||
|
||||
const points = await Promise.all(readOps);
|
||||
for (let i=0; i<points.length; i++) {
|
||||
bBases.set(points[i], (offset+i)*sGin);
|
||||
bScalars.set(scalars[i], (offset+i)*curve.Fr.n8);
|
||||
}
|
||||
|
||||
if (arr.length>1) {
|
||||
const task = [];
|
||||
|
@ -27,7 +27,7 @@
|
||||
// Contributions(10)
|
||||
|
||||
import { Scalar, F1Field } from "ffjavascript";
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
|
||||
import { getCurveFromQ as getCurve } from "./curves.js";
|
||||
import { log2 } from "./misc.js";
|
||||
|
@ -1,4 +1,4 @@
|
||||
import * as binFileUtils from "./binfileutils.js";
|
||||
import * as binFileUtils from "@iden3/binfileutils";
|
||||
import * as zkeyUtils from "./zkey_utils.js";
|
||||
import { getCurveFromQ as getCurve } from "./curves.js";
|
||||
import Blake2b from "blake2b-wasm";
|
||||
|
Loading…
Reference in New Issue
Block a user