deps
This commit is contained in:
parent
b199b284a4
commit
b09d30fe0c
428
build/cli.cjs
428
build/cli.cjs
@ -938,40 +938,6 @@ async function readBinFile(fileName, type, maxVersion, cacheSize, pageSize) {
|
||||
return {fd, sections};
|
||||
}
|
||||
|
||||
async function createBinFile(fileName, type, version, nSections, cacheSize, pageSize) {
|
||||
|
||||
const fd = await createOverride(fileName, cacheSize, pageSize);
|
||||
|
||||
const buff = new Uint8Array(4);
|
||||
for (let i=0; i<4; i++) buff[i] = type.charCodeAt(i);
|
||||
await fd.write(buff, 0); // Magic "r1cs"
|
||||
|
||||
await fd.writeULE32(version); // Version
|
||||
await fd.writeULE32(nSections); // Number of Sections
|
||||
|
||||
return fd;
|
||||
}
|
||||
|
||||
async function startWriteSection(fd, idSection) {
|
||||
if (typeof fd.writingSection !== "undefined") throw new Error("Already writing a section");
|
||||
await fd.writeULE32(idSection); // Header type
|
||||
fd.writingSection = {
|
||||
pSectionSize: fd.pos
|
||||
};
|
||||
await fd.writeULE64(0); // Temporally set to 0 length
|
||||
}
|
||||
|
||||
async function endWriteSection(fd) {
|
||||
if (typeof fd.writingSection === "undefined") throw new Error("Not writing a section");
|
||||
|
||||
const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8;
|
||||
const oldPos = fd.pos;
|
||||
fd.pos = fd.writingSection.pSectionSize;
|
||||
await fd.writeULE64(sectionSize);
|
||||
fd.pos = oldPos;
|
||||
delete fd.writingSection;
|
||||
}
|
||||
|
||||
async function startReadUniqueSection(fd, sections, idSection) {
|
||||
if (typeof fd.readingSection !== "undefined") throw new Error("Already reading a section");
|
||||
if (!sections[idSection]) throw new Error(fd.fileName + ": Missing section "+ idSection );
|
||||
@ -990,41 +956,11 @@ async function endReadSection(fd, noCheck) {
|
||||
delete fd.readingSection;
|
||||
}
|
||||
|
||||
async function writeBigInt(fd, n, n8, pos) {
|
||||
const buff = new Uint8Array(n8);
|
||||
ffjavascript.Scalar.toRprLE(buff, 0, n, n8);
|
||||
await fd.write(buff, pos);
|
||||
}
|
||||
|
||||
async function readBigInt(fd, n8, pos) {
|
||||
const buff = await fd.read(n8, pos);
|
||||
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
||||
}
|
||||
|
||||
async function copySection(fdFrom, sections, fdTo, sectionId, size) {
|
||||
if (typeof size === "undefined") {
|
||||
size = sections[sectionId][0].size;
|
||||
}
|
||||
const chunkSize = fdFrom.pageSize;
|
||||
await startReadUniqueSection(fdFrom, sections, sectionId);
|
||||
await startWriteSection(fdTo, sectionId);
|
||||
for (let p=0; p<size; p+=chunkSize) {
|
||||
const l = Math.min(size -p, chunkSize);
|
||||
const buff = await fdFrom.read(l);
|
||||
await fdTo.write(buff);
|
||||
}
|
||||
await endWriteSection(fdTo);
|
||||
await endReadSection(fdFrom, size != sections[sectionId][0].size);
|
||||
|
||||
}
|
||||
|
||||
async function readFullSection(fd, sections, idSection) {
|
||||
await startReadUniqueSection(fd, sections, idSection);
|
||||
const res = await fd.read(fd.readingSection.size);
|
||||
await endReadSection(fd);
|
||||
return res;
|
||||
}
|
||||
|
||||
async function readSection(fd, sections, idSection, offset, length) {
|
||||
|
||||
offset = (typeof offset === "undefined") ? 0 : offset;
|
||||
@ -1045,23 +981,6 @@ async function readSection(fd, sections, idSection, offset, length) {
|
||||
return buff;
|
||||
}
|
||||
|
||||
async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
|
||||
const MAX_BUFF_SIZE = fd1.pageSize * 16;
|
||||
await startReadUniqueSection(fd1, sections1, idSection);
|
||||
await startReadUniqueSection(fd2, sections2, idSection);
|
||||
if (sections1[idSection][0].size != sections2[idSection][0].size) return false;
|
||||
const totalBytes=sections1[idSection][0].size;
|
||||
for (let i=0; i<totalBytes; i+= MAX_BUFF_SIZE) {
|
||||
const n = Math.min(totalBytes-i, MAX_BUFF_SIZE);
|
||||
const buff1 = await fd1.read(n);
|
||||
const buff2 = await fd2.read(n);
|
||||
for (let j=0; j<n; j++) if (buff1[j] != buff2[j]) return false;
|
||||
}
|
||||
await endReadSection(fd1);
|
||||
await endReadSection(fd2);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function readR1csHeader(fd,sections,singleThread) {
|
||||
|
||||
|
||||
@ -2129,6 +2048,155 @@ function keyFromBeacon(curve, challengeHash, beaconHash, numIterationsExp) {
|
||||
return key;
|
||||
}
|
||||
|
||||
async function readBinFile$1(fileName, type, maxVersion, cacheSize, pageSize) {
|
||||
|
||||
const fd = await readExisting$2(fileName, cacheSize, pageSize);
|
||||
|
||||
const b = await fd.read(4);
|
||||
let readedType = "";
|
||||
for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]);
|
||||
|
||||
if (readedType != type) throw new Error(fileName + ": Invalid File format");
|
||||
|
||||
let v = await fd.readULE32();
|
||||
|
||||
if (v>maxVersion) throw new Error("Version not supported");
|
||||
|
||||
const nSections = await fd.readULE32();
|
||||
|
||||
// Scan sections
|
||||
let sections = [];
|
||||
for (let i=0; i<nSections; i++) {
|
||||
let ht = await fd.readULE32();
|
||||
let hl = await fd.readULE64();
|
||||
if (typeof sections[ht] == "undefined") sections[ht] = [];
|
||||
sections[ht].push({
|
||||
p: fd.pos,
|
||||
size: hl
|
||||
});
|
||||
fd.pos += hl;
|
||||
}
|
||||
|
||||
return {fd, sections};
|
||||
}
|
||||
|
||||
async function createBinFile(fileName, type, version, nSections, cacheSize, pageSize) {
|
||||
|
||||
const fd = await createOverride(fileName, cacheSize, pageSize);
|
||||
|
||||
const buff = new Uint8Array(4);
|
||||
for (let i=0; i<4; i++) buff[i] = type.charCodeAt(i);
|
||||
await fd.write(buff, 0); // Magic "r1cs"
|
||||
|
||||
await fd.writeULE32(version); // Version
|
||||
await fd.writeULE32(nSections); // Number of Sections
|
||||
|
||||
return fd;
|
||||
}
|
||||
|
||||
async function startWriteSection(fd, idSection) {
|
||||
if (typeof fd.writingSection !== "undefined") throw new Error("Already writing a section");
|
||||
await fd.writeULE32(idSection); // Header type
|
||||
fd.writingSection = {
|
||||
pSectionSize: fd.pos
|
||||
};
|
||||
await fd.writeULE64(0); // Temporally set to 0 length
|
||||
}
|
||||
|
||||
async function endWriteSection(fd) {
|
||||
if (typeof fd.writingSection === "undefined") throw new Error("Not writing a section");
|
||||
|
||||
const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8;
|
||||
const oldPos = fd.pos;
|
||||
fd.pos = fd.writingSection.pSectionSize;
|
||||
await fd.writeULE64(sectionSize);
|
||||
fd.pos = oldPos;
|
||||
delete fd.writingSection;
|
||||
}
|
||||
|
||||
async function startReadUniqueSection$1(fd, sections, idSection) {
|
||||
if (typeof fd.readingSection !== "undefined") throw new Error("Already reading a section");
|
||||
if (!sections[idSection]) throw new Error(fd.fileName + ": Missing section "+ idSection );
|
||||
if (sections[idSection].length>1) throw new Error(fd.fileName +": Section Duplicated " +idSection);
|
||||
|
||||
fd.pos = sections[idSection][0].p;
|
||||
|
||||
fd.readingSection = sections[idSection][0];
|
||||
}
|
||||
|
||||
async function endReadSection$1(fd, noCheck) {
|
||||
if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
|
||||
if (!noCheck) {
|
||||
if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
|
||||
}
|
||||
delete fd.readingSection;
|
||||
}
|
||||
|
||||
async function writeBigInt(fd, n, n8, pos) {
|
||||
const buff = new Uint8Array(n8);
|
||||
ffjavascript.Scalar.toRprLE(buff, 0, n, n8);
|
||||
await fd.write(buff, pos);
|
||||
}
|
||||
|
||||
async function readBigInt$1(fd, n8, pos) {
|
||||
const buff = await fd.read(n8, pos);
|
||||
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
||||
}
|
||||
|
||||
async function copySection(fdFrom, sections, fdTo, sectionId, size) {
|
||||
if (typeof size === "undefined") {
|
||||
size = sections[sectionId][0].size;
|
||||
}
|
||||
const chunkSize = fdFrom.pageSize;
|
||||
await startReadUniqueSection$1(fdFrom, sections, sectionId);
|
||||
await startWriteSection(fdTo, sectionId);
|
||||
for (let p=0; p<size; p+=chunkSize) {
|
||||
const l = Math.min(size -p, chunkSize);
|
||||
const buff = await fdFrom.read(l);
|
||||
await fdTo.write(buff);
|
||||
}
|
||||
await endWriteSection(fdTo);
|
||||
await endReadSection$1(fdFrom, size != sections[sectionId][0].size);
|
||||
|
||||
}
|
||||
|
||||
async function readSection$1(fd, sections, idSection, offset, length) {
|
||||
|
||||
offset = (typeof offset === "undefined") ? 0 : offset;
|
||||
length = (typeof length === "undefined") ? sections[idSection][0].size - offset : length;
|
||||
|
||||
if (offset + length > sections[idSection][0].size) {
|
||||
throw new Error("Reading out of the range of the section");
|
||||
}
|
||||
|
||||
let buff;
|
||||
if (length < (1 << 30) ) {
|
||||
buff = new Uint8Array(length);
|
||||
} else {
|
||||
buff = new ffjavascript.BigBuffer(length);
|
||||
}
|
||||
|
||||
await fd.readToBuffer(buff, 0, length, sections[idSection][0].p + offset);
|
||||
return buff;
|
||||
}
|
||||
|
||||
async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
|
||||
const MAX_BUFF_SIZE = fd1.pageSize * 16;
|
||||
await startReadUniqueSection$1(fd1, sections1, idSection);
|
||||
await startReadUniqueSection$1(fd2, sections2, idSection);
|
||||
if (sections1[idSection][0].size != sections2[idSection][0].size) return false;
|
||||
const totalBytes=sections1[idSection][0].size;
|
||||
for (let i=0; i<totalBytes; i+= MAX_BUFF_SIZE) {
|
||||
const n = Math.min(totalBytes-i, MAX_BUFF_SIZE);
|
||||
const buff1 = await fd1.read(n);
|
||||
const buff2 = await fd2.read(n);
|
||||
for (let j=0; j<n; j++) if (buff1[j] != buff2[j]) return false;
|
||||
}
|
||||
await endReadSection$1(fd1);
|
||||
await endReadSection$1(fd2);
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
Header(1)
|
||||
n8
|
||||
@ -2256,7 +2324,7 @@ async function newAccumulator(curve, power, fileName, logger) {
|
||||
|
||||
async function exportChallenge(pTauFilename, challengeFilename, logger) {
|
||||
await Blake2b.ready();
|
||||
const {fd: fdFrom, sections} = await readBinFile(pTauFilename, "ptau", 1);
|
||||
const {fd: fdFrom, sections} = await readBinFile$1(pTauFilename, "ptau", 1);
|
||||
|
||||
const {curve, power} = await readPTauHeader(fdFrom, sections);
|
||||
|
||||
@ -2306,7 +2374,7 @@ async function exportChallenge(pTauFilename, challengeFilename, logger) {
|
||||
const sG = G.F.n8*2;
|
||||
const nPointsChunk = Math.floor((1<<24)/sG);
|
||||
|
||||
await startReadUniqueSection(fdFrom, sections, sectionId);
|
||||
await startReadUniqueSection$1(fdFrom, sections, sectionId);
|
||||
for (let i=0; i< nPoints; i+= nPointsChunk) {
|
||||
if (logger) logger.debug(`Exporting ${sectionName}: ${i}/${nPoints}`);
|
||||
const n = Math.min(nPoints-i, nPointsChunk);
|
||||
@ -2316,7 +2384,7 @@ async function exportChallenge(pTauFilename, challengeFilename, logger) {
|
||||
await fdTo.write(buff);
|
||||
toHash.update(buff);
|
||||
}
|
||||
await endReadSection(fdFrom);
|
||||
await endReadSection$1(fdFrom);
|
||||
}
|
||||
|
||||
|
||||
@ -2329,7 +2397,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
|
||||
const noHash = new Uint8Array(64);
|
||||
for (let i=0; i<64; i++) noHash[i] = 0xFF;
|
||||
|
||||
const {fd: fdOld, sections} = await readBinFile(oldPtauFilename, "ptau", 1);
|
||||
const {fd: fdOld, sections} = await readBinFile$1(oldPtauFilename, "ptau", 1);
|
||||
const {curve, power} = await readPTauHeader(fdOld, sections);
|
||||
const contributions = await readContributions(fdOld, curve, sections);
|
||||
const currentContribution = {};
|
||||
@ -2636,7 +2704,7 @@ async function verify(tauFilename, logger) {
|
||||
let sr;
|
||||
await Blake2b.ready();
|
||||
|
||||
const {fd, sections} = await readBinFile(tauFilename, "ptau", 1);
|
||||
const {fd, sections} = await readBinFile$1(tauFilename, "ptau", 1);
|
||||
const {curve, power, ceremonyPower} = await readPTauHeader(fd, sections);
|
||||
const contrs = await readContributions(fd, curve, sections);
|
||||
|
||||
@ -2849,7 +2917,7 @@ async function verify(tauFilename, logger) {
|
||||
const MAX_CHUNK_SIZE = 1<<16;
|
||||
const G = curve[groupName];
|
||||
const sG = G.F.n8*2;
|
||||
await startReadUniqueSection(fd, sections, idSection);
|
||||
await startReadUniqueSection$1(fd, sections, idSection);
|
||||
|
||||
const singularPoints = [];
|
||||
|
||||
@ -2895,7 +2963,7 @@ async function verify(tauFilename, logger) {
|
||||
}
|
||||
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
return {
|
||||
R1: R1,
|
||||
@ -2949,7 +3017,7 @@ async function verify(tauFilename, logger) {
|
||||
buff_r = new Uint8Array(buff_r.buffer, buff_r.byteOffset, buff_r.byteLength);
|
||||
|
||||
if (logger) logger.debug(`reading points Powers${p}...`);
|
||||
await startReadUniqueSection(fd, sections, tauSection);
|
||||
await startReadUniqueSection$1(fd, sections, tauSection);
|
||||
buffG = new ffjavascript.BigBuffer(nPoints*sG);
|
||||
if (p == power+1) {
|
||||
await fd.readToBuffer(buffG, 0, (nPoints-1)*sG);
|
||||
@ -2957,7 +3025,7 @@ async function verify(tauFilename, logger) {
|
||||
} else {
|
||||
await fd.readToBuffer(buffG, 0, nPoints*sG);
|
||||
}
|
||||
await endReadSection(fd, true);
|
||||
await endReadSection$1(fd, true);
|
||||
|
||||
const resTau = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p);
|
||||
|
||||
@ -2984,10 +3052,10 @@ async function verify(tauFilename, logger) {
|
||||
buff_r = await curve.Fr.batchFromMontgomery(buff_r);
|
||||
|
||||
if (logger) logger.debug(`reading points Lagrange${p}...`);
|
||||
await startReadUniqueSection(fd, sections, lagrangeSection);
|
||||
await startReadUniqueSection$1(fd, sections, lagrangeSection);
|
||||
fd.pos += sG*((2 ** p)-1);
|
||||
await fd.readToBuffer(buffG, 0, nPoints*sG);
|
||||
await endReadSection(fd, true);
|
||||
await endReadSection$1(fd, true);
|
||||
|
||||
const resLagrange = await G.multiExpAffine(buffG, buff_r, logger, sectionName + "_" + p + "_transformed");
|
||||
|
||||
@ -3014,7 +3082,7 @@ async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, group
|
||||
const sG = G.F.n8*2;
|
||||
const nPoints = sections[idSection][0].size / sG;
|
||||
|
||||
await startReadUniqueSection(fdOld, sections,idSection );
|
||||
await startReadUniqueSection$1(fdOld, sections,idSection );
|
||||
await startWriteSection(fdNew, idSection);
|
||||
|
||||
let t = first;
|
||||
@ -3029,7 +3097,7 @@ async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, group
|
||||
}
|
||||
|
||||
await endWriteSection(fdNew);
|
||||
await endReadSection(fdOld);
|
||||
await endReadSection$1(fdOld);
|
||||
}
|
||||
|
||||
|
||||
@ -3155,7 +3223,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,num
|
||||
|
||||
await Blake2b.ready();
|
||||
|
||||
const {fd: fdOld, sections} = await readBinFile(oldPtauFilename, "ptau", 1);
|
||||
const {fd: fdOld, sections} = await readBinFile$1(oldPtauFilename, "ptau", 1);
|
||||
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
|
||||
if (power != ceremonyPower) {
|
||||
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
|
||||
@ -3306,7 +3374,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, beaconHashStr,num
|
||||
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logger) {
|
||||
await Blake2b.ready();
|
||||
|
||||
const {fd: fdOld, sections} = await readBinFile(oldPtauFilename, "ptau", 1);
|
||||
const {fd: fdOld, sections} = await readBinFile$1(oldPtauFilename, "ptau", 1);
|
||||
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
|
||||
if (power != ceremonyPower) {
|
||||
if (logger) logger.error("This file has been reduced. You cannot contribute into a reduced file.");
|
||||
@ -3460,7 +3528,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, logge
|
||||
|
||||
async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
|
||||
|
||||
const {fd: fdOld, sections} = await readBinFile(oldPtauFilename, "ptau", 1);
|
||||
const {fd: fdOld, sections} = await readBinFile$1(oldPtauFilename, "ptau", 1);
|
||||
const {curve, power} = await readPTauHeader(fdOld, sections);
|
||||
|
||||
const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 11);
|
||||
@ -3511,14 +3579,14 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
|
||||
let buff;
|
||||
buff = new ffjavascript.BigBuffer(nPoints*sGin);
|
||||
|
||||
await startReadUniqueSection(fdOld, sections, oldSectionId);
|
||||
await startReadUniqueSection$1(fdOld, sections, oldSectionId);
|
||||
if ((oldSectionId == 2)&&(p==power+1)) {
|
||||
await fdOld.readToBuffer(buff, 0,(nPoints-1)*sGin );
|
||||
buff.set(curve.G1.zeroAffine, (nPoints-1)*sGin );
|
||||
} else {
|
||||
await fdOld.readToBuffer(buff, 0,nPoints*sGin );
|
||||
}
|
||||
await endReadSection(fdOld, true);
|
||||
await endReadSection$1(fdOld, true);
|
||||
|
||||
|
||||
buff = await G.lagrangeEvaluations(buff, "affine", "affine", logger, sectionName);
|
||||
@ -3579,7 +3647,7 @@ async function preparePhase2(oldPtauFilename, newPTauFilename, logger) {
|
||||
|
||||
async function truncate(ptauFilename, template, logger) {
|
||||
|
||||
const {fd: fdOld, sections} = await readBinFile(ptauFilename, "ptau", 1);
|
||||
const {fd: fdOld, sections} = await readBinFile$1(ptauFilename, "ptau", 1);
|
||||
const {curve, power, ceremonyPower} = await readPTauHeader(fdOld, sections);
|
||||
|
||||
const sG1 = curve.G1.F.n8*2;
|
||||
@ -3622,7 +3690,7 @@ async function truncate(ptauFilename, template, logger) {
|
||||
|
||||
async function convert(oldPtauFilename, newPTauFilename, logger) {
|
||||
|
||||
const {fd: fdOld, sections} = await readBinFile(oldPtauFilename, "ptau", 1);
|
||||
const {fd: fdOld, sections} = await readBinFile$1(oldPtauFilename, "ptau", 1);
|
||||
const {curve, power} = await readPTauHeader(fdOld, sections);
|
||||
|
||||
const fdNew = await createBinFile(newPTauFilename, "ptau", 1, 11);
|
||||
@ -3656,13 +3724,13 @@ async function convert(oldPtauFilename, newPTauFilename, logger) {
|
||||
|
||||
const size = sections[newSectionId][0].size;
|
||||
const chunkSize = fdOld.pageSize;
|
||||
await startReadUniqueSection(fdOld, sections, newSectionId);
|
||||
await startReadUniqueSection$1(fdOld, sections, newSectionId);
|
||||
for (let p=0; p<size; p+=chunkSize) {
|
||||
const l = Math.min(size -p, chunkSize);
|
||||
const buff = await fdOld.read(l);
|
||||
await fdNew.write(buff);
|
||||
}
|
||||
await endReadSection(fdOld);
|
||||
await endReadSection$1(fdOld);
|
||||
|
||||
if (oldSectionId == 2) {
|
||||
await processSectionPower(power+1);
|
||||
@ -3678,14 +3746,14 @@ async function convert(oldPtauFilename, newPTauFilename, logger) {
|
||||
let buff;
|
||||
buff = new ffjavascript.BigBuffer(nPoints*sGin);
|
||||
|
||||
await startReadUniqueSection(fdOld, sections, oldSectionId);
|
||||
await startReadUniqueSection$1(fdOld, sections, oldSectionId);
|
||||
if ((oldSectionId == 2)&&(p==power+1)) {
|
||||
await fdOld.readToBuffer(buff, 0,(nPoints-1)*sGin );
|
||||
buff.set(curve.G1.zeroAffine, (nPoints-1)*sGin );
|
||||
} else {
|
||||
await fdOld.readToBuffer(buff, 0,nPoints*sGin );
|
||||
}
|
||||
await endReadSection(fdOld, true);
|
||||
await endReadSection$1(fdOld, true);
|
||||
|
||||
buff = await G.lagrangeEvaluations(buff, "affine", "affine", logger, sectionName);
|
||||
await fdNew.write(buff);
|
||||
@ -3747,7 +3815,7 @@ async function convert(oldPtauFilename, newPTauFilename, logger) {
|
||||
}
|
||||
|
||||
async function exportJson(pTauFilename, verbose) {
|
||||
const {fd, sections} = await readBinFile(pTauFilename, "ptau", 1);
|
||||
const {fd, sections} = await readBinFile$1(pTauFilename, "ptau", 1);
|
||||
|
||||
const {curve, power} = await readPTauHeader(fd, sections);
|
||||
|
||||
@ -3778,13 +3846,13 @@ async function exportJson(pTauFilename, verbose) {
|
||||
const sG = G.F.n8*2;
|
||||
|
||||
const res = [];
|
||||
await startReadUniqueSection(fd, sections, sectionId);
|
||||
await startReadUniqueSection$1(fd, sections, sectionId);
|
||||
for (let i=0; i< nPoints; i++) {
|
||||
if ((verbose)&&i&&(i%10000 == 0)) console.log(`${sectionName}: ` + i);
|
||||
const buff = await fd.read(sG);
|
||||
res.push(G.fromRprLEM(buff, 0));
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
return res;
|
||||
}
|
||||
@ -3794,7 +3862,7 @@ async function exportJson(pTauFilename, verbose) {
|
||||
const sG = G.F.n8*2;
|
||||
|
||||
const res = [];
|
||||
await startReadUniqueSection(fd, sections, sectionId);
|
||||
await startReadUniqueSection$1(fd, sections, sectionId);
|
||||
for (let p=0; p<=power; p++) {
|
||||
if (verbose) console.log(`${sectionName}: Power: ${p}`);
|
||||
res[p] = [];
|
||||
@ -3805,7 +3873,7 @@ async function exportJson(pTauFilename, verbose) {
|
||||
res[p].push(G.fromRprLEM(buff, 0));
|
||||
}
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -3900,9 +3968,9 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
await Blake2b.ready();
|
||||
const csHasher = Blake2b(64);
|
||||
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile(ptauName, "ptau", 1, 1<<22, 1<<24);
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile$1(ptauName, "ptau", 1, 1<<22, 1<<24);
|
||||
const {curve, power} = await readPTauHeader(fdPTau, sectionsPTau);
|
||||
const {fd: fdR1cs, sections: sectionsR1cs} = await readBinFile(r1csName, "r1cs", 1, 1<<22, 1<<24);
|
||||
const {fd: fdR1cs, sections: sectionsR1cs} = await readBinFile$1(r1csName, "r1cs", 1, 1<<22, 1<<24);
|
||||
const r1cs = await readR1csHeader(fdR1cs, sectionsR1cs, false);
|
||||
|
||||
const fdZKey = await createBinFile(zkeyName, "zkey", 1, 10, 1<<22, 1<<24);
|
||||
@ -3992,7 +4060,7 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
if (logger) logger.info("Reading r1cs");
|
||||
let sR1cs = await readSection(fdR1cs, sectionsR1cs, 2);
|
||||
let sR1cs = await readSection$1(fdR1cs, sectionsR1cs, 2);
|
||||
|
||||
const A = new BigArray$1(r1cs.nVars);
|
||||
const B1 = new BigArray$1(r1cs.nVars);
|
||||
@ -4001,13 +4069,13 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const IC = new Array(nPublic+1);
|
||||
|
||||
if (logger) logger.info("Reading tauG1");
|
||||
let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1);
|
||||
let sTauG1 = await readSection$1(fdPTau, sectionsPTau, 12, (domainSize -1)*sG1, domainSize*sG1);
|
||||
if (logger) logger.info("Reading tauG2");
|
||||
let sTauG2 = await readSection(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2);
|
||||
let sTauG2 = await readSection$1(fdPTau, sectionsPTau, 13, (domainSize -1)*sG2, domainSize*sG2);
|
||||
if (logger) logger.info("Reading alphatauG1");
|
||||
let sAlphaTauG1 = await readSection(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1);
|
||||
let sAlphaTauG1 = await readSection$1(fdPTau, sectionsPTau, 14, (domainSize -1)*sG1, domainSize*sG1);
|
||||
if (logger) logger.info("Reading betatauG1");
|
||||
let sBetaTauG1 = await readSection(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1);
|
||||
let sBetaTauG1 = await readSection$1(fdPTau, sectionsPTau, 15, (domainSize -1)*sG1, domainSize*sG1);
|
||||
|
||||
await processConstraints();
|
||||
|
||||
@ -4042,7 +4110,7 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
await startWriteSection(fdZKey, 9);
|
||||
const buffOut = new ffjavascript.BigBuffer(domainSize*sG1);
|
||||
if (cirPower < curve.Fr.s) {
|
||||
let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
|
||||
let sTauG1 = await readSection$1(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
|
||||
for (let i=0; i< domainSize; i++) {
|
||||
if ((logger)&&(i%10000 == 0)) logger.debug(`spliting buffer: ${i}/${domainSize}`);
|
||||
const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 );
|
||||
@ -4514,22 +4582,22 @@ async function readHeader(fd, sections, protocol) {
|
||||
|
||||
// Read Header
|
||||
/////////////////////
|
||||
await startReadUniqueSection(fd, sections, 1);
|
||||
await startReadUniqueSection$1(fd, sections, 1);
|
||||
const protocolId = await fd.readULE32();
|
||||
if (protocolId != 1) throw new Error("File is not groth");
|
||||
zkey.protocol = "groth16";
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
// Read Groth Header
|
||||
/////////////////////
|
||||
await startReadUniqueSection(fd, sections, 2);
|
||||
await startReadUniqueSection$1(fd, sections, 2);
|
||||
const n8q = await fd.readULE32();
|
||||
zkey.n8q = n8q;
|
||||
zkey.q = await readBigInt(fd, n8q);
|
||||
zkey.q = await readBigInt$1(fd, n8q);
|
||||
|
||||
const n8r = await fd.readULE32();
|
||||
zkey.n8r = n8r;
|
||||
zkey.r = await readBigInt(fd, n8r);
|
||||
zkey.r = await readBigInt$1(fd, n8r);
|
||||
|
||||
let curve = await getCurveFromQ(zkey.q);
|
||||
|
||||
@ -4543,14 +4611,14 @@ async function readHeader(fd, sections, protocol) {
|
||||
zkey.vk_gamma_2 = await readG2(fd, curve);
|
||||
zkey.vk_delta_1 = await readG1(fd, curve);
|
||||
zkey.vk_delta_2 = await readG2(fd, curve);
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
return zkey;
|
||||
|
||||
}
|
||||
|
||||
async function readZKey(fileName) {
|
||||
const {fd, sections} = await readBinFile(fileName, "zkey", 1);
|
||||
const {fd, sections} = await readBinFile$1(fileName, "zkey", 1);
|
||||
|
||||
const zkey = await readHeader(fd, sections, "groth16");
|
||||
|
||||
@ -4563,18 +4631,18 @@ async function readZKey(fileName) {
|
||||
|
||||
// Read IC Section
|
||||
///////////
|
||||
await startReadUniqueSection(fd, sections, 3);
|
||||
await startReadUniqueSection$1(fd, sections, 3);
|
||||
zkey.IC = [];
|
||||
for (let i=0; i<= zkey.nPublic; i++) {
|
||||
const P = await readG1(fd, curve);
|
||||
zkey.IC.push(P);
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
|
||||
// Read Coefs
|
||||
///////////
|
||||
await startReadUniqueSection(fd, sections, 4);
|
||||
await startReadUniqueSection$1(fd, sections, 4);
|
||||
const nCCoefs = await fd.readULE32();
|
||||
zkey.ccoefs = [];
|
||||
for (let i=0; i<nCCoefs; i++) {
|
||||
@ -4589,70 +4657,70 @@ async function readZKey(fileName) {
|
||||
value: v
|
||||
});
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
// Read A points
|
||||
///////////
|
||||
await startReadUniqueSection(fd, sections, 5);
|
||||
await startReadUniqueSection$1(fd, sections, 5);
|
||||
zkey.A = [];
|
||||
for (let i=0; i<zkey.nVars; i++) {
|
||||
const A = await readG1(fd, curve);
|
||||
zkey.A[i] = A;
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
|
||||
// Read B1
|
||||
///////////
|
||||
await startReadUniqueSection(fd, sections, 6);
|
||||
await startReadUniqueSection$1(fd, sections, 6);
|
||||
zkey.B1 = [];
|
||||
for (let i=0; i<zkey.nVars; i++) {
|
||||
const B1 = await readG1(fd, curve);
|
||||
|
||||
zkey.B1[i] = B1;
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
|
||||
// Read B2 points
|
||||
///////////
|
||||
await startReadUniqueSection(fd, sections, 7);
|
||||
await startReadUniqueSection$1(fd, sections, 7);
|
||||
zkey.B2 = [];
|
||||
for (let i=0; i<zkey.nVars; i++) {
|
||||
const B2 = await readG2(fd, curve);
|
||||
zkey.B2[i] = B2;
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
|
||||
// Read C points
|
||||
///////////
|
||||
await startReadUniqueSection(fd, sections, 8);
|
||||
await startReadUniqueSection$1(fd, sections, 8);
|
||||
zkey.C = [];
|
||||
for (let i=zkey.nPublic+1; i<zkey.nVars; i++) {
|
||||
const C = await readG1(fd, curve);
|
||||
|
||||
zkey.C[i] = C;
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
|
||||
// Read H points
|
||||
///////////
|
||||
await startReadUniqueSection(fd, sections, 9);
|
||||
await startReadUniqueSection$1(fd, sections, 9);
|
||||
zkey.hExps = [];
|
||||
for (let i=0; i<zkey.domainSize; i++) {
|
||||
const H = await readG1(fd, curve);
|
||||
zkey.hExps.push(H);
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
await fd.close();
|
||||
|
||||
return zkey;
|
||||
|
||||
async function readFr2() {
|
||||
const n = await readBigInt(fd, zkey.n8r);
|
||||
const n = await readBigInt$1(fd, zkey.n8r);
|
||||
return Fr.mul(n, Rri2);
|
||||
}
|
||||
|
||||
@ -4698,7 +4766,7 @@ async function readContribution$1(fd, curve) {
|
||||
|
||||
|
||||
async function readMPCParams(fd, curve, sections) {
|
||||
await startReadUniqueSection(fd, sections, 10);
|
||||
await startReadUniqueSection$1(fd, sections, 10);
|
||||
const res = { contributions: []};
|
||||
res.csHash = await fd.read(64);
|
||||
const n = await fd.readULE32();
|
||||
@ -4706,7 +4774,7 @@ async function readMPCParams(fd, curve, sections) {
|
||||
const c = await readContribution$1(fd, curve);
|
||||
res.contributions.push(c);
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
return res;
|
||||
}
|
||||
@ -4776,7 +4844,7 @@ function hashPubKey(hasher, curve, c) {
|
||||
|
||||
async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
|
||||
const {fd: fdZKey, sections: sectionsZKey} = await readBinFile(zkeyName, "zkey", 2);
|
||||
const {fd: fdZKey, sections: sectionsZKey} = await readBinFile$1(zkeyName, "zkey", 2);
|
||||
const zkey = await readHeader(fdZKey, sectionsZKey, "groth16");
|
||||
|
||||
const curve = await getCurveFromQ(zkey.q);
|
||||
@ -4799,7 +4867,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
|
||||
// IC
|
||||
let buffBasesIC;
|
||||
buffBasesIC = await readFullSection(fdZKey, sectionsZKey, 3);
|
||||
buffBasesIC = await readSection$1(fdZKey, sectionsZKey, 3);
|
||||
buffBasesIC = await curve.G1.batchLEMtoU(buffBasesIC);
|
||||
|
||||
await writePointArray("G1", buffBasesIC);
|
||||
@ -4807,7 +4875,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
/////////////////////
|
||||
// h Section
|
||||
/////////////////////
|
||||
const buffBasesH_Lodd = await readFullSection(fdZKey, sectionsZKey, 9);
|
||||
const buffBasesH_Lodd = await readSection$1(fdZKey, sectionsZKey, 9);
|
||||
|
||||
let buffBasesH_Tau;
|
||||
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", logger);
|
||||
@ -4822,7 +4890,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
// L section
|
||||
/////////////////////
|
||||
let buffBasesC;
|
||||
buffBasesC = await readFullSection(fdZKey, sectionsZKey, 8);
|
||||
buffBasesC = await readSection$1(fdZKey, sectionsZKey, 8);
|
||||
buffBasesC = await curve.G1.batchLEMtoU(buffBasesC);
|
||||
await writePointArray("G1", buffBasesC);
|
||||
|
||||
@ -4830,7 +4898,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
// A Section (C section)
|
||||
/////////////////////
|
||||
let buffBasesA;
|
||||
buffBasesA = await readFullSection(fdZKey, sectionsZKey, 5);
|
||||
buffBasesA = await readSection$1(fdZKey, sectionsZKey, 5);
|
||||
buffBasesA = await curve.G1.batchLEMtoU(buffBasesA);
|
||||
await writePointArray("G1", buffBasesA);
|
||||
|
||||
@ -4838,7 +4906,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
// B1 Section
|
||||
/////////////////////
|
||||
let buffBasesB1;
|
||||
buffBasesB1 = await readFullSection(fdZKey, sectionsZKey, 6);
|
||||
buffBasesB1 = await readSection$1(fdZKey, sectionsZKey, 6);
|
||||
buffBasesB1 = await curve.G1.batchLEMtoU(buffBasesB1);
|
||||
await writePointArray("G1", buffBasesB1);
|
||||
|
||||
@ -4846,7 +4914,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
// B2 Section
|
||||
/////////////////////
|
||||
let buffBasesB2;
|
||||
buffBasesB2 = await readFullSection(fdZKey, sectionsZKey, 7);
|
||||
buffBasesB2 = await readSection$1(fdZKey, sectionsZKey, 7);
|
||||
buffBasesB2 = await curve.G2.batchLEMtoU(buffBasesB2);
|
||||
await writePointArray("G2", buffBasesB2);
|
||||
|
||||
@ -4907,7 +4975,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
|
||||
async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, name, logger) {
|
||||
|
||||
const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await readBinFile(zkeyNameOld, "zkey", 2);
|
||||
const {fd: fdZKeyOld, sections: sectionsZKeyOld} = await readBinFile$1(zkeyNameOld, "zkey", 2);
|
||||
const zkeyHeader = await readHeader(fdZKeyOld, sectionsZKeyOld, "groth16");
|
||||
|
||||
const curve = await getCurveFromQ(zkeyHeader.q);
|
||||
@ -5107,7 +5175,7 @@ async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
|
||||
let sr;
|
||||
await Blake2b.ready();
|
||||
|
||||
const {fd, sections} = await readBinFile(zkeyFileName, "zkey", 2);
|
||||
const {fd, sections} = await readBinFile$1(zkeyFileName, "zkey", 2);
|
||||
const zkey = await readHeader(fd, sections, "groth16");
|
||||
|
||||
const curve = await getCurveFromQ(zkey.q);
|
||||
@ -5176,7 +5244,7 @@ async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
|
||||
const initFileName = {type: "mem"};
|
||||
await newZKey(r1csFileName, pTauFileName, initFileName);
|
||||
|
||||
const {fd: fdInit, sections: sectionsInit} = await readBinFile(initFileName, "zkey", 2);
|
||||
const {fd: fdInit, sections: sectionsInit} = await readBinFile$1(initFileName, "zkey", 2);
|
||||
const zkeyInit = await readHeader(fdInit, sectionsInit, "groth16");
|
||||
|
||||
if ( (!ffjavascript.Scalar.eq(zkeyInit.q, zkey.q))
|
||||
@ -5309,8 +5377,8 @@ async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
|
||||
const MAX_CHUNK_SIZE = 1<<20;
|
||||
const G = curve[groupName];
|
||||
const sG = G.F.n8*2;
|
||||
await startReadUniqueSection(fd1, sections1, idSection);
|
||||
await startReadUniqueSection(fd2, sections2, idSection);
|
||||
await startReadUniqueSection$1(fd1, sections1, idSection);
|
||||
await startReadUniqueSection$1(fd2, sections2, idSection);
|
||||
|
||||
let R1 = G.zero;
|
||||
let R2 = G.zero;
|
||||
@ -5333,8 +5401,8 @@ async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
|
||||
R1 = G.add(R1, r1);
|
||||
R2 = G.add(R2, r2);
|
||||
}
|
||||
await endReadSection(fd1);
|
||||
await endReadSection(fd2);
|
||||
await endReadSection$1(fd1);
|
||||
await endReadSection$1(fd2);
|
||||
|
||||
if (nPoints == 0) return true;
|
||||
|
||||
@ -5350,7 +5418,7 @@ async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
|
||||
const Fr = curve.Fr;
|
||||
const sG = G.F.n8*2;
|
||||
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile(pTauFileName, "ptau", 1);
|
||||
const {fd: fdPTau, sections: sectionsPTau} = await readBinFile$1(pTauFileName, "ptau", 1);
|
||||
|
||||
let buff_r = new Uint8Array(zkey.domainSize * zkey.n8r);
|
||||
|
||||
@ -5403,7 +5471,7 @@ async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
|
||||
buff_r = await Fr.fft(buff_r);
|
||||
buff_r = await Fr.batchFromMontgomery(buff_r);
|
||||
|
||||
await startReadUniqueSection(fd, sections, 9);
|
||||
await startReadUniqueSection$1(fd, sections, 9);
|
||||
let R2 = G.zero;
|
||||
for (let i=0; i<zkey.domainSize; i += MAX_CHUNK_SIZE) {
|
||||
if (logger) logger.debug(`H Verificaition(lagrange): ${i}/${zkey.domainSize}`);
|
||||
@ -5415,7 +5483,7 @@ async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
|
||||
|
||||
R2 = G.add(R2, r);
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
sr = await sameRatio$2(curve, R1, R2, zkey.vk_delta_2, zkeyInit.vk_delta_2);
|
||||
if (sr !== true) return false;
|
||||
@ -5495,7 +5563,7 @@ async function phase2verify(r1csFileName, pTauFileName, zkeyFileName, logger) {
|
||||
async function phase2contribute(zkeyNameOld, zkeyNameNew, name, entropy, logger) {
|
||||
await Blake2b.ready();
|
||||
|
||||
const {fd: fdOld, sections: sections} = await readBinFile(zkeyNameOld, "zkey", 2);
|
||||
const {fd: fdOld, sections: sections} = await readBinFile$1(zkeyNameOld, "zkey", 2);
|
||||
const zkey = await readHeader(fdOld, sections, "groth16");
|
||||
|
||||
const curve = await getCurveFromQ(zkey.q);
|
||||
@ -5592,7 +5660,7 @@ async function beacon$1(zkeyNameOld, zkeyNameNew, name, beaconHashStr, numIterat
|
||||
}
|
||||
|
||||
|
||||
const {fd: fdOld, sections: sections} = await readBinFile(zkeyNameOld, "zkey", 2);
|
||||
const {fd: fdOld, sections: sections} = await readBinFile$1(zkeyNameOld, "zkey", 2);
|
||||
const zkey = await readHeader(fdOld, sections, "groth16");
|
||||
|
||||
const curve = await getCurveFromQ(zkey.q);
|
||||
@ -5840,7 +5908,7 @@ const {stringifyBigInts: stringifyBigInts$1} = ffjavascript.utils;
|
||||
|
||||
async function zkeyExportVerificationKey(zkeyName, logger) {
|
||||
|
||||
const {fd, sections} = await readBinFile(zkeyName, "zkey", 2);
|
||||
const {fd, sections} = await readBinFile$1(zkeyName, "zkey", 2);
|
||||
const zkey = await readHeader(fd, sections, "groth16");
|
||||
|
||||
const curve = await getCurveFromQ(zkey.q);
|
||||
@ -5864,14 +5932,14 @@ async function zkeyExportVerificationKey(zkeyName, logger) {
|
||||
|
||||
// Read IC Section
|
||||
///////////
|
||||
await startReadUniqueSection(fd, sections, 3);
|
||||
await startReadUniqueSection$1(fd, sections, 3);
|
||||
vKey.IC = [];
|
||||
for (let i=0; i<= zkey.nPublic; i++) {
|
||||
const buff = await fd.read(sG1);
|
||||
const P = curve.G1.toObject(buff);
|
||||
vKey.IC.push(P);
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
vKey = stringifyBigInts$1(vKey);
|
||||
|
||||
@ -5969,11 +6037,11 @@ async function writeBin(fd, witnessBin, prime) {
|
||||
|
||||
async function readHeader$1(fd, sections) {
|
||||
|
||||
await startReadUniqueSection(fd, sections, 1);
|
||||
await startReadUniqueSection$1(fd, sections, 1);
|
||||
const n8 = await fd.readULE32();
|
||||
const q = await readBigInt(fd, n8);
|
||||
const q = await readBigInt$1(fd, n8);
|
||||
const nWitness = await fd.readULE32();
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
return {n8, q, nWitness};
|
||||
|
||||
@ -5981,17 +6049,17 @@ async function readHeader$1(fd, sections) {
|
||||
|
||||
async function read(fileName) {
|
||||
|
||||
const {fd, sections} = await readBinFile(fileName, "wtns", 2);
|
||||
const {fd, sections} = await readBinFile$1(fileName, "wtns", 2);
|
||||
|
||||
const {n8, nWitness} = await readHeader$1(fd, sections);
|
||||
|
||||
await startReadUniqueSection(fd, sections, 2);
|
||||
await startReadUniqueSection$1(fd, sections, 2);
|
||||
const res = [];
|
||||
for (let i=0; i<nWitness; i++) {
|
||||
const v = await readBigInt(fd, n8);
|
||||
const v = await readBigInt$1(fd, n8);
|
||||
res.push(v);
|
||||
}
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
await fd.close();
|
||||
|
||||
@ -6001,11 +6069,11 @@ async function read(fileName) {
|
||||
const {stringifyBigInts: stringifyBigInts$2} = ffjavascript.utils;
|
||||
|
||||
async function groth16Prove(zkeyFileName, witnessFileName, logger) {
|
||||
const {fd: fdWtns, sections: sectionsWtns} = await readBinFile(witnessFileName, "wtns", 2);
|
||||
const {fd: fdWtns, sections: sectionsWtns} = await readBinFile$1(witnessFileName, "wtns", 2);
|
||||
|
||||
const wtns = await readHeader$1(fdWtns, sectionsWtns);
|
||||
|
||||
const {fd: fdZKey, sections: sectionsZKey} = await readBinFile(zkeyFileName, "zkey", 2);
|
||||
const {fd: fdZKey, sections: sectionsZKey} = await readBinFile$1(zkeyFileName, "zkey", 2);
|
||||
|
||||
const zkey = await readHeader(fdZKey, sectionsZKey, "groth16");
|
||||
|
||||
@ -6024,13 +6092,13 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
|
||||
|
||||
const power = log2(zkey.domainSize);
|
||||
|
||||
const buffWitness = await readFullSection(fdWtns, sectionsWtns, 2);
|
||||
const buffCoeffs = await readFullSection(fdZKey, sectionsZKey, 4);
|
||||
const buffBasesA = await readFullSection(fdZKey, sectionsZKey, 5);
|
||||
const buffBasesB1 = await readFullSection(fdZKey, sectionsZKey, 6);
|
||||
const buffBasesB2 = await readFullSection(fdZKey, sectionsZKey, 7);
|
||||
const buffBasesC = await readFullSection(fdZKey, sectionsZKey, 8);
|
||||
const buffBasesH = await readFullSection(fdZKey, sectionsZKey, 9);
|
||||
const buffWitness = await readSection$1(fdWtns, sectionsWtns, 2);
|
||||
const buffCoeffs = await readSection$1(fdZKey, sectionsZKey, 4);
|
||||
const buffBasesA = await readSection$1(fdZKey, sectionsZKey, 5);
|
||||
const buffBasesB1 = await readSection$1(fdZKey, sectionsZKey, 6);
|
||||
const buffBasesB2 = await readSection$1(fdZKey, sectionsZKey, 7);
|
||||
const buffBasesC = await readSection$1(fdZKey, sectionsZKey, 8);
|
||||
const buffBasesH = await readSection$1(fdZKey, sectionsZKey, 9);
|
||||
|
||||
const [buffA_T, buffB_T, buffC_T] = await buldABC(curve, zkey, buffWitness, buffCoeffs);
|
||||
|
||||
|
120
build/main.cjs
120
build/main.cjs
@ -937,13 +937,6 @@ async function copySection(fdFrom, sections, fdTo, sectionId, size) {
|
||||
|
||||
}
|
||||
|
||||
async function readFullSection(fd, sections, idSection) {
|
||||
await startReadUniqueSection(fd, sections, idSection);
|
||||
const res = await fd.read(fd.readingSection.size);
|
||||
await endReadSection(fd);
|
||||
return res;
|
||||
}
|
||||
|
||||
async function readSection(fd, sections, idSection, offset, length) {
|
||||
|
||||
offset = (typeof offset === "undefined") ? 0 : offset;
|
||||
@ -1587,13 +1580,13 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
|
||||
|
||||
const power = log2(zkey.domainSize);
|
||||
|
||||
const buffWitness = await readFullSection(fdWtns, sectionsWtns, 2);
|
||||
const buffCoeffs = await readFullSection(fdZKey, sectionsZKey, 4);
|
||||
const buffBasesA = await readFullSection(fdZKey, sectionsZKey, 5);
|
||||
const buffBasesB1 = await readFullSection(fdZKey, sectionsZKey, 6);
|
||||
const buffBasesB2 = await readFullSection(fdZKey, sectionsZKey, 7);
|
||||
const buffBasesC = await readFullSection(fdZKey, sectionsZKey, 8);
|
||||
const buffBasesH = await readFullSection(fdZKey, sectionsZKey, 9);
|
||||
const buffWitness = await readSection(fdWtns, sectionsWtns, 2);
|
||||
const buffCoeffs = await readSection(fdZKey, sectionsZKey, 4);
|
||||
const buffBasesA = await readSection(fdZKey, sectionsZKey, 5);
|
||||
const buffBasesB1 = await readSection(fdZKey, sectionsZKey, 6);
|
||||
const buffBasesB2 = await readSection(fdZKey, sectionsZKey, 7);
|
||||
const buffBasesC = await readSection(fdZKey, sectionsZKey, 8);
|
||||
const buffBasesH = await readSection(fdZKey, sectionsZKey, 9);
|
||||
|
||||
const [buffA_T, buffB_T, buffC_T] = await buldABC(curve, zkey, buffWitness, buffCoeffs);
|
||||
|
||||
@ -4105,14 +4098,89 @@ class BigArray {
|
||||
}
|
||||
}
|
||||
|
||||
async function readBinFile$1(fileName, type, maxVersion, cacheSize, pageSize) {
|
||||
|
||||
const fd = await readExisting$2(fileName, cacheSize, pageSize);
|
||||
|
||||
const b = await fd.read(4);
|
||||
let readedType = "";
|
||||
for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]);
|
||||
|
||||
if (readedType != type) throw new Error(fileName + ": Invalid File format");
|
||||
|
||||
let v = await fd.readULE32();
|
||||
|
||||
if (v>maxVersion) throw new Error("Version not supported");
|
||||
|
||||
const nSections = await fd.readULE32();
|
||||
|
||||
// Scan sections
|
||||
let sections = [];
|
||||
for (let i=0; i<nSections; i++) {
|
||||
let ht = await fd.readULE32();
|
||||
let hl = await fd.readULE64();
|
||||
if (typeof sections[ht] == "undefined") sections[ht] = [];
|
||||
sections[ht].push({
|
||||
p: fd.pos,
|
||||
size: hl
|
||||
});
|
||||
fd.pos += hl;
|
||||
}
|
||||
|
||||
return {fd, sections};
|
||||
}
|
||||
|
||||
async function startReadUniqueSection$1(fd, sections, idSection) {
|
||||
if (typeof fd.readingSection !== "undefined") throw new Error("Already reading a section");
|
||||
if (!sections[idSection]) throw new Error(fd.fileName + ": Missing section "+ idSection );
|
||||
if (sections[idSection].length>1) throw new Error(fd.fileName +": Section Duplicated " +idSection);
|
||||
|
||||
fd.pos = sections[idSection][0].p;
|
||||
|
||||
fd.readingSection = sections[idSection][0];
|
||||
}
|
||||
|
||||
async function endReadSection$1(fd, noCheck) {
|
||||
if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
|
||||
if (!noCheck) {
|
||||
if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
|
||||
}
|
||||
delete fd.readingSection;
|
||||
}
|
||||
|
||||
async function readBigInt$1(fd, n8, pos) {
|
||||
const buff = await fd.read(n8, pos);
|
||||
return ffjavascript.Scalar.fromRprLE(buff, 0, n8);
|
||||
}
|
||||
|
||||
async function readSection$1(fd, sections, idSection, offset, length) {
|
||||
|
||||
offset = (typeof offset === "undefined") ? 0 : offset;
|
||||
length = (typeof length === "undefined") ? sections[idSection][0].size - offset : length;
|
||||
|
||||
if (offset + length > sections[idSection][0].size) {
|
||||
throw new Error("Reading out of the range of the section");
|
||||
}
|
||||
|
||||
let buff;
|
||||
if (length < (1 << 30) ) {
|
||||
buff = new Uint8Array(length);
|
||||
} else {
|
||||
buff = new ffjavascript.BigBuffer(length);
|
||||
}
|
||||
|
||||
await fd.readToBuffer(buff, 0, length, sections[idSection][0].p + offset);
|
||||
return buff;
|
||||
}
|
||||
|
||||
async function readR1csHeader(fd,sections,singleThread) {
|
||||
|
||||
|
||||
const res = {};
|
||||
await startReadUniqueSection(fd, sections, 1);
|
||||
await startReadUniqueSection$1(fd, sections, 1);
|
||||
// Read Header
|
||||
res.n8 = await fd.readULE32();
|
||||
res.prime = await readBigInt(fd, res.n8);
|
||||
res.prime = await readBigInt$1(fd, res.n8);
|
||||
|
||||
res.curve = await ffjavascript.getCurveFromR(res.prime, singleThread);
|
||||
|
||||
@ -4122,13 +4190,13 @@ async function readR1csHeader(fd,sections,singleThread) {
|
||||
res.nPrvInputs = await fd.readULE32();
|
||||
res.nLabels = await fd.readULE64();
|
||||
res.nConstraints = await fd.readULE32();
|
||||
await endReadSection(fd);
|
||||
await endReadSection$1(fd);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async function readConstraints(fd,sections, r1cs, logger, loggerCtx) {
|
||||
const bR1cs = await readSection(fd, sections, 2);
|
||||
const bR1cs = await readSection$1(fd, sections, 2);
|
||||
let bR1csPos = 0;
|
||||
let constraints;
|
||||
if (r1cs.nConstraints>1<<20) {
|
||||
@ -4173,7 +4241,7 @@ async function readConstraints(fd,sections, r1cs, logger, loggerCtx) {
|
||||
}
|
||||
|
||||
async function readMap(fd, sections, r1cs, logger, loggerCtx) {
|
||||
const bMap = await readSection(fd, sections, 3);
|
||||
const bMap = await readSection$1(fd, sections, 3);
|
||||
let bMapPos = 0;
|
||||
let map;
|
||||
|
||||
@ -4204,7 +4272,7 @@ async function readMap(fd, sections, r1cs, logger, loggerCtx) {
|
||||
|
||||
async function readR1cs(fileName, loadConstraints, loadMap, singleThread, logger, loggerCtx) {
|
||||
|
||||
const {fd, sections} = await readBinFile(fileName, "r1cs", 1, 1<<25, 1<<22);
|
||||
const {fd, sections} = await readBinFile$1(fileName, "r1cs", 1, 1<<25, 1<<22);
|
||||
|
||||
const res = await readR1csHeader(fd, sections, singleThread);
|
||||
|
||||
@ -5035,7 +5103,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
|
||||
// IC
|
||||
let buffBasesIC;
|
||||
buffBasesIC = await readFullSection(fdZKey, sectionsZKey, 3);
|
||||
buffBasesIC = await readSection(fdZKey, sectionsZKey, 3);
|
||||
buffBasesIC = await curve.G1.batchLEMtoU(buffBasesIC);
|
||||
|
||||
await writePointArray("G1", buffBasesIC);
|
||||
@ -5043,7 +5111,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
/////////////////////
|
||||
// h Section
|
||||
/////////////////////
|
||||
const buffBasesH_Lodd = await readFullSection(fdZKey, sectionsZKey, 9);
|
||||
const buffBasesH_Lodd = await readSection(fdZKey, sectionsZKey, 9);
|
||||
|
||||
let buffBasesH_Tau;
|
||||
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", logger);
|
||||
@ -5058,7 +5126,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
// L section
|
||||
/////////////////////
|
||||
let buffBasesC;
|
||||
buffBasesC = await readFullSection(fdZKey, sectionsZKey, 8);
|
||||
buffBasesC = await readSection(fdZKey, sectionsZKey, 8);
|
||||
buffBasesC = await curve.G1.batchLEMtoU(buffBasesC);
|
||||
await writePointArray("G1", buffBasesC);
|
||||
|
||||
@ -5066,7 +5134,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
// A Section (C section)
|
||||
/////////////////////
|
||||
let buffBasesA;
|
||||
buffBasesA = await readFullSection(fdZKey, sectionsZKey, 5);
|
||||
buffBasesA = await readSection(fdZKey, sectionsZKey, 5);
|
||||
buffBasesA = await curve.G1.batchLEMtoU(buffBasesA);
|
||||
await writePointArray("G1", buffBasesA);
|
||||
|
||||
@ -5074,7 +5142,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
// B1 Section
|
||||
/////////////////////
|
||||
let buffBasesB1;
|
||||
buffBasesB1 = await readFullSection(fdZKey, sectionsZKey, 6);
|
||||
buffBasesB1 = await readSection(fdZKey, sectionsZKey, 6);
|
||||
buffBasesB1 = await curve.G1.batchLEMtoU(buffBasesB1);
|
||||
await writePointArray("G1", buffBasesB1);
|
||||
|
||||
@ -5082,7 +5150,7 @@ async function phase2exportMPCParams(zkeyName, mpcparamsName, logger) {
|
||||
// B2 Section
|
||||
/////////////////////
|
||||
let buffBasesB2;
|
||||
buffBasesB2 = await readFullSection(fdZKey, sectionsZKey, 7);
|
||||
buffBasesB2 = await readSection(fdZKey, sectionsZKey, 7);
|
||||
buffBasesB2 = await curve.G2.batchLEMtoU(buffBasesB2);
|
||||
await writePointArray("G2", buffBasesB2);
|
||||
|
||||
|
122
build/snarkjs.js
122
build/snarkjs.js
File diff suppressed because one or more lines are too long
2
build/snarkjs.min.js
vendored
2
build/snarkjs.min.js
vendored
File diff suppressed because one or more lines are too long
17
package-lock.json
generated
17
package-lock.json
generated
@ -36,9 +36,9 @@
|
||||
"integrity": "sha512-Xzdyxqm1bOFF6pdIsiHLLl3HkSLjbhqJHVyqaTxXt3RqXBEnmsUmEW47H7VOi/ak7TdkRpNkxjyK5Zbkm+y52g=="
|
||||
},
|
||||
"@iden3/binfileutils": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.6.tgz",
|
||||
"integrity": "sha512-VBlSeTwi4UD+17lwl+pw+Kqi+xyyH3M1lAD2K7B9GucBwZbHMIWDcgWcDqN3a1y41qlcifTKd3Fi8LkhdPBg9Q==",
|
||||
"version": "0.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.7.tgz",
|
||||
"integrity": "sha512-wE5H3NF3/XNgCGEq0fmTPl5HClzhdA8TEYY58Y1XuOdjvL+/D0ChShHvIq8f9YphO2uYARU9u9IFoEgOa67HeQ==",
|
||||
"requires": {
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "^0.2.30"
|
||||
@ -1536,6 +1536,17 @@
|
||||
"@iden3/binfileutils": "0.0.6",
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "0.2.30"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iden3/binfileutils": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@iden3/binfileutils/-/binfileutils-0.0.6.tgz",
|
||||
"integrity": "sha512-VBlSeTwi4UD+17lwl+pw+Kqi+xyyH3M1lAD2K7B9GucBwZbHMIWDcgWcDqN3a1y41qlcifTKd3Fi8LkhdPBg9Q==",
|
||||
"requires": {
|
||||
"fastfile": "0.0.18",
|
||||
"ffjavascript": "^0.2.30"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"randombytes": {
|
||||
|
@ -38,7 +38,7 @@
|
||||
"url": "https://github.com/iden3/snarkjs.git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iden3/binfileutils": "0.0.6",
|
||||
"@iden3/binfileutils": "0.0.7",
|
||||
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
|
||||
"circom_runtime": "0.1.5",
|
||||
"fastfile": "0.0.18",
|
||||
|
@ -30,13 +30,13 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger
|
||||
|
||||
const power = log2(zkey.domainSize);
|
||||
|
||||
const buffWitness = await binFileUtils.readFullSection(fdWtns, sectionsWtns, 2);
|
||||
const buffCoeffs = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 4);
|
||||
const buffBasesA = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 5);
|
||||
const buffBasesB1 = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 6);
|
||||
const buffBasesB2 = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 7);
|
||||
const buffBasesC = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 8);
|
||||
const buffBasesH = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 9);
|
||||
const buffWitness = await binFileUtils.readSection(fdWtns, sectionsWtns, 2);
|
||||
const buffCoeffs = await binFileUtils.readSection(fdZKey, sectionsZKey, 4);
|
||||
const buffBasesA = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
|
||||
const buffBasesB1 = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
|
||||
const buffBasesB2 = await binFileUtils.readSection(fdZKey, sectionsZKey, 7);
|
||||
const buffBasesC = await binFileUtils.readSection(fdZKey, sectionsZKey, 8);
|
||||
const buffBasesH = await binFileUtils.readSection(fdZKey, sectionsZKey, 9);
|
||||
|
||||
const [buffA_T, buffB_T, buffC_T] = await buldABC(curve, zkey, buffWitness, buffCoeffs);
|
||||
|
||||
|
@ -29,7 +29,7 @@ export default async function phase2exportMPCParams(zkeyName, mpcparamsName, log
|
||||
|
||||
// IC
|
||||
let buffBasesIC;
|
||||
buffBasesIC = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 3);
|
||||
buffBasesIC = await binFileUtils.readSection(fdZKey, sectionsZKey, 3);
|
||||
buffBasesIC = await curve.G1.batchLEMtoU(buffBasesIC);
|
||||
|
||||
await writePointArray("G1", buffBasesIC);
|
||||
@ -37,7 +37,7 @@ export default async function phase2exportMPCParams(zkeyName, mpcparamsName, log
|
||||
/////////////////////
|
||||
// h Section
|
||||
/////////////////////
|
||||
const buffBasesH_Lodd = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 9);
|
||||
const buffBasesH_Lodd = await binFileUtils.readSection(fdZKey, sectionsZKey, 9);
|
||||
|
||||
let buffBasesH_Tau;
|
||||
buffBasesH_Tau = await curve.G1.fft(buffBasesH_Lodd, "affine", "jacobian", logger);
|
||||
@ -52,7 +52,7 @@ export default async function phase2exportMPCParams(zkeyName, mpcparamsName, log
|
||||
// L section
|
||||
/////////////////////
|
||||
let buffBasesC;
|
||||
buffBasesC = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 8);
|
||||
buffBasesC = await binFileUtils.readSection(fdZKey, sectionsZKey, 8);
|
||||
buffBasesC = await curve.G1.batchLEMtoU(buffBasesC);
|
||||
await writePointArray("G1", buffBasesC);
|
||||
|
||||
@ -60,7 +60,7 @@ export default async function phase2exportMPCParams(zkeyName, mpcparamsName, log
|
||||
// A Section (C section)
|
||||
/////////////////////
|
||||
let buffBasesA;
|
||||
buffBasesA = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 5);
|
||||
buffBasesA = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
|
||||
buffBasesA = await curve.G1.batchLEMtoU(buffBasesA);
|
||||
await writePointArray("G1", buffBasesA);
|
||||
|
||||
@ -68,7 +68,7 @@ export default async function phase2exportMPCParams(zkeyName, mpcparamsName, log
|
||||
// B1 Section
|
||||
/////////////////////
|
||||
let buffBasesB1;
|
||||
buffBasesB1 = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 6);
|
||||
buffBasesB1 = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
|
||||
buffBasesB1 = await curve.G1.batchLEMtoU(buffBasesB1);
|
||||
await writePointArray("G1", buffBasesB1);
|
||||
|
||||
@ -76,7 +76,7 @@ export default async function phase2exportMPCParams(zkeyName, mpcparamsName, log
|
||||
// B2 Section
|
||||
/////////////////////
|
||||
let buffBasesB2;
|
||||
buffBasesB2 = await binFileUtils.readFullSection(fdZKey, sectionsZKey, 7);
|
||||
buffBasesB2 = await binFileUtils.readSection(fdZKey, sectionsZKey, 7);
|
||||
buffBasesB2 = await curve.G2.batchLEMtoU(buffBasesB2);
|
||||
await writePointArray("G2", buffBasesB2);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user