more advances in powers of taw cermony
This commit is contained in:
parent
4c7a37c274
commit
f587735530
161
cli.js
161
cli.js
@ -41,6 +41,7 @@ const clProcessor = require("./src/clprocessor");
|
|||||||
const powersOfTaw = require("./src/powersoftaw");
|
const powersOfTaw = require("./src/powersoftaw");
|
||||||
|
|
||||||
const bn128 = require("ffjavascript").bn128;
|
const bn128 = require("ffjavascript").bn128;
|
||||||
|
const solidityGenerator = require("./src/soliditygenerator.js");
|
||||||
|
|
||||||
const commands = [
|
const commands = [
|
||||||
{
|
{
|
||||||
@ -150,6 +151,14 @@ const commands = [
|
|||||||
options: "-verbose|v -name|n -entropy|e",
|
options: "-verbose|v -name|n -entropy|e",
|
||||||
action: powersOfTawContribute
|
action: powersOfTawContribute
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
cmd: "powersoftaw prepare phase2 <powersoftaw.ptaw> <new_powersoftaw.ptaw>",
|
||||||
|
description: "Prepares phase 2. ",
|
||||||
|
longDescription: " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau",
|
||||||
|
alias: ["pt2"],
|
||||||
|
options: "-verbose|v",
|
||||||
|
action: powersOfTawPreparePhase2
|
||||||
|
},
|
||||||
|
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -426,11 +435,11 @@ async function solidityGenVerifier(params, options) {
|
|||||||
|
|
||||||
let verifierCode;
|
let verifierCode;
|
||||||
if (verificationKey.protocol == "original") {
|
if (verificationKey.protocol == "original") {
|
||||||
verifierCode = generateVerifier_original(verificationKey);
|
verifierCode = solidityGenerator.generateVerifier_original(verificationKey);
|
||||||
} else if (verificationKey.protocol == "groth16") {
|
} else if (verificationKey.protocol == "groth16") {
|
||||||
verifierCode = generateVerifier_groth16(verificationKey);
|
verifierCode = solidityGenerator.generateVerifier_groth16(verificationKey);
|
||||||
} else if (verificationKey.protocol == "kimleeoh") {
|
} else if (verificationKey.protocol == "kimleeoh") {
|
||||||
verifierCode = generateVerifier_kimleeoh(verificationKey);
|
verifierCode = solidityGenerator.generateVerifier_kimleeoh(verificationKey);
|
||||||
} else {
|
} else {
|
||||||
throw new Error("InvalidProof");
|
throw new Error("InvalidProof");
|
||||||
}
|
}
|
||||||
@ -573,8 +582,10 @@ async function powersOfTawVerify(params, options) {
|
|||||||
const res = await powersOfTaw.verify(ptauName, options.verbose);
|
const res = await powersOfTaw.verify(ptauName, options.verbose);
|
||||||
if (res) {
|
if (res) {
|
||||||
console.log("Powers of tau OK!");
|
console.log("Powers of tau OK!");
|
||||||
|
return 0;
|
||||||
} else {
|
} else {
|
||||||
console.log("=======>INVALID Powers of tau<==========");
|
console.log("=======>INVALID Powers of tau<==========");
|
||||||
|
return 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -602,145 +613,13 @@ async function powersOfTawContribute(params, options) {
|
|||||||
return await powersOfTaw.contribute(oldPtauName, newPtauName, options.name , options.entropy, options.verbose);
|
return await powersOfTaw.contribute(oldPtauName, newPtauName, options.name , options.entropy, options.verbose);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function powersOfTawPreparePhase2(params, options) {
|
||||||
|
let oldPtauName;
|
||||||
|
let newPtauName;
|
||||||
|
|
||||||
function generateVerifier_original(verificationKey) {
|
oldPtauName = params[0];
|
||||||
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_original.sol"), "utf-8");
|
newPtauName = params[1];
|
||||||
|
|
||||||
const vka_str = `[${verificationKey.vk_a[0][1].toString()},`+
|
return await powersOfTaw.preparePhase2(oldPtauName, newPtauName, options.verbose);
|
||||||
`${verificationKey.vk_a[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_a[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_a[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_a%>", vka_str);
|
|
||||||
|
|
||||||
const vkb_str = `${verificationKey.vk_b[0].toString()},`+
|
|
||||||
`${verificationKey.vk_b[1].toString()}`;
|
|
||||||
template = template.replace("<%vk_b%>", vkb_str);
|
|
||||||
|
|
||||||
const vkc_str = `[${verificationKey.vk_c[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_c[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_c[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_c[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_c%>", vkc_str);
|
|
||||||
|
|
||||||
const vkg_str = `[${verificationKey.vk_g[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_g[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_g[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_g[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_g%>", vkg_str);
|
|
||||||
|
|
||||||
const vkgb1_str = `${verificationKey.vk_gb_1[0].toString()},`+
|
|
||||||
`${verificationKey.vk_gb_1[1].toString()}`;
|
|
||||||
template = template.replace("<%vk_gb1%>", vkgb1_str);
|
|
||||||
|
|
||||||
const vkgb2_str = `[${verificationKey.vk_gb_2[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_gb_2[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_gb_2[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_gb_2[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_gb2%>", vkgb2_str);
|
|
||||||
|
|
||||||
const vkz_str = `[${verificationKey.vk_z[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_z[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_z[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_z[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_z%>", vkz_str);
|
|
||||||
|
|
||||||
// The points
|
|
||||||
|
|
||||||
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
|
|
||||||
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
|
|
||||||
let vi = "";
|
|
||||||
for (let i=0; i<verificationKey.IC.length; i++) {
|
|
||||||
if (vi != "") vi = vi + " ";
|
|
||||||
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
|
|
||||||
`${verificationKey.IC[i][1].toString()});\n`;
|
|
||||||
}
|
|
||||||
template = template.replace("<%vk_ic_pts%>", vi);
|
|
||||||
|
|
||||||
return template;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function generateVerifier_groth16(verificationKey) {
|
|
||||||
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_groth16.sol"), "utf-8");
|
|
||||||
|
|
||||||
|
|
||||||
const vkalfa1_str = `${verificationKey.vk_alfa_1[0].toString()},`+
|
|
||||||
`${verificationKey.vk_alfa_1[1].toString()}`;
|
|
||||||
template = template.replace("<%vk_alfa1%>", vkalfa1_str);
|
|
||||||
|
|
||||||
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_beta_2[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_beta_2[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_beta_2[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_beta2%>", vkbeta2_str);
|
|
||||||
|
|
||||||
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
|
|
||||||
|
|
||||||
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_delta_2[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_delta_2[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_delta_2[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_delta2%>", vkdelta2_str);
|
|
||||||
|
|
||||||
// The points
|
|
||||||
|
|
||||||
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
|
|
||||||
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
|
|
||||||
let vi = "";
|
|
||||||
for (let i=0; i<verificationKey.IC.length; i++) {
|
|
||||||
if (vi != "") vi = vi + " ";
|
|
||||||
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
|
|
||||||
`${verificationKey.IC[i][1].toString()});\n`;
|
|
||||||
}
|
|
||||||
template = template.replace("<%vk_ic_pts%>", vi);
|
|
||||||
|
|
||||||
return template;
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateVerifier_kimleeoh(verificationKey) {
|
|
||||||
|
|
||||||
assert(false); // Not implemented yet because it requires G2 exponentiation onchain.
|
|
||||||
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_groth16.sol"), "utf-8");
|
|
||||||
|
|
||||||
|
|
||||||
const vkalfa1_str = `${verificationKey.vk_alfa_1[0].toString()},`+
|
|
||||||
`${verificationKey.vk_alfa_1[1].toString()}`;
|
|
||||||
template = template.replace("<%vk_alfa1%>", vkalfa1_str);
|
|
||||||
|
|
||||||
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_beta_2[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_beta_2[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_beta_2[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_beta2%>", vkbeta2_str);
|
|
||||||
|
|
||||||
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
|
|
||||||
|
|
||||||
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
|
|
||||||
`${verificationKey.vk_delta_2[0][0].toString()}], `+
|
|
||||||
`[${verificationKey.vk_delta_2[1][1].toString()},` +
|
|
||||||
`${verificationKey.vk_delta_2[1][0].toString()}]`;
|
|
||||||
template = template.replace("<%vk_delta2%>", vkdelta2_str);
|
|
||||||
|
|
||||||
// The points
|
|
||||||
|
|
||||||
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
|
|
||||||
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
|
|
||||||
let vi = "";
|
|
||||||
for (let i=0; i<verificationKey.IC.length; i++) {
|
|
||||||
if (vi != "") vi = vi + " ";
|
|
||||||
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
|
|
||||||
`${verificationKey.IC[i][1].toString()});\n`;
|
|
||||||
}
|
|
||||||
template = template.replace("<%vk_ic_pts%>", vi);
|
|
||||||
|
|
||||||
return template;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
123
src/binfileutils.js
Normal file
123
src/binfileutils.js
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
const Scalar = require("ffjavascript").Scalar;
|
||||||
|
const fastFile = require("fastfile");
|
||||||
|
const assert = require("assert");
|
||||||
|
|
||||||
|
async function readBinFile(fileName, type, maxVersion) {
|
||||||
|
|
||||||
|
const fd = await fastFile.readExisting(fileName);
|
||||||
|
|
||||||
|
const b = await fd.read(4);
|
||||||
|
let readedType = "";
|
||||||
|
for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]);
|
||||||
|
|
||||||
|
if (readedType != type) assert(false, fileName + ": Invalid File format");
|
||||||
|
|
||||||
|
let v = await fd.readULE32();
|
||||||
|
|
||||||
|
if (v>maxVersion) assert(false, "Version not supported");
|
||||||
|
|
||||||
|
const nSections = await fd.readULE32();
|
||||||
|
|
||||||
|
// Scan sections
|
||||||
|
let sections = [];
|
||||||
|
for (let i=0; i<nSections; i++) {
|
||||||
|
let ht = await fd.readULE32();
|
||||||
|
let hl = await fd.readULE64();
|
||||||
|
if (typeof sections[ht] == "undefined") sections[ht] = [];
|
||||||
|
sections[ht].push({
|
||||||
|
p: fd.pos,
|
||||||
|
size: hl
|
||||||
|
});
|
||||||
|
fd.pos += hl;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {fd, sections};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createBinFile(fileName, type, version, nSections) {
|
||||||
|
|
||||||
|
const fd = await fastFile.createOverride(fileName);
|
||||||
|
|
||||||
|
const buff = new Uint8Array(4);
|
||||||
|
for (let i=0; i<4; i++) buff[i] = type.charCodeAt(i);
|
||||||
|
await fd.write(buff, 0); // Magic "r1cs"
|
||||||
|
|
||||||
|
await fd.writeULE32(version); // Version
|
||||||
|
await fd.writeULE32(nSections); // Number of Sections
|
||||||
|
|
||||||
|
return fd;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function startWriteSection(fd, idSection) {
|
||||||
|
assert(typeof fd.writingSection === "undefined", "Already writing a section");
|
||||||
|
await fd.writeULE32(idSection); // Header type
|
||||||
|
fd.writingSection = {
|
||||||
|
pSectionSize: fd.pos
|
||||||
|
};
|
||||||
|
await fd.writeULE64(0); // Temporally set to 0 length
|
||||||
|
}
|
||||||
|
|
||||||
|
async function endWriteSection(fd) {
|
||||||
|
assert(typeof fd.writingSection != "undefined", "Not writing a section");
|
||||||
|
|
||||||
|
const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8;
|
||||||
|
const oldPos = fd.pos;
|
||||||
|
fd.pos = fd.writingSection.pSectionSize;
|
||||||
|
fd.writeULE64(sectionSize);
|
||||||
|
fd.pos = oldPos;
|
||||||
|
delete fd.writingSection;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function startReadUniqueSection(fd, sections, idSection) {
|
||||||
|
assert(typeof fd.readingSection === "undefined", "Already reading a section");
|
||||||
|
if (!sections[idSection]) assert(false, fd.fileName + ": Missing section "+ idSection );
|
||||||
|
if (sections[idSection].length>1) assert(false, fd.fileName +": Section Duplicated " +idSection);
|
||||||
|
|
||||||
|
fd.pos = sections[idSection][0].p;
|
||||||
|
|
||||||
|
fd.readingSection = sections[idSection][0];
|
||||||
|
}
|
||||||
|
|
||||||
|
async function endReadSection(fd, noCheck) {
|
||||||
|
assert(typeof fd.readingSection != "undefined", "Not reading a section");
|
||||||
|
if (!noCheck) {
|
||||||
|
assert.equal(fd.pos-fd.readingSection.p, fd.readingSection.size);
|
||||||
|
}
|
||||||
|
delete fd.readingSection;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function writeBigInt(fd, n, n8) {
|
||||||
|
const buff = new Uint8Array(n8);
|
||||||
|
Scalar.toRprLE(buff, 0, n);
|
||||||
|
await fd.write(buff);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readBigInt(fd, n8) {
|
||||||
|
const buff = await fd.read(n8);
|
||||||
|
return Scalar.fromRprLE(buff, 0, n8);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function copySection(fdFrom, sections, fdTo, sectionId) {
|
||||||
|
const chunkSize = fdFrom.pageSize;
|
||||||
|
await startReadUniqueSection(fdFrom, sections, sectionId);
|
||||||
|
await startWriteSection(fdTo, sectionId);
|
||||||
|
for (let p=0; p<sections[sectionId][0].size; p+=chunkSize) {
|
||||||
|
const l = Math.min(sections[sectionId][0].size -p, chunkSize);
|
||||||
|
const buff = await fdFrom.read(l);
|
||||||
|
await fdTo.write(buff);
|
||||||
|
}
|
||||||
|
await endWriteSection(fdTo);
|
||||||
|
await endReadSection(fdFrom);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
module.exports.readBinFile = readBinFile;
|
||||||
|
module.exports.createBinFile = createBinFile;
|
||||||
|
module.exports.writeBigInt = writeBigInt;
|
||||||
|
module.exports.readBigInt = readBigInt;
|
||||||
|
module.exports.startWriteSection = startWriteSection;
|
||||||
|
module.exports.endWriteSection = endWriteSection;
|
||||||
|
module.exports.startReadUniqueSection = startReadUniqueSection;
|
||||||
|
module.exports.endReadSection = endReadSection;
|
||||||
|
module.exports.copySection = copySection;
|
@ -30,11 +30,9 @@ async function applyKey(params) {
|
|||||||
|
|
||||||
let res = [];
|
let res = [];
|
||||||
const sG = G.F.n8*2;
|
const sG = G.F.n8*2;
|
||||||
const buffU = new ArrayBuffer(sG);
|
const buffUv = new Uint8Array(sG);
|
||||||
const buffUv = new Uint8Array(buffU);
|
|
||||||
const scG = G.F.n8;
|
const scG = G.F.n8;
|
||||||
const buffC = new ArrayBuffer(scG);
|
const buffCv = new Uint8Array(scG);
|
||||||
const buffCv = new Uint8Array(buffC);
|
|
||||||
|
|
||||||
const taskManager = await buildTaskManager(contributeThread, {
|
const taskManager = await buildTaskManager(contributeThread, {
|
||||||
ffjavascript: "ffjavascript"
|
ffjavascript: "ffjavascript"
|
||||||
@ -79,9 +77,9 @@ async function applyKey(params) {
|
|||||||
for (let i=0; i<NPoints; i++) {
|
for (let i=0; i<NPoints; i++) {
|
||||||
const buff = await fdTo.read(sG);
|
const buff = await fdTo.read(sG);
|
||||||
const P = G.fromRprLEM(buff, 0);
|
const P = G.fromRprLEM(buff, 0);
|
||||||
G.toRprBE(buffU, 0, P);
|
G.toRprBE(buffUv, 0, P);
|
||||||
newChallangeHasher.update(buffUv);
|
newChallangeHasher.update(buffUv);
|
||||||
G.toRprCompressed(buffC, 0, P);
|
G.toRprCompressed(buffCv, 0, P);
|
||||||
responseHasher.update(buffCv);
|
responseHasher.update(buffCv);
|
||||||
const idx = returnPoints.indexOf(i);
|
const idx = returnPoints.indexOf(i);
|
||||||
if (idx>=0) res[idx] = P;
|
if (idx>=0) res[idx] = P;
|
||||||
@ -103,7 +101,7 @@ function contributeThread(ctx, task) {
|
|||||||
} else if (task.cmd == "MUL") {
|
} else if (task.cmd == "MUL") {
|
||||||
const G = ctx.curve[task.G];
|
const G = ctx.curve[task.G];
|
||||||
const sG = G.F.n64*8*2;
|
const sG = G.F.n64*8*2;
|
||||||
const buffDest = new ArrayBuffer(sG*task.n);
|
const buffDest = new Uint8Array(sG*task.n);
|
||||||
let t = ctx.curve.Fr.e(task.first);
|
let t = ctx.curve.Fr.e(task.first);
|
||||||
let inc = ctx.curve.Fr.e(task.inc);
|
let inc = ctx.curve.Fr.e(task.inc);
|
||||||
for (let i=0; i<task.n; i++) {
|
for (let i=0; i<task.n; i++) {
|
||||||
|
@ -146,7 +146,7 @@ async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp,
|
|||||||
currentContribution.nextChallange = newChallangeHasher.digest();
|
currentContribution.nextChallange = newChallangeHasher.digest();
|
||||||
currentContribution.partialHash = responseHasher.getPartialHash();
|
currentContribution.partialHash = responseHasher.getPartialHash();
|
||||||
|
|
||||||
const buffKey = new ArrayBuffer(curve.F1.n8*2*6+curve.F2.n8*2*3);
|
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
|
||||||
|
|
||||||
utils.toPtauPubKeyRpr(buffKey, 0, curve, currentContribution.key, false);
|
utils.toPtauPubKeyRpr(buffKey, 0, curve, currentContribution.key, false);
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
const fastFile = require("fastfile");
|
const fastFile = require("fastfile");
|
||||||
const assert = require("assert");
|
const assert = require("assert");
|
||||||
const blake2b = require("blake2b-wasm");
|
const Blake2b = require("blake2b-wasm");
|
||||||
const readline = require("readline");
|
const readline = require("readline");
|
||||||
const crypto = require("crypto");
|
const crypto = require("crypto");
|
||||||
const ChaCha = require("ffjavascript").ChaCha;
|
const ChaCha = require("ffjavascript").ChaCha;
|
||||||
@ -26,7 +26,6 @@ const fs = require("fs");
|
|||||||
const utils = require("./powersoftau_utils");
|
const utils = require("./powersoftau_utils");
|
||||||
|
|
||||||
|
|
||||||
const buildTaskManager = require("./taskmanager");
|
|
||||||
const keyPair = require("./keypair");
|
const keyPair = require("./keypair");
|
||||||
|
|
||||||
|
|
||||||
@ -43,16 +42,12 @@ function askEntropy() {
|
|||||||
|
|
||||||
|
|
||||||
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) {
|
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) {
|
||||||
await blake2b.ready();
|
await Blake2b.ready();
|
||||||
|
|
||||||
const MAX_CHUNK_SIZE = 1024;
|
|
||||||
|
|
||||||
let stats = await fs.promises.stat(challangeFilename);
|
let stats = await fs.promises.stat(challangeFilename);
|
||||||
|
|
||||||
const sG1 = curve.F1.n64*8*2;
|
const sG1 = curve.F1.n64*8*2;
|
||||||
const scG1 = curve.F1.n64*8; // Compresed size
|
|
||||||
const sG2 = curve.F2.n64*8*2;
|
const sG2 = curve.F2.n64*8*2;
|
||||||
const scG2 = curve.F2.n64*8; // Compresed size
|
|
||||||
const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2);
|
const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2);
|
||||||
let e = domainSize;
|
let e = domainSize;
|
||||||
let power = 0;
|
let power = 0;
|
||||||
@ -62,11 +57,11 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
|
|||||||
}
|
}
|
||||||
|
|
||||||
assert(1<<power == domainSize, "Invalid file size");
|
assert(1<<power == domainSize, "Invalid file size");
|
||||||
|
console.log("Power to tau size: "+power);
|
||||||
|
|
||||||
const fdFrom = await fastFile.readExisting(challangeFilename);
|
const fdFrom = await fastFile.readExisting(challangeFilename);
|
||||||
|
|
||||||
const fdTo = await fastFile.createOverride(responesFileName);
|
const fdTo = await fastFile.createOverride(responesFileName);
|
||||||
let writePointer = 0;
|
|
||||||
|
|
||||||
while (!entropy) {
|
while (!entropy) {
|
||||||
entropy = await askEntropy();
|
entropy = await askEntropy();
|
||||||
@ -74,26 +69,25 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
|
|||||||
|
|
||||||
// Calculate the hash
|
// Calculate the hash
|
||||||
console.log("Hashing challange");
|
console.log("Hashing challange");
|
||||||
const challangeHasher = blake2b(64);
|
const challangeHasher = Blake2b(64);
|
||||||
for (let i=0; i<stats.size; i+= fdFrom.pageSize) {
|
for (let i=0; i<stats.size; i+= fdFrom.pageSize) {
|
||||||
const s = Math.min(stats.size - i, fdFrom.pageSize);
|
const s = Math.min(stats.size - i, fdFrom.pageSize);
|
||||||
const buff = await fdFrom.read(s);
|
const buff = await fdFrom.read(s);
|
||||||
challangeHasher.update(new Uint8Array(buff));
|
challangeHasher.update(buff);
|
||||||
}
|
}
|
||||||
|
|
||||||
const challangeHash = challangeHasher.digest();
|
const claimedHash = await fdFrom.read(64, 0);
|
||||||
console.log("Challange Hash: ");
|
console.log("Claimed Previus Challange Hash: ");
|
||||||
console.log(utils.formatHash(challangeHash));
|
|
||||||
|
|
||||||
const claimedHash = new Uint8Array( await fdFrom.read(64, 0));
|
|
||||||
console.log("Claimed Hash: ");
|
|
||||||
console.log(utils.formatHash(claimedHash));
|
console.log(utils.formatHash(claimedHash));
|
||||||
|
|
||||||
const hasher = blake2b(64);
|
const challangeHash = challangeHasher.digest();
|
||||||
|
console.log("Current Challange Hash: ");
|
||||||
|
console.log(utils.formatHash(challangeHash));
|
||||||
|
|
||||||
|
const hasher = Blake2b(64);
|
||||||
|
|
||||||
hasher.update(crypto.randomBytes(64));
|
hasher.update(crypto.randomBytes(64));
|
||||||
|
|
||||||
|
|
||||||
const enc = new TextEncoder(); // always utf-8
|
const enc = new TextEncoder(); // always utf-8
|
||||||
hasher.update(enc.encode(entropy));
|
hasher.update(enc.encode(entropy));
|
||||||
|
|
||||||
@ -126,162 +120,48 @@ async function challangeContribute(curve, challangeFilename, responesFileName, e
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const responseHasher = Blake2b(64);
|
||||||
|
|
||||||
await fdTo.write(challangeHash);
|
await fdTo.write(challangeHash);
|
||||||
writePointer += 64;
|
responseHasher.update(challangeHash);
|
||||||
|
|
||||||
const taskManager = await buildTaskManager(contributeThread, {
|
await contributeSection("G1", (1<<power)*2-1, curve.Fr.one, key.tau.prvKey, "tauG1" );
|
||||||
ffjavascript: "ffjavascript"
|
await contributeSection("G2", (1<<power) , curve.Fr.one, key.tau.prvKey, "tauG2" );
|
||||||
},{
|
await contributeSection("G1", (1<<power) , key.alpha.prvKey, key.tau.prvKey, "alphaTauG1" );
|
||||||
curve: curve.name
|
await contributeSection("G1", (1<<power) , key.beta.prvKey, key.tau.prvKey, "betaTauG1" );
|
||||||
});
|
await contributeSection("G2", 1 , key.beta.prvKey, key.tau.prvKey, "betaG2" );
|
||||||
|
|
||||||
// TauG1
|
|
||||||
let t = curve.Fr.e(1);
|
|
||||||
for (let i=0; i<domainSize*2-1; i += MAX_CHUNK_SIZE) {
|
|
||||||
if ((verbose)&&i) console.log("TauG1: " + i);
|
|
||||||
const n = Math.min(domainSize*2-1 - i, MAX_CHUNK_SIZE);
|
|
||||||
const buff = await fdFrom.read(n*sG1);
|
|
||||||
await taskManager.addTask({
|
|
||||||
cmd: "MULG1",
|
|
||||||
first: t,
|
|
||||||
inc: key.tau.prvKey.toString(),
|
|
||||||
buff: buff,
|
|
||||||
n: n,
|
|
||||||
writePos: writePointer
|
|
||||||
}, async function(r) {
|
|
||||||
return await fdTo.write(r.buff, r.writePos);
|
|
||||||
});
|
|
||||||
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
|
|
||||||
writePointer += n*scG1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TauG2
|
|
||||||
t = curve.Fr.e(1);
|
|
||||||
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
|
|
||||||
if ((verbose)&&i) console.log("TauG2: " + i);
|
|
||||||
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
|
|
||||||
const buff = await fdFrom.read(n*sG2);
|
|
||||||
await taskManager.addTask({
|
|
||||||
cmd: "MULG2",
|
|
||||||
first: t,
|
|
||||||
inc: key.tau.prvKey.toString(),
|
|
||||||
buff: buff,
|
|
||||||
n: n,
|
|
||||||
writePos: writePointer
|
|
||||||
}, async function(r) {
|
|
||||||
return await fdTo.write(r.buff, r.writePos);
|
|
||||||
});
|
|
||||||
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
|
|
||||||
writePointer += n*scG2;
|
|
||||||
}
|
|
||||||
|
|
||||||
// AlphaTauG1
|
|
||||||
t = curve.Fr.e(key.alpha.prvKey);
|
|
||||||
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
|
|
||||||
if ((verbose)&&i) console.log("AlfaTauG1: " + i);
|
|
||||||
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
|
|
||||||
const buff = await fdFrom.read(n*sG1);
|
|
||||||
await taskManager.addTask({
|
|
||||||
cmd: "MULG1",
|
|
||||||
first: t,
|
|
||||||
inc: key.tau.prvKey.toString(),
|
|
||||||
buff: buff,
|
|
||||||
n: n,
|
|
||||||
writePos: writePointer
|
|
||||||
}, async function(r) {
|
|
||||||
return await fdTo.write(r.buff, r.writePos);
|
|
||||||
});
|
|
||||||
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
|
|
||||||
writePointer += n*scG1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// BetaTauG1
|
|
||||||
t = curve.Fr.e(key.beta.prvKey);
|
|
||||||
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
|
|
||||||
if ((verbose)&&i) console.log("BetaTauG1: " + i);
|
|
||||||
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
|
|
||||||
const buff = await fdFrom.read(n*sG1);
|
|
||||||
await taskManager.addTask({
|
|
||||||
cmd: "MULG1",
|
|
||||||
first: t,
|
|
||||||
inc: key.tau.prvKey.toString(),
|
|
||||||
buff: buff,
|
|
||||||
n: n,
|
|
||||||
writePos: writePointer
|
|
||||||
}, async function(r) {
|
|
||||||
return await fdTo.write(r.buff, r.writePos);
|
|
||||||
});
|
|
||||||
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
|
|
||||||
writePointer += n*scG1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// BetaG2
|
|
||||||
const buffOldBeta = await fdFrom.read(sG2);
|
|
||||||
const oldBeta = curve.G2.fromRprBE(buffOldBeta);
|
|
||||||
const newBeta = curve.G2.mulScalar(oldBeta, key.beta.prvKey);
|
|
||||||
const buffNewBeta = new ArrayBuffer(curve.F2.n8*2);
|
|
||||||
curve.G2.toRprCompressed(buffNewBeta, 0, newBeta);
|
|
||||||
await fdTo.write(buffNewBeta, writePointer);
|
|
||||||
writePointer += scG2;
|
|
||||||
|
|
||||||
await taskManager.finish();
|
|
||||||
|
|
||||||
//Write Key
|
|
||||||
fdTo.pos = writePointer;
|
|
||||||
await utils.writePtauPubKey(fdTo, curve, key);
|
|
||||||
|
|
||||||
|
// Write and hash key
|
||||||
|
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
|
||||||
|
utils.toPtauPubKeyRpr(buffKey, 0, curve, key, false);
|
||||||
|
await fdTo.write(buffKey);
|
||||||
|
responseHasher.update(buffKey);
|
||||||
|
const responseHash = responseHasher.digest();
|
||||||
|
console.log("Contribution Response Hash: ");
|
||||||
|
console.log(utils.formatHash(responseHash));
|
||||||
|
|
||||||
await fdTo.close();
|
await fdTo.close();
|
||||||
await fdFrom.close();
|
await fdFrom.close();
|
||||||
|
|
||||||
}
|
async function contributeSection(groupName, nPoints, first, inc, sectionName) {
|
||||||
|
|
||||||
function contributeThread(ctx, task) {
|
const G = curve[groupName];
|
||||||
if (task.cmd == "INIT") {
|
const sG = G.F.n8*2;
|
||||||
ctx.assert = ctx.modules.assert;
|
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
|
||||||
if (task.curve == "bn128") {
|
let t = first;
|
||||||
ctx.curve = ctx.modules.ffjavascript.bn128;
|
for (let i=0 ; i<nPoints ; i+= chunkSize) {
|
||||||
} else {
|
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
|
||||||
ctx.assert(false, "curve not defined");
|
const n= Math.min(nPoints-i, chunkSize );
|
||||||
|
const buffInU = await fdFrom.read(n * sG);
|
||||||
|
const buffInLEM = await G.batchUtoLEM(buffInU);
|
||||||
|
const buffOutLEM = await G.batchApplyKey(buffInLEM, t, inc);
|
||||||
|
const buffOutC = await G.batchLEMtoC(buffOutLEM);
|
||||||
|
|
||||||
|
responseHasher.update(buffOutC);
|
||||||
|
await fdTo.write(buffOutC);
|
||||||
|
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
|
||||||
}
|
}
|
||||||
return {};
|
|
||||||
} else if (task.cmd == "MULG1") {
|
|
||||||
const sG1 = ctx.curve.F1.n64*8*2;
|
|
||||||
const scG1 = ctx.curve.F1.n64*8; // Compresed size
|
|
||||||
const buffDest = new ArrayBuffer(scG1*task.n);
|
|
||||||
let t = ctx.curve.Fr.e(task.first);
|
|
||||||
let inc = ctx.curve.Fr.e(task.inc);
|
|
||||||
for (let i=0; i<task.n; i++) {
|
|
||||||
const P = ctx.curve.G1.fromRprBE(task.buff, i*sG1);
|
|
||||||
const R = ctx.curve.G1.mulScalar(P, t);
|
|
||||||
ctx.curve.G1.toRprCompressed(buffDest, i*scG1, R);
|
|
||||||
t = ctx.curve.Fr.mul(t, inc);
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
buff: buffDest,
|
|
||||||
writePos: task.writePos
|
|
||||||
};
|
|
||||||
} else if (task.cmd == "MULG2") {
|
|
||||||
const sG2 = ctx.curve.F2.n64*8*2;
|
|
||||||
const scG2 = ctx.curve.F2.n64*8; // Compresed size
|
|
||||||
const buffDest = new ArrayBuffer(scG2*task.n);
|
|
||||||
let t = ctx.curve.Fr.e(task.first);
|
|
||||||
let inc = ctx.curve.Fr.e(task.inc);
|
|
||||||
for (let i=0; i<task.n; i++) {
|
|
||||||
const P = ctx.curve.G2.fromRprBE(task.buff, i*sG2);
|
|
||||||
const R = ctx.curve.G2.mulScalar(P, t);
|
|
||||||
ctx.curve.G2.toRprCompressed(buffDest, i*scG2, R);
|
|
||||||
t = ctx.curve.Fr.mul(t, inc);
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
buff: buffDest,
|
|
||||||
writePos: task.writePos
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
ctx.assert(false, "Op not implemented");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = challangeContribute;
|
module.exports = challangeContribute;
|
||||||
|
@ -1,10 +1,17 @@
|
|||||||
|
// Format of the output
|
||||||
|
// Hash of the last contribution 64 Bytes
|
||||||
|
// 2^N*2-1 TauG1 Points (uncompressed)
|
||||||
|
// 2^N TauG2 Points (uncompressed)
|
||||||
|
// 2^N AlphaTauG1 Points (uncompressed)
|
||||||
|
// 2^N BetaTauG1 Points (uncompressed)
|
||||||
|
|
||||||
const Blake2b = require("blake2b-wasm");
|
const Blake2b = require("blake2b-wasm");
|
||||||
const utils = require("./powersoftau_utils");
|
const utils = require("./powersoftau_utils");
|
||||||
const wasmSnark = require("wasmsnark");
|
|
||||||
const ChaCha = require("ffjavascript").ChaCha;
|
const ChaCha = require("ffjavascript").ChaCha;
|
||||||
const crypto = require("crypto");
|
const crypto = require("crypto");
|
||||||
const keyPair = require("./keypair");
|
const keyPair = require("./keypair");
|
||||||
const readline = require("readline");
|
const readline = require("readline");
|
||||||
|
const binFileUtils = require("./binfileutils");
|
||||||
|
|
||||||
|
|
||||||
const rl = readline.createInterface({
|
const rl = readline.createInterface({
|
||||||
@ -21,12 +28,13 @@ function askEntropy() {
|
|||||||
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbose) {
|
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbose) {
|
||||||
await Blake2b.ready();
|
await Blake2b.ready();
|
||||||
|
|
||||||
const {fd: fdOld, sections} = await utils.readBinFile(oldPtauFilename, "ptau", 1);
|
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
|
||||||
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
|
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fdOld, sections);
|
||||||
if (curve.name == "bn128") {
|
if (power != ceremonyPower) {
|
||||||
wasmCurve = await wasmSnark.buildBn128();
|
throw new Error("This file has been reduced. You cannot contribute into a reduced file.");
|
||||||
} else {
|
}
|
||||||
throw new Error("Curve not supported");
|
if (sections[12]) {
|
||||||
|
console.log("WARNING: Contributing into a fle that has phase2 calculated. You will have to prepare phase2 again.");
|
||||||
}
|
}
|
||||||
const contributions = await utils.readContributions(fdOld, curve, sections);
|
const contributions = await utils.readContributions(fdOld, curve, sections);
|
||||||
const curContribution = {
|
const curContribution = {
|
||||||
@ -56,8 +64,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
|
|||||||
for (let i=0;i<8;i++) {
|
for (let i=0;i<8;i++) {
|
||||||
seed[i] = hash.readUInt32BE(i*4);
|
seed[i] = hash.readUInt32BE(i*4);
|
||||||
}
|
}
|
||||||
// const rng = new ChaCha(seed);
|
const rng = new ChaCha(seed);
|
||||||
const rng = new ChaCha();
|
|
||||||
curContribution.key = keyPair.createPTauKey(curve, lastChallangeHash, rng);
|
curContribution.key = keyPair.createPTauKey(curve, lastChallangeHash, rng);
|
||||||
|
|
||||||
|
|
||||||
@ -67,7 +74,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
|
|||||||
const responseHasher = new Blake2b(64);
|
const responseHasher = new Blake2b(64);
|
||||||
responseHasher.update(lastChallangeHash);
|
responseHasher.update(lastChallangeHash);
|
||||||
|
|
||||||
const fdNew = await utils.createBinFile(newPTauFilename, "ptau", 1, 7);
|
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
|
||||||
await utils.writePTauHeader(fdNew, curve, power);
|
await utils.writePTauHeader(fdNew, curve, power);
|
||||||
|
|
||||||
let firstPoints;
|
let firstPoints;
|
||||||
@ -83,9 +90,13 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
|
|||||||
curContribution.betaG2 = firstPoints[0];
|
curContribution.betaG2 = firstPoints[0];
|
||||||
|
|
||||||
curContribution.nextChallange = newChallangeHasher.digest();
|
curContribution.nextChallange = newChallangeHasher.digest();
|
||||||
|
|
||||||
|
console.log("Next Challange Hash: ");
|
||||||
|
console.log(utils.formatHash(curContribution.nextChallange));
|
||||||
|
|
||||||
curContribution.partialHash = responseHasher.getPartialHash();
|
curContribution.partialHash = responseHasher.getPartialHash();
|
||||||
|
|
||||||
const buffKey = new ArrayBuffer(curve.F1.n8*2*6+curve.F2.n8*2*3);
|
const buffKey = new Uint8Array(curve.F1.n8*2*6+curve.F2.n8*2*3);
|
||||||
|
|
||||||
utils.toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
|
utils.toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
|
||||||
|
|
||||||
@ -103,23 +114,23 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
|
|||||||
await fdNew.close();
|
await fdNew.close();
|
||||||
|
|
||||||
return;
|
return;
|
||||||
async function processSection(sectionId, Gstr, NPoints, first, inc, sectionName) {
|
async function processSection(sectionId, groupName, NPoints, first, inc, sectionName) {
|
||||||
const res = [];
|
const res = [];
|
||||||
fdOld.pos = sections[sectionId][0].p;
|
fdOld.pos = sections[sectionId][0].p;
|
||||||
await fdNew.writeULE32(sectionId); // tauG1
|
await fdNew.writeULE32(sectionId); // tauG1
|
||||||
const pSection = fdNew.pos;
|
const pSection = fdNew.pos;
|
||||||
await fdNew.writeULE64(0); // Temporally set to 0 length
|
await fdNew.writeULE64(0); // Temporally set to 0 length
|
||||||
|
|
||||||
const G = curve[Gstr];
|
const G = curve[groupName];
|
||||||
const sG = G.F.n8*2;
|
const sG = G.F.n8*2;
|
||||||
const chunkSize = (1<<27) / sG; // 128Mb chunks
|
const chunkSize = Math.floor((1<<20) / sG); // 128Mb chunks
|
||||||
let t = first;
|
let t = first;
|
||||||
for (let i=0 ; i<NPoints ; i+= chunkSize) {
|
for (let i=0 ; i<NPoints ; i+= chunkSize) {
|
||||||
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
|
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
|
||||||
const n= Math.min(NPoints-i, chunkSize );
|
const n= Math.min(NPoints-i, chunkSize );
|
||||||
const buffIn = await fdOld.read(n * sG);
|
const buffIn = await fdOld.read(n * sG);
|
||||||
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
|
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
|
||||||
const promiseWrite = fdNew.write(buffOutLEM.buffer);
|
const promiseWrite = fdNew.write(buffOutLEM);
|
||||||
const buffOutU = await G.batchLEMtoU(buffOutLEM);
|
const buffOutU = await G.batchLEMtoU(buffOutLEM);
|
||||||
const buffOutC = await G.batchLEMtoC(buffOutLEM);
|
const buffOutC = await G.batchLEMtoC(buffOutLEM);
|
||||||
|
|
||||||
@ -128,7 +139,7 @@ async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbo
|
|||||||
await promiseWrite;
|
await promiseWrite;
|
||||||
if (i==0) // Return the 2 first points.
|
if (i==0) // Return the 2 first points.
|
||||||
for (let j=0; j<Math.min(2, NPoints); j++)
|
for (let j=0; j<Math.min(2, NPoints); j++)
|
||||||
res.push(G.fromRprLEM(buffOutLEM.buffer, j*sG));
|
res.push(G.fromRprLEM(buffOutLEM, j*sG));
|
||||||
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
|
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,121 +7,75 @@
|
|||||||
// BetaG2 (uncompressed)
|
// BetaG2 (uncompressed)
|
||||||
|
|
||||||
const fastFile = require("fastfile");
|
const fastFile = require("fastfile");
|
||||||
const assert = require("assert");
|
|
||||||
const Blake2b = require("blake2b-wasm");
|
const Blake2b = require("blake2b-wasm");
|
||||||
const utils = require("./powersoftau_utils");
|
const utils = require("./powersoftau_utils");
|
||||||
|
const binFileUtils = require("./binfileutils");
|
||||||
|
|
||||||
async function exportChallange(pTauFilename, challangeFilename, verbose) {
|
async function exportChallange(pTauFilename, challangeFilename, verbose) {
|
||||||
await Blake2b.ready();
|
await Blake2b.ready();
|
||||||
const {fd: fdFrom, sections} = await utils.readBinFile(pTauFilename, "ptau", 1);
|
const {fd: fdFrom, sections} = await binFileUtils.readBinFile(pTauFilename, "ptau", 1);
|
||||||
|
|
||||||
const {curve, power} = await utils.readPTauHeader(fdFrom, sections);
|
const {curve, power} = await utils.readPTauHeader(fdFrom, sections);
|
||||||
|
|
||||||
const contributions = await utils.readContributions(fdFrom, curve, sections);
|
const contributions = await utils.readContributions(fdFrom, curve, sections);
|
||||||
let challangeHash;
|
let lastResponseHash, curChallangeHash;
|
||||||
if (contributions.length == 0) {
|
if (contributions.length == 0) {
|
||||||
challangeHash = Blake2b(64).digest();
|
lastResponseHash = Blake2b(64).digest();
|
||||||
|
curChallangeHash = utils.calculateFirstChallangeHash(curve, power);
|
||||||
} else {
|
} else {
|
||||||
challangeHash = contributions[contributions.length-1].newChallange;
|
lastResponseHash = contributions[contributions.length-1].responseHash;
|
||||||
|
curChallangeHash = contributions[contributions.length-1].nextChallange;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log("Last Response Hash: ");
|
||||||
|
console.log(utils.formatHash(lastResponseHash));
|
||||||
|
|
||||||
|
console.log("New Challange Hash: ");
|
||||||
|
console.log(utils.formatHash(curChallangeHash));
|
||||||
|
|
||||||
|
|
||||||
const fdTo = await fastFile.createOverride(challangeFilename);
|
const fdTo = await fastFile.createOverride(challangeFilename);
|
||||||
|
|
||||||
const toHash = Blake2b(64);
|
const toHash = Blake2b(64);
|
||||||
fdTo.write(challangeHash);
|
await fdTo.write(lastResponseHash);
|
||||||
toHash.update(challangeHash);
|
toHash.update(lastResponseHash);
|
||||||
|
|
||||||
const buffG1 = new ArrayBuffer(curve.F1.n8*2);
|
await exportSection(2, "G1", (1 << power) * 2 -1, "tauG1");
|
||||||
const buffG1v = new Uint8Array(buffG1);
|
await exportSection(3, "G2", (1 << power) , "tauG2");
|
||||||
const buffG2 = new ArrayBuffer(curve.F2.n8*2);
|
await exportSection(4, "G1", (1 << power) , "alphaTauG1");
|
||||||
const buffG2v = new Uint8Array(buffG2);
|
await exportSection(5, "G1", (1 << power) , "betaTauG1");
|
||||||
|
await exportSection(6, "G2", 1 , "betaG2");
|
||||||
// Process tauG1
|
|
||||||
if (!sections[2]) assert(false, "File has no tauG1 section");
|
|
||||||
if (sections[2].length>1) assert(false, "File has more than one tauG1 section");
|
|
||||||
fdFrom.pos = sections[2][0].p;
|
|
||||||
const nTauG1 = (1 << power) * 2 -1;
|
|
||||||
for (let i=0; i< nTauG1; i++) {
|
|
||||||
const p = await readG1();
|
|
||||||
await writeG1(p);
|
|
||||||
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG1: " + i);
|
|
||||||
}
|
|
||||||
if (fdFrom.pos != sections[2][0].p + sections[2][0].size) assert(false, "Invalid tauG1 section size");
|
|
||||||
|
|
||||||
// Process tauG2
|
|
||||||
if (!sections[3]) assert(false, "File has no tauG2 section");
|
|
||||||
if (sections[3].length>1) assert(false, "File has more than one tauG2 section");
|
|
||||||
fdFrom.pos = sections[3][0].p;
|
|
||||||
const nTauG2 = 1 << power ;
|
|
||||||
for (let i=0; i< nTauG2; i++) {
|
|
||||||
const p = await readG2();
|
|
||||||
await writeG2(p);
|
|
||||||
if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG2: " + i);
|
|
||||||
}
|
|
||||||
if (fdFrom.pos != sections[3][0].p + sections[3][0].size) assert(false, "Invalid tauG2 section size");
|
|
||||||
|
|
||||||
// Process alphaTauG1
|
|
||||||
if (!sections[4]) assert(false, "File has no alphaTauG1 section");
|
|
||||||
if (sections[4].length>1) assert(false, "File has more than one alphaTauG1 section");
|
|
||||||
fdFrom.pos = sections[4][0].p;
|
|
||||||
const nAlphaTauG1 = 1 << power ;
|
|
||||||
for (let i=0; i< nAlphaTauG1; i++) {
|
|
||||||
const p = await readG1();
|
|
||||||
await writeG1(p);
|
|
||||||
if ((verbose)&&((i%100000) == 0)&&i) console.log("alphaTauG1: " + i);
|
|
||||||
}
|
|
||||||
if (fdFrom.pos != sections[4][0].p + sections[4][0].size) assert(false, "Invalid alphaTauG1 section size");
|
|
||||||
|
|
||||||
// Process betaTauG1
|
|
||||||
if (!sections[5]) assert(false, "File has no betaTauG1 section");
|
|
||||||
if (sections[5].length>1) assert(false, "File has more than one betaTauG1 section");
|
|
||||||
fdFrom.pos = sections[5][0].p;
|
|
||||||
const nBetaTauG1 = 1 << power ;
|
|
||||||
for (let i=0; i< nBetaTauG1; i++) {
|
|
||||||
const p = await readG1();
|
|
||||||
await writeG1(p);
|
|
||||||
if ((verbose)&&((i%100000) == 0)&&i) console.log("betaTauG1: " + i);
|
|
||||||
}
|
|
||||||
if (fdFrom.pos != sections[5][0].p + sections[5][0].size) assert(false, "Invalid betaTauG1 section size");
|
|
||||||
|
|
||||||
// Process betaG2
|
|
||||||
if (!sections[6]) assert(false, "File has no betaG2 section");
|
|
||||||
if (sections[6].length>1) assert(false, "File has more than one betaG2 section");
|
|
||||||
fdFrom.pos = sections[6][0].p;
|
|
||||||
const betaG2 = await readG2();
|
|
||||||
await writeG2(betaG2);
|
|
||||||
if (fdFrom.pos != sections[6][0].p + sections[6][0].size) assert(false, "Invalid betaG2 section size");
|
|
||||||
|
|
||||||
await fdFrom.close();
|
await fdFrom.close();
|
||||||
await fdTo.close();
|
await fdTo.close();
|
||||||
|
|
||||||
const newChallangeHash = toHash.digest("hex");
|
const calcCurChallangeHash = toHash.digest();
|
||||||
|
|
||||||
console.log("Challange Hash: " +newChallangeHash);
|
if (!utils.hashIsEqual (curChallangeHash, calcCurChallangeHash)) {
|
||||||
|
console.log("Calc Curret Challange Hash: ");
|
||||||
|
console.log(utils.formatHash(calcCurChallangeHash));
|
||||||
|
|
||||||
|
throw new Error("PTau file is corrupted. Calculated new challange hash does not match with the eclared one");
|
||||||
async function readG1() {
|
|
||||||
const pBuff = await fdFrom.read(curve.F1.n8*2);
|
|
||||||
return curve.G1.fromRprLEM( pBuff );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function readG2() {
|
async function exportSection(sectionId, groupName, nPoints, sectionName) {
|
||||||
const pBuff = await fdFrom.read(curve.F2.n8*2);
|
const G = curve[groupName];
|
||||||
return curve.G2.fromRprLEM( pBuff );
|
const sG = G.F.n8*2;
|
||||||
|
const nPointsChunk = Math.floor((1<<27)/sG);
|
||||||
|
|
||||||
|
await binFileUtils.startReadUniqueSection(fdFrom, sections, sectionId);
|
||||||
|
for (let i=0; i< nPointsChunk; i+= nPointsChunk) {
|
||||||
|
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
|
||||||
|
const n = Math.min(nPoints-i, nPointsChunk);
|
||||||
|
let buff;
|
||||||
|
buff = await fdFrom.read(n*sG);
|
||||||
|
buff = await G.batchLEMtoU(buff);
|
||||||
|
await fdTo.write(buff);
|
||||||
|
toHash.update(buff);
|
||||||
|
}
|
||||||
|
await binFileUtils.endReadSection(fdFrom);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function writeG1(p) {
|
|
||||||
curve.G1.toRprBE(buffG1, 0, p);
|
|
||||||
await fdTo.write(buffG1);
|
|
||||||
toHash.update(buffG1v);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeG2(p) {
|
|
||||||
curve.G2.toRprBE(buffG2, 0, p);
|
|
||||||
await fdTo.write(buffG2);
|
|
||||||
toHash.update(buffG2v);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,12 +3,13 @@ const fastFile = require("fastfile");
|
|||||||
const Blake2b = require("blake2b-wasm");
|
const Blake2b = require("blake2b-wasm");
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const utils = require("./powersoftau_utils");
|
const utils = require("./powersoftau_utils");
|
||||||
|
const binFileUtils = require("./binfileutils");
|
||||||
|
|
||||||
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, verbose) {
|
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, verbose) {
|
||||||
|
|
||||||
await Blake2b.ready();
|
await Blake2b.ready();
|
||||||
|
|
||||||
const {fd: fdOld, sections} = await utils.readBinFile(oldPtauFilename, "ptau", 1);
|
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
|
||||||
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
|
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
|
||||||
const contributions = await utils.readContributions(fdOld, curve, sections);
|
const contributions = await utils.readContributions(fdOld, curve, sections);
|
||||||
const currentContribution = {};
|
const currentContribution = {};
|
||||||
@ -38,7 +39,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
|
|||||||
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power);
|
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power);
|
||||||
}
|
}
|
||||||
|
|
||||||
const fdNew = await utils.createBinFile(newPTauFilename, "ptau", 1, 7);
|
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 7);
|
||||||
await utils.writePTauHeader(fdNew, curve, power);
|
await utils.writePTauHeader(fdNew, curve, power);
|
||||||
|
|
||||||
const fdResponse = await fastFile.readExisting(contributionFilename);
|
const fdResponse = await fastFile.readExisting(contributionFilename);
|
||||||
@ -48,18 +49,21 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
|
|||||||
"Wrong contribution. this contribution is not based on the previus hash");
|
"Wrong contribution. this contribution is not based on the previus hash");
|
||||||
|
|
||||||
const hasherResponse = new Blake2b(64);
|
const hasherResponse = new Blake2b(64);
|
||||||
hasherResponse.update(new Uint8Array(contributionPreviousHash));
|
hasherResponse.update(contributionPreviousHash);
|
||||||
|
|
||||||
const hasherNewChallange = new Blake2b(64);
|
const startSections = [];
|
||||||
hasherNewChallange.update(lastChallangeHash);
|
let res;
|
||||||
|
res = await processSection(fdResponse, fdNew, "G1", 2, (1 << power) * 2 -1, [1], "tauG1");
|
||||||
|
currentContribution.tauG1 = res[0];
|
||||||
|
res = await processSection(fdResponse, fdNew, "G2", 3, (1 << power) , [1], "tauG2");
|
||||||
|
currentContribution.tauG2 = res[0];
|
||||||
|
res = await processSection(fdResponse, fdNew, "G1", 4, (1 << power) , [0], "alphaG1");
|
||||||
|
currentContribution.alphaG1 = res[0];
|
||||||
|
res = await processSection(fdResponse, fdNew, "G1", 5, (1 << power) , [0], "betaG1");
|
||||||
|
currentContribution.betaG1 = res[0];
|
||||||
|
res = await processSection(fdResponse, fdNew, "G2", 6, 1 , [0], "betaG2");
|
||||||
|
currentContribution.betaG2 = res[0];
|
||||||
|
|
||||||
await processSection(fdResponse, fdNew, 2, (1 << power) * 2 -1, "G1", "tauG1", 1);
|
|
||||||
await processSection(fdResponse, fdNew, 3, (1 << power) , "G2", "tauG2", 1);
|
|
||||||
await processSection(fdResponse, fdNew, 4, (1 << power) , "G1", "alphaG1", 0);
|
|
||||||
await processSection(fdResponse, fdNew, 5, (1 << power) , "G1", "betaG1", 0);
|
|
||||||
await processSection(fdResponse, fdNew, 6, 1 , "G2", "betaG2", 0);
|
|
||||||
|
|
||||||
currentContribution.nextChallange = hasherNewChallange.digest();
|
|
||||||
currentContribution.partialHash = hasherResponse.getPartialHash();
|
currentContribution.partialHash = hasherResponse.getPartialHash();
|
||||||
|
|
||||||
|
|
||||||
@ -70,10 +74,22 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
|
|||||||
hasherResponse.update(new Uint8Array(buffKey));
|
hasherResponse.update(new Uint8Array(buffKey));
|
||||||
const hashResponse = hasherResponse.digest();
|
const hashResponse = hasherResponse.digest();
|
||||||
|
|
||||||
if (verbose) {
|
console.log("Contribution Response Hash imported: ");
|
||||||
console.log("Contribution Response Hash imported: ");
|
console.log(utils.formatHash(hashResponse));
|
||||||
console.log(utils.formatHash(hashResponse));
|
|
||||||
}
|
const nextChallangeHasher = new Blake2b(64);
|
||||||
|
nextChallangeHasher.update(hashResponse);
|
||||||
|
|
||||||
|
await hashSection(fdNew, "G1", 2, (1 << power) * 2 -1, "tauG1");
|
||||||
|
await hashSection(fdNew, "G2", 3, (1 << power) , "tauG2");
|
||||||
|
await hashSection(fdNew, "G1", 4, (1 << power) , "alphaTauG1");
|
||||||
|
await hashSection(fdNew, "G1", 5, (1 << power) , "betaTauG1");
|
||||||
|
await hashSection(fdNew, "G2", 6, 1 , "betaG2");
|
||||||
|
|
||||||
|
currentContribution.nextChallange = nextChallangeHasher.digest();
|
||||||
|
|
||||||
|
console.log("Next Challange Hash: ");
|
||||||
|
console.log(utils.formatHash(currentContribution.nextChallange));
|
||||||
|
|
||||||
contributions.push(currentContribution);
|
contributions.push(currentContribution);
|
||||||
|
|
||||||
@ -83,31 +99,67 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
|
|||||||
await fdNew.close();
|
await fdNew.close();
|
||||||
await fdOld.close();
|
await fdOld.close();
|
||||||
|
|
||||||
async function processSection(fdFrom, fdTo, sectionId, n, G, name, contributionId) {
|
async function processSection(fdFrom, fdTo, groupName, sectionId, nPoints, singularPointIndexes, sectionName) {
|
||||||
|
|
||||||
const buffU = new ArrayBuffer(curve[G].F.n8*2);
|
const G = curve[groupName];
|
||||||
const buffUv = new Uint8Array(buffU);
|
const scG = G.F.n8;
|
||||||
const scG = curve[G].F.n8;
|
const sG = G.F.n8*2;
|
||||||
|
|
||||||
await fdTo.writeULE32(sectionId); // tauG1
|
const singularPoints = [];
|
||||||
const pSection = fdTo.pos;
|
|
||||||
await fdTo.writeULE64(0); // Temporally set to 0 length
|
await binFileUtils.startWriteSection(fdTo, sectionId);
|
||||||
for (let i=0; i< n; i++) {
|
const nPointsChunk = Math.floor((1<<27)/sG);
|
||||||
const buffC = await fdFrom.read(scG);
|
|
||||||
hasherResponse.update(new Uint8Array(buffC));
|
startSections[sectionId] = fdTo.pos;
|
||||||
const P = curve[G].fromRprCompressed(buffC);
|
|
||||||
if (i==contributionId) currentContribution[name] = P;
|
for (let i=0; i< nPoints; i += nPointsChunk) {
|
||||||
curve[G].toRprBE(buffU, 0, P);
|
if ((verbose)&&i) console.log(`Importing ${sectionName}: ` + i);
|
||||||
hasherNewChallange.update(buffUv);
|
const n = Math.min(nPoints-i, nPointsChunk);
|
||||||
curve[G].toRprLEM(buffU, 0, P);
|
|
||||||
await fdTo.write(buffU);
|
const buffC = await fdFrom.read(n * scG);
|
||||||
if ((verbose)&&((i%100000) == 0)&&i) console.log(name +": " + i);
|
hasherResponse.update(buffC);
|
||||||
|
|
||||||
|
const buffLEM = await G.batchCtoLEM(buffC);
|
||||||
|
|
||||||
|
await fdTo.write(buffLEM);
|
||||||
|
for (let j=0; j<singularPointIndexes.length; j++) {
|
||||||
|
const sp = singularPointIndexes[j];
|
||||||
|
if ((sp >=i) && (sp < i+n)) {
|
||||||
|
const P = G.fromRprLEM(buffLEM, (sp-i)*sG);
|
||||||
|
singularPoints.push(P);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const sSize = fdTo.pos - pSection -8;
|
|
||||||
const lastPos = fdTo.pos;
|
await binFileUtils.endWriteSection(fdTo);
|
||||||
await fdTo.writeULE64(sSize, pSection);
|
|
||||||
fdTo.pos = lastPos;
|
return singularPoints;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function hashSection(fdTo, groupName, sectionId, nPoints, sectionName) {
|
||||||
|
|
||||||
|
const G = curve[groupName];
|
||||||
|
const sG = G.F.n8*2;
|
||||||
|
const nPointsChunk = Math.floor((1<<27)/sG);
|
||||||
|
|
||||||
|
const oldPos = fdTo.pos;
|
||||||
|
fdTo.pos = startSections[sectionId];
|
||||||
|
|
||||||
|
for (let i=0; i< nPoints; i += nPointsChunk) {
|
||||||
|
if ((verbose)&&i) console.log(`Hashing ${sectionName}: ` + i);
|
||||||
|
const n = Math.min(nPoints-i, nPointsChunk);
|
||||||
|
|
||||||
|
const buffLEM = await fdTo.read(n * sG);
|
||||||
|
|
||||||
|
const buffU = await G.batchLEMtoU(buffLEM);
|
||||||
|
|
||||||
|
nextChallangeHasher.update(buffU);
|
||||||
|
}
|
||||||
|
|
||||||
|
fdTo.pos = oldPos;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = importResponse;
|
module.exports = importResponse;
|
||||||
|
@ -47,17 +47,18 @@ contributions(7)
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
const ptauUtils = require("./powersoftau_utils");
|
const ptauUtils = require("./powersoftau_utils");
|
||||||
|
const binFileUtils = require("./binfileutils");
|
||||||
|
|
||||||
|
|
||||||
async function newAccumulator(curve, power, fileName, verbose) {
|
async function newAccumulator(curve, power, fileName, verbose) {
|
||||||
|
|
||||||
|
|
||||||
const fd = await ptauUtils.createBinFile(fileName, "ptau", 1, 7);
|
const fd = await binFileUtils.createBinFile(fileName, "ptau", 1, 7);
|
||||||
|
|
||||||
await ptauUtils.writePTauHeader(fd, curve, power, 0);
|
await ptauUtils.writePTauHeader(fd, curve, power, 0);
|
||||||
|
|
||||||
const buffG1 = new ArrayBuffer(curve.G1.F.n8*2);
|
const buffG1 = new Uint8Array(curve.G1.F.n8*2);
|
||||||
const buffG2 = new ArrayBuffer(curve.G2.F.n8*2);
|
const buffG2 = new Uint8Array(curve.G2.F.n8*2);
|
||||||
curve.G1.toRprLEM(buffG1, 0, curve.G1.g);
|
curve.G1.toRprLEM(buffG1, 0, curve.G1.g);
|
||||||
curve.G2.toRprLEM(buffG2, 0, curve.G2.g);
|
curve.G2.toRprLEM(buffG2, 0, curve.G2.g);
|
||||||
|
|
||||||
|
48
src/powersoftau_preparephase2.js
Normal file
48
src/powersoftau_preparephase2.js
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
const binFileUtils = require("./binfileutils");
|
||||||
|
const utils = require("./powersoftau_utils");
|
||||||
|
|
||||||
|
async function preparePhase2(oldPtauFilename, newPTauFilename, verbose) {
|
||||||
|
|
||||||
|
const {fd: fdOld, sections} = await binFileUtils.readBinFile(oldPtauFilename, "ptau", 1);
|
||||||
|
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
|
||||||
|
|
||||||
|
const fdNew = await binFileUtils.createBinFile(newPTauFilename, "ptau", 1, 11);
|
||||||
|
await utils.writePTauHeader(fdNew, curve, power);
|
||||||
|
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 2);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
|
||||||
|
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
|
||||||
|
|
||||||
|
await processSection(2, 12, "G1", (1<<power) , "tauG1" );
|
||||||
|
await processSection(3, 13, "G2", (1<<power) , "tauG2" );
|
||||||
|
await processSection(4, 14, "G1", (1<<power) , "alphaTauG1" );
|
||||||
|
await processSection(5, 15, "G1", (1<<power) , "betaTauG1" );
|
||||||
|
|
||||||
|
await fdOld.close();
|
||||||
|
await fdNew.close();
|
||||||
|
|
||||||
|
return;
|
||||||
|
|
||||||
|
async function processSection(oldSectionId, newSectionId, Gstr, NPoints, sectionName) {
|
||||||
|
|
||||||
|
if (verbose) console.log("Starting section: "+sectionName);
|
||||||
|
const G = curve[Gstr];
|
||||||
|
const sG = G.F.n8*2;
|
||||||
|
|
||||||
|
let buff;
|
||||||
|
await binFileUtils.startReadUniqueSection(fdOld, sections, oldSectionId);
|
||||||
|
buff = await fdOld.read(sG*NPoints);
|
||||||
|
await binFileUtils.endReadSection(fdOld, true);
|
||||||
|
|
||||||
|
buff = await G.ifft(buff, verbose ? console.log : null);
|
||||||
|
|
||||||
|
await binFileUtils.startWriteSection(fdNew, newSectionId);
|
||||||
|
await fdNew.write(buff);
|
||||||
|
await binFileUtils.endWriteSection(fdNew);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = preparePhase2;
|
@ -7,66 +7,22 @@ const ChaCha = require("ffjavascript").ChaCha;
|
|||||||
const keyPair = require("./keypair");
|
const keyPair = require("./keypair");
|
||||||
const crypto = require("crypto");
|
const crypto = require("crypto");
|
||||||
|
|
||||||
async function readBinFile(fileName, type, maxVersion) {
|
async function writePTauHeader(fd, curve, power, ceremonyPower) {
|
||||||
|
|
||||||
const fd = await fastFile.readExisting(fileName);
|
|
||||||
|
|
||||||
const b = await fd.read(4);
|
|
||||||
const bv = new Uint8Array(b);
|
|
||||||
let readedType = "";
|
|
||||||
for (let i=0; i<4; i++) readedType += String.fromCharCode(bv[i]);
|
|
||||||
|
|
||||||
if (readedType != type) assert(false, fileName + ": Invalid File format");
|
|
||||||
|
|
||||||
let v = await fd.readULE32();
|
|
||||||
|
|
||||||
if (v>maxVersion) assert(false, "Version not supported");
|
|
||||||
|
|
||||||
const nSections = await fd.readULE32();
|
|
||||||
|
|
||||||
// Scan sections
|
|
||||||
let sections = [];
|
|
||||||
for (let i=0; i<nSections; i++) {
|
|
||||||
let ht = await fd.readULE32();
|
|
||||||
let hl = await fd.readULE64();
|
|
||||||
if (typeof sections[ht] == "undefined") sections[ht] = [];
|
|
||||||
sections[ht].push({
|
|
||||||
p: fd.pos,
|
|
||||||
size: hl
|
|
||||||
});
|
|
||||||
fd.pos += hl;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {fd, sections};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createBinFile(fileName, type, version, nSections) {
|
|
||||||
|
|
||||||
const fd = await fastFile.createOverride(fileName);
|
|
||||||
|
|
||||||
const buff = new Uint8Array(4);
|
|
||||||
for (let i=0; i<4; i++) buff[i] = type.charCodeAt(i);
|
|
||||||
await fd.write(buff.buffer, 0); // Magic "r1cs"
|
|
||||||
|
|
||||||
await fd.writeULE32(version); // Version
|
|
||||||
await fd.writeULE32(nSections); // Number of Sections
|
|
||||||
|
|
||||||
return fd;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writePTauHeader(fd, curve, power) {
|
|
||||||
// Write the header
|
// Write the header
|
||||||
///////////
|
///////////
|
||||||
|
|
||||||
|
if (typeof(ceremonyPower) === "undefined") ceremonyPower = power;
|
||||||
await fd.writeULE32(1); // Header type
|
await fd.writeULE32(1); // Header type
|
||||||
const pHeaderSize = fd.pos;
|
const pHeaderSize = fd.pos;
|
||||||
await fd.writeULE64(0); // Temporally set to 0 length
|
await fd.writeULE64(0); // Temporally set to 0 length
|
||||||
|
|
||||||
await fd.writeULE32(curve.F1.n64*8);
|
await fd.writeULE32(curve.F1.n64*8);
|
||||||
|
|
||||||
const buff = new ArrayBuffer(curve.F1.n8);
|
const buff = new Uint8Array(curve.F1.n8);
|
||||||
Scalar.toRprLE(buff, 0, curve.q, curve.F1.n8);
|
Scalar.toRprLE(buff, 0, curve.q, curve.F1.n8);
|
||||||
await fd.write(buff);
|
await fd.write(buff);
|
||||||
await fd.writeULE32(power); // power
|
await fd.writeULE32(power); // power
|
||||||
|
await fd.writeULE32(ceremonyPower); // power
|
||||||
|
|
||||||
const headerSize = fd.pos - pHeaderSize - 8;
|
const headerSize = fd.pos - pHeaderSize - 8;
|
||||||
|
|
||||||
@ -94,10 +50,11 @@ async function readPTauHeader(fd, sections) {
|
|||||||
assert(curve.F1.n64*8 == n8, fd.fileName +": Invalid size");
|
assert(curve.F1.n64*8 == n8, fd.fileName +": Invalid size");
|
||||||
|
|
||||||
const power = await fd.readULE32();
|
const power = await fd.readULE32();
|
||||||
|
const ceremonyPower = await fd.readULE32();
|
||||||
|
|
||||||
assert.equal(fd.pos-sections[1][0].p, sections[1][0].size);
|
assert.equal(fd.pos-sections[1][0].p, sections[1][0].size);
|
||||||
|
|
||||||
return {curve, power};
|
return {curve, power, ceremonyPower};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -185,7 +142,7 @@ function toPtauPubKeyRpr(buff, pos, curve, key, montgomery) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function writePtauPubKey(fd, curve, key, montgomery) {
|
async function writePtauPubKey(fd, curve, key, montgomery) {
|
||||||
const buff = new ArrayBuffer(curve.F1.n8*2*6 + curve.F2.n8*2*3);
|
const buff = new Uint8Array(curve.F1.n8*2*6 + curve.F2.n8*2*3);
|
||||||
toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
|
toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
|
||||||
await fd.write(buff);
|
await fd.write(buff);
|
||||||
}
|
}
|
||||||
@ -199,10 +156,18 @@ async function readContribution(fd, curve) {
|
|||||||
c.betaG1 = await readG1();
|
c.betaG1 = await readG1();
|
||||||
c.betaG2 = await readG2();
|
c.betaG2 = await readG2();
|
||||||
c.key = await readPtauPubKey(fd, curve, true);
|
c.key = await readPtauPubKey(fd, curve, true);
|
||||||
c.partialHash = new Uint8Array(await fd.read(216));
|
c.partialHash = await fd.read(216);
|
||||||
c.nextChallange = new Uint8Array(await fd.read(64));
|
c.nextChallange = await fd.read(64);
|
||||||
c.type = await fd.readULE32();
|
c.type = await fd.readULE32();
|
||||||
|
|
||||||
|
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
|
||||||
|
toPtauPubKeyRpr(buffV, 0, curve, c.key, false);
|
||||||
|
|
||||||
|
const responseHasher = Blake2b(64);
|
||||||
|
responseHasher.setPartialHash(c.partialHash);
|
||||||
|
responseHasher.update(buffV);
|
||||||
|
c.responseHash = responseHasher.digest();
|
||||||
|
|
||||||
const paramLength = await fd.readULE32();
|
const paramLength = await fd.readULE32();
|
||||||
const curPos = fd.pos;
|
const curPos = fd.pos;
|
||||||
let lastType =0;
|
let lastType =0;
|
||||||
@ -266,8 +231,8 @@ async function readContributions(fd, curve, sections) {
|
|||||||
|
|
||||||
async function writeContribution(fd, curve, contribution) {
|
async function writeContribution(fd, curve, contribution) {
|
||||||
|
|
||||||
const buffG1 = new ArrayBuffer(curve.F1.n8*2);
|
const buffG1 = new Uint8Array(curve.F1.n8*2);
|
||||||
const buffG2 = new ArrayBuffer(curve.F2.n8*2);
|
const buffG2 = new Uint8Array(curve.F2.n8*2);
|
||||||
await writeG1(contribution.tauG1);
|
await writeG1(contribution.tauG1);
|
||||||
await writeG2(contribution.tauG2);
|
await writeG2(contribution.tauG2);
|
||||||
await writeG1(contribution.alphaG1);
|
await writeG1(contribution.alphaG1);
|
||||||
@ -361,12 +326,10 @@ function hashIsEqual(h1, h2) {
|
|||||||
function calculateFirstChallangeHash(curve, power) {
|
function calculateFirstChallangeHash(curve, power) {
|
||||||
const hasher = new Blake2b(64);
|
const hasher = new Blake2b(64);
|
||||||
|
|
||||||
const buffG1 = new ArrayBuffer(curve.G1.F.n8*2);
|
const vG1 = new Uint8Array(curve.G1.F.n8*2);
|
||||||
const vG1 = new Uint8Array(buffG1);
|
const vG2 = new Uint8Array(curve.G2.F.n8*2);
|
||||||
const buffG2 = new ArrayBuffer(curve.G2.F.n8*2);
|
curve.G1.toRprBE(vG1, 0, curve.G1.g);
|
||||||
const vG2 = new Uint8Array(buffG2);
|
curve.G2.toRprBE(vG2, 0, curve.G2.g);
|
||||||
curve.G1.toRprBE(buffG1, 0, curve.G1.g);
|
|
||||||
curve.G2.toRprBE(buffG2, 0, curve.G2.g);
|
|
||||||
|
|
||||||
const blankHasher = new Blake2b(64);
|
const blankHasher = new Blake2b(64);
|
||||||
hasher.update(blankHasher.digest());
|
hasher.update(blankHasher.digest());
|
||||||
@ -415,8 +378,6 @@ function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) {
|
|||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.readBinFile = readBinFile;
|
|
||||||
module.exports.createBinFile = createBinFile;
|
|
||||||
module.exports.readPTauHeader = readPTauHeader;
|
module.exports.readPTauHeader = readPTauHeader;
|
||||||
module.exports.writePTauHeader = writePTauHeader;
|
module.exports.writePTauHeader = writePTauHeader;
|
||||||
module.exports.readPtauPubKey = readPtauPubKey;
|
module.exports.readPtauPubKey = readPtauPubKey;
|
||||||
|
@ -4,8 +4,14 @@ const keyPair = require("./keypair");
|
|||||||
const assert = require("assert");
|
const assert = require("assert");
|
||||||
const crypto = require("crypto");
|
const crypto = require("crypto");
|
||||||
const buildTaskManager = require("./taskmanager");
|
const buildTaskManager = require("./taskmanager");
|
||||||
|
const binFileUtils = require("./binfileutils");
|
||||||
|
const ChaCha = require("ffjavascript").ChaCha;
|
||||||
|
|
||||||
function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
|
function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
|
||||||
|
if (curve.G1.isZero(g1s)) return false;
|
||||||
|
if (curve.G1.isZero(g1sx)) return false;
|
||||||
|
if (curve.G2.isZero(g2s)) return false;
|
||||||
|
if (curve.G2.isZero(g2sx)) return false;
|
||||||
return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
|
return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -104,8 +110,8 @@ function verifyContribution(curve, cur, prev) {
|
|||||||
async function verify(tauFilename, verbose) {
|
async function verify(tauFilename, verbose) {
|
||||||
await Blake2b.ready();
|
await Blake2b.ready();
|
||||||
|
|
||||||
const {fd, sections} = await utils.readBinFile(tauFilename, "ptau", 1);
|
const {fd, sections} = await binFileUtils.readBinFile(tauFilename, "ptau", 1);
|
||||||
const {curve, power} = await utils.readPTauHeader(fd, sections);
|
const {curve, power, ceremonyPower} = await utils.readPTauHeader(fd, sections);
|
||||||
const contrs = await utils.readContributions(fd, curve, sections);
|
const contrs = await utils.readContributions(fd, curve, sections);
|
||||||
|
|
||||||
if (verbose) console.log("power: 2**" + power);
|
if (verbose) console.log("power: 2**" + power);
|
||||||
@ -118,7 +124,8 @@ async function verify(tauFilename, verbose) {
|
|||||||
alphaG1: curve.G1.g,
|
alphaG1: curve.G1.g,
|
||||||
betaG1: curve.G1.g,
|
betaG1: curve.G1.g,
|
||||||
betaG2: curve.G2.g,
|
betaG2: curve.G2.g,
|
||||||
nextChallange: utils.calculateFirstChallangeHash(curve, power)
|
nextChallange: utils.calculateFirstChallangeHash(curve, ceremonyPower),
|
||||||
|
responseHash: Blake2b(64).digest()
|
||||||
};
|
};
|
||||||
|
|
||||||
if (contrs.length == 0) {
|
if (contrs.length == 0) {
|
||||||
@ -126,7 +133,6 @@ async function verify(tauFilename, verbose) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
let prevContr;
|
let prevContr;
|
||||||
if (contrs.length>1) {
|
if (contrs.length>1) {
|
||||||
prevContr = contrs[contrs.length-2];
|
prevContr = contrs[contrs.length-2];
|
||||||
@ -140,7 +146,7 @@ async function verify(tauFilename, verbose) {
|
|||||||
|
|
||||||
|
|
||||||
const nextContributionHasher = Blake2b(64);
|
const nextContributionHasher = Blake2b(64);
|
||||||
nextContributionHasher.update(prevContr.nextChallange);
|
nextContributionHasher.update(curContr.responseHash);
|
||||||
const key = curContr.key;
|
const key = curContr.key;
|
||||||
|
|
||||||
// Verify powers and compute nextChallangeHash
|
// Verify powers and compute nextChallangeHash
|
||||||
@ -150,7 +156,7 @@ async function verify(tauFilename, verbose) {
|
|||||||
// Verify Section tau*G1
|
// Verify Section tau*G1
|
||||||
if (verbose) console.log("Verifying powers in tau*G1 section");
|
if (verbose) console.log("Verifying powers in tau*G1 section");
|
||||||
const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1]);
|
const rTau1 = await processSection(2, "G1", "tauG1", (1 << power)*2-1, [0, 1]);
|
||||||
if (!sameRatio(curve, rTau1.R1, rTau1.R2, key.tau.g2_sp, key.tau.g2_spx)) {
|
if (!sameRatio(curve, rTau1.R1, rTau1.R2, curve.G2.g, curContr.tauG2)) {
|
||||||
console.log("tauG1 section. Powers do not match");
|
console.log("tauG1 section. Powers do not match");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -168,7 +174,7 @@ async function verify(tauFilename, verbose) {
|
|||||||
// Verify Section tau*G2
|
// Verify Section tau*G2
|
||||||
if (verbose) console.log("Verifying powers in tau*G2 section");
|
if (verbose) console.log("Verifying powers in tau*G2 section");
|
||||||
const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1]);
|
const rTau2 = await processSection(3, "G2", "tauG2", 1 << power, [0, 1]);
|
||||||
if (!sameRatio(curve, key.tau.g1_s, key.tau.g1_sx, rTau2.R1, rTau2.R2)) {
|
if (!sameRatio(curve, curve.G1.g, curContr.tauG1, rTau2.R1, rTau2.R2)) {
|
||||||
console.log("tauG2 section. Powers do not match");
|
console.log("tauG2 section. Powers do not match");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -184,7 +190,7 @@ async function verify(tauFilename, verbose) {
|
|||||||
// Verify Section alpha*tau*G1
|
// Verify Section alpha*tau*G1
|
||||||
if (verbose) console.log("Verifying powers in alpha*tau*G1 section");
|
if (verbose) console.log("Verifying powers in alpha*tau*G1 section");
|
||||||
const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0]);
|
const rAlphaTauG1 = await processSection(4, "G1", "alphatauG1", 1 << power, [0]);
|
||||||
if (!sameRatio(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, key.tau.g2_sp, key.tau.g2_spx)) {
|
if (!sameRatio(curve, rAlphaTauG1.R1, rAlphaTauG1.R2, curve.G2.g, curContr.tauG2)) {
|
||||||
console.log("alphaTauG1 section. Powers do not match");
|
console.log("alphaTauG1 section. Powers do not match");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -196,7 +202,7 @@ async function verify(tauFilename, verbose) {
|
|||||||
// Verify Section beta*tau*G1
|
// Verify Section beta*tau*G1
|
||||||
if (verbose) console.log("Verifying powers in beta*tau*G1 section");
|
if (verbose) console.log("Verifying powers in beta*tau*G1 section");
|
||||||
const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0]);
|
const rBetaTauG1 = await processSection(5, "G1", "betatauG1", 1 << power, [0]);
|
||||||
if (!sameRatio(curve, rBetaTauG1.R1, rBetaTauG1.R2, key.tau.g2_sp, key.tau.g2_spx)) {
|
if (!sameRatio(curve, rBetaTauG1.R1, rBetaTauG1.R2, curve.G2.g, curContr.tauG2)) {
|
||||||
console.log("betaTauG1 section. Powers do not match");
|
console.log("betaTauG1 section. Powers do not match");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -211,14 +217,13 @@ async function verify(tauFilename, verbose) {
|
|||||||
console.log("betaG2 element in betaG2 section does not match the one in the contribution section");
|
console.log("betaG2 element in betaG2 section does not match the one in the contribution section");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
await fd.close();
|
|
||||||
|
|
||||||
|
|
||||||
const nextContributionHash = nextContributionHasher.digest();
|
const nextContributionHash = nextContributionHasher.digest();
|
||||||
|
|
||||||
// Check the nextChallangeHash
|
// Check the nextChallangeHash
|
||||||
if (!utils.hashIsEqual(nextContributionHash,curContr.nextChallange)) {
|
if (!utils.hashIsEqual(nextContributionHash,curContr.nextChallange)) {
|
||||||
console.log("Hash of the values does not math the next challange of the last contributor in the contributions section");
|
console.log("Hash of the values does not match the next challange of the last contributor in the contributions section");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -230,7 +235,6 @@ async function verify(tauFilename, verbose) {
|
|||||||
// Verify Previous contributions
|
// Verify Previous contributions
|
||||||
|
|
||||||
printContribution(curContr, prevContr);
|
printContribution(curContr, prevContr);
|
||||||
|
|
||||||
for (let i = contrs.length-2; i>=0; i--) {
|
for (let i = contrs.length-2; i>=0; i--) {
|
||||||
const curContr = contrs[i];
|
const curContr = contrs[i];
|
||||||
const prevContr = (curContr>0) ? contrs[i-1] : initialContribution;
|
const prevContr = (curContr>0) ? contrs[i-1] : initialContribution;
|
||||||
@ -238,17 +242,34 @@ async function verify(tauFilename, verbose) {
|
|||||||
printContribution(curContr, prevContr);
|
printContribution(curContr, prevContr);
|
||||||
}
|
}
|
||||||
console.log("-----------------------------------------------------");
|
console.log("-----------------------------------------------------");
|
||||||
|
|
||||||
|
if ((!sections[12]) || (!sections[13]) || (!sections[14]) || (!sections[15])) {
|
||||||
|
console.log("this file does not contain phase2 precalculated values. Please run: ");
|
||||||
|
console.log(" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony." );
|
||||||
|
} else {
|
||||||
|
let res;
|
||||||
|
res = await verifyLagrangeEvaluations("G1", 1 << power, 2, 12, "tauG1");
|
||||||
|
if (!res) return false;
|
||||||
|
res = await verifyLagrangeEvaluations("G2", 1 << power, 3, 13, "tauG2");
|
||||||
|
if (!res) return false;
|
||||||
|
res = await verifyLagrangeEvaluations("G1", 1 << power, 4, 14, "alphaTauG1");
|
||||||
|
if (!res) return false;
|
||||||
|
res = await verifyLagrangeEvaluations("G1", 1 << power, 5, 15, "betaTauG1");
|
||||||
|
if (!res) return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
await fd.close();
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
function printContribution(curContr, prevContr) {
|
function printContribution(curContr, prevContr) {
|
||||||
console.log("-----------------------------------------------------");
|
console.log("-----------------------------------------------------");
|
||||||
console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`);
|
console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`);
|
||||||
console.log("\tNext Challange");
|
console.log("\tBased on challange");
|
||||||
console.log(utils.formatHash(curContr.nextChallange));
|
console.log(utils.formatHash(prevContr.nextChallange));
|
||||||
|
|
||||||
const buff = new ArrayBuffer(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
|
const buffV = new Uint8Array(curve.G1.F.n8*2*6+curve.G2.F.n8*2*3);
|
||||||
const buffV = new Uint8Array(buff);
|
utils.toPtauPubKeyRpr(buffV, 0, curve, key, false);
|
||||||
utils.toPtauPubKeyRpr(buff, 0, curve, key, false);
|
|
||||||
|
|
||||||
const responseHasher = Blake2b(64);
|
const responseHasher = Blake2b(64);
|
||||||
responseHasher.setPartialHash(curContr.partialHash);
|
responseHasher.setPartialHash(curContr.partialHash);
|
||||||
@ -258,15 +279,14 @@ async function verify(tauFilename, verbose) {
|
|||||||
console.log("\tResponse Hash");
|
console.log("\tResponse Hash");
|
||||||
console.log(utils.formatHash(responseHash));
|
console.log(utils.formatHash(responseHash));
|
||||||
|
|
||||||
console.log("\tBased on challange");
|
console.log("\tNext Challange");
|
||||||
console.log(utils.formatHash(prevContr.nextChallange));
|
console.log(utils.formatHash(curContr.nextChallange));
|
||||||
}
|
}
|
||||||
|
|
||||||
async function processSectionBetaG2() {
|
async function processSectionBetaG2() {
|
||||||
const G = curve.G2;
|
const G = curve.G2;
|
||||||
const sG = G.F.n8*2;
|
const sG = G.F.n8*2;
|
||||||
const buffU = new ArrayBuffer(sG);
|
const buffUv = new Uint8Array(sG);
|
||||||
const buffUv = new Uint8Array(buffU);
|
|
||||||
|
|
||||||
if (!sections[6]) assert(false, "File has no BetaG2 section");
|
if (!sections[6]) assert(false, "File has no BetaG2 section");
|
||||||
if (sections[6].length>1) assert(false, "File has more than one GetaG2 section");
|
if (sections[6].length>1) assert(false, "File has more than one GetaG2 section");
|
||||||
@ -275,7 +295,7 @@ async function verify(tauFilename, verbose) {
|
|||||||
const buff = await fd.read(sG);
|
const buff = await fd.read(sG);
|
||||||
const P = G.fromRprLEM(buff);
|
const P = G.fromRprLEM(buff);
|
||||||
|
|
||||||
G.toRprBE(buffU, 0, P);
|
G.toRprBE(buffUv, 0, P);
|
||||||
nextContributionHasher.update(buffUv);
|
nextContributionHasher.update(buffUv);
|
||||||
|
|
||||||
return P;
|
return P;
|
||||||
@ -285,8 +305,7 @@ async function verify(tauFilename, verbose) {
|
|||||||
const MAX_CHUNK_SIZE = 1024;
|
const MAX_CHUNK_SIZE = 1024;
|
||||||
const G = curve[gName];
|
const G = curve[gName];
|
||||||
const sG = G.F.n8*2;
|
const sG = G.F.n8*2;
|
||||||
const buffU = new ArrayBuffer(G.F.n8*2);
|
const buffUv = new Uint8Array(G.F.n8*2);
|
||||||
const buffUv = new Uint8Array(buffU);
|
|
||||||
|
|
||||||
const singularPoints = [];
|
const singularPoints = [];
|
||||||
|
|
||||||
@ -326,7 +345,7 @@ async function verify(tauFilename, verbose) {
|
|||||||
});
|
});
|
||||||
for (let j=i; j<i+n; j++) {
|
for (let j=i; j<i+n; j++) {
|
||||||
const P = G.fromRprLEM(buff, (j-i)*sG);
|
const P = G.fromRprLEM(buff, (j-i)*sG);
|
||||||
G.toRprBE(buffU, 0, P);
|
G.toRprBE(buffUv, 0, P);
|
||||||
nextContributionHasher.update(buffUv);
|
nextContributionHasher.update(buffUv);
|
||||||
if (singularPointIds.indexOf(j)>=0) singularPoints.push(P);
|
if (singularPointIds.indexOf(j)>=0) singularPoints.push(P);
|
||||||
}
|
}
|
||||||
@ -342,38 +361,51 @@ async function verify(tauFilename, verbose) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async function test() {
|
async function verifyLagrangeEvaluations(gName, nPoints, tauSection, lagrangeSection, sectionName) {
|
||||||
const NN=2;
|
|
||||||
|
|
||||||
fd.pos = sections[3][0].p + curve.G2.F.n8*2*6;
|
if (verbose) console.log(`Verifying phase2 calculated values ${sectionName}...`);
|
||||||
|
|
||||||
const buff = await fd.read(curve.G2.F.n8*2*NN);
|
const n8r = curve.Fr.n8;
|
||||||
|
let buff_r = new Uint8Array(nPoints * n8r);
|
||||||
|
let buffG;
|
||||||
|
const G = curve[gName];
|
||||||
|
const sG = G.F.n8*2;
|
||||||
|
|
||||||
const ctx= {
|
const seed= new Array(8);
|
||||||
modules: {
|
for (let i=0; i<8; i++) {
|
||||||
ffjavascript: require("ffjavascript"),
|
seed[i] = crypto.randomBytes(4).readUInt32BE(0, true);
|
||||||
assert: require("assert")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
verifyThread(ctx, {cmd: "INIT", curve: "bn128", seed: [0,0,0,0,0,0,0,0]});
|
|
||||||
|
|
||||||
const r = verifyThread(ctx, {
|
|
||||||
cmd: "MUL",
|
|
||||||
G: "G2",
|
|
||||||
n: NN,
|
|
||||||
TotalPoints: NN,
|
|
||||||
buff: buff.slice(),
|
|
||||||
offset: 0
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!sameRatio(curve, key.tau.g1_s, key.tau.g1_sx, r.R1, r.R2)) {
|
|
||||||
console.log("Test does not match");
|
|
||||||
} else {
|
|
||||||
console.log("!!!!!!TEST OK!!!!!!!");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
const rng = new ChaCha(seed);
|
||||||
|
|
||||||
|
for (let i=0; i<nPoints; i++) {
|
||||||
|
const e = curve.Fr.fromRng(rng);
|
||||||
|
curve.Fr.toRprLE(buff_r, i*n8r, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
binFileUtils.startReadUniqueSection(fd, sections, tauSection);
|
||||||
|
buffG = await fd.read(nPoints*sG);
|
||||||
|
binFileUtils.endReadSection(fd, true);
|
||||||
|
|
||||||
|
const resTau = await G.multiExpAffine(buffG, buff_r);
|
||||||
|
|
||||||
|
buff_r = await curve.Fr.batchToMontgomery(buff_r);
|
||||||
|
buff_r = await curve.Fr.fft(buff_r);
|
||||||
|
buff_r = await curve.Fr.batchFromMontgomery(buff_r);
|
||||||
|
|
||||||
|
binFileUtils.startReadUniqueSection(fd, sections, lagrangeSection);
|
||||||
|
buffG = await fd.read(nPoints*sG);
|
||||||
|
binFileUtils.endReadSection(fd, true);
|
||||||
|
|
||||||
|
const resLagrange = await G.multiExpAffine(buffG, buff_r);
|
||||||
|
|
||||||
|
if (!G.eq(resTau, resLagrange)) {
|
||||||
|
console.log("Phase2 caclutation does not match with powers of tau");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,3 +7,4 @@ module.exports.verify = require("./powersoftau_verify");
|
|||||||
module.exports.challangeContribute = require("./powersoftau_challangecontribute");
|
module.exports.challangeContribute = require("./powersoftau_challangecontribute");
|
||||||
module.exports.beacon = require("./powersoftau_beacon");
|
module.exports.beacon = require("./powersoftau_beacon");
|
||||||
module.exports.contribute = require("./powersoftau_contribute");
|
module.exports.contribute = require("./powersoftau_contribute");
|
||||||
|
module.exports.preparePhase2 = require("./powersoftau_preparephase2");
|
||||||
|
150
src/soliditygenerator.js
Normal file
150
src/soliditygenerator.js
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
const path = require("path");
|
||||||
|
const fs = require("fs");
|
||||||
|
|
||||||
|
module.exports.generateVerifier_original = generateVerifier_original;
|
||||||
|
module.exports.generateVerifier_groth16 = generateVerifier_groth16;
|
||||||
|
|
||||||
|
// Not ready yet
|
||||||
|
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
|
||||||
|
|
||||||
|
|
||||||
|
function generateVerifier_original(verificationKey) {
|
||||||
|
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_original.sol"), "utf-8");
|
||||||
|
|
||||||
|
const vka_str = `[${verificationKey.vk_a[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_a[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_a[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_a[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_a%>", vka_str);
|
||||||
|
|
||||||
|
const vkb_str = `${verificationKey.vk_b[0].toString()},`+
|
||||||
|
`${verificationKey.vk_b[1].toString()}`;
|
||||||
|
template = template.replace("<%vk_b%>", vkb_str);
|
||||||
|
|
||||||
|
const vkc_str = `[${verificationKey.vk_c[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_c[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_c[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_c[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_c%>", vkc_str);
|
||||||
|
|
||||||
|
const vkg_str = `[${verificationKey.vk_g[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_g[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_g[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_g[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_g%>", vkg_str);
|
||||||
|
|
||||||
|
const vkgb1_str = `${verificationKey.vk_gb_1[0].toString()},`+
|
||||||
|
`${verificationKey.vk_gb_1[1].toString()}`;
|
||||||
|
template = template.replace("<%vk_gb1%>", vkgb1_str);
|
||||||
|
|
||||||
|
const vkgb2_str = `[${verificationKey.vk_gb_2[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_gb_2[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_gb_2[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_gb_2[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_gb2%>", vkgb2_str);
|
||||||
|
|
||||||
|
const vkz_str = `[${verificationKey.vk_z[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_z[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_z[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_z[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_z%>", vkz_str);
|
||||||
|
|
||||||
|
// The points
|
||||||
|
|
||||||
|
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
|
||||||
|
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
|
||||||
|
let vi = "";
|
||||||
|
for (let i=0; i<verificationKey.IC.length; i++) {
|
||||||
|
if (vi != "") vi = vi + " ";
|
||||||
|
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
|
||||||
|
`${verificationKey.IC[i][1].toString()});\n`;
|
||||||
|
}
|
||||||
|
template = template.replace("<%vk_ic_pts%>", vi);
|
||||||
|
|
||||||
|
return template;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function generateVerifier_groth16(verificationKey) {
|
||||||
|
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8");
|
||||||
|
|
||||||
|
|
||||||
|
const vkalfa1_str = `${verificationKey.vk_alfa_1[0].toString()},`+
|
||||||
|
`${verificationKey.vk_alfa_1[1].toString()}`;
|
||||||
|
template = template.replace("<%vk_alfa1%>", vkalfa1_str);
|
||||||
|
|
||||||
|
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_beta_2[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_beta_2[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_beta_2[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_beta2%>", vkbeta2_str);
|
||||||
|
|
||||||
|
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
|
||||||
|
|
||||||
|
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_delta_2[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_delta_2[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_delta_2[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_delta2%>", vkdelta2_str);
|
||||||
|
|
||||||
|
// The points
|
||||||
|
|
||||||
|
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
|
||||||
|
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
|
||||||
|
let vi = "";
|
||||||
|
for (let i=0; i<verificationKey.IC.length; i++) {
|
||||||
|
if (vi != "") vi = vi + " ";
|
||||||
|
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
|
||||||
|
`${verificationKey.IC[i][1].toString()});\n`;
|
||||||
|
}
|
||||||
|
template = template.replace("<%vk_ic_pts%>", vi);
|
||||||
|
|
||||||
|
return template;
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateVerifier_kimleeoh(verificationKey) {
|
||||||
|
|
||||||
|
assert(false); // Not implemented yet because it requires G2 exponentiation onchain.
|
||||||
|
let template = fs.readFileSync(path.join( __dirname, "..", "templates", "verifier_groth16.sol"), "utf-8");
|
||||||
|
|
||||||
|
|
||||||
|
const vkalfa1_str = `${verificationKey.vk_alfa_1[0].toString()},`+
|
||||||
|
`${verificationKey.vk_alfa_1[1].toString()}`;
|
||||||
|
template = template.replace("<%vk_alfa1%>", vkalfa1_str);
|
||||||
|
|
||||||
|
const vkbeta2_str = `[${verificationKey.vk_beta_2[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_beta_2[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_beta_2[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_beta_2[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_beta2%>", vkbeta2_str);
|
||||||
|
|
||||||
|
const vkgamma2_str = `[${verificationKey.vk_gamma_2[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_gamma_2[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_gamma_2[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_gamma_2[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_gamma2%>", vkgamma2_str);
|
||||||
|
|
||||||
|
const vkdelta2_str = `[${verificationKey.vk_delta_2[0][1].toString()},`+
|
||||||
|
`${verificationKey.vk_delta_2[0][0].toString()}], `+
|
||||||
|
`[${verificationKey.vk_delta_2[1][1].toString()},` +
|
||||||
|
`${verificationKey.vk_delta_2[1][0].toString()}]`;
|
||||||
|
template = template.replace("<%vk_delta2%>", vkdelta2_str);
|
||||||
|
|
||||||
|
// The points
|
||||||
|
|
||||||
|
template = template.replace("<%vk_input_length%>", (verificationKey.IC.length-1).toString());
|
||||||
|
template = template.replace("<%vk_ic_length%>", verificationKey.IC.length.toString());
|
||||||
|
let vi = "";
|
||||||
|
for (let i=0; i<verificationKey.IC.length; i++) {
|
||||||
|
if (vi != "") vi = vi + " ";
|
||||||
|
vi = vi + `vk.IC[${i}] = Pairing.G1Point(${verificationKey.IC[i][0].toString()},`+
|
||||||
|
`${verificationKey.IC[i][1].toString()});\n`;
|
||||||
|
}
|
||||||
|
template = template.replace("<%vk_ic_pts%>", vi);
|
||||||
|
|
||||||
|
return template;
|
||||||
|
}
|
@ -49,7 +49,7 @@ function thread(self, fn, modules) {
|
|||||||
|
|
||||||
if (res) {
|
if (res) {
|
||||||
if (res.buff) {
|
if (res.buff) {
|
||||||
self.postMessage(res, [res.buff]);
|
self.postMessage(res, [res.buff.buffer]);
|
||||||
} else {
|
} else {
|
||||||
self.postMessage(res);
|
self.postMessage(res);
|
||||||
}
|
}
|
||||||
@ -135,7 +135,7 @@ async function buildTaskManager(fn, mods, initTask) {
|
|||||||
|
|
||||||
tm.workers[i].state = "WORKING";
|
tm.workers[i].state = "WORKING";
|
||||||
if (task.buff) {
|
if (task.buff) {
|
||||||
tm.workers[i].worker.postMessage(task, [task.buff]);
|
tm.workers[i].worker.postMessage(task, [task.buff.buffer]);
|
||||||
} else {
|
} else {
|
||||||
tm.workers[i].worker.postMessage(task);
|
tm.workers[i].worker.postMessage(task);
|
||||||
}
|
}
|
||||||
|
169
src/wtnsfile.js
169
src/wtnsfile.js
@ -1,155 +1,70 @@
|
|||||||
const Scalar = require("ffjavascript").Scalar;
|
const Scalar = require("ffjavascript").Scalar;
|
||||||
const fastFile = require("fastfile");
|
|
||||||
const assert = require("assert");
|
const assert = require("assert");
|
||||||
|
const binFileUtils = require("./binfileutils");
|
||||||
|
|
||||||
module.exports.write = async function writeZKey(fileName, witness, prime) {
|
|
||||||
|
|
||||||
const fd = await fastFile.createOverride(fileName);
|
module.exports.write = async function writeWtns(fileName, witness, prime) {
|
||||||
|
|
||||||
await fd.write(Buffer.from("wtns"), 0); // Magic "r1cs"
|
const fd = await binFileUtils.createOverride(fileName,"wtns", 2, 2);
|
||||||
|
|
||||||
let p = 4;
|
|
||||||
await writeU32(1); // Version
|
|
||||||
|
|
||||||
|
await binFileUtils.startWriteSection(fd, 1);
|
||||||
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
|
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
|
||||||
|
await fd.writeULE32(n8);
|
||||||
|
await binFileUtils.writeBigInt(fd, prime, n8);
|
||||||
|
await fd.writeULE32(witness.length);
|
||||||
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
await writeU32(n8);
|
await binFileUtils.startWriteSection(fd, 2);
|
||||||
await writeBigInt(prime);
|
|
||||||
|
|
||||||
await writeU32(witness.length);
|
|
||||||
|
|
||||||
for (let i=0; i<witness.length; i++) {
|
for (let i=0; i<witness.length; i++) {
|
||||||
await writeBigInt(witness[i]);
|
await binFileUtils.writeBigInt(fd, witness[i], n8);
|
||||||
}
|
}
|
||||||
|
await binFileUtils.endWriteSection(fd, 2);
|
||||||
|
|
||||||
await fd.close();
|
await fd.close();
|
||||||
|
|
||||||
|
|
||||||
async function writeU32(v, pos) {
|
|
||||||
let o = (typeof pos == "undefined") ? p : pos;
|
|
||||||
|
|
||||||
const b = Buffer.allocUnsafe(4);
|
|
||||||
b.writeInt32LE(v);
|
|
||||||
|
|
||||||
await fd.write(b, o);
|
|
||||||
|
|
||||||
if (typeof(pos) == "undefined") p += 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async function writeBigInt(n, pos) {
|
|
||||||
|
|
||||||
let o = (typeof pos == "undefined") ? p : pos;
|
|
||||||
|
|
||||||
const s = n.toString(16);
|
|
||||||
const b = Buffer.from(s.padStart(n8*2, "0"), "hex");
|
|
||||||
const buff = Buffer.allocUnsafe(b.length);
|
|
||||||
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
|
|
||||||
|
|
||||||
await fd.write(buff, o);
|
|
||||||
|
|
||||||
if (typeof(pos) == "undefined") p += n8;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports.writeBin = async function writeZKey(fileName, witnessBin, prime) {
|
module.exports.writeBin = async function writeWtnsBin(fileName, witnessBin, prime) {
|
||||||
|
|
||||||
witnessBin = Buffer.from(witnessBin);
|
witnessBin = Buffer.from(witnessBin);
|
||||||
|
|
||||||
const fd = await fastFile.createOverride(fileName);
|
const fd = await binFileUtils.createBinFile(fileName, "wtns", 2, 2);
|
||||||
|
|
||||||
await fd.write(Buffer.from("wtns"), 0); // Magic "r1cs"
|
|
||||||
|
|
||||||
let p = 4;
|
|
||||||
await writeU32(1); // Version
|
|
||||||
|
|
||||||
|
await binFileUtils.startWriteSection(fd, 1);
|
||||||
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
|
const n8 = (Math.floor( (Scalar.bitLength(prime) - 1) / 64) +1)*8;
|
||||||
|
await fd.writeULE32(n8);
|
||||||
await writeU32(n8);
|
await binFileUtils.writeBigInt(fd, prime, n8);
|
||||||
await writeBigInt(prime);
|
|
||||||
|
|
||||||
assert(witnessBin.length % n8 == 0);
|
assert(witnessBin.length % n8 == 0);
|
||||||
|
await fd.writeULE32(witnessBin.byteLength / n8);
|
||||||
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
await writeU32(witnessBin.length / n8);
|
|
||||||
|
|
||||||
await fd.write(witnessBin, p);
|
await binFileUtils.startWriteSection(fd, 2);
|
||||||
|
await fd.write(witnessBin);
|
||||||
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
|
await fd.close();
|
||||||
|
};
|
||||||
|
|
||||||
|
module.exports.read = async function readWtns(fileName) {
|
||||||
|
|
||||||
|
const {fd, sections} = await binFileUtils.readBinFile(fileName, "wtns", 2);
|
||||||
|
|
||||||
|
await binFileUtils.startReadUniqueSection(fd, sections, 1);
|
||||||
|
const n8 = await fd.readULE32();
|
||||||
|
await binFileUtils.readBigInt(fd, n8);
|
||||||
|
const nWitness = await fd.readULE32();
|
||||||
|
await binFileUtils.endReadSection(fd);
|
||||||
|
|
||||||
|
await binFileUtils.startReadUniqueSection(fd, sections, 2);
|
||||||
|
const res = [];
|
||||||
|
for (let i=0; i<nWitness; i++) {
|
||||||
|
const v = await binFileUtils.readBigInt(fd, n8);
|
||||||
|
res.push(v);
|
||||||
|
}
|
||||||
|
await binFileUtils.endReadSection(fd);
|
||||||
|
|
||||||
await fd.close();
|
await fd.close();
|
||||||
|
|
||||||
|
|
||||||
async function writeU32(v, pos) {
|
|
||||||
let o = (typeof pos == "undefined") ? p : pos;
|
|
||||||
|
|
||||||
const b = Buffer.allocUnsafe(4);
|
|
||||||
b.writeInt32LE(v);
|
|
||||||
|
|
||||||
await fd.write(b, o);
|
|
||||||
|
|
||||||
if (typeof(pos) == "undefined") p += 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeBigInt(n, pos) {
|
|
||||||
|
|
||||||
let o = (typeof pos == "undefined") ? p : pos;
|
|
||||||
|
|
||||||
const s = n.toString(16);
|
|
||||||
const b = Buffer.from(s.padStart(n8*2, "0"), "hex");
|
|
||||||
const buff = Buffer.allocUnsafe(b.length);
|
|
||||||
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
|
|
||||||
|
|
||||||
await fd.write(buff, o);
|
|
||||||
|
|
||||||
if (typeof(pos) == "undefined") p += n8;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
module.exports.read = async function writeZKey(fileName) {
|
|
||||||
|
|
||||||
const res = [];
|
|
||||||
const fd = await fastFile.readExisting(fileName);
|
|
||||||
|
|
||||||
const b = await fd.read(0, 4);
|
|
||||||
|
|
||||||
if (b.toString() != "wtns") assert(false, "Invalid File format");
|
|
||||||
|
|
||||||
let p=4;
|
|
||||||
|
|
||||||
let v = await readU32();
|
|
||||||
|
|
||||||
if (v>1) assert(false, "Version not supported");
|
|
||||||
|
|
||||||
const n8 = await readU32();
|
|
||||||
await readBigInt();
|
|
||||||
|
|
||||||
const nWitness = await readU32();
|
|
||||||
|
|
||||||
for (let i=0; i<nWitness; i++) {
|
|
||||||
const v = await readBigInt();
|
|
||||||
res.push(v);
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
|
|
||||||
|
|
||||||
async function readU32() {
|
|
||||||
const b = await fd.read(p, 4);
|
|
||||||
|
|
||||||
p+=4;
|
|
||||||
|
|
||||||
return b.readUInt32LE(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readBigInt() {
|
|
||||||
const buff = await fd.read(p, n8);
|
|
||||||
assert(buff.length == n8);
|
|
||||||
const buffR = Buffer.allocUnsafe(n8);
|
|
||||||
for (let i=0; i<n8; i++) buffR[i] = buff[n8-1-i];
|
|
||||||
|
|
||||||
p += n8;
|
|
||||||
|
|
||||||
return Scalar.fromString(buffR.toString("hex"), 16);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
284
src/zkeyfile.js
284
src/zkeyfile.js
@ -30,33 +30,23 @@
|
|||||||
|
|
||||||
const Scalar = require("ffjavascript").Scalar;
|
const Scalar = require("ffjavascript").Scalar;
|
||||||
const F1Field = require("ffjavascript").F1Field;
|
const F1Field = require("ffjavascript").F1Field;
|
||||||
const fastFile = require("fastfile");
|
|
||||||
const assert = require("assert");
|
const assert = require("assert");
|
||||||
|
const binFileUtils = require("./binfileutils");
|
||||||
|
|
||||||
module.exports.write = async function writeZKey(fileName, zkey) {
|
module.exports.write = async function writeZKey(fileName, zkey) {
|
||||||
|
|
||||||
const fd = await fastFile.createOverride(fileName);
|
const fd = await binFileUtils.createOverride(fileName,"zkey", 6, 1);
|
||||||
|
|
||||||
await fd.write(Buffer.from("zkey"), 0); // Magic "r1cs"
|
|
||||||
|
|
||||||
let p = 4;
|
|
||||||
await writeU32(1); // Version
|
|
||||||
await writeU32(6); // Number of Sections
|
|
||||||
|
|
||||||
// Write the header
|
// Write the header
|
||||||
///////////
|
///////////
|
||||||
await writeU32(1); // Header type
|
await binFileUtils.startWriteSection(fd, 1);
|
||||||
const pHeaderSize = p;
|
await fd.writeULE32(1); // Groth
|
||||||
await writeU64(0); // Temporally set to 0 length
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
await writeU32(1); // Groth
|
|
||||||
|
|
||||||
const headerSize = p - pHeaderSize - 8;
|
|
||||||
|
|
||||||
|
|
||||||
// Write the Groth header section
|
// Write the Groth header section
|
||||||
///////////
|
///////////
|
||||||
|
|
||||||
|
await binFileUtils.startWriteSection(fd, 2);
|
||||||
const primeQ = zkey.q;
|
const primeQ = zkey.q;
|
||||||
const Fq = new F1Field(zkey.q);
|
const Fq = new F1Field(zkey.q);
|
||||||
const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
|
const n8q = (Math.floor( (Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
|
||||||
@ -68,21 +58,13 @@ module.exports.write = async function writeZKey(fileName, zkey) {
|
|||||||
const Rr = Scalar.mod(Scalar.shl(1, n8r*8), primeR);
|
const Rr = Scalar.mod(Scalar.shl(1, n8r*8), primeR);
|
||||||
const R2r = Scalar.mod(Scalar.mul(Rr,Rr), primeR);
|
const R2r = Scalar.mod(Scalar.mul(Rr,Rr), primeR);
|
||||||
|
|
||||||
// Field Def
|
await fd.writeULE32(n8q);
|
||||||
|
await binFileUtils.writeBigInt(primeQ, n8q);
|
||||||
|
await fd.writeULE32(n8r);
|
||||||
await writeU32(2); // Constraints type
|
await binFileUtils.writeBigInt(primeR, n8r);
|
||||||
const pGrothHeader = p;
|
await fd.writeULE32(zkey.nVars); // Total number of bars
|
||||||
await writeU64(0); // Temporally set to 0 length
|
await fd.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
|
||||||
|
await fd.writeULE32(zkey.domainSize); // domainSize
|
||||||
|
|
||||||
await writeU32(n8q);
|
|
||||||
await writeBigIntQ(primeQ);
|
|
||||||
await writeU32(n8r);
|
|
||||||
await writeBigIntR(primeR);
|
|
||||||
await writeU32(zkey.nVars); // Total number of bars
|
|
||||||
await writeU32(zkey.nPublic); // Total number of public vars (not including ONE)
|
|
||||||
await writeU32(zkey.domainSize); // domainSize
|
|
||||||
await writePointG1(zkey.vk_alfa_1);
|
await writePointG1(zkey.vk_alfa_1);
|
||||||
await writePointG1(zkey.vk_beta_1);
|
await writePointG1(zkey.vk_beta_1);
|
||||||
await writePointG1(zkey.vk_delta_1);
|
await writePointG1(zkey.vk_delta_1);
|
||||||
@ -90,42 +72,35 @@ module.exports.write = async function writeZKey(fileName, zkey) {
|
|||||||
await writePointG2(zkey.vk_gamma_2);
|
await writePointG2(zkey.vk_gamma_2);
|
||||||
await writePointG2(zkey.vk_delta_2);
|
await writePointG2(zkey.vk_delta_2);
|
||||||
|
|
||||||
const grothHeaderSize = p - pGrothHeader - 8;
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
|
|
||||||
// Write IC Section
|
// Write IC Section
|
||||||
///////////
|
///////////
|
||||||
await writeU32(3); // IC
|
await binFileUtils.startWriteSection(fd, 3);
|
||||||
const pIc = p;
|
|
||||||
await writeU64(0); // Temporally set to 0 length
|
|
||||||
for (let i=0; i<= zkey.nPublic; i++) {
|
for (let i=0; i<= zkey.nPublic; i++) {
|
||||||
await writePointG1(zkey.IC[i] );
|
await writePointG1(zkey.IC[i] );
|
||||||
}
|
}
|
||||||
const icSize = p - pIc -8;
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
|
|
||||||
// Write Pol A
|
// Write Pols (A and B (C can be ommited))
|
||||||
///////////
|
///////////
|
||||||
await writeU32(4); // A Pols
|
await binFileUtils.startWriteSection(fd, 4);
|
||||||
const pCoefs = p;
|
await fd.writeULE32(zkey.ccoefs.length);
|
||||||
await writeU64(0); // Temporally set to 0 length
|
|
||||||
|
|
||||||
await writeU32(zkey.ccoefs.length);
|
|
||||||
for (let i=0; i<zkey.ccoefs.length; i++) {
|
for (let i=0; i<zkey.ccoefs.length; i++) {
|
||||||
const coef = zkey.ccoefs[i];
|
const coef = zkey.ccoefs[i];
|
||||||
await writeU32(coef.matrix);
|
await fd.writeULE32(coef.matrix);
|
||||||
await writeU32(coef.constraint);
|
await fd.writeULE32(coef.constraint);
|
||||||
await writeU32(coef.signal);
|
await fd.writeULE32(coef.signal);
|
||||||
await writeFr2(coef.value);
|
await writeFr2(coef.value);
|
||||||
}
|
}
|
||||||
const coefsSize = p - pCoefs -8;
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
|
|
||||||
// Write A B1 B2 C points
|
// Write A B1 B2 C points
|
||||||
///////////
|
///////////
|
||||||
await writeU32(5); // A B1 B2 C points
|
await binFileUtils.startWriteSection(fd, 5);
|
||||||
const pPointsAB1B2C = p;
|
|
||||||
await writeU64(0); // Temporally set to 0 length
|
|
||||||
for (let i=0; i<zkey.nVars; i++) {
|
for (let i=0; i<zkey.nVars; i++) {
|
||||||
await writePointG1(zkey.A[i]);
|
await writePointG1(zkey.A[i]);
|
||||||
await writePointG1(zkey.B1[i]);
|
await writePointG1(zkey.B1[i]);
|
||||||
@ -136,95 +111,30 @@ module.exports.write = async function writeZKey(fileName, zkey) {
|
|||||||
await writePointG1(zkey.C[i]);
|
await writePointG1(zkey.C[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const pointsAB1B2CSize = p - pPointsAB1B2C - 8;
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
// Write H points
|
// Write H points
|
||||||
///////////
|
///////////
|
||||||
await writeU32(6); // H Points
|
await binFileUtils.startWriteSection(fd, 6);
|
||||||
const pPointsH = p;
|
|
||||||
await writeU64(0); // Temporally set to 0 length
|
|
||||||
for (let i=0; i<zkey.domainSize; i++) {
|
for (let i=0; i<zkey.domainSize; i++) {
|
||||||
await writePointG1(zkey.hExps[i]);
|
await writePointG1(zkey.hExps[i]);
|
||||||
}
|
}
|
||||||
const pointsHsize = p - pPointsH -8;
|
await binFileUtils.endWriteSection(fd);
|
||||||
|
|
||||||
|
|
||||||
// Write sizes
|
|
||||||
await writeU64(headerSize, pHeaderSize);
|
|
||||||
await writeU64(grothHeaderSize, pGrothHeader);
|
|
||||||
await writeU64(icSize, pIc);
|
|
||||||
await writeU64(coefsSize, pCoefs);
|
|
||||||
await writeU64(pointsAB1B2CSize, pPointsAB1B2C);
|
|
||||||
await writeU64(pointsHsize, pPointsH);
|
|
||||||
|
|
||||||
await fd.close();
|
await fd.close();
|
||||||
|
|
||||||
async function writeU32(v, pos) {
|
|
||||||
let o = (typeof pos == "undefined") ? p : pos;
|
|
||||||
|
|
||||||
const b = Buffer.allocUnsafe(4);
|
|
||||||
b.writeInt32LE(v);
|
|
||||||
|
|
||||||
await fd.write(b, o);
|
|
||||||
|
|
||||||
if (typeof(pos) == "undefined") p += 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeU64(v, pos) {
|
|
||||||
let o = (typeof pos == "undefined") ? p : pos;
|
|
||||||
|
|
||||||
const b = Buffer.allocUnsafe(8);
|
|
||||||
|
|
||||||
const LSB = v & 0xFFFFFFFF;
|
|
||||||
const MSB = Math.floor(v / 0x100000000);
|
|
||||||
b.writeInt32LE(LSB, 0);
|
|
||||||
b.writeInt32LE(MSB, 4);
|
|
||||||
|
|
||||||
await fd.write(b, o);
|
|
||||||
|
|
||||||
if (typeof(pos) == "undefined") p += 8;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeBigIntQ(n, pos) {
|
|
||||||
|
|
||||||
let o = (typeof pos == "undefined") ? p : pos;
|
|
||||||
|
|
||||||
const s = n.toString(16);
|
|
||||||
const b = Buffer.from(s.padStart(n8q*2, "0"), "hex");
|
|
||||||
const buff = Buffer.allocUnsafe(b.length);
|
|
||||||
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
|
|
||||||
|
|
||||||
await fd.write(buff, o);
|
|
||||||
|
|
||||||
if (typeof(pos) == "undefined") p += n8q;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeBigIntR(n, pos) {
|
|
||||||
|
|
||||||
let o = (typeof pos == "undefined") ? p : pos;
|
|
||||||
|
|
||||||
const s = n.toString(16);
|
|
||||||
const b = Buffer.from(s.padStart(n8r*2, "0"), "hex");
|
|
||||||
const buff = Buffer.allocUnsafe(b.length);
|
|
||||||
for (let i=0; i<b.length; i++) buff[i] = b[b.length-1-i];
|
|
||||||
|
|
||||||
await fd.write(buff, o);
|
|
||||||
|
|
||||||
if (typeof(pos) == "undefined") p += n8r;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeFr2(n) {
|
async function writeFr2(n) {
|
||||||
// Convert to montgomery
|
// Convert to montgomery
|
||||||
n = Scalar.mod( Scalar.mul(n, R2r), primeR);
|
n = Scalar.mod( Scalar.mul(n, R2r), primeR);
|
||||||
|
|
||||||
await writeBigIntR(n);
|
await binFileUtils.writeBigInt(fd, n, n8r);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function writeFq(n) {
|
async function writeFq(n) {
|
||||||
// Convert to montgomery
|
// Convert to montgomery
|
||||||
n = Scalar.mod( Scalar.mul(n, Rq), primeQ);
|
n = Scalar.mod( Scalar.mul(n, Rq), primeQ);
|
||||||
|
|
||||||
await writeBigIntQ(n);
|
await binFileUtils.writeBigInt(fd, n, n8q);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function writePointG1(p) {
|
async function writePointG1(p) {
|
||||||
@ -260,101 +170,66 @@ module.exports.write = async function writeZKey(fileName, zkey) {
|
|||||||
|
|
||||||
module.exports.read = async function readZKey(fileName) {
|
module.exports.read = async function readZKey(fileName) {
|
||||||
const zkey = {};
|
const zkey = {};
|
||||||
const fd = await fastFile.readExisting(fileName);
|
const {fd, sections} = await binFileUtils.readBinFile(fileName, "zkey", 1);
|
||||||
|
|
||||||
const b = await fd.read(0, 4);
|
|
||||||
|
|
||||||
if (b.toString() != "zkey") assert(false, "Invalid File format");
|
|
||||||
|
|
||||||
let p=4;
|
|
||||||
|
|
||||||
let v = await readU32();
|
|
||||||
|
|
||||||
if (v>1) assert(false, "Version not supported");
|
|
||||||
|
|
||||||
const nSections = await readU32();
|
|
||||||
|
|
||||||
// Scan sections
|
|
||||||
let sections = [];
|
|
||||||
for (let i=0; i<nSections; i++) {
|
|
||||||
let ht = await readU32();
|
|
||||||
let hl = await readU64();
|
|
||||||
if (typeof sections[ht] == "undefined") sections[ht] = [];
|
|
||||||
sections[ht].push({
|
|
||||||
p: p,
|
|
||||||
size: hl
|
|
||||||
});
|
|
||||||
p += hl;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read Header
|
// Read Header
|
||||||
/////////////////////
|
/////////////////////
|
||||||
if (sections[1].length==0) assert(false, "File has no header");
|
await binFileUtils.startReadUniqueSection(fd, sections, 1);
|
||||||
if (sections[1].length>1) assert(false, "File has more than one header");
|
const protocol = await fd.readULE32();
|
||||||
|
|
||||||
p = sections[1][0].p;
|
|
||||||
const protocol = await readU32();
|
|
||||||
if (protocol != 1) assert("File is not groth");
|
if (protocol != 1) assert("File is not groth");
|
||||||
if (p != sections[1][0].p + sections[1][0].size) assert(false, "Invalid header section size");
|
zkey.protocol = "groth16";
|
||||||
|
await binFileUtils.endReadSection(fd);
|
||||||
|
|
||||||
// Read Groth Header
|
// Read Groth Header
|
||||||
/////////////////////
|
/////////////////////
|
||||||
if (sections[2].length==0) assert(false, "File has no groth header");
|
await binFileUtils.startReadUniqueSection(fd, sections, 2);
|
||||||
if (sections[2].length>1) assert(false, "File has more than one groth header");
|
const n8q = await fd.readULE32();
|
||||||
|
zkey.q = await binFileUtils.readBigInt(fd, n8q);
|
||||||
zkey.protocol = "groth16";
|
|
||||||
|
|
||||||
p = sections[2][0].p;
|
|
||||||
const n8q = await readU32();
|
|
||||||
zkey.q = await readBigIntQ();
|
|
||||||
const Fq = new F1Field(zkey.q);
|
const Fq = new F1Field(zkey.q);
|
||||||
const Rq = Scalar.mod(Scalar.shl(1, n8q*8), zkey.q);
|
const Rq = Scalar.mod(Scalar.shl(1, n8q*8), zkey.q);
|
||||||
const Rqi = Fq.inv(Rq);
|
const Rqi = Fq.inv(Rq);
|
||||||
|
|
||||||
const n8r = await readU32();
|
const n8r = await fd.readULE32();
|
||||||
zkey.r = await readBigIntR();
|
zkey.r = await binFileUtils.readBigInt(fd, n8r);
|
||||||
const Fr = new F1Field(zkey.r);
|
const Fr = new F1Field(zkey.r);
|
||||||
const Rr = Scalar.mod(Scalar.shl(1, n8q*8), zkey.r);
|
const Rr = Scalar.mod(Scalar.shl(1, n8q*8), zkey.r);
|
||||||
const Rri = Fr.inv(Rr);
|
const Rri = Fr.inv(Rr);
|
||||||
const Rri2 = Fr.mul(Rri, Rri);
|
const Rri2 = Fr.mul(Rri, Rri);
|
||||||
|
|
||||||
|
|
||||||
zkey.nVars = await readU32();
|
zkey.nVars = await fd.readULE32();
|
||||||
zkey.nPublic = await readU32();
|
zkey.nPublic = await fd.readULE32();
|
||||||
zkey.domainSize = await readU32();
|
zkey.domainSize = await fd.readULE32();
|
||||||
zkey.vk_alfa_1 = await readG1();
|
zkey.vk_alfa_1 = await readG1();
|
||||||
zkey.vk_beta_1 = await readG1();
|
zkey.vk_beta_1 = await readG1();
|
||||||
zkey.vk_delta_1 = await readG1();
|
zkey.vk_delta_1 = await readG1();
|
||||||
zkey.vk_beta_2 = await readG2();
|
zkey.vk_beta_2 = await readG2();
|
||||||
zkey.vk_gamma_2 = await readG2();
|
zkey.vk_gamma_2 = await readG2();
|
||||||
zkey.vk_delta_2 = await readG2();
|
zkey.vk_delta_2 = await readG2();
|
||||||
if (p != sections[2][0].p + sections[2][0].size) assert(false, "Invalid groth header section size");
|
await binFileUtils.endReadSection(fd);
|
||||||
|
|
||||||
|
|
||||||
// Read IC Section
|
// Read IC Section
|
||||||
///////////
|
///////////
|
||||||
if (sections[3].length==0) assert(false, "File has no IC section");
|
await binFileUtils.startReadUniqueSection(fd, sections, 3);
|
||||||
if (sections[3].length>1) assert(false, "File has more than one IC section");
|
|
||||||
p = sections[3][0].p;
|
|
||||||
zkey.IC = [];
|
zkey.IC = [];
|
||||||
for (let i=0; i<= zkey.nPublic; i++) {
|
for (let i=0; i<= zkey.nPublic; i++) {
|
||||||
const P = await readG1();
|
const P = await readG1();
|
||||||
zkey.IC.push(P);
|
zkey.IC.push(P);
|
||||||
}
|
}
|
||||||
if (p != sections[3][0].p + sections[3][0].size) assert(false, "Invalid IC section size");
|
await binFileUtils.endReadSection(fd);
|
||||||
|
|
||||||
|
|
||||||
// Read Coefs
|
// Read Coefs
|
||||||
///////////
|
///////////
|
||||||
if (sections[4].length==0) assert(false, "File has no PolA section");
|
await binFileUtils.startReadUniqueSection(fd, sections, 4);
|
||||||
if (sections[4].length>1) assert(false, "File has more than one PolA section");
|
const nCCoefs = await fd.readULE32();
|
||||||
p = sections[4][0].p;
|
|
||||||
const nCCoefs = await readU32();
|
|
||||||
zkey.ccoefs = [];
|
zkey.ccoefs = [];
|
||||||
for (let i=0; i<nCCoefs; i++) {
|
for (let i=0; i<nCCoefs; i++) {
|
||||||
const m = await readU32();
|
const m = await fd.readULE32();
|
||||||
const c = await readU32();
|
const c = await fd.readULE32();
|
||||||
const s = await readU32();
|
const s = await fd.readULE32();
|
||||||
const v = await readFr2();
|
const v = await readFr2();
|
||||||
zkey.ccoefs.push({
|
zkey.ccoefs.push({
|
||||||
matrix: m,
|
matrix: m,
|
||||||
@ -363,13 +238,11 @@ module.exports.read = async function readZKey(fileName) {
|
|||||||
value: v
|
value: v
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (p != sections[4][0].p + sections[4][0].size) assert(false, "Invalid PolsA section size");
|
await binFileUtils.endReadSection(fd);
|
||||||
|
|
||||||
// Read A B1 B2 C points
|
// Read A B1 B2 C points
|
||||||
///////////
|
///////////
|
||||||
if (sections[5].length==0) assert(false, "File has no AB1B2C section");
|
await binFileUtils.startReadUniqueSection(fd, sections, 5);
|
||||||
if (sections[5].length>1) assert(false, "File has more than one AB1B2C section");
|
|
||||||
p = sections[5][0].p;
|
|
||||||
zkey.A = [];
|
zkey.A = [];
|
||||||
zkey.B1 = [];
|
zkey.B1 = [];
|
||||||
zkey.B2 = [];
|
zkey.B2 = [];
|
||||||
@ -388,72 +261,29 @@ module.exports.read = async function readZKey(fileName) {
|
|||||||
assert(Fr.isZero(C[2]), "C value for public is not zero");
|
assert(Fr.isZero(C[2]), "C value for public is not zero");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (p != sections[5][0].p + sections[5][0].size) assert(false, "Invalid AB1B2C section size");
|
await binFileUtils.endReadSection(fd);
|
||||||
|
|
||||||
// Read H points
|
// Read H points
|
||||||
///////////
|
///////////
|
||||||
if (sections[6].length==0) assert(false, "File has no H section");
|
await binFileUtils.startReadUniqueSection(fd, sections, 6);
|
||||||
if (sections[6].length>1) assert(false, "File has more than one H section");
|
|
||||||
p = sections[6][0].p;
|
|
||||||
zkey.hExps = [];
|
zkey.hExps = [];
|
||||||
for (let i=0; i<zkey.domainSize; i++) {
|
for (let i=0; i<zkey.domainSize; i++) {
|
||||||
const H = await readG1();
|
const H = await readG1();
|
||||||
zkey.hExps.push(H);
|
zkey.hExps.push(H);
|
||||||
}
|
}
|
||||||
if (p != sections[6][0].p + sections[6][0].size) assert(false, "Invalid H section size");
|
await binFileUtils.endReadSection(fd);
|
||||||
|
|
||||||
await fd.close();
|
await fd.close();
|
||||||
|
|
||||||
return zkey;
|
return zkey;
|
||||||
|
|
||||||
async function readU32() {
|
|
||||||
const b = await fd.read(p, 4);
|
|
||||||
|
|
||||||
p+=4;
|
|
||||||
|
|
||||||
return b.readUInt32LE(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readU64() {
|
|
||||||
const b = await fd.read(p, 8);
|
|
||||||
|
|
||||||
p+=8;
|
|
||||||
|
|
||||||
const LS = b.readUInt32LE(0);
|
|
||||||
const MS = b.readUInt32LE(4);
|
|
||||||
|
|
||||||
return MS * 0x100000000 + LS;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readBigIntQ() {
|
|
||||||
const buff = await fd.read(p, n8q);
|
|
||||||
assert(buff.length == n8q);
|
|
||||||
const buffR = Buffer.allocUnsafe(n8q);
|
|
||||||
for (let i=0; i<n8q; i++) buffR[i] = buff[n8q-1-i];
|
|
||||||
|
|
||||||
p += n8q;
|
|
||||||
|
|
||||||
return Scalar.fromString(buffR.toString("hex"), 16);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readBigIntR() {
|
|
||||||
const buff = await fd.read(p, n8r);
|
|
||||||
assert(buff.length == n8r);
|
|
||||||
const buffR = Buffer.allocUnsafe(n8r);
|
|
||||||
for (let i=0; i<n8r; i++) buffR[i] = buff[n8r-1-i];
|
|
||||||
|
|
||||||
p += n8r;
|
|
||||||
|
|
||||||
return Scalar.fromString(buffR.toString("hex"), 16);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readFq() {
|
async function readFq() {
|
||||||
const n = await readBigIntQ();
|
const n = await binFileUtils.readBigInt(fd, n8q);
|
||||||
return Fq.mul(n, Rqi);
|
return Fq.mul(n, Rqi);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function readFr2() {
|
async function readFr2() {
|
||||||
const n = await readBigIntR();
|
const n = await binFileUtils.readBigInt(fd, n8r);
|
||||||
return Fr.mul(n, Rri2);
|
return Fr.mul(n, Rri2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user