QAP single thread

This commit is contained in:
Jordi Baylina 2020-10-23 11:34:00 +02:00
parent a4424afd5f
commit 7e4cae9c25
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
5 changed files with 154 additions and 368 deletions

@ -6096,6 +6096,10 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
const buffWitness = await readSection$1(fdWtns, sectionsWtns, 2);
if (logger) logger.debug("Reading Coeffs");
const buffCoeffs = await readSection$1(fdZKey, sectionsZKey, 4);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC1(curve, zkey, buffWitness, buffCoeffs, logger);
if (logger) logger.debug("Reading A Points");
const buffBasesA = await readSection$1(fdZKey, sectionsZKey, 5);
if (logger) logger.debug("Reading B1 Points");
@ -6107,9 +6111,6 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
if (logger) logger.debug("Reading H Points");
const buffBasesH = await readSection$1(fdZKey, sectionsZKey, 9);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC(curve, zkey, buffWitness, buffCoeffs, logger);
const inc = power == Fr.s ? curve.Fr.shift : curve.Fr.w[power+1];
const buffA = await Fr.ifft(buffA_T, "", "", logger, "FFT_A");
@ -6177,133 +6178,46 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
}
async function buldABC(curve, zkey, witness, coeffs, logger) {
const concurrency = curve.tm.concurrency;
async function buldABC1(curve, zkey, witness, coeffs, logger) {
const n8 = curve.Fr.n8;
const sCoef = 4*3 + zkey.n8r;
const nCoef = (coeffs.byteLength-4) / sCoef;
let getUint32;
const outBuffA = new ffjavascript.BigBuffer(zkey.domainSize * n8);
const outBuffB = new ffjavascript.BigBuffer(zkey.domainSize * n8);
const outBuffC = new ffjavascript.BigBuffer(zkey.domainSize * n8);
if (coeffs instanceof ffjavascript.BigBuffer) {
const coeffsDV = [];
const PAGE_LEN = coeffs.buffers[0].length;
for (let i=0; i< coeffs.buffers.length; i++) {
coeffsDV.push(new DataView(coeffs.buffers[i].buffer));
}
getUint32 = function (pos) {
return coeffsDV[Math.floor(pos/PAGE_LEN)].getUint32(pos % PAGE_LEN, true);
};
} else {
const coeffsDV = new DataView(coeffs.buffer, coeffs.byteOffset, coeffs.byteLength);
getUint32 = function (pos) {
return coeffsDV.getUint32(pos, true);
};
const outBuf = [ outBuffA, outBuffB ];
for (let i=0; i<nCoef; i++) {
if ((logger)&&(i%100000 == 0)) logger.debug(`QAP AB: ${i}/${nCoef}`);
const buffCoef = coeffs.slice(4+i*sCoef, 4+i*sCoef+sCoef);
const buffCoefV = new DataView(buffCoef.buffer);
const m= buffCoefV.getUint32(0, true);
const c= buffCoefV.getUint32(4, true);
const s= buffCoefV.getUint32(8, true);
const coef = buffCoef.slice(12);
outBuf[m].set(
curve.Fr.add(
outBuf[m].slice(c*n8, c*n8+n8),
curve.Fr.mul(coef, witness.slice(s*n8, s*n8+n8))
),
c*n8
);
}
const elementsPerChunk = Math.floor(zkey.domainSize/concurrency);
const promises = [];
const cutPoints = [];
for (let i=0; i<concurrency; i++) {
cutPoints.push( getCutPoint( Math.floor(i*elementsPerChunk) ));
}
cutPoints.push(coeffs.byteLength);
const chunkSize = 2**26;
for (let s=0 ; s<zkey.nVars ; s+= chunkSize) {
if (logger) logger.debug(`QAP ${s}: ${s}/${zkey.nVars}`);
const ns= Math.min(zkey.nVars-s, chunkSize );
for (let i=0; i<concurrency; i++) {
let n;
if (i< concurrency-1) {
n = elementsPerChunk;
} else {
n = zkey.domainSize - i*elementsPerChunk;
}
if (n==0) continue;
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: coeffs.slice(cutPoints[i], cutPoints[i+1])});
task.push({cmd: "ALLOCSET", var: 1, buff: witness.slice(s*curve.Fr.n8, (s+ns)*curve.Fr.n8)});
task.push({cmd: "ALLOC", var: 2, len: n*curve.Fr.n8});
task.push({cmd: "ALLOC", var: 3, len: n*curve.Fr.n8});
task.push({cmd: "ALLOC", var: 4, len: n*curve.Fr.n8});
task.push({cmd: "CALL", fnName: "qap_buildABC", params:[
{var: 0},
{val: (cutPoints[i+1] - cutPoints[i])/sCoef},
{var: 1},
{var: 2},
{var: 3},
{var: 4},
{val: i*elementsPerChunk},
{val: n},
{val: s},
{val: ns}
]});
task.push({cmd: "GET", out: 0, var: 2, len: n*curve.Fr.n8});
task.push({cmd: "GET", out: 1, var: 3, len: n*curve.Fr.n8});
task.push({cmd: "GET", out: 2, var: 4, len: n*curve.Fr.n8});
promises.push(curve.tm.queueAction(task));
}
}
let result = await Promise.all(promises);
const nGroups = result.length / concurrency;
if (nGroups>1) {
const promises2 = [];
for (let i=0; i<concurrency; i++) {
const task=[];
task.push({cmd: "ALLOC", var: 0, len: result[i][0].byteLength});
task.push({cmd: "ALLOC", var: 1, len: result[i][0].byteLength});
for (let m=0; m<3; m++) {
task.push({cmd: "SET", var: 0, buff: result[i][m]});
for (let s=1; s<nGroups; s++) {
task.push({cmd: "SET", var: 1, buff: result[s*concurrency + i][m]});
task.push({cmd: "CALL", fnName: "qap_batchAdd", params:[
{var: 0},
{var: 1},
{val: result[i][m].length/curve.Fr.n8},
{var: 0}
]});
}
task.push({cmd: "GET", out: m, var: 0, len: result[i][m].length});
}
promises2.push(curve.tm.queueAction(task));
}
result = await Promise.all(promises2);
}
const outBuffA = new ffjavascript.BigBuffer(zkey.domainSize * curve.Fr.n8);
const outBuffB = new ffjavascript.BigBuffer(zkey.domainSize * curve.Fr.n8);
const outBuffC = new ffjavascript.BigBuffer(zkey.domainSize * curve.Fr.n8);
let p=0;
for (let i=0; i<result.length; i++) {
outBuffA.set(result[i][0], p);
outBuffB.set(result[i][1], p);
outBuffC.set(result[i][2], p);
p += result[i][0].byteLength;
for (let i=0; i<zkey.domainSize; i++) {
if ((logger)&&(i%100000 == 0)) logger.debug(`QAP C: ${i}/${zkey.domainSize}`);
outBuffC.set(
curve.Fr.mul(
outBuffA.slice(i*n8, i*n8+n8),
outBuffB.slice(i*n8, i*n8+n8),
),
i*n8
);
}
return [outBuffA, outBuffB, outBuffC];
function getCutPoint(v) {
let m = 0;
let n = getUint32(0);
while (m < n) {
var k = Math.floor((n + m) / 2);
const va = getUint32(4 + k*sCoef + 4);
if (va > v) {
n = k - 1;
} else if (va < v) {
m = k + 1;
} else {
n = k;
}
}
return 4 + m*sCoef;
}
}

@ -1584,6 +1584,10 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
const buffWitness = await readSection(fdWtns, sectionsWtns, 2);
if (logger) logger.debug("Reading Coeffs");
const buffCoeffs = await readSection(fdZKey, sectionsZKey, 4);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC1(curve, zkey, buffWitness, buffCoeffs, logger);
if (logger) logger.debug("Reading A Points");
const buffBasesA = await readSection(fdZKey, sectionsZKey, 5);
if (logger) logger.debug("Reading B1 Points");
@ -1595,9 +1599,6 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
if (logger) logger.debug("Reading H Points");
const buffBasesH = await readSection(fdZKey, sectionsZKey, 9);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC(curve, zkey, buffWitness, buffCoeffs, logger);
const inc = power == Fr.s ? curve.Fr.shift : curve.Fr.w[power+1];
const buffA = await Fr.ifft(buffA_T, "", "", logger, "FFT_A");
@ -1665,133 +1666,46 @@ async function groth16Prove(zkeyFileName, witnessFileName, logger) {
}
async function buldABC(curve, zkey, witness, coeffs, logger) {
const concurrency = curve.tm.concurrency;
async function buldABC1(curve, zkey, witness, coeffs, logger) {
const n8 = curve.Fr.n8;
const sCoef = 4*3 + zkey.n8r;
const nCoef = (coeffs.byteLength-4) / sCoef;
let getUint32;
const outBuffA = new ffjavascript.BigBuffer(zkey.domainSize * n8);
const outBuffB = new ffjavascript.BigBuffer(zkey.domainSize * n8);
const outBuffC = new ffjavascript.BigBuffer(zkey.domainSize * n8);
if (coeffs instanceof ffjavascript.BigBuffer) {
const coeffsDV = [];
const PAGE_LEN = coeffs.buffers[0].length;
for (let i=0; i< coeffs.buffers.length; i++) {
coeffsDV.push(new DataView(coeffs.buffers[i].buffer));
}
getUint32 = function (pos) {
return coeffsDV[Math.floor(pos/PAGE_LEN)].getUint32(pos % PAGE_LEN, true);
};
} else {
const coeffsDV = new DataView(coeffs.buffer, coeffs.byteOffset, coeffs.byteLength);
getUint32 = function (pos) {
return coeffsDV.getUint32(pos, true);
};
const outBuf = [ outBuffA, outBuffB ];
for (let i=0; i<nCoef; i++) {
if ((logger)&&(i%100000 == 0)) logger.debug(`QAP AB: ${i}/${nCoef}`);
const buffCoef = coeffs.slice(4+i*sCoef, 4+i*sCoef+sCoef);
const buffCoefV = new DataView(buffCoef.buffer);
const m= buffCoefV.getUint32(0, true);
const c= buffCoefV.getUint32(4, true);
const s= buffCoefV.getUint32(8, true);
const coef = buffCoef.slice(12);
outBuf[m].set(
curve.Fr.add(
outBuf[m].slice(c*n8, c*n8+n8),
curve.Fr.mul(coef, witness.slice(s*n8, s*n8+n8))
),
c*n8
);
}
const elementsPerChunk = Math.floor(zkey.domainSize/concurrency);
const promises = [];
const cutPoints = [];
for (let i=0; i<concurrency; i++) {
cutPoints.push( getCutPoint( Math.floor(i*elementsPerChunk) ));
}
cutPoints.push(coeffs.byteLength);
const chunkSize = 2**26;
for (let s=0 ; s<zkey.nVars ; s+= chunkSize) {
if (logger) logger.debug(`QAP ${s}: ${s}/${zkey.nVars}`);
const ns= Math.min(zkey.nVars-s, chunkSize );
for (let i=0; i<concurrency; i++) {
let n;
if (i< concurrency-1) {
n = elementsPerChunk;
} else {
n = zkey.domainSize - i*elementsPerChunk;
}
if (n==0) continue;
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: coeffs.slice(cutPoints[i], cutPoints[i+1])});
task.push({cmd: "ALLOCSET", var: 1, buff: witness.slice(s*curve.Fr.n8, (s+ns)*curve.Fr.n8)});
task.push({cmd: "ALLOC", var: 2, len: n*curve.Fr.n8});
task.push({cmd: "ALLOC", var: 3, len: n*curve.Fr.n8});
task.push({cmd: "ALLOC", var: 4, len: n*curve.Fr.n8});
task.push({cmd: "CALL", fnName: "qap_buildABC", params:[
{var: 0},
{val: (cutPoints[i+1] - cutPoints[i])/sCoef},
{var: 1},
{var: 2},
{var: 3},
{var: 4},
{val: i*elementsPerChunk},
{val: n},
{val: s},
{val: ns}
]});
task.push({cmd: "GET", out: 0, var: 2, len: n*curve.Fr.n8});
task.push({cmd: "GET", out: 1, var: 3, len: n*curve.Fr.n8});
task.push({cmd: "GET", out: 2, var: 4, len: n*curve.Fr.n8});
promises.push(curve.tm.queueAction(task));
}
}
let result = await Promise.all(promises);
const nGroups = result.length / concurrency;
if (nGroups>1) {
const promises2 = [];
for (let i=0; i<concurrency; i++) {
const task=[];
task.push({cmd: "ALLOC", var: 0, len: result[i][0].byteLength});
task.push({cmd: "ALLOC", var: 1, len: result[i][0].byteLength});
for (let m=0; m<3; m++) {
task.push({cmd: "SET", var: 0, buff: result[i][m]});
for (let s=1; s<nGroups; s++) {
task.push({cmd: "SET", var: 1, buff: result[s*concurrency + i][m]});
task.push({cmd: "CALL", fnName: "qap_batchAdd", params:[
{var: 0},
{var: 1},
{val: result[i][m].length/curve.Fr.n8},
{var: 0}
]});
}
task.push({cmd: "GET", out: m, var: 0, len: result[i][m].length});
}
promises2.push(curve.tm.queueAction(task));
}
result = await Promise.all(promises2);
}
const outBuffA = new ffjavascript.BigBuffer(zkey.domainSize * curve.Fr.n8);
const outBuffB = new ffjavascript.BigBuffer(zkey.domainSize * curve.Fr.n8);
const outBuffC = new ffjavascript.BigBuffer(zkey.domainSize * curve.Fr.n8);
let p=0;
for (let i=0; i<result.length; i++) {
outBuffA.set(result[i][0], p);
outBuffB.set(result[i][1], p);
outBuffC.set(result[i][2], p);
p += result[i][0].byteLength;
for (let i=0; i<zkey.domainSize; i++) {
if ((logger)&&(i%100000 == 0)) logger.debug(`QAP C: ${i}/${zkey.domainSize}`);
outBuffC.set(
curve.Fr.mul(
outBuffA.slice(i*n8, i*n8+n8),
outBuffB.slice(i*n8, i*n8+n8),
),
i*n8
);
}
return [outBuffA, outBuffB, outBuffC];
function getCutPoint(v) {
let m = 0;
let n = getUint32(0);
while (m < n) {
var k = Math.floor((n + m) / 2);
const va = getUint32(4 + k*sCoef + 4);
if (va > v) {
n = k - 1;
} else if (va < v) {
m = k + 1;
} else {
n = k;
}
}
return 4 + m*sCoef;
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -34,6 +34,10 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger
const buffWitness = await binFileUtils.readSection(fdWtns, sectionsWtns, 2);
if (logger) logger.debug("Reading Coeffs");
const buffCoeffs = await binFileUtils.readSection(fdZKey, sectionsZKey, 4);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC1(curve, zkey, buffWitness, buffCoeffs, logger);
if (logger) logger.debug("Reading A Points");
const buffBasesA = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
if (logger) logger.debug("Reading B1 Points");
@ -45,9 +49,6 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger
if (logger) logger.debug("Reading H Points");
const buffBasesH = await binFileUtils.readSection(fdZKey, sectionsZKey, 9);
if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buldABC(curve, zkey, buffWitness, buffCoeffs, logger);
const inc = power == Fr.s ? curve.Fr.shift : curve.Fr.w[power+1];
const buffA = await Fr.ifft(buffA_T, "", "", logger, "FFT_A");
@ -115,6 +116,49 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger
}
async function buldABC1(curve, zkey, witness, coeffs, logger) {
const n8 = curve.Fr.n8;
const sCoef = 4*3 + zkey.n8r;
const nCoef = (coeffs.byteLength-4) / sCoef;
const outBuffA = new BigBuffer(zkey.domainSize * n8);
const outBuffB = new BigBuffer(zkey.domainSize * n8);
const outBuffC = new BigBuffer(zkey.domainSize * n8);
const outBuf = [ outBuffA, outBuffB ];
for (let i=0; i<nCoef; i++) {
if ((logger)&&(i%100000 == 0)) logger.debug(`QAP AB: ${i}/${nCoef}`);
const buffCoef = coeffs.slice(4+i*sCoef, 4+i*sCoef+sCoef);
const buffCoefV = new DataView(buffCoef.buffer);
const m= buffCoefV.getUint32(0, true);
const c= buffCoefV.getUint32(4, true);
const s= buffCoefV.getUint32(8, true);
const coef = buffCoef.slice(12);
outBuf[m].set(
curve.Fr.add(
outBuf[m].slice(c*n8, c*n8+n8),
curve.Fr.mul(coef, witness.slice(s*n8, s*n8+n8))
),
c*n8
);
}
for (let i=0; i<zkey.domainSize; i++) {
if ((logger)&&(i%100000 == 0)) logger.debug(`QAP C: ${i}/${zkey.domainSize}`);
outBuffC.set(
curve.Fr.mul(
outBuffA.slice(i*n8, i*n8+n8),
outBuffB.slice(i*n8, i*n8+n8),
),
i*n8
);
}
return [outBuffA, outBuffB, outBuffC];
}
async function buldABC(curve, zkey, witness, coeffs, logger) {
const concurrency = curve.tm.concurrency;
const sCoef = 4*3 + zkey.n8r;