fix sectionIsEqual

This commit is contained in:
Jordi Baylina 2020-07-26 14:05:23 +02:00
parent e5b89dcc26
commit f022621696
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
8 changed files with 1499 additions and 72 deletions

@ -231,7 +231,7 @@ This information fits with our mental map of the circuit we created: we had two
### 12. Print the constraints ### 12. Print the constraints
```sh ```sh
snarkjs r1cs print circuit.r1cs snarkjs r1cs print circuit.r1cs circuit.sym
``` ```
To double check, we print the constraints of the circuit. To double check, we print the constraints of the circuit.

@ -321,18 +321,6 @@ class FastFile {
} }
function createNew(o) {
const initialSize = o.initialSize || 1<<20;
const fd = new MemFile();
fd.o = o;
fd.o.data = new Uint8Array(initialSize);
fd.allocSize = initialSize;
fd.totalSize = 0;
fd.readOnly = false;
fd.pos = 0;
return fd;
}
function readExisting(o) { function readExisting(o) {
const fd = new MemFile(); const fd = new MemFile();
fd.o = o; fd.o = o;
@ -457,24 +445,6 @@ class MemFile {
/* global fetch */ /* global fetch */
async function createOverride(o, b) {
if (typeof o === "string") {
o = {
type: "file",
fileName: o,
cacheSize: b
};
}
if (o.type == "file") {
return await open(o.fileName, "w+", o.cacheSize);
} else if (o.type == "mem") {
return createNew(o);
} else {
throw new Error("Invalid FastFile type: "+o.type);
}
}
async function readExisting$1(o, b) { async function readExisting$1(o, b) {
if (o instanceof Uint8Array) { if (o instanceof Uint8Array) {
o = { o = {
@ -645,13 +615,519 @@ async function load(fileName, loadConstraints, loadMap) {
} }
} }
async function open$1(fileName, openFlags, cacheSize) {
cacheSize = cacheSize || 4096*64;
if (["w+", "wx+", "r", "ax+", "a+"].indexOf(openFlags) <0)
throw new Error("Invalid open option");
const fd =await fs.promises.open(fileName, openFlags);
const stats = await fd.stat();
return new FastFile$1(fd, stats, cacheSize, fileName);
}
const tmpBuff32 = new Uint8Array(4);
const tmpBuff32v = new DataView(tmpBuff32.buffer);
const tmpBuff64 = new Uint8Array(8);
const tmpBuff64v = new DataView(tmpBuff64.buffer);
class FastFile$1 {
constructor(fd, stats, cacheSize, fileName) {
this.fileName = fileName;
this.fd = fd;
this.pos = 0;
this.pageBits = 8;
this.pageSize = (1 << this.pageBits);
while (this.pageSize < stats.blksize*4) {
this.pageBits ++;
this.pageSize *= 2;
}
this.totalSize = stats.size;
this.totalPages = Math.floor((stats.size -1) / this.pageSize)+1;
this.maxPagesLoaded = Math.floor( cacheSize / this.pageSize)+1;
this.pages = {};
this.pendingLoads = [];
this.writing = false;
this.reading = false;
}
_loadPage(p) {
const self = this;
return new Promise((resolve, reject)=> {
self.pendingLoads.push({
page: p,
resolve: resolve,
reject: reject
});
setImmediate(self._triggerLoad.bind(self));
});
}
_triggerLoad() {
const self = this;
processPendingLoads();
if (self.pendingLoads.length == 0) return;
if (Object.keys(self.pages).length >= self.maxPagesLoaded) {
const dp = getDeletablePage();
if (dp<0) { // // No sizes available
// setTimeout(self._triggerLoad.bind(self), 10000);
return;
}
delete self.pages[dp];
}
const load = self.pendingLoads.shift();
if (load.page>=self.totalPages) {
self.pages[load.page] = {
dirty: false,
buff: new Uint8Array(self.pageSize),
pendingOps: 1,
size: 0
};
load.resolve();
setImmediate(self._triggerLoad.bind(self));
return;
}
if (self.reading) {
self.pendingLoads.unshift(load);
return; // Only one read at a time.
}
self.reading = true;
const page = {
dirty: false,
buff: new Uint8Array(self.pageSize),
pendingOps: 1,
size: 0
};
self.fd.read(page.buff, 0, self.pageSize, load.page*self.pageSize).then((res)=> {
page.size = res.bytesRead;
self.pages[load.page] = page;
self.reading = false;
load.resolve();
setImmediate(self._triggerLoad.bind(self));
}, (err) => {
load.reject(err);
});
function processPendingLoads() {
const newPendingLoads = [];
for (let i=0; i<self.pendingLoads.length; i++) {
const load = self.pendingLoads[i];
if (typeof self.pages[load.page] != "undefined") {
self.pages[load.page].pendingOps ++;
load.resolve();
} else {
newPendingLoads.push(load);
}
}
self.pendingLoads = newPendingLoads;
}
function getDeletablePage() {
for (let p in self.pages) {
const page = self.pages[p];
if ((page.dirty == false)&&(page.pendingOps==0)) return p;
}
return -1;
}
}
_triggerWrite() {
const self = this;
if (self.writing) return;
const p = self._getDirtyPage();
if (p<0) {
if (self.pendingClose) self.pendingClose();
return;
}
self.writing=true;
self.pages[p].dirty = false;
self.fd.write(self.pages[p].buff, 0, self.pages[p].size, p*self.pageSize).then(() => {
self.writing = false;
setImmediate(self._triggerWrite.bind(self));
setImmediate(self._triggerLoad.bind(self));
}, (err) => {
console.log("ERROR Writing: "+err);
self.error = err;
self._tryClose();
});
}
_getDirtyPage() {
for (let p in this.pages) {
if (this.pages[p].dirty) return p;
}
return -1;
}
async write(buff, pos) {
if (buff.byteLength == 0) return;
const self = this;
/*
if (buff.byteLength > self.pageSize*self.maxPagesLoaded*0.8) {
const cacheSize = Math.floor(buff.byteLength * 1.1);
this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1;
}
*/
if (typeof pos == "undefined") pos = self.pos;
self.pos = pos+buff.byteLength;
if (self.totalSize < pos + buff.byteLength) self.totalSize = pos + buff.byteLength;
if (self.pendingClose)
throw new Error("Writing a closing file");
const firstPage = Math.floor(pos / self.pageSize);
const lastPage = Math.floor((pos+buff.byteLength-1) / self.pageSize);
// for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i);
let p = firstPage;
let o = pos % self.pageSize;
let r = buff.byteLength;
while (r>0) {
await self._loadPage(p);
const l = (o+r > self.pageSize) ? (self.pageSize -o) : r;
const srcView = new Uint8Array(buff.buffer, buff.byteLength - r, l);
const dstView = new Uint8Array(self.pages[p].buff.buffer, o, l);
dstView.set(srcView);
self.pages[p].dirty = true;
self.pages[p].pendingOps --;
self.pages[p].size = Math.max(o+l, self.pages[p].size);
if (p>=self.totalPages) {
self.totalPages = p+1;
}
r = r-l;
p ++;
o = 0;
}
setImmediate(self._triggerWrite.bind(self));
}
async read(len, pos) {
if (len == 0) {
return new Uint8Array(0);
}
const self = this;
if (len > self.pageSize*self.maxPagesLoaded*0.8) {
const cacheSize = Math.floor(len * 1.1);
this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1;
}
if (typeof pos == "undefined") pos = self.pos;
self.pos = pos+len;
if (self.pendingClose)
throw new Error("Reading a closing file");
const firstPage = Math.floor(pos / self.pageSize);
const lastPage = Math.floor((pos+len-1) / self.pageSize);
for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i);
let buff = new Uint8Array(len);
let dstView = new Uint8Array(buff);
let p = firstPage;
let o = pos % self.pageSize;
// Remaining bytes to read
let r = pos + len > self.totalSize ? len - (pos + len - self.totalSize): len;
while (r>0) {
// bytes to copy from this page
const l = (o+r > self.pageSize) ? (self.pageSize -o) : r;
const srcView = new Uint8Array(self.pages[p].buff.buffer, o, l);
buff.set(srcView, dstView.byteLength-r);
self.pages[p].pendingOps --;
r = r-l;
p ++;
o = 0;
}
setImmediate(self._triggerLoad.bind(self));
return buff;
}
_tryClose() {
const self = this;
if (!self.pendingClose) return;
if (self.error) {
self.pendingCloseReject(self.error);
}
const p = self._getDirtyPage();
if ((p>=0) || (self.writing) || (self.reading) || (self.pendingLoads.length>0)) return;
self.pendingClose();
}
close() {
const self = this;
if (self.pendingClose)
throw new Error("Closing the file twice");
return new Promise((resolve, reject) => {
self.pendingClose = resolve;
self.pendingCloseReject = reject;
self._tryClose();
}).then(()=> {
self.fd.close();
}, (err) => {
self.fd.close();
throw (err);
});
}
async discard() {
const self = this;
await self.close();
await fs.promises.unlink(this.fileName);
}
async writeULE32(v, pos) {
const self = this;
tmpBuff32v.setUint32(0, v, true);
await self.write(tmpBuff32, pos);
}
async writeUBE32(v, pos) {
const self = this;
tmpBuff32v.setUint32(0, v, true);
await self.write(tmpBuff32, pos);
}
async writeULE64(v, pos) {
const self = this;
tmpBuff64v.setUint32(0, v & 0xFFFFFFFF, true);
tmpBuff64v.setUint32(4, Math.floor(v / 0x100000000) , true);
await self.write(tmpBuff64, pos);
}
async readULE32(pos) {
const self = this;
const b = await self.read(4, pos);
const view = new Uint32Array(b.buffer);
return view[0];
}
async readUBE32(pos) {
const self = this;
const b = await self.read(4, pos);
const view = new DataView(b.buffer);
return view.getUint32(0, false);
}
async readULE64(pos) {
const self = this;
const b = await self.read(8, pos);
const view = new Uint32Array(b.buffer);
return view[1] * 0x100000000 + view[0];
}
}
function createNew(o) {
const initialSize = o.initialSize || 1<<20;
const fd = new MemFile$1();
fd.o = o;
fd.o.data = new Uint8Array(initialSize);
fd.allocSize = initialSize;
fd.totalSize = 0;
fd.readOnly = false;
fd.pos = 0;
return fd;
}
function readExisting$2(o) {
const fd = new MemFile$1();
fd.o = o;
fd.allocSize = o.data.byteLength;
fd.totalSize = o.data.byteLength;
fd.readOnly = true;
fd.pos = 0;
return fd;
}
class MemFile$1 {
constructor() {
this.pageSize = 1 << 14; // for compatibility
}
_resizeIfNeeded(newLen) {
if (newLen > this.allocSize) {
const newAllocSize = Math.max(
this.allocSize + (1 << 20),
Math.floor(this.allocSize * 1.1),
newLen
);
const newData = new Uint8Array(newAllocSize);
newData.set(this.o.data);
this.o.data = newData;
this.allocSize = newAllocSize;
}
}
async write(buff, pos) {
const self =this;
if (typeof pos == "undefined") pos = self.pos;
if (this.readOnly) throw new Error("Writing a read only file");
this._resizeIfNeeded(pos + buff.byteLength);
this.o.data.set(buff, pos);
if (pos + buff.byteLength > this.totalSize) this.totalSize = pos + buff.byteLength;
this.pos = pos + buff.byteLength;
}
async read(len, pos) {
const self = this;
if (typeof pos == "undefined") pos = self.pos;
if (this.readOnly) {
if (pos + len > this.totalSize) throw new Error("Reading out of bounds");
}
this._resizeIfNeeded(pos + len);
const buff = this.o.data.slice(pos, pos+len);
this.pos = pos + len;
return buff;
}
close() {
if (this.o.data.byteLength != this.totalSize) {
this.o.data = this.o.data.slice(0, this.totalSize);
}
}
async discard() {
}
async writeULE32(v, pos) {
const self = this;
const b = Uint32Array.of(v);
await self.write(new Uint8Array(b.buffer), pos);
}
async writeUBE32(v, pos) {
const self = this;
const buff = new Uint8Array(4);
const buffV = new DataView(buff.buffer);
buffV.setUint32(0, v, false);
await self.write(buff, pos);
}
async writeULE64(v, pos) {
const self = this;
const b = Uint32Array.of(v & 0xFFFFFFFF, Math.floor(v / 0x100000000));
await self.write(new Uint8Array(b.buffer), pos);
}
async readULE32(pos) {
const self = this;
const b = await self.read(4, pos);
const view = new Uint32Array(b.buffer);
return view[0];
}
async readUBE32(pos) {
const self = this;
const b = await self.read(4, pos);
const view = new DataView(b.buffer);
return view.getUint32(0, false);
}
async readULE64(pos) {
const self = this;
const b = await self.read(8, pos);
const view = new Uint32Array(b.buffer);
return view[1] * 0x100000000 + view[0];
}
}
/* global fetch */
async function createOverride(o, b) {
if (typeof o === "string") {
o = {
type: "file",
fileName: o,
cacheSize: b
};
}
if (o.type == "file") {
return await open$1(o.fileName, "w+", o.cacheSize);
} else if (o.type == "mem") {
return createNew(o);
} else {
throw new Error("Invalid FastFile type: "+o.type);
}
}
async function readExisting$3(o, b) {
if (o instanceof Uint8Array) {
o = {
type: "mem",
data: o
};
}
if (process.browser) {
if (typeof o === "string") {
const buff = await fetch(o).then( function(res) {
return res.arrayBuffer();
}).then(function (ab) {
return new Uint8Array(ab);
});
o = {
type: "mem",
data: buff
};
}
} else {
if (typeof o === "string") {
o = {
type: "file",
fileName: o,
cacheSize: b
};
}
}
if (o.type == "file") {
return await open$1(o.fileName, "r", o.cacheSize);
} else if (o.type == "mem") {
return await readExisting$2(o);
} else {
throw new Error("Invalid FastFile type: "+o.type);
}
}
async function loadSymbols(symFileName) { async function loadSymbols(symFileName) {
const sym = { const sym = {
labelIdx2Name: [ "one" ], labelIdx2Name: [ "one" ],
varIdx2Name: [ "one" ], varIdx2Name: [ "one" ],
componentIdx2Name: [] componentIdx2Name: []
}; };
const fd = await readExisting$1(symFileName); const fd = await readExisting$3(symFileName);
const buff = await fd.read(fd.totalSize); const buff = await fd.read(fd.totalSize);
const symsStr = new TextDecoder("utf-8").decode(buff); const symsStr = new TextDecoder("utf-8").decode(buff);
const lines = symsStr.split("\n"); const lines = symsStr.split("\n");
@ -739,7 +1215,7 @@ async function r1csExportJson(r1csFileName, logger) {
var name = "snarkjs"; var name = "snarkjs";
var type = "module"; var type = "module";
var version = "0.3.5"; var version = "0.3.6";
var description = "zkSNARKs implementation in JavaScript"; var description = "zkSNARKs implementation in JavaScript";
var main = "./build/main.cjs"; var main = "./build/main.cjs";
var module$1 = "./main.js"; var module$1 = "./main.js";
@ -778,7 +1254,7 @@ var repository = {
var dependencies = { var dependencies = {
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git", "blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
circom_runtime: "0.0.9", circom_runtime: "0.0.9",
fastfile: "0.0.6", fastfile: "0.0.9",
ffjavascript: "0.2.4", ffjavascript: "0.2.4",
keccak: "^3.0.0", keccak: "^3.0.0",
logplease: "^1.2.15", logplease: "^1.2.15",
@ -1637,7 +2113,7 @@ function keyFromBeacon(curve, challengeHash, beaconHash, numIterationsExp) {
async function readBinFile$1(fileName, type, maxVersion) { async function readBinFile$1(fileName, type, maxVersion) {
const fd = await readExisting$1(fileName); const fd = await readExisting$3(fileName);
const b = await fd.read(4); const b = await fd.read(4);
let readedType = ""; let readedType = "";
@ -1761,7 +2237,7 @@ async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
const n = Math.min(totalBytes-i, MAX_BUFF_SIZE); const n = Math.min(totalBytes-i, MAX_BUFF_SIZE);
const buff1 = await fd1.read(n); const buff1 = await fd1.read(n);
const buff2 = await fd2.read(n); const buff2 = await fd2.read(n);
for (i=0; i<n; i++) if (buff1[i] != buff2[i]) return false; for (let j=0; j<n; j++) if (buff1[j] != buff2[j]) return false;
} }
await endReadSection$1(fd1); await endReadSection$1(fd1);
await endReadSection$1(fd2); await endReadSection$1(fd2);
@ -1977,7 +2453,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
const sG2 = curve.F2.n8*2; const sG2 = curve.F2.n8*2;
const scG2 = curve.F2.n8; // Compresed size const scG2 = curve.F2.n8; // Compresed size
const fdResponse = await readExisting$1(contributionFilename); const fdResponse = await readExisting$3(contributionFilename);
if (fdResponse.totalSize != if (fdResponse.totalSize !=
64 + // Old Hash 64 + // Old Hash
@ -2613,7 +3089,7 @@ async function applyKeyToChallengeSection(fdOld, fdNew, responseHasher, curve, g
async function challengeContribute(curve, challengeFilename, responesFileName, entropy, logger) { async function challengeContribute(curve, challengeFilename, responesFileName, entropy, logger) {
await Blake2b.ready(); await Blake2b.ready();
const fdFrom = await readExisting$1(challengeFilename); const fdFrom = await readExisting$3(challengeFilename);
const sG1 = curve.F1.n64*8*2; const sG1 = curve.F1.n64*8*2;
@ -4155,7 +4631,7 @@ async function phase2importMPCParams(zkeyNameOld, mpcparamsName, zkeyNameNew, na
const oldMPCParams = await readMPCParams(fdZKeyOld, curve, sectionsZKeyOld); const oldMPCParams = await readMPCParams(fdZKeyOld, curve, sectionsZKeyOld);
const newMPCParams = {}; const newMPCParams = {};
const fdMPCParams = await readExisting$1(mpcparamsName); const fdMPCParams = await readExisting$3(mpcparamsName);
fdMPCParams.pos = fdMPCParams.pos =
sG1*3 + sG2*3 + // vKey sG1*3 + sG2*3 + // vKey
@ -4911,7 +5387,7 @@ async function bellmanContribute(curve, challengeFilename, responesFileName, ent
const sG1 = curve.G1.F.n8*2; const sG1 = curve.G1.F.n8*2;
const sG2 = curve.G2.F.n8*2; const sG2 = curve.G2.F.n8*2;
const fdFrom = await readExisting$1(challengeFilename); const fdFrom = await readExisting$3(challengeFilename);
const fdTo = await createOverride(responesFileName); const fdTo = await createOverride(responesFileName);
@ -5111,7 +5587,7 @@ async function exportSolidityVerifier(zKeyName, templateName, logger) {
const verificationKey = await zkeyExportVerificationKey(zKeyName); const verificationKey = await zkeyExportVerificationKey(zKeyName);
const fd = await readExisting$1(templateName); const fd = await readExisting$3(templateName);
const buff = await fd.read(fd.totalSize); const buff = await fd.read(fd.totalSize);
let template = new TextDecoder("utf-8").decode(buff); let template = new TextDecoder("utf-8").decode(buff);
@ -5461,7 +5937,7 @@ const { WitnessCalculatorBuilder } = circomRuntime;
async function wtnsCalculate(input, wasmFileName, wtnsFileName, options) { async function wtnsCalculate(input, wasmFileName, wtnsFileName, options) {
const fdWasm = await readExisting$1(wasmFileName); const fdWasm = await readExisting$3(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize); const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close(); await fdWasm.close();
@ -5560,7 +6036,7 @@ const { WitnessCalculatorBuilder: WitnessCalculatorBuilder$1 } = circomRuntime;
async function wtnsDebug(input, wasmFileName, wtnsFileName, symName, options, logger) { async function wtnsDebug(input, wasmFileName, wtnsFileName, symName, options, logger) {
const fdWasm = await readExisting$1(wasmFileName); const fdWasm = await readExisting$3(wasmFileName);
const wasm = await fdWasm.read(fdWasm.totalSize); const wasm = await fdWasm.read(fdWasm.totalSize);
await fdWasm.close(); await fdWasm.close();

@ -22,6 +22,11 @@ async function open(fileName, openFlags, cacheSize) {
return new FastFile(fd, stats, cacheSize, fileName); return new FastFile(fd, stats, cacheSize, fileName);
} }
const tmpBuff32 = new Uint8Array(4);
const tmpBuff32v = new DataView(tmpBuff32.buffer);
const tmpBuff64 = new Uint8Array(8);
const tmpBuff64v = new DataView(tmpBuff64.buffer);
class FastFile { class FastFile {
constructor(fd, stats, cacheSize, fileName) { constructor(fd, stats, cacheSize, fileName) {
@ -157,10 +162,12 @@ class FastFile {
async write(buff, pos) { async write(buff, pos) {
if (buff.byteLength == 0) return; if (buff.byteLength == 0) return;
const self = this; const self = this;
/*
if (buff.byteLength > self.pageSize*self.maxPagesLoaded*0.8) { if (buff.byteLength > self.pageSize*self.maxPagesLoaded*0.8) {
const cacheSize = Math.floor(buff.byteLength * 1.1); const cacheSize = Math.floor(buff.byteLength * 1.1);
this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1; this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1;
} }
*/
if (typeof pos == "undefined") pos = self.pos; if (typeof pos == "undefined") pos = self.pos;
self.pos = pos+buff.byteLength; self.pos = pos+buff.byteLength;
if (self.totalSize < pos + buff.byteLength) self.totalSize = pos + buff.byteLength; if (self.totalSize < pos + buff.byteLength) self.totalSize = pos + buff.byteLength;
@ -169,12 +176,13 @@ class FastFile {
const firstPage = Math.floor(pos / self.pageSize); const firstPage = Math.floor(pos / self.pageSize);
const lastPage = Math.floor((pos+buff.byteLength-1) / self.pageSize); const lastPage = Math.floor((pos+buff.byteLength-1) / self.pageSize);
for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i); // for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i);
let p = firstPage; let p = firstPage;
let o = pos % self.pageSize; let o = pos % self.pageSize;
let r = buff.byteLength; let r = buff.byteLength;
while (r>0) { while (r>0) {
await self._loadPage(p);
const l = (o+r > self.pageSize) ? (self.pageSize -o) : r; const l = (o+r > self.pageSize) ? (self.pageSize -o) : r;
const srcView = new Uint8Array(buff.buffer, buff.byteLength - r, l); const srcView = new Uint8Array(buff.buffer, buff.byteLength - r, l);
const dstView = new Uint8Array(self.pages[p].buff.buffer, o, l); const dstView = new Uint8Array(self.pages[p].buff.buffer, o, l);
@ -266,28 +274,27 @@ class FastFile {
async writeULE32(v, pos) { async writeULE32(v, pos) {
const self = this; const self = this;
const b = Uint32Array.of(v); tmpBuff32v.setUint32(0, v, true);
await self.write(new Uint8Array(b.buffer), pos); await self.write(tmpBuff32, pos);
} }
async writeUBE32(v, pos) { async writeUBE32(v, pos) {
const self = this; const self = this;
const buff = new Uint8Array(4); tmpBuff32v.setUint32(0, v, true);
const buffV = new DataView(buff.buffer);
buffV.setUint32(0, v, false);
await self.write(buff, pos); await self.write(tmpBuff32, pos);
} }
async writeULE64(v, pos) { async writeULE64(v, pos) {
const self = this; const self = this;
const b = Uint32Array.of(v & 0xFFFFFFFF, Math.floor(v / 0x100000000)); tmpBuff64v.setUint32(0, v & 0xFFFFFFFF, true);
tmpBuff64v.setUint32(4, Math.floor(v / 0x100000000) , true);
await self.write(new Uint8Array(b.buffer), pos); await self.write(tmpBuff64, pos);
} }
async readULE32(pos) { async readULE32(pos) {
@ -636,7 +643,7 @@ async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection) {
const n = Math.min(totalBytes-i, MAX_BUFF_SIZE); const n = Math.min(totalBytes-i, MAX_BUFF_SIZE);
const buff1 = await fd1.read(n); const buff1 = await fd1.read(n);
const buff2 = await fd2.read(n); const buff2 = await fd2.read(n);
for (i=0; i<n; i++) if (buff1[i] != buff2[i]) return false; for (let j=0; j<n; j++) if (buff1[j] != buff2[j]) return false;
} }
await endReadSection(fd1); await endReadSection(fd1);
await endReadSection(fd2); await endReadSection(fd2);
@ -3457,9 +3464,478 @@ function r1csPrint(r1cs, syms, logger) {
} }
async function open$1(fileName, openFlags, cacheSize) {
cacheSize = cacheSize || 4096*64;
if (["w+", "wx+", "r", "ax+", "a+"].indexOf(openFlags) <0)
throw new Error("Invalid open option");
const fd =await fs.promises.open(fileName, openFlags);
const stats = await fd.stat();
return new FastFile$1(fd, stats, cacheSize, fileName);
}
class FastFile$1 {
constructor(fd, stats, cacheSize, fileName) {
this.fileName = fileName;
this.fd = fd;
this.pos = 0;
this.pageBits = 8;
this.pageSize = (1 << this.pageBits);
while (this.pageSize < stats.blksize*4) {
this.pageBits ++;
this.pageSize *= 2;
}
this.totalSize = stats.size;
this.totalPages = Math.floor((stats.size -1) / this.pageSize)+1;
this.maxPagesLoaded = Math.floor( cacheSize / this.pageSize)+1;
this.pages = {};
this.pendingLoads = [];
this.writing = false;
this.reading = false;
}
_loadPage(p) {
const self = this;
return new Promise((resolve, reject)=> {
self.pendingLoads.push({
page: p,
resolve: resolve,
reject: reject
});
setImmediate(self._triggerLoad.bind(self));
});
}
_triggerLoad() {
const self = this;
processPendingLoads();
if (self.pendingLoads.length == 0) return;
if (Object.keys(self.pages).length >= self.maxPagesLoaded) {
const dp = getDeletablePage();
if (dp<0) { // // No sizes available
// setTimeout(self._triggerLoad.bind(self), 10000);
return;
}
delete self.pages[dp];
}
const load = self.pendingLoads.shift();
if (load.page>=self.totalPages) {
self.pages[load.page] = {
dirty: false,
buff: new Uint8Array(self.pageSize),
pendingOps: 1,
size: 0
};
load.resolve();
setImmediate(self._triggerLoad.bind(self));
return;
}
if (self.reading) {
self.pendingLoads.unshift(load);
return; // Only one read at a time.
}
self.reading = true;
const page = {
dirty: false,
buff: new Uint8Array(self.pageSize),
pendingOps: 1,
size: 0
};
self.fd.read(page.buff, 0, self.pageSize, load.page*self.pageSize).then((res)=> {
page.size = res.bytesRead;
self.pages[load.page] = page;
self.reading = false;
load.resolve();
setImmediate(self._triggerLoad.bind(self));
}, (err) => {
load.reject(err);
});
function processPendingLoads() {
const newPendingLoads = [];
for (let i=0; i<self.pendingLoads.length; i++) {
const load = self.pendingLoads[i];
if (typeof self.pages[load.page] != "undefined") {
self.pages[load.page].pendingOps ++;
load.resolve();
} else {
newPendingLoads.push(load);
}
}
self.pendingLoads = newPendingLoads;
}
function getDeletablePage() {
for (let p in self.pages) {
const page = self.pages[p];
if ((page.dirty == false)&&(page.pendingOps==0)) return p;
}
return -1;
}
}
_triggerWrite() {
const self = this;
if (self.writing) return;
const p = self._getDirtyPage();
if (p<0) {
if (self.pendingClose) self.pendingClose();
return;
}
self.writing=true;
self.pages[p].dirty = false;
self.fd.write(self.pages[p].buff, 0, self.pages[p].size, p*self.pageSize).then(() => {
self.writing = false;
setImmediate(self._triggerWrite.bind(self));
setImmediate(self._triggerLoad.bind(self));
}, (err) => {
console.log("ERROR Writing: "+err);
self.error = err;
self._tryClose();
});
}
_getDirtyPage() {
for (let p in this.pages) {
if (this.pages[p].dirty) return p;
}
return -1;
}
async write(buff, pos) {
if (buff.byteLength == 0) return;
const self = this;
if (buff.byteLength > self.pageSize*self.maxPagesLoaded*0.8) {
const cacheSize = Math.floor(buff.byteLength * 1.1);
this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1;
}
if (typeof pos == "undefined") pos = self.pos;
self.pos = pos+buff.byteLength;
if (self.totalSize < pos + buff.byteLength) self.totalSize = pos + buff.byteLength;
if (self.pendingClose)
throw new Error("Writing a closing file");
const firstPage = Math.floor(pos / self.pageSize);
const lastPage = Math.floor((pos+buff.byteLength-1) / self.pageSize);
for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i);
let p = firstPage;
let o = pos % self.pageSize;
let r = buff.byteLength;
while (r>0) {
const l = (o+r > self.pageSize) ? (self.pageSize -o) : r;
const srcView = new Uint8Array(buff.buffer, buff.byteLength - r, l);
const dstView = new Uint8Array(self.pages[p].buff.buffer, o, l);
dstView.set(srcView);
self.pages[p].dirty = true;
self.pages[p].pendingOps --;
self.pages[p].size = Math.max(o+l, self.pages[p].size);
if (p>=self.totalPages) {
self.totalPages = p+1;
}
r = r-l;
p ++;
o = 0;
}
setImmediate(self._triggerWrite.bind(self));
}
async read(len, pos) {
if (len == 0) {
return new Uint8Array(0);
}
const self = this;
if (len > self.pageSize*self.maxPagesLoaded*0.8) {
const cacheSize = Math.floor(len * 1.1);
this.maxPagesLoaded = Math.floor( cacheSize / self.pageSize)+1;
}
if (typeof pos == "undefined") pos = self.pos;
self.pos = pos+len;
if (self.pendingClose)
throw new Error("Reading a closing file");
const firstPage = Math.floor(pos / self.pageSize);
const lastPage = Math.floor((pos+len-1) / self.pageSize);
for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i);
let buff = new Uint8Array(len);
let dstView = new Uint8Array(buff);
let p = firstPage;
let o = pos % self.pageSize;
// Remaining bytes to read
let r = pos + len > self.totalSize ? len - (pos + len - self.totalSize): len;
while (r>0) {
// bytes to copy from this page
const l = (o+r > self.pageSize) ? (self.pageSize -o) : r;
const srcView = new Uint8Array(self.pages[p].buff.buffer, o, l);
buff.set(srcView, dstView.byteLength-r);
self.pages[p].pendingOps --;
r = r-l;
p ++;
o = 0;
}
setImmediate(self._triggerLoad.bind(self));
return buff;
}
_tryClose() {
const self = this;
if (!self.pendingClose) return;
if (self.error) {
self.pendingCloseReject(self.error);
}
const p = self._getDirtyPage();
if ((p>=0) || (self.writing) || (self.reading) || (self.pendingLoads.length>0)) return;
self.pendingClose();
}
close() {
const self = this;
if (self.pendingClose)
throw new Error("Closing the file twice");
return new Promise((resolve, reject) => {
self.pendingClose = resolve;
self.pendingCloseReject = reject;
self._tryClose();
}).then(()=> {
self.fd.close();
}, (err) => {
self.fd.close();
throw (err);
});
}
async discard() {
const self = this;
await self.close();
await fs.promises.unlink(this.fileName);
}
async writeULE32(v, pos) {
const self = this;
const b = Uint32Array.of(v);
await self.write(new Uint8Array(b.buffer), pos);
}
async writeUBE32(v, pos) {
const self = this;
const buff = new Uint8Array(4);
const buffV = new DataView(buff.buffer);
buffV.setUint32(0, v, false);
await self.write(buff, pos);
}
async writeULE64(v, pos) {
const self = this;
const b = Uint32Array.of(v & 0xFFFFFFFF, Math.floor(v / 0x100000000));
await self.write(new Uint8Array(b.buffer), pos);
}
async readULE32(pos) {
const self = this;
const b = await self.read(4, pos);
const view = new Uint32Array(b.buffer);
return view[0];
}
async readUBE32(pos) {
const self = this;
const b = await self.read(4, pos);
const view = new DataView(b.buffer);
return view.getUint32(0, false);
}
async readULE64(pos) {
const self = this;
const b = await self.read(8, pos);
const view = new Uint32Array(b.buffer);
return view[1] * 0x100000000 + view[0];
}
}
function readExisting$2(o) {
const fd = new MemFile$1();
fd.o = o;
fd.allocSize = o.data.byteLength;
fd.totalSize = o.data.byteLength;
fd.readOnly = true;
fd.pos = 0;
return fd;
}
class MemFile$1 {
constructor() {
this.pageSize = 1 << 14; // for compatibility
}
_resizeIfNeeded(newLen) {
if (newLen > this.allocSize) {
const newAllocSize = Math.max(
this.allocSize + (1 << 20),
Math.floor(this.allocSize * 1.1),
newLen
);
const newData = new Uint8Array(newAllocSize);
newData.set(this.o.data);
this.o.data = newData;
this.allocSize = newAllocSize;
}
}
async write(buff, pos) {
const self =this;
if (typeof pos == "undefined") pos = self.pos;
if (this.readOnly) throw new Error("Writing a read only file");
this._resizeIfNeeded(pos + buff.byteLength);
this.o.data.set(buff, pos);
if (pos + buff.byteLength > this.totalSize) this.totalSize = pos + buff.byteLength;
this.pos = pos + buff.byteLength;
}
async read(len, pos) {
const self = this;
if (typeof pos == "undefined") pos = self.pos;
if (this.readOnly) {
if (pos + len > this.totalSize) throw new Error("Reading out of bounds");
}
this._resizeIfNeeded(pos + len);
const buff = this.o.data.slice(pos, pos+len);
this.pos = pos + len;
return buff;
}
close() {
if (this.o.data.byteLength != this.totalSize) {
this.o.data = this.o.data.slice(0, this.totalSize);
}
}
async discard() {
}
async writeULE32(v, pos) {
const self = this;
const b = Uint32Array.of(v);
await self.write(new Uint8Array(b.buffer), pos);
}
async writeUBE32(v, pos) {
const self = this;
const buff = new Uint8Array(4);
const buffV = new DataView(buff.buffer);
buffV.setUint32(0, v, false);
await self.write(buff, pos);
}
async writeULE64(v, pos) {
const self = this;
const b = Uint32Array.of(v & 0xFFFFFFFF, Math.floor(v / 0x100000000));
await self.write(new Uint8Array(b.buffer), pos);
}
async readULE32(pos) {
const self = this;
const b = await self.read(4, pos);
const view = new Uint32Array(b.buffer);
return view[0];
}
async readUBE32(pos) {
const self = this;
const b = await self.read(4, pos);
const view = new DataView(b.buffer);
return view.getUint32(0, false);
}
async readULE64(pos) {
const self = this;
const b = await self.read(8, pos);
const view = new Uint32Array(b.buffer);
return view[1] * 0x100000000 + view[0];
}
}
/* global fetch */
async function readExisting$3(o, b) {
if (o instanceof Uint8Array) {
o = {
type: "mem",
data: o
};
}
if (process.browser) {
if (typeof o === "string") {
const buff = await fetch(o).then( function(res) {
return res.arrayBuffer();
}).then(function (ab) {
return new Uint8Array(ab);
});
o = {
type: "mem",
data: buff
};
}
} else {
if (typeof o === "string") {
o = {
type: "file",
fileName: o,
cacheSize: b
};
}
}
if (o.type == "file") {
return await open$1(o.fileName, "r", o.cacheSize);
} else if (o.type == "mem") {
return await readExisting$2(o);
} else {
throw new Error("Invalid FastFile type: "+o.type);
}
}
async function readBinFile$1(fileName, type, maxVersion) { async function readBinFile$1(fileName, type, maxVersion) {
const fd = await readExisting$1(fileName); const fd = await readExisting$3(fileName);
const b = await fd.read(4); const b = await fd.read(4);
let readedType = ""; let readedType = "";

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

13
package-lock.json generated

@ -658,9 +658,9 @@
"dev": true "dev": true
}, },
"fastfile": { "fastfile": {
"version": "0.0.6", "version": "0.0.9",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.6.tgz", "resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.9.tgz",
"integrity": "sha512-6cOUdePcue0DAssqGKPhmcSgdLTaB2IzxNgg2WAADOuta00Os88+ShpDItSkQ/eLCiAeYjsPasdBLYozVz+4Ug==" "integrity": "sha512-njh6lH2SJiS0u0JofJQf2YfEOSgGfbYPtmFnpEXXy6OilWoX1wGw3klaSKIwhq8+E5MqYpqJXMiaqmptaU2wig=="
}, },
"ffjavascript": { "ffjavascript": {
"version": "0.2.4", "version": "0.2.4",
@ -1694,6 +1694,13 @@
"requires": { "requires": {
"fastfile": "0.0.6", "fastfile": "0.0.6",
"ffjavascript": "0.2.4" "ffjavascript": "0.2.4"
},
"dependencies": {
"fastfile": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.6.tgz",
"integrity": "sha512-6cOUdePcue0DAssqGKPhmcSgdLTaB2IzxNgg2WAADOuta00Os88+ShpDItSkQ/eLCiAeYjsPasdBLYozVz+4Ug=="
}
} }
}, },
"randombytes": { "randombytes": {

@ -40,7 +40,7 @@
"dependencies": { "dependencies": {
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git", "blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
"circom_runtime": "0.0.9", "circom_runtime": "0.0.9",
"fastfile": "0.0.6", "fastfile": "0.0.9",
"ffjavascript": "0.2.4", "ffjavascript": "0.2.4",
"keccak": "^3.0.0", "keccak": "^3.0.0",
"logplease": "^1.2.15", "logplease": "^1.2.15",

@ -128,7 +128,7 @@ export async function sectionIsEqual(fd1, sections1, fd2, sections2, idSection)
const n = Math.min(totalBytes-i, MAX_BUFF_SIZE); const n = Math.min(totalBytes-i, MAX_BUFF_SIZE);
const buff1 = await fd1.read(n); const buff1 = await fd1.read(n);
const buff2 = await fd2.read(n); const buff2 = await fd2.read(n);
for (i=0; i<n; i++) if (buff1[i] != buff2[i]) return false; for (let j=0; j<n; j++) if (buff1[j] != buff2[j]) return false;
} }
await endReadSection(fd1); await endReadSection(fd1);
await endReadSection(fd2); await endReadSection(fd2);