2020-07-11 11:31:52 +03:00
#! /usr/bin/env node
'use strict' ;
function _interopDefault ( ex ) { return ( ex && ( typeof ex === 'object' ) && 'default' in ex ) ? ex [ 'default' ] : ex ; }
var fs = _interopDefault ( require ( 'fs' ) ) ;
var ffjavascript = require ( 'ffjavascript' ) ;
var Blake2b = _interopDefault ( require ( 'blake2b-wasm' ) ) ;
var readline = _interopDefault ( require ( 'readline' ) ) ;
var crypto = _interopDefault ( require ( 'crypto' ) ) ;
var circomRuntime = _interopDefault ( require ( 'circom_runtime' ) ) ;
2020-07-11 11:36:57 +03:00
var path = _interopDefault ( require ( 'path' ) ) ;
2020-07-11 11:31:52 +03:00
var Logger = _interopDefault ( require ( 'logplease' ) ) ;
async function open ( fileName , openFlags , cacheSize ) {
cacheSize = cacheSize || 4096 * 64 ;
if ( [ "w+" , "wx+" , "r" , "ax+" , "a+" ] . indexOf ( openFlags ) < 0 )
throw new Error ( "Invalid open option" ) ;
const fd = await fs . promises . open ( fileName , openFlags ) ;
const stats = await fd . stat ( ) ;
return new FastFile ( fd , stats , cacheSize , fileName ) ;
}
class FastFile {
constructor ( fd , stats , cacheSize , fileName ) {
this . fileName = fileName ;
this . fd = fd ;
this . pos = 0 ;
this . pageBits = 8 ;
this . pageSize = ( 1 << this . pageBits ) ;
while ( this . pageSize < stats . blksize * 4 ) {
this . pageBits ++ ;
this . pageSize *= 2 ;
}
this . totalSize = stats . size ;
this . totalPages = Math . floor ( ( stats . size - 1 ) / this . pageSize ) + 1 ;
this . maxPagesLoaded = Math . floor ( cacheSize / this . pageSize ) + 1 ;
this . pages = { } ;
this . pendingLoads = [ ] ;
this . writing = false ;
this . reading = false ;
}
_loadPage ( p ) {
const self = this ;
return new Promise ( ( resolve , reject ) => {
self . pendingLoads . push ( {
page : p ,
resolve : resolve ,
reject : reject
} ) ;
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
} ) ;
}
_triggerLoad ( ) {
const self = this ;
processPendingLoads ( ) ;
if ( self . pendingLoads . length == 0 ) return ;
if ( Object . keys ( self . pages ) . length >= self . maxPagesLoaded ) {
const dp = getDeletablePage ( ) ;
if ( dp < 0 ) { // // No sizes available
// setTimeout(self._triggerLoad.bind(self), 10000);
return ;
}
delete self . pages [ dp ] ;
}
const load = self . pendingLoads . shift ( ) ;
if ( load . page >= self . totalPages ) {
self . pages [ load . page ] = {
dirty : false ,
buff : new Uint8Array ( self . pageSize ) ,
pendingOps : 1 ,
size : 0
} ;
load . resolve ( ) ;
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
return ;
}
if ( self . reading ) {
self . pendingLoads . unshift ( load ) ;
return ; // Only one read at a time.
}
self . reading = true ;
const page = {
dirty : false ,
buff : new Uint8Array ( self . pageSize ) ,
pendingOps : 1 ,
size : 0
} ;
self . fd . read ( page . buff , 0 , self . pageSize , load . page * self . pageSize ) . then ( ( res ) => {
page . size = res . bytesRead ;
self . pages [ load . page ] = page ;
self . reading = false ;
load . resolve ( ) ;
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
} , ( err ) => {
load . reject ( err ) ;
} ) ;
function processPendingLoads ( ) {
const newPendingLoads = [ ] ;
for ( let i = 0 ; i < self . pendingLoads . length ; i ++ ) {
const load = self . pendingLoads [ i ] ;
if ( typeof self . pages [ load . page ] != "undefined" ) {
self . pages [ load . page ] . pendingOps ++ ;
load . resolve ( ) ;
} else {
newPendingLoads . push ( load ) ;
}
}
self . pendingLoads = newPendingLoads ;
}
function getDeletablePage ( ) {
for ( let p in self . pages ) {
const page = self . pages [ p ] ;
if ( ( page . dirty == false ) && ( page . pendingOps == 0 ) ) return p ;
}
return - 1 ;
}
}
_triggerWrite ( ) {
const self = this ;
if ( self . writing ) return ;
const p = self . _getDirtyPage ( ) ;
if ( p < 0 ) {
if ( self . pendingClose ) self . pendingClose ( ) ;
return ;
}
self . writing = true ;
self . pages [ p ] . dirty = false ;
self . fd . write ( self . pages [ p ] . buff , 0 , self . pages [ p ] . size , p * self . pageSize ) . then ( ( ) => {
self . writing = false ;
setImmediate ( self . _triggerWrite . bind ( self ) ) ;
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
} , ( err ) => {
console . log ( "ERROR Writing: " + err ) ;
self . error = err ;
self . _tryClose ( ) ;
} ) ;
}
_getDirtyPage ( ) {
for ( let p in this . pages ) {
if ( this . pages [ p ] . dirty ) return p ;
}
return - 1 ;
}
async write ( buff , pos ) {
if ( buff . byteLength == 0 ) return ;
const self = this ;
if ( buff . byteLength > self . pageSize * self . maxPagesLoaded * 0.8 ) {
const cacheSize = Math . floor ( buff . byteLength * 1.1 ) ;
this . maxPagesLoaded = Math . floor ( cacheSize / self . pageSize ) + 1 ;
}
if ( typeof pos == "undefined" ) pos = self . pos ;
self . pos = pos + buff . byteLength ;
if ( self . totalSize < pos + buff . byteLength ) self . totalSize = pos + buff . byteLength ;
if ( self . pendingClose )
throw new Error ( "Writing a closing file" ) ;
const firstPage = Math . floor ( pos / self . pageSize ) ;
const lastPage = Math . floor ( ( pos + buff . byteLength - 1 ) / self . pageSize ) ;
for ( let i = firstPage ; i <= lastPage ; i ++ ) await self . _loadPage ( i ) ;
let p = firstPage ;
let o = pos % self . pageSize ;
let r = buff . byteLength ;
while ( r > 0 ) {
const l = ( o + r > self . pageSize ) ? ( self . pageSize - o ) : r ;
const srcView = new Uint8Array ( buff . buffer , buff . byteLength - r , l ) ;
const dstView = new Uint8Array ( self . pages [ p ] . buff . buffer , o , l ) ;
dstView . set ( srcView ) ;
self . pages [ p ] . dirty = true ;
self . pages [ p ] . pendingOps -- ;
self . pages [ p ] . size = Math . max ( o + l , self . pages [ p ] . size ) ;
if ( p >= self . totalPages ) {
self . totalPages = p + 1 ;
}
r = r - l ;
p ++ ;
o = 0 ;
}
setImmediate ( self . _triggerWrite . bind ( self ) ) ;
}
async read ( len , pos ) {
if ( len == 0 ) {
return new Uint8Array ( 0 ) ;
}
const self = this ;
if ( len > self . pageSize * self . maxPagesLoaded * 0.8 ) {
const cacheSize = Math . floor ( len * 1.1 ) ;
this . maxPagesLoaded = Math . floor ( cacheSize / self . pageSize ) + 1 ;
}
if ( typeof pos == "undefined" ) pos = self . pos ;
self . pos = pos + len ;
if ( self . pendingClose )
throw new Error ( "Reading a closing file" ) ;
const firstPage = Math . floor ( pos / self . pageSize ) ;
const lastPage = Math . floor ( ( pos + len - 1 ) / self . pageSize ) ;
for ( let i = firstPage ; i <= lastPage ; i ++ ) await self . _loadPage ( i ) ;
let buff = new Uint8Array ( len ) ;
let dstView = new Uint8Array ( buff ) ;
let p = firstPage ;
let o = pos % self . pageSize ;
// Remaining bytes to read
let r = pos + len > self . totalSize ? len - ( pos + len - self . totalSize ) : len ;
while ( r > 0 ) {
// bytes to copy from this page
const l = ( o + r > self . pageSize ) ? ( self . pageSize - o ) : r ;
const srcView = new Uint8Array ( self . pages [ p ] . buff . buffer , o , l ) ;
buff . set ( srcView , dstView . byteLength - r ) ;
self . pages [ p ] . pendingOps -- ;
r = r - l ;
p ++ ;
o = 0 ;
}
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
return buff ;
}
_tryClose ( ) {
const self = this ;
if ( ! self . pendingClose ) return ;
if ( self . error ) {
self . pendingCloseReject ( self . error ) ;
}
const p = self . _getDirtyPage ( ) ;
if ( ( p >= 0 ) || ( self . writing ) || ( self . reading ) || ( self . pendingLoads . length > 0 ) ) return ;
self . pendingClose ( ) ;
}
close ( ) {
const self = this ;
if ( self . pendingClose )
throw new Error ( "Closing the file twice" ) ;
return new Promise ( ( resolve , reject ) => {
self . pendingClose = resolve ;
self . pendingCloseReject = reject ;
self . _tryClose ( ) ;
} ) . then ( ( ) => {
self . fd . close ( ) ;
} , ( err ) => {
self . fd . close ( ) ;
throw ( err ) ;
} ) ;
}
async discard ( ) {
const self = this ;
await self . close ( ) ;
await fs . promises . unlink ( this . fileName ) ;
}
async writeULE32 ( v , pos ) {
const self = this ;
const b = Uint32Array . of ( v ) ;
await self . write ( new Uint8Array ( b . buffer ) , pos ) ;
}
async writeUBE32 ( v , pos ) {
const self = this ;
const buff = new Uint8Array ( 4 ) ;
const buffV = new DataView ( buff . buffer ) ;
buffV . setUint32 ( 0 , v , false ) ;
await self . write ( buff , pos ) ;
}
async writeULE64 ( v , pos ) {
const self = this ;
const b = Uint32Array . of ( v & 0xFFFFFFFF , Math . floor ( v / 0x100000000 ) ) ;
await self . write ( new Uint8Array ( b . buffer ) , pos ) ;
}
async readULE32 ( pos ) {
const self = this ;
const b = await self . read ( 4 , pos ) ;
const view = new Uint32Array ( b . buffer ) ;
return view [ 0 ] ;
}
async readUBE32 ( pos ) {
const self = this ;
const b = await self . read ( 4 , pos ) ;
const view = new DataView ( b . buffer ) ;
return view . getUint32 ( 0 , false ) ;
}
async readULE64 ( pos ) {
const self = this ;
const b = await self . read ( 8 , pos ) ;
const view = new Uint32Array ( b . buffer ) ;
return view [ 1 ] * 0x100000000 + view [ 0 ] ;
}
}
function readExisting ( o ) {
const fd = new MemFile ( ) ;
fd . o = o ;
fd . allocSize = o . data . byteLength ;
fd . totalSize = o . data . byteLength ;
fd . readOnly = true ;
fd . pos = 0 ;
return fd ;
}
class MemFile {
constructor ( ) {
this . pageSize = 1 << 14 ; // for compatibility
}
_resizeIfNeeded ( newLen ) {
if ( newLen > this . allocSize ) {
const newAllocSize = Math . max (
this . allocSize + ( 1 << 20 ) ,
Math . floor ( this . allocSize * 1.1 ) ,
newLen
) ;
const newData = new Uint8Array ( newAllocSize ) ;
newData . set ( this . o . data ) ;
this . o . data = newData ;
this . allocSize = newAllocSize ;
}
}
async write ( buff , pos ) {
const self = this ;
if ( typeof pos == "undefined" ) pos = self . pos ;
if ( this . readOnly ) throw new Error ( "Writing a read only file" ) ;
this . _resizeIfNeeded ( pos + buff . byteLength ) ;
this . o . data . set ( buff , pos ) ;
if ( pos + buff . byteLength > this . totalSize ) this . totalSize = pos + buff . byteLength ;
this . pos = pos + buff . byteLength ;
}
async read ( len , pos ) {
const self = this ;
if ( typeof pos == "undefined" ) pos = self . pos ;
if ( this . readOnly ) {
if ( pos + len > this . totalSize ) throw new Error ( "Reading out of bounds" ) ;
}
this . _resizeIfNeeded ( pos + len ) ;
const buff = this . o . data . slice ( pos , pos + len ) ;
this . pos = pos + len ;
return buff ;
}
close ( ) {
if ( this . o . data . byteLength != this . totalSize ) {
this . o . data = this . o . data . slice ( 0 , this . totalSize ) ;
}
}
async discard ( ) {
}
async writeULE32 ( v , pos ) {
const self = this ;
const b = Uint32Array . of ( v ) ;
await self . write ( new Uint8Array ( b . buffer ) , pos ) ;
}
async writeUBE32 ( v , pos ) {
const self = this ;
const buff = new Uint8Array ( 4 ) ;
const buffV = new DataView ( buff . buffer ) ;
buffV . setUint32 ( 0 , v , false ) ;
await self . write ( buff , pos ) ;
}
async writeULE64 ( v , pos ) {
const self = this ;
const b = Uint32Array . of ( v & 0xFFFFFFFF , Math . floor ( v / 0x100000000 ) ) ;
await self . write ( new Uint8Array ( b . buffer ) , pos ) ;
}
async readULE32 ( pos ) {
const self = this ;
const b = await self . read ( 4 , pos ) ;
const view = new Uint32Array ( b . buffer ) ;
return view [ 0 ] ;
}
async readUBE32 ( pos ) {
const self = this ;
const b = await self . read ( 4 , pos ) ;
const view = new DataView ( b . buffer ) ;
return view . getUint32 ( 0 , false ) ;
}
async readULE64 ( pos ) {
const self = this ;
const b = await self . read ( 8 , pos ) ;
const view = new Uint32Array ( b . buffer ) ;
return view [ 1 ] * 0x100000000 + view [ 0 ] ;
}
}
2020-07-13 08:21:03 +03:00
/* global fetch */
async function readExisting$1 ( o , b ) {
2020-07-11 11:31:52 +03:00
if ( o instanceof Uint8Array ) {
o = {
type : "mem" ,
data : o
} ;
}
2020-07-13 08:21:03 +03:00
if ( process . browser ) {
if ( typeof o === "string" ) {
const buff = await fetch ( o ) . then ( function ( res ) {
return res . arrayBuffer ( ) ;
} ) . then ( function ( ab ) {
return new Uint8Array ( ab ) ;
} ) ;
o = {
type : "mem" ,
data : buff
} ;
}
} else {
if ( typeof o === "string" ) {
o = {
type : "file" ,
fileName : o ,
cacheSize : b
} ;
}
2020-07-11 11:31:52 +03:00
}
if ( o . type == "file" ) {
2020-07-13 08:21:03 +03:00
return await open ( o . fileName , "r" , o . cacheSize ) ;
2020-07-11 11:31:52 +03:00
} else if ( o . type == "mem" ) {
2020-07-13 08:21:03 +03:00
return await readExisting ( o ) ;
2020-07-11 11:31:52 +03:00
} else {
throw new Error ( "Invalid FastFile type: " + o . type ) ;
}
}
async function readBinFile ( fileName , type , maxVersion ) {
const fd = await readExisting$1 ( fileName ) ;
const b = await fd . read ( 4 ) ;
let readedType = "" ;
for ( let i = 0 ; i < 4 ; i ++ ) readedType += String . fromCharCode ( b [ i ] ) ;
if ( readedType != type ) throw new Error ( fileName + ": Invalid File format" ) ;
let v = await fd . readULE32 ( ) ;
if ( v > maxVersion ) throw new Error ( "Version not supported" ) ;
const nSections = await fd . readULE32 ( ) ;
// Scan sections
let sections = [ ] ;
for ( let i = 0 ; i < nSections ; i ++ ) {
let ht = await fd . readULE32 ( ) ;
let hl = await fd . readULE64 ( ) ;
if ( typeof sections [ ht ] == "undefined" ) sections [ ht ] = [ ] ;
sections [ ht ] . push ( {
p : fd . pos ,
size : hl
} ) ;
fd . pos += hl ;
}
return { fd , sections } ;
}
async function startReadUniqueSection ( fd , sections , idSection ) {
if ( typeof fd . readingSection != "undefined" )
throw new Error ( "Already reading a section" ) ;
if ( ! sections [ idSection ] ) throw new Error ( fd . fileName + ": Missing section " + idSection ) ;
if ( sections [ idSection ] . length > 1 ) throw new Error ( fd . fileName + ": Section Duplicated " + idSection ) ;
fd . pos = sections [ idSection ] [ 0 ] . p ;
fd . readingSection = sections [ idSection ] [ 0 ] ;
}
async function endReadSection ( fd , noCheck ) {
if ( typeof fd . readingSection == "undefined" )
throw new Error ( "Not reading a section" ) ;
if ( ! noCheck ) {
if ( fd . pos - fd . readingSection . p != fd . readingSection . size )
throw new Error ( "Invalid section size" ) ;
}
delete fd . readingSection ;
}
async function readBigInt ( fd , n8 , pos ) {
const buff = await fd . read ( n8 , pos ) ;
return ffjavascript . Scalar . fromRprLE ( buff , 0 , n8 ) ;
}
async function loadHeader ( fd , sections ) {
const res = { } ;
await startReadUniqueSection ( fd , sections , 1 ) ;
// Read Header
res . n8 = await fd . readULE32 ( ) ;
res . prime = await readBigInt ( fd , res . n8 ) ;
res . Fr = new ffjavascript . ZqField ( res . prime ) ;
res . nVars = await fd . readULE32 ( ) ;
res . nOutputs = await fd . readULE32 ( ) ;
res . nPubInputs = await fd . readULE32 ( ) ;
res . nPrvInputs = await fd . readULE32 ( ) ;
res . nLabels = await fd . readULE64 ( ) ;
res . nConstraints = await fd . readULE32 ( ) ;
await endReadSection ( fd ) ;
return res ;
}
async function load ( fileName , loadConstraints , loadMap ) {
const { fd , sections } = await readBinFile ( fileName , "r1cs" , 1 ) ;
const res = await loadHeader ( fd , sections ) ;
if ( loadConstraints ) {
await startReadUniqueSection ( fd , sections , 2 ) ;
res . constraints = [ ] ;
for ( let i = 0 ; i < res . nConstraints ; i ++ ) {
const c = await readConstraint ( ) ;
res . constraints . push ( c ) ;
}
await endReadSection ( fd ) ;
}
// Read Labels
if ( loadMap ) {
await startReadUniqueSection ( fd , sections , 3 ) ;
res . map = [ ] ;
for ( let i = 0 ; i < res . nVars ; i ++ ) {
const idx = await fd . readULE64 ( ) ;
res . map . push ( idx ) ;
}
await endReadSection ( fd ) ;
}
await fd . close ( ) ;
return res ;
async function readConstraint ( ) {
const c = [ ] ;
c [ 0 ] = await readLC ( ) ;
c [ 1 ] = await readLC ( ) ;
c [ 2 ] = await readLC ( ) ;
return c ;
}
async function readLC ( ) {
const lc = { } ;
const nIdx = await fd . readULE32 ( ) ;
for ( let i = 0 ; i < nIdx ; i ++ ) {
const idx = await fd . readULE32 ( ) ;
const val = res . Fr . e ( await readBigInt ( fd , res . n8 ) ) ;
lc [ idx ] = val ;
}
return lc ;
}
}
2020-07-26 15:05:23 +03:00
async function open$1 ( fileName , openFlags , cacheSize ) {
cacheSize = cacheSize || 4096 * 64 ;
if ( [ "w+" , "wx+" , "r" , "ax+" , "a+" ] . indexOf ( openFlags ) < 0 )
throw new Error ( "Invalid open option" ) ;
const fd = await fs . promises . open ( fileName , openFlags ) ;
const stats = await fd . stat ( ) ;
return new FastFile$1 ( fd , stats , cacheSize , fileName ) ;
}
const tmpBuff32 = new Uint8Array ( 4 ) ;
const tmpBuff32v = new DataView ( tmpBuff32 . buffer ) ;
const tmpBuff64 = new Uint8Array ( 8 ) ;
const tmpBuff64v = new DataView ( tmpBuff64 . buffer ) ;
class FastFile$1 {
constructor ( fd , stats , cacheSize , fileName ) {
this . fileName = fileName ;
this . fd = fd ;
this . pos = 0 ;
this . pageBits = 8 ;
this . pageSize = ( 1 << this . pageBits ) ;
while ( this . pageSize < stats . blksize * 4 ) {
this . pageBits ++ ;
this . pageSize *= 2 ;
}
this . totalSize = stats . size ;
this . totalPages = Math . floor ( ( stats . size - 1 ) / this . pageSize ) + 1 ;
this . maxPagesLoaded = Math . floor ( cacheSize / this . pageSize ) + 1 ;
this . pages = { } ;
this . pendingLoads = [ ] ;
this . writing = false ;
this . reading = false ;
}
_loadPage ( p ) {
const self = this ;
return new Promise ( ( resolve , reject ) => {
self . pendingLoads . push ( {
page : p ,
resolve : resolve ,
reject : reject
} ) ;
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
} ) ;
}
_triggerLoad ( ) {
const self = this ;
processPendingLoads ( ) ;
if ( self . pendingLoads . length == 0 ) return ;
if ( Object . keys ( self . pages ) . length >= self . maxPagesLoaded ) {
const dp = getDeletablePage ( ) ;
if ( dp < 0 ) { // // No sizes available
// setTimeout(self._triggerLoad.bind(self), 10000);
return ;
}
delete self . pages [ dp ] ;
}
const load = self . pendingLoads . shift ( ) ;
if ( load . page >= self . totalPages ) {
self . pages [ load . page ] = {
dirty : false ,
buff : new Uint8Array ( self . pageSize ) ,
pendingOps : 1 ,
size : 0
} ;
load . resolve ( ) ;
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
return ;
}
if ( self . reading ) {
self . pendingLoads . unshift ( load ) ;
return ; // Only one read at a time.
}
self . reading = true ;
const page = {
dirty : false ,
buff : new Uint8Array ( self . pageSize ) ,
pendingOps : 1 ,
size : 0
} ;
self . fd . read ( page . buff , 0 , self . pageSize , load . page * self . pageSize ) . then ( ( res ) => {
page . size = res . bytesRead ;
self . pages [ load . page ] = page ;
self . reading = false ;
load . resolve ( ) ;
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
} , ( err ) => {
load . reject ( err ) ;
} ) ;
function processPendingLoads ( ) {
const newPendingLoads = [ ] ;
for ( let i = 0 ; i < self . pendingLoads . length ; i ++ ) {
const load = self . pendingLoads [ i ] ;
if ( typeof self . pages [ load . page ] != "undefined" ) {
self . pages [ load . page ] . pendingOps ++ ;
load . resolve ( ) ;
} else {
newPendingLoads . push ( load ) ;
}
}
self . pendingLoads = newPendingLoads ;
}
function getDeletablePage ( ) {
for ( let p in self . pages ) {
const page = self . pages [ p ] ;
if ( ( page . dirty == false ) && ( page . pendingOps == 0 ) ) return p ;
}
return - 1 ;
}
}
_triggerWrite ( ) {
const self = this ;
if ( self . writing ) return ;
const p = self . _getDirtyPage ( ) ;
if ( p < 0 ) {
if ( self . pendingClose ) self . pendingClose ( ) ;
return ;
}
self . writing = true ;
self . pages [ p ] . dirty = false ;
self . fd . write ( self . pages [ p ] . buff , 0 , self . pages [ p ] . size , p * self . pageSize ) . then ( ( ) => {
self . writing = false ;
setImmediate ( self . _triggerWrite . bind ( self ) ) ;
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
} , ( err ) => {
console . log ( "ERROR Writing: " + err ) ;
self . error = err ;
self . _tryClose ( ) ;
} ) ;
}
_getDirtyPage ( ) {
for ( let p in this . pages ) {
if ( this . pages [ p ] . dirty ) return p ;
}
return - 1 ;
}
async write ( buff , pos ) {
if ( buff . byteLength == 0 ) return ;
const self = this ;
/ *
if ( buff . byteLength > self . pageSize * self . maxPagesLoaded * 0.8 ) {
const cacheSize = Math . floor ( buff . byteLength * 1.1 ) ;
this . maxPagesLoaded = Math . floor ( cacheSize / self . pageSize ) + 1 ;
}
* /
if ( typeof pos == "undefined" ) pos = self . pos ;
self . pos = pos + buff . byteLength ;
if ( self . totalSize < pos + buff . byteLength ) self . totalSize = pos + buff . byteLength ;
if ( self . pendingClose )
throw new Error ( "Writing a closing file" ) ;
const firstPage = Math . floor ( pos / self . pageSize ) ;
const lastPage = Math . floor ( ( pos + buff . byteLength - 1 ) / self . pageSize ) ;
// for (let i=firstPage; i<=lastPage; i++) await self._loadPage(i);
let p = firstPage ;
let o = pos % self . pageSize ;
let r = buff . byteLength ;
while ( r > 0 ) {
await self . _loadPage ( p ) ;
const l = ( o + r > self . pageSize ) ? ( self . pageSize - o ) : r ;
const srcView = new Uint8Array ( buff . buffer , buff . byteLength - r , l ) ;
const dstView = new Uint8Array ( self . pages [ p ] . buff . buffer , o , l ) ;
dstView . set ( srcView ) ;
self . pages [ p ] . dirty = true ;
self . pages [ p ] . pendingOps -- ;
self . pages [ p ] . size = Math . max ( o + l , self . pages [ p ] . size ) ;
if ( p >= self . totalPages ) {
self . totalPages = p + 1 ;
}
r = r - l ;
p ++ ;
o = 0 ;
}
setImmediate ( self . _triggerWrite . bind ( self ) ) ;
}
async read ( len , pos ) {
if ( len == 0 ) {
return new Uint8Array ( 0 ) ;
}
const self = this ;
if ( len > self . pageSize * self . maxPagesLoaded * 0.8 ) {
const cacheSize = Math . floor ( len * 1.1 ) ;
this . maxPagesLoaded = Math . floor ( cacheSize / self . pageSize ) + 1 ;
}
if ( typeof pos == "undefined" ) pos = self . pos ;
self . pos = pos + len ;
if ( self . pendingClose )
throw new Error ( "Reading a closing file" ) ;
const firstPage = Math . floor ( pos / self . pageSize ) ;
const lastPage = Math . floor ( ( pos + len - 1 ) / self . pageSize ) ;
for ( let i = firstPage ; i <= lastPage ; i ++ ) await self . _loadPage ( i ) ;
let buff = new Uint8Array ( len ) ;
let dstView = new Uint8Array ( buff ) ;
let p = firstPage ;
let o = pos % self . pageSize ;
// Remaining bytes to read
let r = pos + len > self . totalSize ? len - ( pos + len - self . totalSize ) : len ;
while ( r > 0 ) {
// bytes to copy from this page
const l = ( o + r > self . pageSize ) ? ( self . pageSize - o ) : r ;
const srcView = new Uint8Array ( self . pages [ p ] . buff . buffer , o , l ) ;
buff . set ( srcView , dstView . byteLength - r ) ;
self . pages [ p ] . pendingOps -- ;
r = r - l ;
p ++ ;
o = 0 ;
}
setImmediate ( self . _triggerLoad . bind ( self ) ) ;
return buff ;
}
_tryClose ( ) {
const self = this ;
if ( ! self . pendingClose ) return ;
if ( self . error ) {
self . pendingCloseReject ( self . error ) ;
}
const p = self . _getDirtyPage ( ) ;
if ( ( p >= 0 ) || ( self . writing ) || ( self . reading ) || ( self . pendingLoads . length > 0 ) ) return ;
self . pendingClose ( ) ;
}
close ( ) {
const self = this ;
if ( self . pendingClose )
throw new Error ( "Closing the file twice" ) ;
return new Promise ( ( resolve , reject ) => {
self . pendingClose = resolve ;
self . pendingCloseReject = reject ;
self . _tryClose ( ) ;
} ) . then ( ( ) => {
self . fd . close ( ) ;
} , ( err ) => {
self . fd . close ( ) ;
throw ( err ) ;
} ) ;
}
async discard ( ) {
const self = this ;
await self . close ( ) ;
await fs . promises . unlink ( this . fileName ) ;
}
async writeULE32 ( v , pos ) {
const self = this ;
tmpBuff32v . setUint32 ( 0 , v , true ) ;
await self . write ( tmpBuff32 , pos ) ;
}
async writeUBE32 ( v , pos ) {
const self = this ;
tmpBuff32v . setUint32 ( 0 , v , true ) ;
await self . write ( tmpBuff32 , pos ) ;
}
async writeULE64 ( v , pos ) {
const self = this ;
tmpBuff64v . setUint32 ( 0 , v & 0xFFFFFFFF , true ) ;
tmpBuff64v . setUint32 ( 4 , Math . floor ( v / 0x100000000 ) , true ) ;
await self . write ( tmpBuff64 , pos ) ;
}
async readULE32 ( pos ) {
const self = this ;
const b = await self . read ( 4 , pos ) ;
const view = new Uint32Array ( b . buffer ) ;
return view [ 0 ] ;
}
async readUBE32 ( pos ) {
const self = this ;
const b = await self . read ( 4 , pos ) ;
const view = new DataView ( b . buffer ) ;
return view . getUint32 ( 0 , false ) ;
}
async readULE64 ( pos ) {
const self = this ;
const b = await self . read ( 8 , pos ) ;
const view = new Uint32Array ( b . buffer ) ;
return view [ 1 ] * 0x100000000 + view [ 0 ] ;
}
}
function createNew ( o ) {
const initialSize = o . initialSize || 1 << 20 ;
const fd = new MemFile$1 ( ) ;
fd . o = o ;
fd . o . data = new Uint8Array ( initialSize ) ;
fd . allocSize = initialSize ;
fd . totalSize = 0 ;
fd . readOnly = false ;
fd . pos = 0 ;
return fd ;
}
function readExisting$2 ( o ) {
const fd = new MemFile$1 ( ) ;
fd . o = o ;
fd . allocSize = o . data . byteLength ;
fd . totalSize = o . data . byteLength ;
fd . readOnly = true ;
fd . pos = 0 ;
return fd ;
}
class MemFile$1 {
constructor ( ) {
this . pageSize = 1 << 14 ; // for compatibility
}
_resizeIfNeeded ( newLen ) {
if ( newLen > this . allocSize ) {
const newAllocSize = Math . max (
this . allocSize + ( 1 << 20 ) ,
Math . floor ( this . allocSize * 1.1 ) ,
newLen
) ;
const newData = new Uint8Array ( newAllocSize ) ;
newData . set ( this . o . data ) ;
this . o . data = newData ;
this . allocSize = newAllocSize ;
}
}
async write ( buff , pos ) {
const self = this ;
if ( typeof pos == "undefined" ) pos = self . pos ;
if ( this . readOnly ) throw new Error ( "Writing a read only file" ) ;
this . _resizeIfNeeded ( pos + buff . byteLength ) ;
this . o . data . set ( buff , pos ) ;
if ( pos + buff . byteLength > this . totalSize ) this . totalSize = pos + buff . byteLength ;
this . pos = pos + buff . byteLength ;
}
async read ( len , pos ) {
const self = this ;
if ( typeof pos == "undefined" ) pos = self . pos ;
if ( this . readOnly ) {
if ( pos + len > this . totalSize ) throw new Error ( "Reading out of bounds" ) ;
}
this . _resizeIfNeeded ( pos + len ) ;
const buff = this . o . data . slice ( pos , pos + len ) ;
this . pos = pos + len ;
return buff ;
}
close ( ) {
if ( this . o . data . byteLength != this . totalSize ) {
this . o . data = this . o . data . slice ( 0 , this . totalSize ) ;
}
}
async discard ( ) {
}
async writeULE32 ( v , pos ) {
const self = this ;
const b = Uint32Array . of ( v ) ;
await self . write ( new Uint8Array ( b . buffer ) , pos ) ;
}
async writeUBE32 ( v , pos ) {
const self = this ;
const buff = new Uint8Array ( 4 ) ;
const buffV = new DataView ( buff . buffer ) ;
buffV . setUint32 ( 0 , v , false ) ;
await self . write ( buff , pos ) ;
}
async writeULE64 ( v , pos ) {
const self = this ;
const b = Uint32Array . of ( v & 0xFFFFFFFF , Math . floor ( v / 0x100000000 ) ) ;
await self . write ( new Uint8Array ( b . buffer ) , pos ) ;
}
async readULE32 ( pos ) {
const self = this ;
const b = await self . read ( 4 , pos ) ;
const view = new Uint32Array ( b . buffer ) ;
return view [ 0 ] ;
}
async readUBE32 ( pos ) {
const self = this ;
const b = await self . read ( 4 , pos ) ;
const view = new DataView ( b . buffer ) ;
return view . getUint32 ( 0 , false ) ;
}
async readULE64 ( pos ) {
const self = this ;
const b = await self . read ( 8 , pos ) ;
const view = new Uint32Array ( b . buffer ) ;
return view [ 1 ] * 0x100000000 + view [ 0 ] ;
}
}
/* global fetch */
async function createOverride ( o , b ) {
if ( typeof o === "string" ) {
o = {
type : "file" ,
fileName : o ,
cacheSize : b
} ;
}
if ( o . type == "file" ) {
return await open$1 ( o . fileName , "w+" , o . cacheSize ) ;
} else if ( o . type == "mem" ) {
return createNew ( o ) ;
} else {
throw new Error ( "Invalid FastFile type: " + o . type ) ;
}
}
async function readExisting$3 ( o , b ) {
if ( o instanceof Uint8Array ) {
o = {
type : "mem" ,
data : o
} ;
}
if ( process . browser ) {
if ( typeof o === "string" ) {
const buff = await fetch ( o ) . then ( function ( res ) {
return res . arrayBuffer ( ) ;
} ) . then ( function ( ab ) {
return new Uint8Array ( ab ) ;
} ) ;
o = {
type : "mem" ,
data : buff
} ;
}
} else {
if ( typeof o === "string" ) {
o = {
type : "file" ,
fileName : o ,
cacheSize : b
} ;
}
}
if ( o . type == "file" ) {
return await open$1 ( o . fileName , "r" , o . cacheSize ) ;
} else if ( o . type == "mem" ) {
return await readExisting$2 ( o ) ;
} else {
throw new Error ( "Invalid FastFile type: " + o . type ) ;
}
}
2020-07-11 11:31:52 +03:00
async function loadSymbols ( symFileName ) {
const sym = {
labelIdx2Name : [ "one" ] ,
varIdx2Name : [ "one" ] ,
componentIdx2Name : [ ]
} ;
2020-07-26 15:05:23 +03:00
const fd = await readExisting$3 ( symFileName ) ;
2020-07-11 11:31:52 +03:00
const buff = await fd . read ( fd . totalSize ) ;
const symsStr = new TextDecoder ( "utf-8" ) . decode ( buff ) ;
const lines = symsStr . split ( "\n" ) ;
for ( let i = 0 ; i < lines . length ; i ++ ) {
const arr = lines [ i ] . split ( "," ) ;
if ( arr . length != 4 ) continue ;
if ( sym . varIdx2Name [ arr [ 1 ] ] ) {
sym . varIdx2Name [ arr [ 1 ] ] += "|" + arr [ 3 ] ;
} else {
sym . varIdx2Name [ arr [ 1 ] ] = arr [ 3 ] ;
}
sym . labelIdx2Name [ arr [ 0 ] ] = arr [ 3 ] ;
if ( ! sym . componentIdx2Name [ arr [ 2 ] ] ) {
sym . componentIdx2Name [ arr [ 2 ] ] = extractComponent ( arr [ 3 ] ) ;
}
}
await fd . close ( ) ;
return sym ;
function extractComponent ( name ) {
const arr = name . split ( "." ) ;
arr . pop ( ) ; // Remove the lasr element
return arr . join ( "." ) ;
}
}
function r1csPrint ( r1cs , syms , logger ) {
for ( let i = 0 ; i < r1cs . constraints . length ; i ++ ) {
printCostraint ( r1cs . constraints [ i ] ) ;
}
function printCostraint ( c ) {
const lc2str = ( lc ) => {
let S = "" ;
const keys = Object . keys ( lc ) ;
keys . forEach ( ( k ) => {
let name = syms . varIdx2Name [ k ] ;
if ( name == "one" ) name = "" ;
let vs = r1cs . Fr . toString ( lc [ k ] ) ;
if ( vs == "1" ) vs = "" ; // Do not show ones
if ( vs == "-1" ) vs = "-" ; // Do not show ones
if ( ( S != "" ) && ( vs [ 0 ] != "-" ) ) vs = "+" + vs ;
if ( S != "" ) vs = " " + vs ;
S = S + vs + name ;
} ) ;
return S ;
} ;
const S = ` [ ${ lc2str ( c [ 0 ] ) } ] * [ ${ lc2str ( c [ 1 ] ) } ] - [ ${ lc2str ( c [ 2 ] ) } ] = 0 ` ;
if ( logger ) logger . info ( S ) ;
}
}
const bls12381r = ffjavascript . Scalar . e ( "73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001" , 16 ) ;
const bn128r = ffjavascript . Scalar . e ( "21888242871839275222246405745257275088548364400416034343698204186575808495617" ) ;
async function r1csInfo ( r1csName , logger ) {
const cir = await load ( r1csName ) ;
if ( ffjavascript . Scalar . eq ( cir . prime , bn128r ) ) {
if ( logger ) logger . info ( "Curve: bn-128" ) ;
} else if ( ffjavascript . Scalar . eq ( cir . prime , bls12381r ) ) {
if ( logger ) logger . info ( "Curve: bls12-381" ) ;
} else {
if ( logger ) logger . info ( ` Unknown Curve. Prime: ${ ffjavascript . Scalar . toString ( cir . prime ) } ` ) ;
}
if ( logger ) logger . info ( ` # of Wires: ${ cir . nVars } ` ) ;
if ( logger ) logger . info ( ` # of Constraints: ${ cir . nConstraints } ` ) ;
if ( logger ) logger . info ( ` # of Private Inputs: ${ cir . nPrvInputs } ` ) ;
if ( logger ) logger . info ( ` # of Public Inputs: ${ cir . nPubInputs } ` ) ;
if ( logger ) logger . info ( ` # of Outputs: ${ cir . nOutputs } ` ) ;
return cir ;
}
async function r1csExportJson ( r1csFileName , logger ) {
const cir = await load ( r1csFileName , true , true ) ;
return cir ;
}
var name = "snarkjs" ;
var type = "module" ;
2020-07-26 15:05:23 +03:00
var version = "0.3.6" ;
2020-07-11 11:31:52 +03:00
var description = "zkSNARKs implementation in JavaScript" ;
var main = "./build/main.cjs" ;
var module$1 = "./main.js" ;
var exports$1 = {
"import" : "./main.js" ,
require : "./build/main.cjs"
} ;
var scripts = {
test : "mocha" ,
build : "rollup -c config/rollup.cjs.config.js" ,
buildcli : "rollup -c config/rollup.cli.config.js" ,
2020-07-13 08:21:03 +03:00
buildiife : "BROWSER=true rollup -c config/rollup.iife.config.js" ,
buildiifemin : "BROWSER=true rollup -c config/rollup.iife_min.config.js"
2020-07-11 11:31:52 +03:00
} ;
var bin = {
2020-07-13 08:21:03 +03:00
snarkjs : "build/cli.cjs"
2020-07-11 11:31:52 +03:00
} ;
var directories = {
templates : "templates"
} ;
var keywords = [
"zksnark" ,
"zcash" ,
"ethereum" ,
"zero" ,
"knowlage" ,
"cryptography" ,
"circuit"
] ;
var author = "Jordi Baylina" ;
var license = "GPL-3.0" ;
var repository = {
type : "git" ,
url : "https://github.com/iden3/snarkjs.git"
} ;
var dependencies = {
"blake2b-wasm" : "https://github.com/jbaylina/blake2b-wasm.git" ,
2020-07-13 09:10:55 +03:00
circom _runtime : "0.0.9" ,
2020-07-26 15:05:23 +03:00
fastfile : "0.0.9" ,
2020-07-13 09:10:55 +03:00
ffjavascript : "0.2.4" ,
2020-07-11 11:31:52 +03:00
keccak : "^3.0.0" ,
logplease : "^1.2.15" ,
2020-07-13 09:10:55 +03:00
r1csfile : "0.0.12" ,
2020-07-11 11:31:52 +03:00
yargs : "^12.0.5"
} ;
var devDependencies = {
chai : "^4.2.0" ,
eslint : "^6.8.0" ,
lodash : "^4.17.15" ,
mocha : "^7.1.1" ,
rollup : "^2.20.0" ,
"rollup-plugin-commonjs" : "^10.1.0" ,
"rollup-plugin-ignore" : "^1.0.6" ,
"rollup-plugin-json" : "^4.0.0" ,
"rollup-plugin-node-resolve" : "^5.2.0" ,
2020-07-13 08:21:03 +03:00
"rollup-plugin-replace" : "^2.2.0" ,
"rollup-plugin-terser" : "^6.1.0"
2020-07-11 11:31:52 +03:00
} ;
var pkg = {
name : name ,
type : type ,
version : version ,
description : description ,
main : main ,
module : module$1 ,
exports : exports$1 ,
scripts : scripts ,
bin : bin ,
directories : directories ,
keywords : keywords ,
author : author ,
license : license ,
repository : repository ,
dependencies : dependencies ,
devDependencies : devDependencies
} ;
const version$1 = pkg . version ;
let selectedCommand = null ;
async function clProcessor ( commands ) {
const cl = [ ] ;
const argv = { } ;
for ( let i = 2 ; i < process . argv . length ; i ++ ) {
if ( process . argv [ i ] [ 0 ] == "-" ) {
let S = process . argv [ i ] ;
while ( S [ 0 ] == "-" ) S = S . slice ( 1 ) ;
const arr = S . split ( "=" ) ;
if ( arr . length > 1 ) {
argv [ arr [ 0 ] ] = arr . slice ( 1 ) . join ( "=" ) ;
} else {
argv [ arr [ 0 ] ] = true ;
}
} else {
cl . push ( process . argv [ i ] ) ;
}
}
for ( let i = 0 ; i < commands . length ; i ++ ) {
const cmd = commands [ i ] ;
const m = calculateMatch ( commands [ i ] , cl ) ;
if ( m ) {
if ( ( argv . h ) || ( argv . help ) ) {
helpCmd ( cmd ) ;
return ;
}
if ( areParamsValid ( cmd . cmd , m ) ) {
if ( cmd . options ) {
const options = getOptions ( cmd . options ) ;
await cmd . action ( m , options ) ;
} else {
await cmd . action ( m , { } ) ;
}
} else {
if ( m . length > 0 ) console . log ( "Invalid number of parameters" ) ;
helpCmd ( cmd ) ;
}
return ;
}
}
if ( cl . length > 0 ) console . log ( "Invalid command" ) ;
helpAll ( ) ;
function calculateMatch ( cmd , cl ) {
const alias = [ ] ;
alias . push ( parseLine ( cmd . cmd ) ) ;
if ( cmd . alias ) {
if ( Array . isArray ( cmd . alias ) ) {
for ( let i = 0 ; i < cmd . alias . length ; i ++ ) {
alias . push ( parseLine ( cmd . alias [ i ] ) ) ;
}
} else {
alias . push ( parseLine ( cmd . alias ) ) ;
}
}
for ( let i = 0 ; i < cl . length ; i ++ ) {
for ( let j = 0 ; j < alias . length ; j ++ ) {
const w = alias [ j ] . cmd . shift ( ) ;
if ( cl [ i ] . toUpperCase ( ) == w . toUpperCase ( ) ) {
if ( alias [ j ] . cmd . length == 0 ) {
return buildRemaining ( alias [ j ] . params , cl . slice ( i + 1 ) ) ;
}
} else {
alias . splice ( j , 1 ) ;
j -- ;
}
}
}
return null ;
function buildRemaining ( defParams , cl ) {
const res = [ ] ;
let p = 0 ;
for ( let i = 0 ; i < defParams . length ; i ++ ) {
if ( defParams [ i ] [ 0 ] == "-" ) {
res . push ( getOption ( defParams [ i ] ) . val ) ;
} else {
if ( p < cl . length ) {
res . push ( cl [ p ++ ] ) ;
} else {
res . push ( null ) ;
}
}
}
while ( p < cl . length ) {
res . push ( cl [ p ++ ] ) ;
}
return res ;
}
}
function parseLine ( l ) {
const words = l . match ( /(\S+)/g ) ;
for ( let i = 0 ; i < words . length ; i ++ ) {
if ( ( words [ i ] [ 0 ] == "<" )
|| ( words [ i ] [ 0 ] == "[" )
|| ( words [ i ] [ 0 ] == "-" ) )
{
return {
cmd : words . slice ( 0 , i ) ,
params : words . slice ( i )
} ;
}
}
return {
cmd : words ,
params : [ ]
} ;
}
function getOption ( o ) {
const arr1 = o . slice ( 1 ) . split ( ":" ) ;
const arr2 = arr1 [ 0 ] . split ( "|" ) ;
for ( let i = 0 ; i < arr2 . length ; i ++ ) {
if ( argv [ arr2 [ i ] ] ) return {
key : arr2 [ 0 ] ,
val : argv [ arr2 [ i ] ]
} ;
}
return {
key : arr2 [ 0 ] ,
val : ( arr1 . length > 1 ) ? arr1 [ 1 ] : null
} ;
}
function areParamsValid ( cmd , params ) {
const pl = parseLine ( cmd ) ;
if ( params . length > pl . params . length ) return false ;
let minParams = pl . params . length ;
while ( ( minParams > 0 ) && ( pl . params [ minParams - 1 ] [ 0 ] == "[" ) ) minParams -- ;
if ( params . length < minParams ) return false ;
for ( let i = 0 ; ( i < pl . params . length ) && ( pl . params [ i ] [ 0 ] == "<" ) ; i ++ ) {
if ( typeof params [ i ] == "undefined" ) return false ;
}
return true ;
}
function getOptions ( options ) {
const res = { } ;
const opts = options . match ( /(\S+)/g ) ;
for ( let i = 0 ; i < opts . length ; i ++ ) {
const o = getOption ( opts [ i ] ) ;
res [ o . key ] = o . val ;
}
return res ;
}
function printVersion ( ) {
console . log ( "snarkjs@" + version$1 ) ;
}
function epilog ( ) {
console . log ( ` Copyright (C) 2018 0kims association
This program comes with ABSOLUTELY NO WARRANTY ;
This is free software , and you are welcome to redistribute it
under certain conditions ; see the COPYING file in the official
repo directory at https : //github.com/iden3/snarkjs `);
}
function helpAll ( ) {
printVersion ( ) ;
epilog ( ) ;
console . log ( "" ) ;
console . log ( "Usage:" ) ;
console . log ( " snarkjs <full command> ... <options>" ) ;
console . log ( " or snarkjs <shorcut> ... <options>" ) ;
console . log ( "" ) ;
console . log ( "Type snarkjs <command> --help to get more information for that command" ) ;
console . log ( "" ) ;
console . log ( "Full Command Description" ) ;
console . log ( "============ =================" ) ;
for ( let i = 0 ; i < commands . length ; i ++ ) {
const cmd = commands [ i ] ;
let S = "" ;
const pl = parseLine ( cmd . cmd ) ;
S += pl . cmd . join ( " " ) ;
while ( S . length < 30 ) S = S + " " ;
S += cmd . description ;
console . log ( S ) ;
S = " Usage: snarkjs " ;
if ( cmd . alias ) {
if ( Array . isArray ( cmd . alias ) ) {
S += cmd . alias [ 0 ] ;
} else {
S += cmd . alias ;
}
} else {
S += pl . cmd . join ( " " ) ;
}
S += " " + pl . params . join ( " " ) ;
console . log ( S ) ;
}
}
function helpCmd ( cmd ) {
if ( typeof cmd == "undefined" ) cmd = selectedCommand ;
if ( typeof cmd == "undefined" ) return helpAll ( ) ;
printVersion ( ) ;
epilog ( ) ;
console . log ( "" ) ;
if ( cmd . longDescription ) {
console . log ( cmd . longDescription ) ;
} else {
console . log ( cmd . description ) ;
}
console . log ( "Usage: " ) ;
console . log ( " snarkjs " + cmd . cmd ) ;
const pl = parseLine ( cmd . cmd ) ;
let S = " or snarkjs " ;
if ( cmd . alias ) {
if ( Array . isArray ( cmd . alias ) ) {
S += cmd . alias [ 0 ] ;
} else {
S += cmd . alias ;
}
} else {
S += pl . cmd . join ( " " ) ;
}
S += " " + pl . params . join ( " " ) ;
console . log ( S ) ;
console . log ( "" ) ;
}
}
function hashToG2 ( curve , hash ) {
const hashV = new DataView ( hash . buffer , hash . byteOffset , hash . byteLength ) ;
const seed = [ ] ;
for ( let i = 0 ; i < 8 ; i ++ ) {
seed [ i ] = hashV . getUint32 ( i * 4 ) ;
}
const rng = new ffjavascript . ChaCha ( seed ) ;
const g2 _sp = curve . G2 . fromRng ( rng ) ;
return g2 _sp ;
}
2020-07-14 12:55:12 +03:00
function getG2sp ( curve , persinalization , challenge , g1s , g1sx ) {
2020-07-11 11:31:52 +03:00
const h = Blake2b ( 64 ) ;
const b1 = new Uint8Array ( [ persinalization ] ) ;
h . update ( b1 ) ;
2020-07-14 12:55:12 +03:00
h . update ( challenge ) ;
2020-07-11 11:31:52 +03:00
const b3 = curve . G1 . toUncompressed ( g1s ) ;
h . update ( b3 ) ;
const b4 = curve . G1 . toUncompressed ( g1sx ) ;
h . update ( b4 ) ;
const hash = h . digest ( ) ;
return hashToG2 ( curve , hash ) ;
}
2020-07-14 12:55:12 +03:00
function calculatePubKey ( k , curve , personalization , challengeHash , rng ) {
2020-07-11 11:31:52 +03:00
k . g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
k . g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( k . g1 _s , k . prvKey ) ) ;
2020-07-14 12:55:12 +03:00
k . g2 _sp = curve . G2 . toAffine ( getG2sp ( curve , personalization , challengeHash , k . g1 _s , k . g1 _sx ) ) ;
2020-07-11 11:31:52 +03:00
k . g2 _spx = curve . G2 . toAffine ( curve . G2 . timesFr ( k . g2 _sp , k . prvKey ) ) ;
return k ;
}
2020-07-14 12:55:12 +03:00
function createPTauKey ( curve , challengeHash , rng ) {
2020-07-11 11:31:52 +03:00
const key = {
tau : { } ,
alpha : { } ,
beta : { }
} ;
key . tau . prvKey = curve . Fr . fromRng ( rng ) ;
key . alpha . prvKey = curve . Fr . fromRng ( rng ) ;
key . beta . prvKey = curve . Fr . fromRng ( rng ) ;
2020-07-14 12:55:12 +03:00
calculatePubKey ( key . tau , curve , 0 , challengeHash , rng ) ;
calculatePubKey ( key . alpha , curve , 1 , challengeHash , rng ) ;
calculatePubKey ( key . beta , curve , 2 , challengeHash , rng ) ;
2020-07-11 11:31:52 +03:00
return key ;
}
/* global window */
const _revTable = [ ] ;
for ( let i = 0 ; i < 256 ; i ++ ) {
_revTable [ i ] = _revSlow ( i , 8 ) ;
}
function _revSlow ( idx , bits ) {
let res = 0 ;
let a = idx ;
for ( let i = 0 ; i < bits ; i ++ ) {
res <<= 1 ;
res = res | ( a & 1 ) ;
a >>= 1 ;
}
return res ;
}
function bitReverse ( idx , bits ) {
return (
_revTable [ idx >>> 24 ] |
( _revTable [ ( idx >>> 16 ) & 0xFF ] << 8 ) |
( _revTable [ ( idx >>> 8 ) & 0xFF ] << 16 ) |
( _revTable [ idx & 0xFF ] << 24 )
) >>> ( 32 - bits ) ;
}
function log2 ( V )
{
return ( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000 , 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00 , 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0 , 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC , 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) ) ;
}
function formatHash ( b , title ) {
const a = new DataView ( b . buffer , b . byteOffset , b . byteLength ) ;
let S = "" ;
for ( let i = 0 ; i < 4 ; i ++ ) {
if ( i > 0 ) S += "\n" ;
S += "\t\t" ;
for ( let j = 0 ; j < 4 ; j ++ ) {
if ( j > 0 ) S += " " ;
S += a . getUint32 ( i * 16 + j * 4 ) . toString ( 16 ) . padStart ( 8 , "0" ) ;
}
}
if ( title ) S = title + "\n" + S ;
return S ;
}
function hashIsEqual ( h1 , h2 ) {
if ( h1 . byteLength != h2 . byteLength ) return false ;
var dv1 = new Int8Array ( h1 ) ;
var dv2 = new Int8Array ( h2 ) ;
for ( var i = 0 ; i != h1 . byteLength ; i ++ )
{
if ( dv1 [ i ] != dv2 [ i ] ) return false ;
}
return true ;
}
function cloneHasher ( h ) {
const ph = h . getPartialHash ( ) ;
const res = Blake2b ( 64 ) ;
res . setPartialHash ( ph ) ;
return res ;
}
async function sameRatio ( curve , g1s , g1sx , g2s , g2sx ) {
if ( curve . G1 . isZero ( g1s ) ) return false ;
if ( curve . G1 . isZero ( g1sx ) ) return false ;
if ( curve . G2 . isZero ( g2s ) ) return false ;
if ( curve . G2 . isZero ( g2sx ) ) return false ;
// return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
const res = await curve . pairingEq ( g1s , g2sx , curve . G1 . neg ( g1sx ) , g2s ) ;
return res ;
}
function askEntropy ( ) {
if ( process . browser ) {
return window . prompt ( "Enter a random text. (Entropy): " , "" ) ;
} else {
const rl = readline . createInterface ( {
input : process . stdin ,
output : process . stdout
} ) ;
return new Promise ( ( resolve ) => {
rl . question ( "Enter a random text. (Entropy): " , ( input ) => resolve ( input ) ) ;
} ) ;
}
}
async function getRandomRng ( entropy ) {
// Generate a random Rng
while ( ! entropy ) {
entropy = await askEntropy ( ) ;
}
const hasher = Blake2b ( 64 ) ;
hasher . update ( crypto . randomBytes ( 64 ) ) ;
const enc = new TextEncoder ( ) ; // always utf-8
hasher . update ( enc . encode ( entropy ) ) ;
const hash = Buffer . from ( hasher . digest ( ) ) ;
const seed = [ ] ;
for ( let i = 0 ; i < 8 ; i ++ ) {
seed [ i ] = hash . readUInt32BE ( i * 4 ) ;
}
const rng = new ffjavascript . ChaCha ( seed ) ;
return rng ;
}
function rngFromBeaconParams ( beaconHash , numIterationsExp ) {
let nIterationsInner ;
let nIterationsOuter ;
if ( numIterationsExp < 32 ) {
nIterationsInner = ( 1 << numIterationsExp ) >>> 0 ;
nIterationsOuter = 1 ;
} else {
nIterationsInner = 0x100000000 ;
nIterationsOuter = ( 1 << ( numIterationsExp - 32 ) ) >>> 0 ;
}
let curHash = beaconHash ;
for ( let i = 0 ; i < nIterationsOuter ; i ++ ) {
for ( let j = 0 ; j < nIterationsInner ; j ++ ) {
curHash = crypto . createHash ( "sha256" ) . update ( curHash ) . digest ( ) ;
}
}
const curHashV = new DataView ( curHash . buffer , curHash . byteOffset , curHash . byteLength ) ;
const seed = [ ] ;
for ( let i = 0 ; i < 8 ; i ++ ) {
seed [ i ] = curHashV . getUint32 ( i * 4 , false ) ;
}
const rng = new ffjavascript . ChaCha ( seed ) ;
return rng ;
}
function hex2ByteArray ( s ) {
if ( s instanceof Uint8Array ) return s ;
if ( s . slice ( 0 , 2 ) == "0x" ) s = s . slice ( 2 ) ;
return new Uint8Array ( s . match ( /[\da-f]{2}/gi ) . map ( function ( h ) {
return parseInt ( h , 16 ) ;
} ) ) ;
}
function byteArray2hex ( byteArray ) {
return Array . prototype . map . call ( byteArray , function ( byte ) {
return ( "0" + ( byte & 0xFF ) . toString ( 16 ) ) . slice ( - 2 ) ;
} ) . join ( "" ) ;
}
const bls12381r$1 = ffjavascript . Scalar . e ( "73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001" , 16 ) ;
const bn128r$1 = ffjavascript . Scalar . e ( "21888242871839275222246405745257275088548364400416034343698204186575808495617" ) ;
const bls12381q = ffjavascript . Scalar . e ( "1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab" , 16 ) ;
const bn128q = ffjavascript . Scalar . e ( "21888242871839275222246405745257275088696311157297823662689037894645226208583" ) ;
async function getCurveFromQ ( q ) {
let curve ;
if ( ffjavascript . Scalar . eq ( q , bn128q ) ) {
curve = await ffjavascript . buildBn128 ( ) ;
} else if ( ffjavascript . Scalar . eq ( q , bls12381q ) ) {
curve = await ffjavascript . buildBls12381 ( ) ;
} else {
throw new Error ( ` Curve not supported: ${ ffjavascript . Scalar . toString ( q ) } ` ) ;
}
return curve ;
}
async function getCurveFromName ( name ) {
let curve ;
const normName = normalizeName ( name ) ;
if ( [ "BN128" , "BN254" , "ALTBN128" ] . indexOf ( normName ) >= 0 ) {
curve = await ffjavascript . buildBn128 ( ) ;
} else if ( [ "BLS12381" ] . indexOf ( normName ) >= 0 ) {
curve = await ffjavascript . buildBls12381 ( ) ;
} else {
throw new Error ( ` Curve not supported: ${ name } ` ) ;
}
return curve ;
function normalizeName ( n ) {
return n . toUpperCase ( ) . match ( /[A-Za-z0-9]+/g ) . join ( "" ) ;
}
}
async function writePTauHeader ( fd , curve , power , ceremonyPower ) {
// Write the header
///////////
if ( ! ceremonyPower ) ceremonyPower = power ;
await fd . writeULE32 ( 1 ) ; // Header type
const pHeaderSize = fd . pos ;
await fd . writeULE64 ( 0 ) ; // Temporally set to 0 length
await fd . writeULE32 ( curve . F1 . n64 * 8 ) ;
const buff = new Uint8Array ( curve . F1 . n8 ) ;
ffjavascript . Scalar . toRprLE ( buff , 0 , curve . q , curve . F1 . n8 ) ;
await fd . write ( buff ) ;
await fd . writeULE32 ( power ) ; // power
await fd . writeULE32 ( ceremonyPower ) ; // power
const headerSize = fd . pos - pHeaderSize - 8 ;
const oldPos = fd . pos ;
fd . writeULE64 ( headerSize , pHeaderSize ) ;
fd . pos = oldPos ;
}
async function readPTauHeader ( fd , sections ) {
if ( ! sections [ 1 ] ) throw new Error ( fd . fileName + ": File has no header" ) ;
if ( sections [ 1 ] . length > 1 ) throw new Error ( fd . fileName + ": File has more than one header" ) ;
fd . pos = sections [ 1 ] [ 0 ] . p ;
const n8 = await fd . readULE32 ( ) ;
const buff = await fd . read ( n8 ) ;
const q = ffjavascript . Scalar . fromRprLE ( buff ) ;
const curve = await getCurveFromQ ( q ) ;
if ( curve . F1 . n64 * 8 != n8 ) throw new Error ( fd . fileName + ": Invalid size" ) ;
const power = await fd . readULE32 ( ) ;
const ceremonyPower = await fd . readULE32 ( ) ;
if ( fd . pos - sections [ 1 ] [ 0 ] . p != sections [ 1 ] [ 0 ] . size ) throw new Error ( "Invalid PTau header size" ) ;
return { curve , power , ceremonyPower } ;
}
async function readPtauPubKey ( fd , curve , montgomery ) {
const buff = await fd . read ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
return fromPtauPubKeyRpr ( buff , 0 , curve , montgomery ) ;
}
function fromPtauPubKeyRpr ( buff , pos , curve , montgomery ) {
const key = {
tau : { } ,
alpha : { } ,
beta : { }
} ;
key . tau . g1 _s = readG1 ( ) ;
key . tau . g1 _sx = readG1 ( ) ;
key . alpha . g1 _s = readG1 ( ) ;
key . alpha . g1 _sx = readG1 ( ) ;
key . beta . g1 _s = readG1 ( ) ;
key . beta . g1 _sx = readG1 ( ) ;
key . tau . g2 _spx = readG2 ( ) ;
key . alpha . g2 _spx = readG2 ( ) ;
key . beta . g2 _spx = readG2 ( ) ;
return key ;
function readG1 ( ) {
let p ;
if ( montgomery ) {
p = curve . G1 . fromRprLEM ( buff , pos ) ;
} else {
p = curve . G1 . fromRprUncompressed ( buff , pos ) ;
}
pos += curve . G1 . F . n8 * 2 ;
return p ;
}
function readG2 ( ) {
let p ;
if ( montgomery ) {
p = curve . G2 . fromRprLEM ( buff , pos ) ;
} else {
p = curve . G2 . fromRprUncompressed ( buff , pos ) ;
}
pos += curve . G2 . F . n8 * 2 ;
return p ;
}
}
function toPtauPubKeyRpr ( buff , pos , curve , key , montgomery ) {
writeG1 ( key . tau . g1 _s ) ;
writeG1 ( key . tau . g1 _sx ) ;
writeG1 ( key . alpha . g1 _s ) ;
writeG1 ( key . alpha . g1 _sx ) ;
writeG1 ( key . beta . g1 _s ) ;
writeG1 ( key . beta . g1 _sx ) ;
writeG2 ( key . tau . g2 _spx ) ;
writeG2 ( key . alpha . g2 _spx ) ;
writeG2 ( key . beta . g2 _spx ) ;
async function writeG1 ( p ) {
if ( montgomery ) {
curve . G1 . toRprLEM ( buff , pos , p ) ;
} else {
curve . G1 . toRprUncompressed ( buff , pos , p ) ;
}
pos += curve . F1 . n8 * 2 ;
}
async function writeG2 ( p ) {
if ( montgomery ) {
curve . G2 . toRprLEM ( buff , pos , p ) ;
} else {
curve . G2 . toRprUncompressed ( buff , pos , p ) ;
}
pos += curve . F2 . n8 * 2 ;
}
return buff ;
}
async function writePtauPubKey ( fd , curve , key , montgomery ) {
const buff = new Uint8Array ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buff , 0 , curve , key , montgomery ) ;
await fd . write ( buff ) ;
}
async function readContribution ( fd , curve ) {
const c = { } ;
c . tauG1 = await readG1 ( ) ;
c . tauG2 = await readG2 ( ) ;
c . alphaG1 = await readG1 ( ) ;
c . betaG1 = await readG1 ( ) ;
c . betaG2 = await readG2 ( ) ;
c . key = await readPtauPubKey ( fd , curve , true ) ;
c . partialHash = await fd . read ( 216 ) ;
2020-07-14 12:55:12 +03:00
c . nextChallenge = await fd . read ( 64 ) ;
2020-07-11 11:31:52 +03:00
c . type = await fd . readULE32 ( ) ;
const buffV = new Uint8Array ( curve . G1 . F . n8 * 2 * 6 + curve . G2 . F . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffV , 0 , curve , c . key , false ) ;
const responseHasher = Blake2b ( 64 ) ;
responseHasher . setPartialHash ( c . partialHash ) ;
responseHasher . update ( buffV ) ;
c . responseHash = responseHasher . digest ( ) ;
const paramLength = await fd . readULE32 ( ) ;
const curPos = fd . pos ;
let lastType = 0 ;
while ( fd . pos - curPos < paramLength ) {
const buffType = await readDV ( 1 ) ;
if ( buffType [ 0 ] <= lastType ) throw new Error ( "Parameters in the contribution must be sorted" ) ;
lastType = buffType [ 0 ] ;
if ( buffType [ 0 ] == 1 ) { // Name
const buffLen = await readDV ( 1 ) ;
const buffStr = await readDV ( buffLen [ 0 ] ) ;
c . name = new TextDecoder ( ) . decode ( buffStr ) ;
} else if ( buffType [ 0 ] == 2 ) {
const buffExp = await readDV ( 1 ) ;
c . numIterationsExp = buffExp [ 0 ] ;
} else if ( buffType [ 0 ] == 3 ) {
const buffLen = await readDV ( 1 ) ;
c . beaconHash = await readDV ( buffLen [ 0 ] ) ;
} else {
throw new Error ( "Parameter not recognized" ) ;
}
}
if ( fd . pos != curPos + paramLength ) {
throw new Error ( "Parametes do not match" ) ;
}
return c ;
async function readG1 ( ) {
const pBuff = await fd . read ( curve . G1 . F . n8 * 2 ) ;
return curve . G1 . fromRprLEM ( pBuff ) ;
}
async function readG2 ( ) {
const pBuff = await fd . read ( curve . G2 . F . n8 * 2 ) ;
return curve . G2 . fromRprLEM ( pBuff ) ;
}
async function readDV ( n ) {
const b = await fd . read ( n ) ;
return new Uint8Array ( b ) ;
}
}
async function readContributions ( fd , curve , sections ) {
if ( ! sections [ 7 ] ) throw new Error ( fd . fileName + ": File has no contributions" ) ;
if ( sections [ 7 ] [ 0 ] . length > 1 ) throw new Error ( fd . fileName + ": File has more than one contributions section" ) ;
fd . pos = sections [ 7 ] [ 0 ] . p ;
const nContributions = await fd . readULE32 ( ) ;
const contributions = [ ] ;
for ( let i = 0 ; i < nContributions ; i ++ ) {
const c = await readContribution ( fd , curve ) ;
c . id = i + 1 ;
contributions . push ( c ) ;
}
if ( fd . pos - sections [ 7 ] [ 0 ] . p != sections [ 7 ] [ 0 ] . size ) throw new Error ( "Invalid contribution section size" ) ;
return contributions ;
}
async function writeContribution ( fd , curve , contribution ) {
const buffG1 = new Uint8Array ( curve . F1 . n8 * 2 ) ;
const buffG2 = new Uint8Array ( curve . F2 . n8 * 2 ) ;
await writeG1 ( contribution . tauG1 ) ;
await writeG2 ( contribution . tauG2 ) ;
await writeG1 ( contribution . alphaG1 ) ;
await writeG1 ( contribution . betaG1 ) ;
await writeG2 ( contribution . betaG2 ) ;
await writePtauPubKey ( fd , curve , contribution . key , true ) ;
await fd . write ( contribution . partialHash ) ;
2020-07-14 12:55:12 +03:00
await fd . write ( contribution . nextChallenge ) ;
2020-07-11 11:31:52 +03:00
await fd . writeULE32 ( contribution . type || 0 ) ;
const params = [ ] ;
if ( contribution . name ) {
params . push ( 1 ) ; // Param Name
const nameData = new TextEncoder ( "utf-8" ) . encode ( contribution . name . substring ( 0 , 64 ) ) ;
params . push ( nameData . byteLength ) ;
for ( let i = 0 ; i < nameData . byteLength ; i ++ ) params . push ( nameData [ i ] ) ;
}
if ( contribution . type == 1 ) {
params . push ( 2 ) ; // Param numIterationsExp
params . push ( contribution . numIterationsExp ) ;
params . push ( 3 ) ; // Beacon Hash
params . push ( contribution . beaconHash . byteLength ) ;
for ( let i = 0 ; i < contribution . beaconHash . byteLength ; i ++ ) params . push ( contribution . beaconHash [ i ] ) ;
}
if ( params . length > 0 ) {
const paramsBuff = new Uint8Array ( params ) ;
await fd . writeULE32 ( paramsBuff . byteLength ) ;
await fd . write ( paramsBuff ) ;
} else {
await fd . writeULE32 ( 0 ) ;
}
async function writeG1 ( p ) {
curve . G1 . toRprLEM ( buffG1 , 0 , p ) ;
await fd . write ( buffG1 ) ;
}
async function writeG2 ( p ) {
curve . G2 . toRprLEM ( buffG2 , 0 , p ) ;
await fd . write ( buffG2 ) ;
}
}
async function writeContributions ( fd , curve , contributions ) {
await fd . writeULE32 ( 7 ) ; // Header type
const pContributionsSize = fd . pos ;
await fd . writeULE64 ( 0 ) ; // Temporally set to 0 length
await fd . writeULE32 ( contributions . length ) ;
for ( let i = 0 ; i < contributions . length ; i ++ ) {
await writeContribution ( fd , curve , contributions [ i ] ) ;
}
const contributionsSize = fd . pos - pContributionsSize - 8 ;
const oldPos = fd . pos ;
fd . writeULE64 ( contributionsSize , pContributionsSize ) ;
fd . pos = oldPos ;
}
2020-07-14 12:55:12 +03:00
function calculateFirstChallengeHash ( curve , power , logger ) {
if ( logger ) logger . debug ( "Calculating First Challenge Hash" ) ;
2020-07-11 11:31:52 +03:00
const hasher = new Blake2b ( 64 ) ;
const vG1 = new Uint8Array ( curve . G1 . F . n8 * 2 ) ;
const vG2 = new Uint8Array ( curve . G2 . F . n8 * 2 ) ;
curve . G1 . toRprUncompressed ( vG1 , 0 , curve . G1 . g ) ;
curve . G2 . toRprUncompressed ( vG2 , 0 , curve . G2 . g ) ;
hasher . update ( Blake2b ( 64 ) . digest ( ) ) ;
let n ;
n = ( 1 << power ) * 2 - 1 ;
if ( logger ) logger . debug ( "Calculate Initial Hash: tauG1" ) ;
hashBlock ( vG1 , n ) ;
n = 1 << power ;
if ( logger ) logger . debug ( "Calculate Initial Hash: tauG2" ) ;
hashBlock ( vG2 , n ) ;
if ( logger ) logger . debug ( "Calculate Initial Hash: alphaTauG1" ) ;
hashBlock ( vG1 , n ) ;
if ( logger ) logger . debug ( "Calculate Initial Hash: betaTauG1" ) ;
hashBlock ( vG1 , n ) ;
hasher . update ( vG2 ) ;
return hasher . digest ( ) ;
function hashBlock ( buff , n ) {
const blockSize = 500000 ;
const nBlocks = Math . floor ( n / blockSize ) ;
const rem = n % blockSize ;
const bigBuff = new Uint8Array ( blockSize * buff . byteLength ) ;
for ( let i = 0 ; i < blockSize ; i ++ ) {
bigBuff . set ( buff , i * buff . byteLength ) ;
}
for ( let i = 0 ; i < nBlocks ; i ++ ) {
hasher . update ( bigBuff ) ;
if ( logger ) logger . debug ( "Initial hash: " + i * blockSize ) ;
}
for ( let i = 0 ; i < rem ; i ++ ) {
hasher . update ( buff ) ;
}
}
}
2020-07-14 12:55:12 +03:00
function keyFromBeacon ( curve , challengeHash , beaconHash , numIterationsExp ) {
2020-07-11 11:31:52 +03:00
const rng = rngFromBeaconParams ( beaconHash , numIterationsExp ) ;
2020-07-14 12:55:12 +03:00
const key = createPTauKey ( curve , challengeHash , rng ) ;
2020-07-11 11:31:52 +03:00
return key ;
}
async function readBinFile$1 ( fileName , type , maxVersion ) {
2020-07-26 15:05:23 +03:00
const fd = await readExisting$3 ( fileName ) ;
2020-07-11 11:31:52 +03:00
const b = await fd . read ( 4 ) ;
let readedType = "" ;
for ( let i = 0 ; i < 4 ; i ++ ) readedType += String . fromCharCode ( b [ i ] ) ;
if ( readedType != type ) throw new Error ( fileName + ": Invalid File format" ) ;
let v = await fd . readULE32 ( ) ;
if ( v > maxVersion ) throw new Error ( "Version not supported" ) ;
const nSections = await fd . readULE32 ( ) ;
// Scan sections
let sections = [ ] ;
for ( let i = 0 ; i < nSections ; i ++ ) {
let ht = await fd . readULE32 ( ) ;
let hl = await fd . readULE64 ( ) ;
if ( typeof sections [ ht ] == "undefined" ) sections [ ht ] = [ ] ;
sections [ ht ] . push ( {
p : fd . pos ,
size : hl
} ) ;
fd . pos += hl ;
}
return { fd , sections } ;
}
async function createBinFile ( fileName , type , version , nSections ) {
const fd = await createOverride ( fileName ) ;
const buff = new Uint8Array ( 4 ) ;
for ( let i = 0 ; i < 4 ; i ++ ) buff [ i ] = type . charCodeAt ( i ) ;
await fd . write ( buff , 0 ) ; // Magic "r1cs"
await fd . writeULE32 ( version ) ; // Version
await fd . writeULE32 ( nSections ) ; // Number of Sections
return fd ;
}
async function startWriteSection ( fd , idSection ) {
if ( typeof fd . writingSection !== "undefined" ) throw new Error ( "Already writing a section" ) ;
await fd . writeULE32 ( idSection ) ; // Header type
fd . writingSection = {
pSectionSize : fd . pos
} ;
await fd . writeULE64 ( 0 ) ; // Temporally set to 0 length
}
async function endWriteSection ( fd ) {
if ( typeof fd . writingSection === "undefined" ) throw new Error ( "Not writing a section" ) ;
const sectionSize = fd . pos - fd . writingSection . pSectionSize - 8 ;
const oldPos = fd . pos ;
fd . pos = fd . writingSection . pSectionSize ;
fd . writeULE64 ( sectionSize ) ;
fd . pos = oldPos ;
delete fd . writingSection ;
}
async function startReadUniqueSection$1 ( fd , sections , idSection ) {
if ( typeof fd . readingSection !== "undefined" ) throw new Error ( "Already reading a section" ) ;
if ( ! sections [ idSection ] ) throw new Error ( fd . fileName + ": Missing section " + idSection ) ;
if ( sections [ idSection ] . length > 1 ) throw new Error ( fd . fileName + ": Section Duplicated " + idSection ) ;
fd . pos = sections [ idSection ] [ 0 ] . p ;
fd . readingSection = sections [ idSection ] [ 0 ] ;
}
async function endReadSection$1 ( fd , noCheck ) {
if ( typeof fd . readingSection === "undefined" ) throw new Error ( "Not reading a section" ) ;
if ( ! noCheck ) {
if ( fd . pos - fd . readingSection . p != fd . readingSection . size ) throw new Error ( "Invalid section size reading" ) ;
}
delete fd . readingSection ;
}
async function writeBigInt ( fd , n , n8 , pos ) {
const buff = new Uint8Array ( n8 ) ;
ffjavascript . Scalar . toRprLE ( buff , 0 , n , n8 ) ;
await fd . write ( buff , pos ) ;
}
async function readBigInt$1 ( fd , n8 , pos ) {
const buff = await fd . read ( n8 , pos ) ;
return ffjavascript . Scalar . fromRprLE ( buff , 0 , n8 ) ;
}
async function copySection ( fdFrom , sections , fdTo , sectionId ) {
const chunkSize = fdFrom . pageSize ;
await startReadUniqueSection$1 ( fdFrom , sections , sectionId ) ;
await startWriteSection ( fdTo , sectionId ) ;
for ( let p = 0 ; p < sections [ sectionId ] [ 0 ] . size ; p += chunkSize ) {
const l = Math . min ( sections [ sectionId ] [ 0 ] . size - p , chunkSize ) ;
const buff = await fdFrom . read ( l ) ;
await fdTo . write ( buff ) ;
}
await endWriteSection ( fdTo ) ;
await endReadSection$1 ( fdFrom ) ;
}
async function readFullSection ( fd , sections , idSection ) {
await startReadUniqueSection$1 ( fd , sections , idSection ) ;
const res = await fd . read ( fd . readingSection . size ) ;
await endReadSection$1 ( fd ) ;
return res ;
}
async function sectionIsEqual ( fd1 , sections1 , fd2 , sections2 , idSection ) {
const MAX _BUFF _SIZE = fd1 . pageSize * 16 ;
await startReadUniqueSection$1 ( fd1 , sections1 , idSection ) ;
await startReadUniqueSection$1 ( fd2 , sections2 , idSection ) ;
if ( sections1 [ idSection ] [ 0 ] . size != sections2 [ idSection ] [ 0 ] . size ) return false ;
const totalBytes = sections1 [ idSection ] [ 0 ] . size ;
for ( let i = 0 ; i < totalBytes ; i += MAX _BUFF _SIZE ) {
const n = Math . min ( totalBytes - i , MAX _BUFF _SIZE ) ;
const buff1 = await fd1 . read ( n ) ;
const buff2 = await fd2 . read ( n ) ;
2020-07-26 15:05:23 +03:00
for ( let j = 0 ; j < n ; j ++ ) if ( buff1 [ j ] != buff2 [ j ] ) return false ;
2020-07-11 11:31:52 +03:00
}
await endReadSection$1 ( fd1 ) ;
await endReadSection$1 ( fd2 ) ;
return true ;
}
/ *
Header ( 1 )
n8
prime
power
tauG1 ( 2 )
{ ( 1 << power ) * 2 - 1 } [
G1 , tau * G1 , tau ^ 2 * G1 , ... .
]
tauG2 ( 3 )
{ 1 << power } [
G2 , tau * G2 , tau ^ 2 * G2 , ...
]
alphaTauG1 ( 4 )
{ 1 << power } [
alpha * G1 , alpha * tau * G1 , alpha * tau ^ 2 * G1 , ... .
]
betaTauG1 ( 5 )
{ 1 << power } [ ]
beta * G1 , beta * tau * G1 , beta * tau ^ 2 * G1 , ... .
]
betaG2 ( 6 )
{ 1 } [
beta * G2
]
contributions ( 7 )
NContributions
{ NContributions } [
tau * G1
tau * G2
alpha * G1
beta * G1
beta * G2
pubKey
tau _g1s
tau _g1sx
tau _g2spx
alpha _g1s
alpha _g1sx
alpha _g1spx
beta _g1s
beta _g1sx
beta _g1spx
partialHash ( 216 bytes ) See https : //github.com/mafintosh/blake2b-wasm/blob/23bee06945806309977af802bc374727542617c7/blake2b.wat#L9
2020-07-14 12:55:12 +03:00
hashNewChallenge
2020-07-11 11:31:52 +03:00
]
* /
async function newAccumulator ( curve , power , fileName , logger ) {
await Blake2b . ready ( ) ;
const fd = await createBinFile ( fileName , "ptau" , 1 , 7 ) ;
await writePTauHeader ( fd , curve , power , 0 ) ;
const buffG1 = curve . G1 . oneAffine ;
const buffG2 = curve . G2 . oneAffine ;
// Write tauG1
///////////
await startWriteSection ( fd , 2 ) ;
const nTauG1 = ( 1 << power ) * 2 - 1 ;
for ( let i = 0 ; i < nTauG1 ; i ++ ) {
await fd . write ( buffG1 ) ;
if ( ( logger ) && ( ( i % 100000 ) == 0 ) && i ) logger . info ( "tauG1: " + i ) ;
}
await endWriteSection ( fd ) ;
// Write tauG2
///////////
await startWriteSection ( fd , 3 ) ;
const nTauG2 = ( 1 << power ) ;
for ( let i = 0 ; i < nTauG2 ; i ++ ) {
await fd . write ( buffG2 ) ;
if ( ( logger ) && ( ( i % 100000 ) == 0 ) && i ) logger . log ( "tauG2: " + i ) ;
}
await endWriteSection ( fd ) ;
// Write alphaTauG1
///////////
await startWriteSection ( fd , 4 ) ;
const nAlfaTauG1 = ( 1 << power ) ;
for ( let i = 0 ; i < nAlfaTauG1 ; i ++ ) {
await fd . write ( buffG1 ) ;
if ( ( logger ) && ( ( i % 100000 ) == 0 ) && i ) logger . log ( "alphaTauG1: " + i ) ;
}
await endWriteSection ( fd ) ;
// Write betaTauG1
///////////
await startWriteSection ( fd , 5 ) ;
const nBetaTauG1 = ( 1 << power ) ;
for ( let i = 0 ; i < nBetaTauG1 ; i ++ ) {
await fd . write ( buffG1 ) ;
if ( ( logger ) && ( ( i % 100000 ) == 0 ) && i ) logger . log ( "betaTauG1: " + i ) ;
}
await endWriteSection ( fd ) ;
// Write betaG2
///////////
await startWriteSection ( fd , 6 ) ;
await fd . write ( buffG2 ) ;
await endWriteSection ( fd ) ;
// Contributions
///////////
await startWriteSection ( fd , 7 ) ;
await fd . writeULE32 ( 0 ) ; // 0 Contributions
await endWriteSection ( fd ) ;
await fd . close ( ) ;
2020-07-14 12:55:12 +03:00
const firstChallengeHash = calculateFirstChallengeHash ( curve , power , logger ) ;
2020-07-11 11:31:52 +03:00
if ( logger ) logger . debug ( formatHash ( Blake2b ( 64 ) . digest ( ) , "Blank Contribution Hash:" ) ) ;
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( firstChallengeHash , "First Contribution Hash:" ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
return firstChallengeHash ;
2020-07-11 11:31:52 +03:00
}
// Format of the outpu
2020-07-14 12:55:12 +03:00
async function exportChallenge ( pTauFilename , challengeFilename , logger ) {
2020-07-11 11:31:52 +03:00
await Blake2b . ready ( ) ;
const { fd : fdFrom , sections } = await readBinFile$1 ( pTauFilename , "ptau" , 1 ) ;
const { curve , power } = await readPTauHeader ( fdFrom , sections ) ;
const contributions = await readContributions ( fdFrom , curve , sections ) ;
2020-07-14 12:55:12 +03:00
let lastResponseHash , curChallengeHash ;
2020-07-11 11:31:52 +03:00
if ( contributions . length == 0 ) {
lastResponseHash = Blake2b ( 64 ) . digest ( ) ;
2020-07-14 12:55:12 +03:00
curChallengeHash = calculateFirstChallengeHash ( curve , power ) ;
2020-07-11 11:31:52 +03:00
} else {
lastResponseHash = contributions [ contributions . length - 1 ] . responseHash ;
2020-07-14 12:55:12 +03:00
curChallengeHash = contributions [ contributions . length - 1 ] . nextChallenge ;
2020-07-11 11:31:52 +03:00
}
if ( logger ) logger . info ( formatHash ( lastResponseHash , "Last Response Hash: " ) ) ;
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( curChallengeHash , "New Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
const fdTo = await createOverride ( challengeFilename ) ;
2020-07-11 11:31:52 +03:00
const toHash = Blake2b ( 64 ) ;
await fdTo . write ( lastResponseHash ) ;
toHash . update ( lastResponseHash ) ;
await exportSection ( 2 , "G1" , ( 1 << power ) * 2 - 1 , "tauG1" ) ;
await exportSection ( 3 , "G2" , ( 1 << power ) , "tauG2" ) ;
await exportSection ( 4 , "G1" , ( 1 << power ) , "alphaTauG1" ) ;
await exportSection ( 5 , "G1" , ( 1 << power ) , "betaTauG1" ) ;
await exportSection ( 6 , "G2" , 1 , "betaG2" ) ;
await fdFrom . close ( ) ;
await fdTo . close ( ) ;
2020-07-14 12:55:12 +03:00
const calcCurChallengeHash = toHash . digest ( ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( ! hashIsEqual ( curChallengeHash , calcCurChallengeHash ) ) {
if ( logger ) logger . info ( formatHash ( calcCurChallengeHash , "Calc Curret Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "PTau file is corrupted. Calculated new challenge hash does not match with the eclared one" ) ;
throw new Error ( "PTau file is corrupted. Calculated new challenge hash does not match with the eclared one" ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
return curChallengeHash ;
2020-07-11 11:31:52 +03:00
async function exportSection ( sectionId , groupName , nPoints , sectionName ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
await startReadUniqueSection$1 ( fdFrom , sections , sectionId ) ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Exporting ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
let buff ;
buff = await fdFrom . read ( n * sG ) ;
buff = await G . batchLEMtoU ( buff ) ;
await fdTo . write ( buff ) ;
toHash . update ( buff ) ;
}
await endReadSection$1 ( fdFrom ) ;
}
}
async function importResponse ( oldPtauFilename , contributionFilename , newPTauFilename , name , importPoints , logger ) {
await Blake2b . ready ( ) ;
const { fd : fdOld , sections } = await readBinFile$1 ( oldPtauFilename , "ptau" , 1 ) ;
const { curve , power } = await readPTauHeader ( fdOld , sections ) ;
const contributions = await readContributions ( fdOld , curve , sections ) ;
const currentContribution = { } ;
if ( name ) currentContribution . name = name ;
const sG1 = curve . F1 . n8 * 2 ;
const scG1 = curve . F1 . n8 ; // Compresed size
const sG2 = curve . F2 . n8 * 2 ;
const scG2 = curve . F2 . n8 ; // Compresed size
2020-07-26 15:05:23 +03:00
const fdResponse = await readExisting$3 ( contributionFilename ) ;
2020-07-11 11:31:52 +03:00
if ( fdResponse . totalSize !=
64 + // Old Hash
( ( 1 << power ) * 2 - 1 ) * scG1 +
( 1 << power ) * scG2 +
( 1 << power ) * scG1 +
( 1 << power ) * scG1 +
scG2 +
sG1 * 6 + sG2 * 3 )
throw new Error ( "Size of the contribution is invalid" ) ;
2020-07-14 12:55:12 +03:00
let lastChallengeHash ;
2020-07-11 11:31:52 +03:00
if ( contributions . length > 0 ) {
2020-07-14 12:55:12 +03:00
lastChallengeHash = contributions [ contributions . length - 1 ] . nextChallenge ;
2020-07-11 11:31:52 +03:00
} else {
2020-07-14 12:55:12 +03:00
lastChallengeHash = calculateFirstChallengeHash ( curve , power , logger ) ;
2020-07-11 11:31:52 +03:00
}
const fdNew = await createBinFile ( newPTauFilename , "ptau" , 1 , 7 ) ;
await writePTauHeader ( fdNew , curve , power ) ;
const contributionPreviousHash = await fdResponse . read ( 64 ) ;
2020-07-14 12:55:12 +03:00
if ( ! hashIsEqual ( contributionPreviousHash , lastChallengeHash ) )
2020-07-11 11:31:52 +03:00
throw new Error ( "Wrong contribution. this contribution is not based on the previus hash" ) ;
const hasherResponse = new Blake2b ( 64 ) ;
hasherResponse . update ( contributionPreviousHash ) ;
const startSections = [ ] ;
let res ;
res = await processSection ( fdResponse , fdNew , "G1" , 2 , ( 1 << power ) * 2 - 1 , [ 1 ] , "tauG1" ) ;
currentContribution . tauG1 = res [ 0 ] ;
res = await processSection ( fdResponse , fdNew , "G2" , 3 , ( 1 << power ) , [ 1 ] , "tauG2" ) ;
currentContribution . tauG2 = res [ 0 ] ;
res = await processSection ( fdResponse , fdNew , "G1" , 4 , ( 1 << power ) , [ 0 ] , "alphaG1" ) ;
currentContribution . alphaG1 = res [ 0 ] ;
res = await processSection ( fdResponse , fdNew , "G1" , 5 , ( 1 << power ) , [ 0 ] , "betaG1" ) ;
currentContribution . betaG1 = res [ 0 ] ;
res = await processSection ( fdResponse , fdNew , "G2" , 6 , 1 , [ 0 ] , "betaG2" ) ;
currentContribution . betaG2 = res [ 0 ] ;
currentContribution . partialHash = hasherResponse . getPartialHash ( ) ;
const buffKey = await fdResponse . read ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
currentContribution . key = fromPtauPubKeyRpr ( buffKey , 0 , curve , false ) ;
hasherResponse . update ( new Uint8Array ( buffKey ) ) ;
const hashResponse = hasherResponse . digest ( ) ;
if ( logger ) logger . info ( formatHash ( hashResponse , "Contribution Response Hash imported: " ) ) ;
2020-07-14 12:55:12 +03:00
const nextChallengeHasher = new Blake2b ( 64 ) ;
nextChallengeHasher . update ( hashResponse ) ;
2020-07-11 11:31:52 +03:00
await hashSection ( fdNew , "G1" , 2 , ( 1 << power ) * 2 - 1 , "tauG1" , logger ) ;
await hashSection ( fdNew , "G2" , 3 , ( 1 << power ) , "tauG2" , logger ) ;
await hashSection ( fdNew , "G1" , 4 , ( 1 << power ) , "alphaTauG1" , logger ) ;
await hashSection ( fdNew , "G1" , 5 , ( 1 << power ) , "betaTauG1" , logger ) ;
await hashSection ( fdNew , "G2" , 6 , 1 , "betaG2" , logger ) ;
2020-07-14 12:55:12 +03:00
currentContribution . nextChallenge = nextChallengeHasher . digest ( ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( currentContribution . nextChallenge , "Next Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
contributions . push ( currentContribution ) ;
await writeContributions ( fdNew , curve , contributions ) ;
await fdResponse . close ( ) ;
await fdNew . close ( ) ;
await fdOld . close ( ) ;
2020-07-14 12:55:12 +03:00
return currentContribution . nextChallenge ;
2020-07-11 11:31:52 +03:00
async function processSection ( fdFrom , fdTo , groupName , sectionId , nPoints , singularPointIndexes , sectionName ) {
const G = curve [ groupName ] ;
const scG = G . F . n8 ;
const sG = G . F . n8 * 2 ;
const singularPoints = [ ] ;
await startWriteSection ( fdTo , sectionId ) ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
startSections [ sectionId ] = fdTo . pos ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Importing ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffC = await fdFrom . read ( n * scG ) ;
hasherResponse . update ( buffC ) ;
const buffLEM = await G . batchCtoLEM ( buffC ) ;
await fdTo . write ( buffLEM ) ;
for ( let j = 0 ; j < singularPointIndexes . length ; j ++ ) {
const sp = singularPointIndexes [ j ] ;
if ( ( sp >= i ) && ( sp < i + n ) ) {
const P = G . fromRprLEM ( buffLEM , ( sp - i ) * sG ) ;
singularPoints . push ( P ) ;
}
}
}
await endWriteSection ( fdTo ) ;
return singularPoints ;
}
async function hashSection ( fdTo , groupName , sectionId , nPoints , sectionName , logger ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
const oldPos = fdTo . pos ;
fdTo . pos = startSections [ sectionId ] ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Hashing ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffLEM = await fdTo . read ( n * sG ) ;
const buffU = await G . batchLEMtoU ( buffLEM ) ;
2020-07-14 12:55:12 +03:00
nextChallengeHasher . update ( buffU ) ;
2020-07-11 11:31:52 +03:00
}
fdTo . pos = oldPos ;
}
}
const sameRatio$1 = sameRatio ;
async function verifyContribution ( curve , cur , prev , logger ) {
let sr ;
if ( cur . type == 1 ) { // Verify the beacon.
2020-07-14 12:55:12 +03:00
const beaconKey = keyFromBeacon ( curve , prev . nextChallenge , cur . beaconHash , cur . numIterationsExp ) ;
2020-07-11 11:31:52 +03:00
if ( ! curve . G1 . eq ( cur . key . tau . g1 _s , beaconKey . tau . g1 _s ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (tauG1_s) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . tau . g1 _sx , beaconKey . tau . g1 _sx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (tauG1_sx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G2 . eq ( cur . key . tau . g2 _spx , beaconKey . tau . g2 _spx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (tauG2_spx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . alpha . g1 _s , beaconKey . alpha . g1 _s ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (alphaG1_s) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . alpha . g1 _sx , beaconKey . alpha . g1 _sx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (alphaG1_sx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G2 . eq ( cur . key . alpha . g2 _spx , beaconKey . alpha . g2 _spx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (alphaG2_spx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . beta . g1 _s , beaconKey . beta . g1 _s ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (betaG1_s) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . beta . g1 _sx , beaconKey . beta . g1 _sx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (betaG1_sx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G2 . eq ( cur . key . beta . g2 _spx , beaconKey . beta . g2 _spx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (betaG2_spx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
}
2020-07-14 12:55:12 +03:00
cur . key . tau . g2 _sp = curve . G2 . toAffine ( getG2sp ( curve , 0 , prev . nextChallenge , cur . key . tau . g1 _s , cur . key . tau . g1 _sx ) ) ;
cur . key . alpha . g2 _sp = curve . G2 . toAffine ( getG2sp ( curve , 1 , prev . nextChallenge , cur . key . alpha . g1 _s , cur . key . alpha . g1 _sx ) ) ;
cur . key . beta . g2 _sp = curve . G2 . toAffine ( getG2sp ( curve , 2 , prev . nextChallenge , cur . key . beta . g1 _s , cur . key . beta . g1 _sx ) ) ;
2020-07-11 11:31:52 +03:00
sr = await sameRatio$1 ( curve , cur . key . tau . g1 _s , cur . key . tau . g1 _sx , cur . key . tau . g2 _sp , cur . key . tau . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID key (tau) in challenge #" + cur . id ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , cur . key . alpha . g1 _s , cur . key . alpha . g1 _sx , cur . key . alpha . g2 _sp , cur . key . alpha . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID key (alpha) in challenge #" + cur . id ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , cur . key . beta . g1 _s , cur . key . beta . g1 _sx , cur . key . beta . g2 _sp , cur . key . beta . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID key (beta) in challenge #" + cur . id ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , prev . tauG1 , cur . tauG1 , cur . key . tau . g2 _sp , cur . key . tau . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID tau*G1. challenge #" + cur . id + " It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , cur . key . tau . g1 _s , cur . key . tau . g1 _sx , prev . tauG2 , cur . tauG2 ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID tau*G2. challenge #" + cur . id + " It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , prev . alphaG1 , cur . alphaG1 , cur . key . alpha . g2 _sp , cur . key . alpha . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID alpha*G1. challenge #" + cur . id + " It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , prev . betaG1 , cur . betaG1 , cur . key . beta . g2 _sp , cur . key . beta . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID beta*G1. challenge #" + cur . id + " It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , cur . key . beta . g1 _s , cur . key . beta . g1 _sx , prev . betaG2 , cur . betaG2 ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID beta*G2. challenge #" + cur . id + "It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( logger ) logger . info ( "Powers Of tau file OK!" ) ;
return true ;
}
async function verify ( tauFilename , logger ) {
let sr ;
await Blake2b . ready ( ) ;
const { fd , sections } = await readBinFile$1 ( tauFilename , "ptau" , 1 ) ;
const { curve , power , ceremonyPower } = await readPTauHeader ( fd , sections ) ;
const contrs = await readContributions ( fd , curve , sections ) ;
if ( logger ) logger . debug ( "power: 2**" + power ) ;
// Verify Last contribution
if ( logger ) logger . debug ( "Computing initial contribution hash" ) ;
const initialContribution = {
tauG1 : curve . G1 . g ,
tauG2 : curve . G2 . g ,
alphaG1 : curve . G1 . g ,
betaG1 : curve . G1 . g ,
betaG2 : curve . G2 . g ,
2020-07-14 12:55:12 +03:00
nextChallenge : calculateFirstChallengeHash ( curve , ceremonyPower , logger ) ,
2020-07-11 11:31:52 +03:00
responseHash : Blake2b ( 64 ) . digest ( )
} ;
if ( contrs . length == 0 ) {
if ( logger ) logger . error ( "This file has no contribution! It cannot be used in production" ) ;
return false ;
}
let prevContr ;
if ( contrs . length > 1 ) {
prevContr = contrs [ contrs . length - 2 ] ;
} else {
prevContr = initialContribution ;
}
const curContr = contrs [ contrs . length - 1 ] ;
if ( logger ) logger . debug ( "Validating contribution #" + contrs [ contrs . length - 1 ] . id ) ;
const res = await verifyContribution ( curve , curContr , prevContr , logger ) ;
if ( ! res ) return false ;
const nextContributionHasher = Blake2b ( 64 ) ;
nextContributionHasher . update ( curContr . responseHash ) ;
2020-07-14 12:55:12 +03:00
// Verify powers and compute nextChallengeHash
2020-07-11 11:31:52 +03:00
// await test();
// Verify Section tau*G1
if ( logger ) logger . debug ( "Verifying powers in tau*G1 section" ) ;
const rTau1 = await processSection ( 2 , "G1" , "tauG1" , ( 1 << power ) * 2 - 1 , [ 0 , 1 ] , logger ) ;
sr = await sameRatio$1 ( curve , rTau1 . R1 , rTau1 . R2 , curve . G2 . g , curContr . tauG2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "tauG1 section. Powers do not match" ) ;
return false ;
}
if ( ! curve . G1 . eq ( curve . G1 . g , rTau1 . singularPoints [ 0 ] ) ) {
if ( logger ) logger . error ( "First element of tau*G1 section must be the generator" ) ;
return false ;
}
if ( ! curve . G1 . eq ( curContr . tauG1 , rTau1 . singularPoints [ 1 ] ) ) {
if ( logger ) logger . error ( "Second element of tau*G1 section does not match the one in the contribution section" ) ;
return false ;
}
// await test();
// Verify Section tau*G2
if ( logger ) logger . debug ( "Verifying powers in tau*G2 section" ) ;
const rTau2 = await processSection ( 3 , "G2" , "tauG2" , 1 << power , [ 0 , 1 ] , logger ) ;
sr = await sameRatio$1 ( curve , curve . G1 . g , curContr . tauG1 , rTau2 . R1 , rTau2 . R2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "tauG2 section. Powers do not match" ) ;
return false ;
}
if ( ! curve . G2 . eq ( curve . G2 . g , rTau2 . singularPoints [ 0 ] ) ) {
if ( logger ) logger . error ( "First element of tau*G2 section must be the generator" ) ;
return false ;
}
if ( ! curve . G2 . eq ( curContr . tauG2 , rTau2 . singularPoints [ 1 ] ) ) {
if ( logger ) logger . error ( "Second element of tau*G2 section does not match the one in the contribution section" ) ;
return false ;
}
// Verify Section alpha*tau*G1
if ( logger ) logger . debug ( "Verifying powers in alpha*tau*G1 section" ) ;
const rAlphaTauG1 = await processSection ( 4 , "G1" , "alphatauG1" , 1 << power , [ 0 ] , logger ) ;
sr = await sameRatio$1 ( curve , rAlphaTauG1 . R1 , rAlphaTauG1 . R2 , curve . G2 . g , curContr . tauG2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "alphaTauG1 section. Powers do not match" ) ;
return false ;
}
if ( ! curve . G1 . eq ( curContr . alphaG1 , rAlphaTauG1 . singularPoints [ 0 ] ) ) {
if ( logger ) logger . error ( "First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section" ) ;
return false ;
}
// Verify Section beta*tau*G1
if ( logger ) logger . debug ( "Verifying powers in beta*tau*G1 section" ) ;
const rBetaTauG1 = await processSection ( 5 , "G1" , "betatauG1" , 1 << power , [ 0 ] , logger ) ;
sr = await sameRatio$1 ( curve , rBetaTauG1 . R1 , rBetaTauG1 . R2 , curve . G2 . g , curContr . tauG2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "betaTauG1 section. Powers do not match" ) ;
return false ;
}
if ( ! curve . G1 . eq ( curContr . betaG1 , rBetaTauG1 . singularPoints [ 0 ] ) ) {
if ( logger ) logger . error ( "First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section" ) ;
return false ;
}
//Verify Beta G2
const betaG2 = await processSectionBetaG2 ( logger ) ;
if ( ! curve . G2 . eq ( curContr . betaG2 , betaG2 ) ) {
if ( logger ) logger . error ( "betaG2 element in betaG2 section does not match the one in the contribution section" ) ;
return false ;
}
const nextContributionHash = nextContributionHasher . digest ( ) ;
2020-07-14 12:55:12 +03:00
// Check the nextChallengeHash
if ( ! hashIsEqual ( nextContributionHash , curContr . nextChallenge ) ) {
if ( logger ) logger . error ( "Hash of the values does not match the next challenge of the last contributor in the contributions section" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( nextContributionHash , "Next challenge hash: " ) ) ;
2020-07-11 11:31:52 +03:00
// Verify Previous contributions
printContribution ( curContr , prevContr ) ;
for ( let i = contrs . length - 2 ; i >= 0 ; i -- ) {
const curContr = contrs [ i ] ;
const prevContr = ( i > 0 ) ? contrs [ i - 1 ] : initialContribution ;
const res = await verifyContribution ( curve , curContr , prevContr , logger ) ;
if ( ! res ) return false ;
printContribution ( curContr , prevContr ) ;
}
if ( logger ) logger . info ( "-----------------------------------------------------" ) ;
if ( ( ! sections [ 12 ] ) || ( ! sections [ 13 ] ) || ( ! sections [ 14 ] ) || ( ! sections [ 15 ] ) ) {
if ( logger ) logger . warn (
"this file does not contain phase2 precalculated values. Please run: \n" +
" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony."
) ;
} else {
let res ;
res = await verifyLagrangeEvaluations ( "G1" , 2 , 12 , "tauG1" , logger ) ;
if ( ! res ) return false ;
res = await verifyLagrangeEvaluations ( "G2" , 3 , 13 , "tauG2" , logger ) ;
if ( ! res ) return false ;
res = await verifyLagrangeEvaluations ( "G1" , 4 , 14 , "alphaTauG1" , logger ) ;
if ( ! res ) return false ;
res = await verifyLagrangeEvaluations ( "G1" , 5 , 15 , "betaTauG1" , logger ) ;
if ( ! res ) return false ;
}
await fd . close ( ) ;
return true ;
function printContribution ( curContr , prevContr ) {
if ( ! logger ) return ;
logger . info ( "-----------------------------------------------------" ) ;
logger . info ( ` Contribution # ${ curContr . id } : ${ curContr . name || "" } ` ) ;
2020-07-14 12:55:12 +03:00
logger . info ( formatHash ( curContr . nextChallenge , "Next Challenge: " ) ) ;
2020-07-11 11:31:52 +03:00
const buffV = new Uint8Array ( curve . G1 . F . n8 * 2 * 6 + curve . G2 . F . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffV , 0 , curve , curContr . key , false ) ;
const responseHasher = Blake2b ( 64 ) ;
responseHasher . setPartialHash ( curContr . partialHash ) ;
responseHasher . update ( buffV ) ;
const responseHash = responseHasher . digest ( ) ;
logger . info ( formatHash ( responseHash , "Response Hash:" ) ) ;
2020-07-14 12:55:12 +03:00
logger . info ( formatHash ( prevContr . nextChallenge , "Response Hash:" ) ) ;
2020-07-11 11:31:52 +03:00
if ( curContr . type == 1 ) {
logger . info ( ` Beacon generator: ${ byteArray2hex ( curContr . beaconHash ) } ` ) ;
logger . info ( ` Beacon iterations Exp: ${ curContr . numIterationsExp } ` ) ;
}
}
async function processSectionBetaG2 ( logger ) {
const G = curve . G2 ;
const sG = G . F . n8 * 2 ;
const buffUv = new Uint8Array ( sG ) ;
if ( ! sections [ 6 ] ) {
logger . error ( "File has no BetaG2 section" ) ;
throw new Error ( "File has no BetaG2 section" ) ;
}
if ( sections [ 6 ] . length > 1 ) {
logger . error ( "File has no BetaG2 section" ) ;
throw new Error ( "File has more than one GetaG2 section" ) ;
}
fd . pos = sections [ 6 ] [ 0 ] . p ;
const buff = await fd . read ( sG ) ;
const P = G . fromRprLEM ( buff ) ;
G . toRprUncompressed ( buffUv , 0 , P ) ;
nextContributionHasher . update ( buffUv ) ;
return P ;
}
async function processSection ( idSection , groupName , sectionName , nPoints , singularPointIndexes , logger ) {
const MAX _CHUNK _SIZE = 1 << 16 ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
await startReadUniqueSection$1 ( fd , sections , idSection ) ;
const singularPoints = [ ] ;
let R1 = G . zero ;
let R2 = G . zero ;
let lastBase = G . zero ;
for ( let i = 0 ; i < nPoints ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` points relations: ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , MAX _CHUNK _SIZE ) ;
const bases = await fd . read ( n * sG ) ;
const basesU = await G . batchLEMtoU ( bases ) ;
nextContributionHasher . update ( basesU ) ;
const scalars = new Uint8Array ( 4 * ( n - 1 ) ) ;
crypto . randomFillSync ( scalars ) ;
if ( i > 0 ) {
const firstBase = G . fromRprLEM ( bases , 0 ) ;
const r = crypto . randomBytes ( 4 ) . readUInt32BE ( 0 , true ) ;
R1 = G . add ( R1 , G . timesScalar ( lastBase , r ) ) ;
R2 = G . add ( R2 , G . timesScalar ( firstBase , r ) ) ;
}
const r1 = await G . multiExpAffine ( bases . slice ( 0 , ( n - 1 ) * sG ) , scalars ) ;
const r2 = await G . multiExpAffine ( bases . slice ( sG ) , scalars ) ;
R1 = G . add ( R1 , r1 ) ;
R2 = G . add ( R2 , r2 ) ;
lastBase = G . fromRprLEM ( bases , ( n - 1 ) * sG ) ;
for ( let j = 0 ; j < singularPointIndexes . length ; j ++ ) {
const sp = singularPointIndexes [ j ] ;
if ( ( sp >= i ) && ( sp < i + n ) ) {
const P = G . fromRprLEM ( bases , ( sp - i ) * sG ) ;
singularPoints . push ( P ) ;
}
}
}
await endReadSection$1 ( fd ) ;
return {
R1 : R1 ,
R2 : R2 ,
singularPoints : singularPoints
} ;
}
async function verifyLagrangeEvaluations ( gName , tauSection , lagrangeSection , sectionName , logger ) {
if ( logger ) logger . debug ( ` Verifying phase2 calculated values ${ sectionName } ... ` ) ;
const G = curve [ gName ] ;
const sG = G . F . n8 * 2 ;
const seed = new Array ( 8 ) ;
for ( let i = 0 ; i < 8 ; i ++ ) {
seed [ i ] = crypto . randomBytes ( 4 ) . readUInt32BE ( 0 , true ) ;
}
const rng = new ffjavascript . ChaCha ( seed ) ;
for ( let p = 0 ; p <= power ; p ++ ) {
const res = await verifyPower ( p ) ;
if ( ! res ) return false ;
}
return true ;
async function verifyPower ( p ) {
if ( logger ) logger . debug ( ` Power ${ p } ... ` ) ;
const n8r = curve . Fr . n8 ;
const nPoints = 1 << p ;
let buff _r = new Uint8Array ( nPoints * n8r ) ;
let buffG ;
for ( let i = 0 ; i < nPoints ; i ++ ) {
const e = curve . Fr . fromRng ( rng ) ;
curve . Fr . toRprLE ( buff _r , i * n8r , e ) ;
}
await startReadUniqueSection$1 ( fd , sections , tauSection ) ;
buffG = await fd . read ( nPoints * sG ) ;
await endReadSection$1 ( fd , true ) ;
const resTau = await G . multiExpAffine ( buffG , buff _r ) ;
buff _r = await curve . Fr . batchToMontgomery ( buff _r ) ;
buff _r = await curve . Fr . fft ( buff _r ) ;
buff _r = await curve . Fr . batchFromMontgomery ( buff _r ) ;
await startReadUniqueSection$1 ( fd , sections , lagrangeSection ) ;
fd . pos += sG * ( ( 1 << p ) - 1 ) ;
buffG = await fd . read ( nPoints * sG ) ;
await endReadSection$1 ( fd , true ) ;
const resLagrange = await G . multiExpAffine ( buffG , buff _r ) ;
if ( ! G . eq ( resTau , resLagrange ) ) {
if ( logger ) logger . error ( "Phase2 caclutation does not match with powers of tau" ) ;
return false ;
}
return true ;
}
}
}
/ *
This function creates a new section in the fdTo file with id idSection .
It multiplies the pooints in fdFrom by first , first * inc , first * inc ^ 2 , ... .
nPoint Times .
2020-07-14 12:55:12 +03:00
It also updates the newChallengeHasher with the new points
2020-07-11 11:31:52 +03:00
* /
async function applyKeyToSection ( fdOld , sections , fdNew , idSection , curve , groupName , first , inc , sectionName , logger ) {
const MAX _CHUNK _SIZE = 1 << 16 ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPoints = sections [ idSection ] [ 0 ] . size / sG ;
await startReadUniqueSection$1 ( fdOld , sections , idSection ) ;
await startWriteSection ( fdNew , idSection ) ;
let t = first ;
for ( let i = 0 ; i < nPoints ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` Applying key: ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , MAX _CHUNK _SIZE ) ;
let buff ;
buff = await fdOld . read ( n * sG ) ;
buff = await G . batchApplyKey ( buff , t , inc ) ;
await fdNew . write ( buff ) ;
t = curve . Fr . mul ( t , curve . Fr . exp ( inc , n ) ) ;
}
await endWriteSection ( fdNew ) ;
await endReadSection$1 ( fdOld ) ;
}
2020-07-14 12:55:12 +03:00
async function applyKeyToChallengeSection ( fdOld , fdNew , responseHasher , curve , groupName , nPoints , first , inc , formatOut , sectionName , logger ) {
2020-07-11 11:31:52 +03:00
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const chunkSize = Math . floor ( ( 1 << 20 ) / sG ) ; // 128Mb chunks
let t = first ;
for ( let i = 0 ; i < nPoints ; i += chunkSize ) {
if ( logger ) logger . debug ( ` Applying key ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , chunkSize ) ;
const buffInU = await fdOld . read ( n * sG ) ;
const buffInLEM = await G . batchUtoLEM ( buffInU ) ;
const buffOutLEM = await G . batchApplyKey ( buffInLEM , t , inc ) ;
let buffOut ;
if ( formatOut == "COMPRESSED" ) {
buffOut = await G . batchLEMtoC ( buffOutLEM ) ;
} else {
buffOut = await G . batchLEMtoU ( buffOutLEM ) ;
}
if ( responseHasher ) responseHasher . update ( buffOut ) ;
await fdNew . write ( buffOut ) ;
t = curve . Fr . mul ( t , curve . Fr . exp ( inc , n ) ) ;
}
}
// Format of the output
2020-07-14 12:55:12 +03:00
async function challengeContribute ( curve , challengeFilename , responesFileName , entropy , logger ) {
2020-07-11 11:31:52 +03:00
await Blake2b . ready ( ) ;
2020-07-26 15:05:23 +03:00
const fdFrom = await readExisting$3 ( challengeFilename ) ;
2020-07-11 11:31:52 +03:00
const sG1 = curve . F1 . n64 * 8 * 2 ;
const sG2 = curve . F2 . n64 * 8 * 2 ;
const domainSize = ( fdFrom . totalSize + sG1 - 64 - sG2 ) / ( 4 * sG1 + sG2 ) ;
let e = domainSize ;
let power = 0 ;
while ( e > 1 ) {
e = e / 2 ;
power += 1 ;
}
if ( 1 << power != domainSize ) throw new Error ( "Invalid file size" ) ;
if ( logger ) logger . debug ( "Power to tau size: " + power ) ;
const rng = await getRandomRng ( entropy ) ;
const fdTo = await createOverride ( responesFileName ) ;
// Calculate the hash
2020-07-14 12:55:12 +03:00
const challengeHasher = Blake2b ( 64 ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < fdFrom . totalSize ; i += fdFrom . pageSize ) {
2020-07-15 07:30:52 +03:00
if ( logger ) logger . debug ( ` Hashing challenge ${ i } / ${ fdFrom . totalSize } ` ) ;
2020-07-11 11:31:52 +03:00
const s = Math . min ( fdFrom . totalSize - i , fdFrom . pageSize ) ;
const buff = await fdFrom . read ( s ) ;
2020-07-14 12:55:12 +03:00
challengeHasher . update ( buff ) ;
2020-07-11 11:31:52 +03:00
}
const claimedHash = await fdFrom . read ( 64 , 0 ) ;
if ( logger ) logger . info ( formatHash ( claimedHash , "Claimed Previus Response Hash: " ) ) ;
2020-07-14 12:55:12 +03:00
const challengeHash = challengeHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( challengeHash , "Current Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
const key = createPTauKey ( curve , challengeHash , rng ) ;
2020-07-11 11:31:52 +03:00
if ( logger ) {
[ "tau" , "alpha" , "beta" ] . forEach ( ( k ) => {
logger . debug ( k + ".g1_s: " + curve . G1 . toString ( key [ k ] . g1 _s , 16 ) ) ;
logger . debug ( k + ".g1_sx: " + curve . G1 . toString ( key [ k ] . g1 _sx , 16 ) ) ;
logger . debug ( k + ".g2_sp: " + curve . G2 . toString ( key [ k ] . g2 _sp , 16 ) ) ;
logger . debug ( k + ".g2_spx: " + curve . G2 . toString ( key [ k ] . g2 _spx , 16 ) ) ;
logger . debug ( "" ) ;
} ) ;
}
const responseHasher = Blake2b ( 64 ) ;
2020-07-14 12:55:12 +03:00
await fdTo . write ( challengeHash ) ;
responseHasher . update ( challengeHash ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G1" , ( 1 << power ) * 2 - 1 , curve . Fr . one , key . tau . prvKey , "COMPRESSED" , "tauG1" , logger ) ;
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G2" , ( 1 << power ) , curve . Fr . one , key . tau . prvKey , "COMPRESSED" , "tauG2" , logger ) ;
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G1" , ( 1 << power ) , key . alpha . prvKey , key . tau . prvKey , "COMPRESSED" , "alphaTauG1" , logger ) ;
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G1" , ( 1 << power ) , key . beta . prvKey , key . tau . prvKey , "COMPRESSED" , "betaTauG1" , logger ) ;
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G2" , 1 , key . beta . prvKey , key . tau . prvKey , "COMPRESSED" , "betaTauG2" , logger ) ;
2020-07-11 11:31:52 +03:00
// Write and hash key
const buffKey = new Uint8Array ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffKey , 0 , curve , key , false ) ;
await fdTo . write ( buffKey ) ;
responseHasher . update ( buffKey ) ;
const responseHash = responseHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( responseHash , "Contribution Response Hash: " ) ) ;
await fdTo . close ( ) ;
await fdFrom . close ( ) ;
}
async function beacon ( oldPtauFilename , newPTauFilename , name , beaconHashStr , numIterationsExp , logger ) {
const beaconHash = hex2ByteArray ( beaconHashStr ) ;
if ( ( beaconHash . byteLength == 0 )
|| ( beaconHash . byteLength * 2 != beaconHashStr . length ) )
{
if ( logger ) logger . error ( "Invalid Beacon Hash. (It must be a valid hexadecimal sequence)" ) ;
return false ;
}
if ( beaconHash . length >= 256 ) {
if ( logger ) logger . error ( "Maximum lenght of beacon hash is 255 bytes" ) ;
return false ;
}
numIterationsExp = parseInt ( numIterationsExp ) ;
if ( ( numIterationsExp < 10 ) || ( numIterationsExp > 63 ) ) {
if ( logger ) logger . error ( "Invalid numIterationsExp. (Must be between 10 and 63)" ) ;
return false ;
}
await Blake2b . ready ( ) ;
const { fd : fdOld , sections } = await readBinFile$1 ( oldPtauFilename , "ptau" , 1 ) ;
const { curve , power , ceremonyPower } = await readPTauHeader ( fdOld , sections ) ;
if ( power != ceremonyPower ) {
if ( logger ) logger . error ( "This file has been reduced. You cannot contribute into a reduced file." ) ;
return false ;
}
if ( sections [ 12 ] ) {
if ( logger ) logger . warn ( "Contributing into a file that has phase2 calculated. You will have to prepare phase2 again." ) ;
}
const contributions = await readContributions ( fdOld , curve , sections ) ;
const curContribution = {
name : name ,
type : 1 , // Beacon
numIterationsExp : numIterationsExp ,
beaconHash : beaconHash
} ;
2020-07-14 12:55:12 +03:00
let lastChallengeHash ;
2020-07-11 11:31:52 +03:00
if ( contributions . length > 0 ) {
2020-07-14 12:55:12 +03:00
lastChallengeHash = contributions [ contributions . length - 1 ] . nextChallenge ;
2020-07-11 11:31:52 +03:00
} else {
2020-07-14 12:55:12 +03:00
lastChallengeHash = calculateFirstChallengeHash ( curve , power , logger ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
curContribution . key = keyFromBeacon ( curve , lastChallengeHash , beaconHash , numIterationsExp ) ;
2020-07-11 11:31:52 +03:00
const responseHasher = new Blake2b ( 64 ) ;
2020-07-14 12:55:12 +03:00
responseHasher . update ( lastChallengeHash ) ;
2020-07-11 11:31:52 +03:00
const fdNew = await createBinFile ( newPTauFilename , "ptau" , 1 , 7 ) ;
await writePTauHeader ( fdNew , curve , power ) ;
const startSections = [ ] ;
let firstPoints ;
firstPoints = await processSection ( 2 , "G1" , ( 1 << power ) * 2 - 1 , curve . Fr . e ( 1 ) , curContribution . key . tau . prvKey , "tauG1" , logger ) ;
curContribution . tauG1 = firstPoints [ 1 ] ;
firstPoints = await processSection ( 3 , "G2" , ( 1 << power ) , curve . Fr . e ( 1 ) , curContribution . key . tau . prvKey , "tauG2" , logger ) ;
curContribution . tauG2 = firstPoints [ 1 ] ;
firstPoints = await processSection ( 4 , "G1" , ( 1 << power ) , curContribution . key . alpha . prvKey , curContribution . key . tau . prvKey , "alphaTauG1" , logger ) ;
curContribution . alphaG1 = firstPoints [ 0 ] ;
firstPoints = await processSection ( 5 , "G1" , ( 1 << power ) , curContribution . key . beta . prvKey , curContribution . key . tau . prvKey , "betaTauG1" , logger ) ;
curContribution . betaG1 = firstPoints [ 0 ] ;
firstPoints = await processSection ( 6 , "G2" , 1 , curContribution . key . beta . prvKey , curContribution . key . tau . prvKey , "betaTauG2" , logger ) ;
curContribution . betaG2 = firstPoints [ 0 ] ;
curContribution . partialHash = responseHasher . getPartialHash ( ) ;
const buffKey = new Uint8Array ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffKey , 0 , curve , curContribution . key , false ) ;
responseHasher . update ( new Uint8Array ( buffKey ) ) ;
const hashResponse = responseHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( hashResponse , "Contribution Response Hash imported: " ) ) ;
2020-07-14 12:55:12 +03:00
const nextChallengeHasher = new Blake2b ( 64 ) ;
nextChallengeHasher . update ( hashResponse ) ;
2020-07-11 11:31:52 +03:00
await hashSection ( fdNew , "G1" , 2 , ( 1 << power ) * 2 - 1 , "tauG1" , logger ) ;
await hashSection ( fdNew , "G2" , 3 , ( 1 << power ) , "tauG2" , logger ) ;
await hashSection ( fdNew , "G1" , 4 , ( 1 << power ) , "alphaTauG1" , logger ) ;
await hashSection ( fdNew , "G1" , 5 , ( 1 << power ) , "betaTauG1" , logger ) ;
await hashSection ( fdNew , "G2" , 6 , 1 , "betaG2" , logger ) ;
2020-07-14 12:55:12 +03:00
curContribution . nextChallenge = nextChallengeHasher . digest ( ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( curContribution . nextChallenge , "Next Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
contributions . push ( curContribution ) ;
await writeContributions ( fdNew , curve , contributions ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
return hashResponse ;
async function processSection ( sectionId , groupName , NPoints , first , inc , sectionName , logger ) {
const res = [ ] ;
fdOld . pos = sections [ sectionId ] [ 0 ] . p ;
await startWriteSection ( fdNew , sectionId ) ;
startSections [ sectionId ] = fdNew . pos ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const chunkSize = Math . floor ( ( 1 << 20 ) / sG ) ; // 128Mb chunks
let t = first ;
for ( let i = 0 ; i < NPoints ; i += chunkSize ) {
if ( logger ) logger . debug ( ` applying key ${ sectionName } : ${ i } / ${ NPoints } ` ) ;
const n = Math . min ( NPoints - i , chunkSize ) ;
const buffIn = await fdOld . read ( n * sG ) ;
const buffOutLEM = await G . batchApplyKey ( buffIn , t , inc ) ;
/ * C o d e t o t e s t t h e c a s e w h e r e w e d o n ' t h a v e t h e 2 ^ m - 2 c o m p o n e n t
if ( sectionName == "tauG1" ) {
const bz = new Uint8Array ( 64 ) ;
buffOutLEM . set ( bz , 64 * ( ( 1 << power ) - 1 ) ) ;
}
* /
const promiseWrite = fdNew . write ( buffOutLEM ) ;
const buffOutC = await G . batchLEMtoC ( buffOutLEM ) ;
responseHasher . update ( buffOutC ) ;
await promiseWrite ;
if ( i == 0 ) // Return the 2 first points.
for ( let j = 0 ; j < Math . min ( 2 , NPoints ) ; j ++ )
res . push ( G . fromRprLEM ( buffOutLEM , j * sG ) ) ;
t = curve . Fr . mul ( t , curve . Fr . exp ( inc , n ) ) ;
}
await endWriteSection ( fdNew ) ;
return res ;
}
async function hashSection ( fdTo , groupName , sectionId , nPoints , sectionName , logger ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
const oldPos = fdTo . pos ;
fdTo . pos = startSections [ sectionId ] ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Hashing ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffLEM = await fdTo . read ( n * sG ) ;
const buffU = await G . batchLEMtoU ( buffLEM ) ;
2020-07-14 12:55:12 +03:00
nextChallengeHasher . update ( buffU ) ;
2020-07-11 11:31:52 +03:00
}
fdTo . pos = oldPos ;
}
}
// Format of the output
async function contribute ( oldPtauFilename , newPTauFilename , name , entropy , logger ) {
await Blake2b . ready ( ) ;
const { fd : fdOld , sections } = await readBinFile$1 ( oldPtauFilename , "ptau" , 1 ) ;
const { curve , power , ceremonyPower } = await readPTauHeader ( fdOld , sections ) ;
if ( power != ceremonyPower ) {
if ( logger ) logger . error ( "This file has been reduced. You cannot contribute into a reduced file." ) ;
throw new Error ( "This file has been reduced. You cannot contribute into a reduced file." ) ;
}
if ( sections [ 12 ] ) {
if ( logger ) logger . warn ( "WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again." ) ;
}
const contributions = await readContributions ( fdOld , curve , sections ) ;
const curContribution = {
name : name ,
type : 0 , // Beacon
} ;
2020-07-14 12:55:12 +03:00
let lastChallengeHash ;
2020-07-11 11:31:52 +03:00
const rng = await getRandomRng ( entropy ) ;
if ( contributions . length > 0 ) {
2020-07-14 12:55:12 +03:00
lastChallengeHash = contributions [ contributions . length - 1 ] . nextChallenge ;
2020-07-11 11:31:52 +03:00
} else {
2020-07-14 12:55:12 +03:00
lastChallengeHash = calculateFirstChallengeHash ( curve , power , logger ) ;
2020-07-11 11:31:52 +03:00
}
// Generate a random key
2020-07-14 12:55:12 +03:00
curContribution . key = createPTauKey ( curve , lastChallengeHash , rng ) ;
2020-07-11 11:31:52 +03:00
const responseHasher = new Blake2b ( 64 ) ;
2020-07-14 12:55:12 +03:00
responseHasher . update ( lastChallengeHash ) ;
2020-07-11 11:31:52 +03:00
const fdNew = await createBinFile ( newPTauFilename , "ptau" , 1 , 7 ) ;
await writePTauHeader ( fdNew , curve , power ) ;
const startSections = [ ] ;
let firstPoints ;
firstPoints = await processSection ( 2 , "G1" , ( 1 << power ) * 2 - 1 , curve . Fr . e ( 1 ) , curContribution . key . tau . prvKey , "tauG1" ) ;
curContribution . tauG1 = firstPoints [ 1 ] ;
firstPoints = await processSection ( 3 , "G2" , ( 1 << power ) , curve . Fr . e ( 1 ) , curContribution . key . tau . prvKey , "tauG2" ) ;
curContribution . tauG2 = firstPoints [ 1 ] ;
firstPoints = await processSection ( 4 , "G1" , ( 1 << power ) , curContribution . key . alpha . prvKey , curContribution . key . tau . prvKey , "alphaTauG1" ) ;
curContribution . alphaG1 = firstPoints [ 0 ] ;
firstPoints = await processSection ( 5 , "G1" , ( 1 << power ) , curContribution . key . beta . prvKey , curContribution . key . tau . prvKey , "betaTauG1" ) ;
curContribution . betaG1 = firstPoints [ 0 ] ;
firstPoints = await processSection ( 6 , "G2" , 1 , curContribution . key . beta . prvKey , curContribution . key . tau . prvKey , "betaTauG2" ) ;
curContribution . betaG2 = firstPoints [ 0 ] ;
curContribution . partialHash = responseHasher . getPartialHash ( ) ;
const buffKey = new Uint8Array ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffKey , 0 , curve , curContribution . key , false ) ;
responseHasher . update ( new Uint8Array ( buffKey ) ) ;
const hashResponse = responseHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( hashResponse , "Contribution Response Hash imported: " ) ) ;
2020-07-14 12:55:12 +03:00
const nextChallengeHasher = new Blake2b ( 64 ) ;
nextChallengeHasher . update ( hashResponse ) ;
2020-07-11 11:31:52 +03:00
await hashSection ( fdNew , "G1" , 2 , ( 1 << power ) * 2 - 1 , "tauG1" ) ;
await hashSection ( fdNew , "G2" , 3 , ( 1 << power ) , "tauG2" ) ;
await hashSection ( fdNew , "G1" , 4 , ( 1 << power ) , "alphaTauG1" ) ;
await hashSection ( fdNew , "G1" , 5 , ( 1 << power ) , "betaTauG1" ) ;
await hashSection ( fdNew , "G2" , 6 , 1 , "betaG2" ) ;
2020-07-14 12:55:12 +03:00
curContribution . nextChallenge = nextChallengeHasher . digest ( ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( curContribution . nextChallenge , "Next Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
contributions . push ( curContribution ) ;
await writeContributions ( fdNew , curve , contributions ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
return hashResponse ;
async function processSection ( sectionId , groupName , NPoints , first , inc , sectionName ) {
const res = [ ] ;
fdOld . pos = sections [ sectionId ] [ 0 ] . p ;
await startWriteSection ( fdNew , sectionId ) ;
startSections [ sectionId ] = fdNew . pos ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const chunkSize = Math . floor ( ( 1 << 20 ) / sG ) ; // 128Mb chunks
let t = first ;
for ( let i = 0 ; i < NPoints ; i += chunkSize ) {
if ( logger ) logger . debug ( ` processing: ${ sectionName } : ${ i } / ${ NPoints } ` ) ;
const n = Math . min ( NPoints - i , chunkSize ) ;
const buffIn = await fdOld . read ( n * sG ) ;
const buffOutLEM = await G . batchApplyKey ( buffIn , t , inc ) ;
/ * C o d e t o t e s t t h e c a s e w h e r e w e d o n ' t h a v e t h e 2 ^ m - 2 c o m p o n e n t
if ( sectionName == "tauG1" ) {
const bz = new Uint8Array ( 64 ) ;
buffOutLEM . set ( bz , 64 * ( ( 1 << power ) - 1 ) ) ;
}
* /
const promiseWrite = fdNew . write ( buffOutLEM ) ;
const buffOutC = await G . batchLEMtoC ( buffOutLEM ) ;
responseHasher . update ( buffOutC ) ;
await promiseWrite ;
if ( i == 0 ) // Return the 2 first points.
for ( let j = 0 ; j < Math . min ( 2 , NPoints ) ; j ++ )
res . push ( G . fromRprLEM ( buffOutLEM , j * sG ) ) ;
t = curve . Fr . mul ( t , curve . Fr . exp ( inc , n ) ) ;
}
await endWriteSection ( fdNew ) ;
return res ;
}
async function hashSection ( fdTo , groupName , sectionId , nPoints , sectionName ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
const oldPos = fdTo . pos ;
fdTo . pos = startSections [ sectionId ] ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( ( logger ) && i ) logger . debug ( ` Hashing ${ sectionName } : ` + i ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffLEM = await fdTo . read ( n * sG ) ;
const buffU = await G . batchLEMtoU ( buffLEM ) ;
2020-07-14 12:55:12 +03:00
nextChallengeHasher . update ( buffU ) ;
2020-07-11 11:31:52 +03:00
}
fdTo . pos = oldPos ;
}
}
async function preparePhase2 ( oldPtauFilename , newPTauFilename , logger ) {
const { fd : fdOld , sections } = await readBinFile$1 ( oldPtauFilename , "ptau" , 1 ) ;
const { curve , power } = await readPTauHeader ( fdOld , sections ) ;
const fdNew = await createBinFile ( newPTauFilename , "ptau" , 1 , 11 ) ;
await writePTauHeader ( fdNew , curve , power ) ;
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
const fdTmp = await createOverride ( { type : "mem" } ) ;
await copySection ( fdOld , sections , fdNew , 2 ) ;
await copySection ( fdOld , sections , fdNew , 3 ) ;
await copySection ( fdOld , sections , fdNew , 4 ) ;
await copySection ( fdOld , sections , fdNew , 5 ) ;
await copySection ( fdOld , sections , fdNew , 6 ) ;
await copySection ( fdOld , sections , fdNew , 7 ) ;
await processSection ( 2 , 12 , "G1" , "tauG1" ) ;
await processSection ( 3 , 13 , "G2" , "tauG2" ) ;
await processSection ( 4 , 14 , "G1" , "alphaTauG1" ) ;
await processSection ( 5 , 15 , "G1" , "betaTauG1" ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
await fdTmp . close ( ) ;
// await fs.promises.unlink(newPTauFilename+ ".tmp");
return ;
async function processSection ( oldSectionId , newSectionId , Gstr , sectionName ) {
const CHUNKPOW = 16 ;
if ( logger ) logger . debug ( "Starting section: " + sectionName ) ;
await startWriteSection ( fdNew , newSectionId ) ;
for ( let p = 0 ; p <= power ; p ++ ) {
await processSectionPower ( p ) ;
}
await endWriteSection ( fdNew ) ;
async function processSectionPower ( p ) {
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p ;
const pointsPerChunk = 1 << chunkPower ;
const nPoints = 1 << p ;
const nChunks = nPoints / pointsPerChunk ;
const G = curve [ Gstr ] ;
const Fr = curve . Fr ;
const PFr = curve . PFr ;
const sGin = G . F . n8 * 2 ;
const sGmid = G . F . n8 * 3 ;
await startReadUniqueSection$1 ( fdOld , sections , oldSectionId ) ;
// Build the initial tmp Buff
fdTmp . pos = 0 ;
for ( let i = 0 ; i < nChunks ; i ++ ) {
let buff ;
if ( logger ) logger . debug ( ` ${ sectionName } Prepare ${ i + 1 } / ${ nChunks } ` ) ;
buff = await fdOld . read ( pointsPerChunk * sGin ) ;
buff = await G . batchToJacobian ( buff ) ;
for ( let j = 0 ; j < pointsPerChunk ; j ++ ) {
fdTmp . pos = bitReverse ( i * pointsPerChunk + j , p ) * sGmid ;
await fdTmp . write ( buff . slice ( j * sGmid , ( j + 1 ) * sGmid ) ) ;
}
}
await endReadSection$1 ( fdOld , true ) ;
for ( let j = 0 ; j < nChunks ; j ++ ) {
if ( logger ) logger . debug ( ` ${ sectionName } ${ p } FFTMix ${ j + 1 } / ${ nChunks } ` ) ;
let buff ;
fdTmp . pos = ( j * pointsPerChunk ) * sGmid ;
buff = await fdTmp . read ( pointsPerChunk * sGmid ) ;
buff = await G . fftMix ( buff ) ;
fdTmp . pos = ( j * pointsPerChunk ) * sGmid ;
await fdTmp . write ( buff ) ;
}
for ( let i = chunkPower + 1 ; i <= p ; i ++ ) {
const nGroups = 1 << ( p - i ) ;
const nChunksPerGroup = nChunks / nGroups ;
for ( let j = 0 ; j < nGroups ; j ++ ) {
for ( let k = 0 ; k < nChunksPerGroup / 2 ; k ++ ) {
if ( logger ) logger . debug ( ` ${ sectionName } ${ i } / ${ p } FFTJoin ${ j + 1 } / ${ nGroups } ${ k } / ${ nChunksPerGroup / 2 } ` ) ;
const first = Fr . pow ( PFr . w [ i ] , k * pointsPerChunk ) ;
const inc = PFr . w [ i ] ;
const o1 = j * nChunksPerGroup + k ;
const o2 = j * nChunksPerGroup + k + nChunksPerGroup / 2 ;
let buff1 , buff2 ;
fdTmp . pos = o1 * pointsPerChunk * sGmid ;
buff1 = await fdTmp . read ( pointsPerChunk * sGmid ) ;
fdTmp . pos = o2 * pointsPerChunk * sGmid ;
buff2 = await fdTmp . read ( pointsPerChunk * sGmid ) ;
[ buff1 , buff2 ] = await G . fftJoin ( buff1 , buff2 , first , inc ) ;
fdTmp . pos = o1 * pointsPerChunk * sGmid ;
await fdTmp . write ( buff1 ) ;
fdTmp . pos = o2 * pointsPerChunk * sGmid ;
await fdTmp . write ( buff2 ) ;
}
}
}
await finalInverse ( p ) ;
}
async function finalInverse ( p ) {
const G = curve [ Gstr ] ;
const Fr = curve . Fr ;
const sGmid = G . F . n8 * 3 ;
const sGout = G . F . n8 * 2 ;
const chunkPower = p > CHUNKPOW ? CHUNKPOW : p ;
const pointsPerChunk = 1 << chunkPower ;
const nPoints = 1 << p ;
const nChunks = nPoints / pointsPerChunk ;
const o = fdNew . pos ;
fdTmp . pos = 0 ;
const factor = Fr . inv ( Fr . e ( 1 << p ) ) ;
for ( let i = 0 ; i < nChunks ; i ++ ) {
if ( logger ) logger . debug ( ` ${ sectionName } ${ p } FFTFinal ${ i + 1 } / ${ nChunks } ` ) ;
let buff ;
buff = await fdTmp . read ( pointsPerChunk * sGmid ) ;
buff = await G . fftFinal ( buff , factor ) ;
if ( i == 0 ) {
fdNew . pos = o ;
await fdNew . write ( buff . slice ( ( pointsPerChunk - 1 ) * sGout ) ) ;
fdNew . pos = o + ( ( nChunks - 1 ) * pointsPerChunk + 1 ) * sGout ;
await fdNew . write ( buff . slice ( 0 , ( pointsPerChunk - 1 ) * sGout ) ) ;
} else {
fdNew . pos = o + ( ( nChunks - 1 - i ) * pointsPerChunk + 1 ) * sGout ;
await fdNew . write ( buff ) ;
}
}
fdNew . pos = o + nChunks * pointsPerChunk * sGout ;
}
}
}
async function exportJson ( pTauFilename , verbose ) {
const { fd , sections } = await readBinFile$1 ( pTauFilename , "ptau" , 1 ) ;
const { curve , power } = await readPTauHeader ( fd , sections ) ;
const pTau = { } ;
pTau . q = curve . q ;
pTau . power = power ;
pTau . contributions = await readContributions ( fd , curve , sections ) ;
pTau . tauG1 = await exportSection ( 2 , "G1" , ( 1 << power ) * 2 - 1 , "tauG1" ) ;
pTau . tauG2 = await exportSection ( 3 , "G2" , ( 1 << power ) , "tauG2" ) ;
pTau . alphaTauG1 = await exportSection ( 4 , "G1" , ( 1 << power ) , "alphaTauG1" ) ;
pTau . betaTauG1 = await exportSection ( 5 , "G1" , ( 1 << power ) , "betaTauG1" ) ;
pTau . betaG2 = await exportSection ( 6 , "G2" , 1 , "betaG2" ) ;
pTau . lTauG1 = await exportLagrange ( 12 , "G1" , "lTauG1" ) ;
pTau . lTauG2 = await exportLagrange ( 13 , "G2" , "lTauG2" ) ;
pTau . lAlphaTauG1 = await exportLagrange ( 14 , "G1" , "lAlphaTauG2" ) ;
pTau . lBetaTauG1 = await exportLagrange ( 15 , "G1" , "lBetaTauG2" ) ;
await fd . close ( ) ;
return pTau ;
async function exportSection ( sectionId , groupName , nPoints , sectionName ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const res = [ ] ;
await startReadUniqueSection$1 ( fd , sections , sectionId ) ;
for ( let i = 0 ; i < nPoints ; i ++ ) {
if ( ( verbose ) && i && ( i % 10000 == 0 ) ) console . log ( ` ${ sectionName } : ` + i ) ;
const buff = await fd . read ( sG ) ;
res . push ( G . fromRprLEM ( buff , 0 ) ) ;
}
await endReadSection$1 ( fd ) ;
return res ;
}
async function exportLagrange ( sectionId , groupName , sectionName ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const res = [ ] ;
await startReadUniqueSection$1 ( fd , sections , sectionId ) ;
for ( let p = 0 ; p <= power ; p ++ ) {
if ( verbose ) console . log ( ` ${ sectionName } : Power: ${ p } ` ) ;
res [ p ] = [ ] ;
const nPoints = ( 1 << p ) ;
for ( let i = 0 ; i < nPoints ; i ++ ) {
if ( ( verbose ) && i && ( i % 10000 == 0 ) ) console . log ( ` ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const buff = await fd . read ( sG ) ;
res [ p ] . push ( G . fromRprLEM ( buff , 0 ) ) ;
}
}
await endReadSection$1 ( fd ) ;
return res ;
}
}
async function newZKey ( r1csName , ptauName , zkeyName , logger ) {
await Blake2b . ready ( ) ;
const csHasher = Blake2b ( 64 ) ;
const { fd : fdR1cs , sections : sectionsR1cs } = await readBinFile$1 ( r1csName , "r1cs" , 1 ) ;
const r1cs = await loadHeader ( fdR1cs , sectionsR1cs ) ;
const { fd : fdPTau , sections : sectionsPTau } = await readBinFile$1 ( ptauName , "ptau" , 1 ) ;
const { curve , power } = await readPTauHeader ( fdPTau , sectionsPTau ) ;
const fdZKey = await createBinFile ( zkeyName , "zkey" , 1 , 10 ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
if ( r1cs . prime != curve . r ) {
if ( logger ) logger . error ( "r1cs curve does not match powers of tau ceremony curve" ) ;
return - 1 ;
}
const cirPower = log2 ( r1cs . nConstraints + r1cs . nPubInputs + r1cs . nOutputs + 1 - 1 ) + 1 ;
if ( cirPower > power ) {
if ( logger ) logger . error ( ` circuit too big for this power of tau ceremony. ${ r1cs . nConstraints } > 2** ${ power } ` ) ;
return - 1 ;
}
if ( ! sectionsPTau [ 12 ] ) {
if ( logger ) logger . error ( "Powers of tau is not prepared." ) ;
return - 1 ;
}
const nPublic = r1cs . nOutputs + r1cs . nPubInputs ;
const domainSize = 1 << cirPower ;
// Write the header
///////////
await startWriteSection ( fdZKey , 1 ) ;
await fdZKey . writeULE32 ( 1 ) ; // Groth
await endWriteSection ( fdZKey ) ;
// Write the Groth header section
///////////
await startWriteSection ( fdZKey , 2 ) ;
const primeQ = curve . q ;
const n8q = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeQ ) - 1 ) / 64 ) + 1 ) * 8 ;
const primeR = curve . r ;
const n8r = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeR ) - 1 ) / 64 ) + 1 ) * 8 ;
const Rr = ffjavascript . Scalar . mod ( ffjavascript . Scalar . shl ( 1 , n8r * 8 ) , primeR ) ;
const R2r = curve . Fr . e ( ffjavascript . Scalar . mod ( ffjavascript . Scalar . mul ( Rr , Rr ) , primeR ) ) ;
await fdZKey . writeULE32 ( n8q ) ;
await writeBigInt ( fdZKey , primeQ , n8q ) ;
await fdZKey . writeULE32 ( n8r ) ;
await writeBigInt ( fdZKey , primeR , n8r ) ;
await fdZKey . writeULE32 ( r1cs . nVars ) ; // Total number of bars
await fdZKey . writeULE32 ( nPublic ) ; // Total number of public vars (not including ONE)
await fdZKey . writeULE32 ( domainSize ) ; // domainSize
let bAlpha1 ;
bAlpha1 = await fdPTau . read ( sG1 , sectionsPTau [ 4 ] [ 0 ] . p ) ;
await fdZKey . write ( bAlpha1 ) ;
bAlpha1 = await curve . G1 . batchLEMtoU ( bAlpha1 ) ;
csHasher . update ( bAlpha1 ) ;
let bBeta1 ;
bBeta1 = await fdPTau . read ( sG1 , sectionsPTau [ 5 ] [ 0 ] . p ) ;
await fdZKey . write ( bBeta1 ) ;
bBeta1 = await curve . G1 . batchLEMtoU ( bBeta1 ) ;
csHasher . update ( bBeta1 ) ;
let bBeta2 ;
bBeta2 = await fdPTau . read ( sG2 , sectionsPTau [ 6 ] [ 0 ] . p ) ;
await fdZKey . write ( bBeta2 ) ;
bBeta2 = await curve . G2 . batchLEMtoU ( bBeta2 ) ;
csHasher . update ( bBeta2 ) ;
const bg1 = new Uint8Array ( sG1 ) ;
curve . G1 . toRprLEM ( bg1 , 0 , curve . G1 . g ) ;
const bg2 = new Uint8Array ( sG2 ) ;
curve . G2 . toRprLEM ( bg2 , 0 , curve . G2 . g ) ;
const bg1U = new Uint8Array ( sG1 ) ;
curve . G1 . toRprUncompressed ( bg1U , 0 , curve . G1 . g ) ;
const bg2U = new Uint8Array ( sG2 ) ;
curve . G2 . toRprUncompressed ( bg2U , 0 , curve . G2 . g ) ;
await fdZKey . write ( bg2 ) ; // gamma2
await fdZKey . write ( bg1 ) ; // delta1
await fdZKey . write ( bg2 ) ; // delta2
csHasher . update ( bg2U ) ; // gamma2
csHasher . update ( bg1U ) ; // delta1
csHasher . update ( bg2U ) ; // delta2
await endWriteSection ( fdZKey ) ;
const A = new Array ( r1cs . nVars ) ;
const B1 = new Array ( r1cs . nVars ) ;
const B2 = new Array ( r1cs . nVars ) ;
const C = new Array ( r1cs . nVars - nPublic - 1 ) ;
const IC = new Array ( nPublic + 1 ) ;
const lTauG1 = sectionsPTau [ 12 ] [ 0 ] . p + ( ( 1 << cirPower ) - 1 ) * sG1 ;
const lTauG2 = sectionsPTau [ 13 ] [ 0 ] . p + ( ( 1 << cirPower ) - 1 ) * sG2 ;
const lAlphaTauG1 = sectionsPTau [ 14 ] [ 0 ] . p + ( ( 1 << cirPower ) - 1 ) * sG1 ;
const lBetaTauG1 = sectionsPTau [ 15 ] [ 0 ] . p + ( ( 1 << cirPower ) - 1 ) * sG1 ;
await startWriteSection ( fdZKey , 4 ) ;
await startReadUniqueSection$1 ( fdR1cs , sectionsR1cs , 2 ) ;
const pNCoefs = fdZKey . pos ;
let nCoefs = 0 ;
fdZKey . pos += 4 ;
for ( let c = 0 ; c < r1cs . nConstraints ; c ++ ) {
if ( ( logger ) & ( c % 10000 == 0 ) ) logger . debug ( ` processing constraints: ${ c } / ${ r1cs . nConstraints } ` ) ;
const nA = await fdR1cs . readULE32 ( ) ;
for ( let i = 0 ; i < nA ; i ++ ) {
const s = await fdR1cs . readULE32 ( ) ;
const coef = await fdR1cs . read ( r1cs . n8 ) ;
const l1 = lTauG1 + sG1 * c ;
const l2 = lBetaTauG1 + sG1 * c ;
if ( typeof A [ s ] === "undefined" ) A [ s ] = [ ] ;
A [ s ] . push ( [ l1 , coef ] ) ;
if ( s <= nPublic ) {
if ( typeof IC [ s ] === "undefined" ) IC [ s ] = [ ] ;
IC [ s ] . push ( [ l2 , coef ] ) ;
} else {
if ( typeof C [ s - nPublic - 1 ] === "undefined" ) C [ s - nPublic - 1 ] = [ ] ;
C [ s - nPublic - 1 ] . push ( [ l2 , coef ] ) ;
}
await fdZKey . writeULE32 ( 0 ) ;
await fdZKey . writeULE32 ( c ) ;
await fdZKey . writeULE32 ( s ) ;
await writeFr2 ( coef ) ;
nCoefs ++ ;
}
const nB = await fdR1cs . readULE32 ( ) ;
for ( let i = 0 ; i < nB ; i ++ ) {
const s = await fdR1cs . readULE32 ( ) ;
const coef = await fdR1cs . read ( r1cs . n8 ) ;
const l1 = lTauG1 + sG1 * c ;
const l2 = lTauG2 + sG2 * c ;
const l3 = lAlphaTauG1 + sG1 * c ;
if ( typeof B1 [ s ] === "undefined" ) B1 [ s ] = [ ] ;
B1 [ s ] . push ( [ l1 , coef ] ) ;
if ( typeof B2 [ s ] === "undefined" ) B2 [ s ] = [ ] ;
B2 [ s ] . push ( [ l2 , coef ] ) ;
if ( s <= nPublic ) {
if ( typeof IC [ s ] === "undefined" ) IC [ s ] = [ ] ;
IC [ s ] . push ( [ l3 , coef ] ) ;
} else {
if ( typeof C [ s - nPublic - 1 ] === "undefined" ) C [ s - nPublic - 1 ] = [ ] ;
C [ s - nPublic - 1 ] . push ( [ l3 , coef ] ) ;
}
await fdZKey . writeULE32 ( 1 ) ;
await fdZKey . writeULE32 ( c ) ;
await fdZKey . writeULE32 ( s ) ;
await writeFr2 ( coef ) ;
nCoefs ++ ;
}
const nC = await fdR1cs . readULE32 ( ) ;
for ( let i = 0 ; i < nC ; i ++ ) {
const s = await fdR1cs . readULE32 ( ) ;
const coef = await fdR1cs . read ( r1cs . n8 ) ;
const l1 = lTauG1 + sG1 * c ;
if ( s <= nPublic ) {
if ( typeof IC [ s ] === "undefined" ) IC [ s ] = [ ] ;
IC [ s ] . push ( [ l1 , coef ] ) ;
} else {
if ( typeof C [ s - nPublic - 1 ] === "undefined" ) C [ s - nPublic - 1 ] = [ ] ;
C [ s - nPublic - 1 ] . push ( [ l1 , coef ] ) ;
}
}
}
const bOne = new Uint8Array ( curve . Fr . n8 ) ;
curve . Fr . toRprLE ( bOne , 0 , curve . Fr . e ( 1 ) ) ;
for ( let s = 0 ; s <= nPublic ; s ++ ) {
const l1 = lTauG1 + sG1 * ( r1cs . nConstraints + s ) ;
const l2 = lBetaTauG1 + sG1 * ( r1cs . nConstraints + s ) ;
if ( typeof A [ s ] === "undefined" ) A [ s ] = [ ] ;
A [ s ] . push ( [ l1 , bOne ] ) ;
if ( typeof IC [ s ] === "undefined" ) IC [ s ] = [ ] ;
IC [ s ] . push ( [ l2 , bOne ] ) ;
await fdZKey . writeULE32 ( 0 ) ;
await fdZKey . writeULE32 ( r1cs . nConstraints + s ) ;
await fdZKey . writeULE32 ( s ) ;
await writeFr2 ( bOne ) ;
nCoefs ++ ;
}
const oldPos = fdZKey . pos ;
await fdZKey . writeULE32 ( nCoefs , pNCoefs ) ;
fdZKey . pos = oldPos ;
await endWriteSection ( fdZKey ) ;
await endReadSection$1 ( fdR1cs ) ;
/ *
zKey . hExps = new Array ( zKey . domainSize - 1 ) ;
for ( let i = 0 ; i < zKey . domainSize ; i ++ ) {
const t1 = await readEvaluation ( "tauG1" , i ) ;
const t2 = await readEvaluation ( "tauG1" , i + zKey . domainSize ) ;
zKey . hExps [ i ] = curve . G1 . sub ( t2 , t1 ) ;
}
* /
await composeAndWritePoints ( 3 , "G1" , IC , "IC" ) ;
// Write Hs
await startWriteSection ( fdZKey , 9 ) ;
const o = sectionsPTau [ 12 ] [ 0 ] . p + ( ( 1 << ( cirPower + 1 ) ) - 1 ) * sG1 ;
for ( let i = 0 ; i < domainSize ; i ++ ) {
const buff = await fdPTau . read ( sG1 , o + ( i * 2 + 1 ) * sG1 ) ;
await fdZKey . write ( buff ) ;
}
await endWriteSection ( fdZKey ) ;
await hashHPoints ( ) ;
await composeAndWritePoints ( 8 , "G1" , C , "C" ) ;
await composeAndWritePoints ( 5 , "G1" , A , "A" ) ;
await composeAndWritePoints ( 6 , "G1" , B1 , "B1" ) ;
await composeAndWritePoints ( 7 , "G2" , B2 , "B2" ) ;
const csHash = csHasher . digest ( ) ;
// Contributions section
await startWriteSection ( fdZKey , 10 ) ;
await fdZKey . write ( csHash ) ;
await fdZKey . writeULE32 ( 0 ) ;
await endWriteSection ( fdZKey ) ;
if ( logger ) logger . info ( formatHash ( csHash , "Circuit hash: " ) ) ;
await fdZKey . close ( ) ;
await fdPTau . close ( ) ;
await fdR1cs . close ( ) ;
return csHash ;
async function writeFr2 ( buff ) {
const n = curve . Fr . fromRprLE ( buff , 0 ) ;
const nR2 = curve . Fr . mul ( n , R2r ) ;
const buff2 = new Uint8Array ( curve . Fr . n8 ) ;
curve . Fr . toRprLE ( buff2 , 0 , nR2 ) ;
await fdZKey . write ( buff2 ) ;
}
async function composeAndWritePoints ( idSection , groupName , arr , sectionName ) {
const CHUNK _SIZE = 1 << 18 ;
hashU32 ( arr . length ) ;
await startWriteSection ( fdZKey , idSection ) ;
for ( let i = 0 ; i < arr . length ; i += CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` Writing points ${ sectionName } : ${ i } / ${ arr . length } ` ) ;
const n = Math . min ( arr . length - i , CHUNK _SIZE ) ;
const subArr = arr . slice ( i , i + n ) ;
await composeAndWritePointsChunk ( groupName , subArr ) ;
}
await endWriteSection ( fdZKey ) ;
}
async function composeAndWritePointsChunk ( groupName , arr ) {
const concurrency = curve . tm . concurrency ;
const nElementsPerThread = Math . floor ( arr . length / concurrency ) ;
const opPromises = [ ] ;
const G = curve [ groupName ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
let n ;
if ( i < concurrency - 1 ) {
n = nElementsPerThread ;
} else {
n = arr . length - i * nElementsPerThread ;
}
if ( n == 0 ) continue ;
const subArr = arr . slice ( i * nElementsPerThread , i * nElementsPerThread + n ) ;
opPromises . push ( composeAndWritePointsThread ( groupName , subArr ) ) ;
}
const result = await Promise . all ( opPromises ) ;
for ( let i = 0 ; i < result . length ; i ++ ) {
await fdZKey . write ( result [ i ] [ 0 ] ) ;
const buff = await G . batchLEMtoU ( result [ i ] [ 0 ] ) ;
csHasher . update ( buff ) ;
}
}
async function composeAndWritePointsThread ( groupName , arr ) {
const G = curve [ groupName ] ;
const sGin = G . F . n8 * 2 ;
const sGmid = G . F . n8 * 3 ;
const sGout = G . F . n8 * 2 ;
let fnExp , fnMultiExp , fnBatchToAffine , fnZero ;
if ( groupName == "G1" ) {
fnExp = "g1m_timesScalarAffine" ;
fnMultiExp = "g1m_multiexpAffine" ;
fnBatchToAffine = "g1m_batchToAffine" ;
fnZero = "g1m_zero" ;
} else if ( groupName == "G2" ) {
fnExp = "g2m_timesScalarAffine" ;
fnMultiExp = "g2m_multiexpAffine" ;
fnBatchToAffine = "g2m_batchToAffine" ;
fnZero = "g2m_zero" ;
} else {
throw new Error ( "Invalid group" ) ;
}
let acc = 0 ;
for ( let i = 0 ; i < arr . length ; i ++ ) acc += arr [ i ] ? arr [ i ] . length : 0 ;
const bBases = new Uint8Array ( acc * sGin ) ;
const bScalars = new Uint8Array ( acc * curve . Fr . n8 ) ;
let pB = 0 ;
let pS = 0 ;
for ( let i = 0 ; i < arr . length ; i ++ ) {
if ( ! arr [ i ] ) continue ;
for ( let j = 0 ; j < arr [ i ] . length ; j ++ ) {
const bBase = await fdPTau . read ( sGin , arr [ i ] [ j ] [ 0 ] ) ;
bBases . set ( bBase , pB ) ;
pB += sGin ;
bScalars . set ( arr [ i ] [ j ] [ 1 ] , pS ) ;
pS += curve . Fr . n8 ;
}
}
const task = [ ] ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : bBases } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : bScalars } ) ;
task . push ( { cmd : "ALLOC" , var : 2 , len : arr . length * sGmid } ) ;
pB = 0 ;
pS = 0 ;
let pD = 0 ;
for ( let i = 0 ; i < arr . length ; i ++ ) {
if ( ! arr [ i ] ) {
task . push ( { cmd : "CALL" , fnName : fnZero , params : [
{ var : 2 , offset : pD }
] } ) ;
pD += sGmid ;
continue ;
}
if ( arr [ i ] . length == 1 ) {
task . push ( { cmd : "CALL" , fnName : fnExp , params : [
{ var : 0 , offset : pB } ,
{ var : 1 , offset : pS } ,
{ val : curve . Fr . n8 } ,
{ var : 2 , offset : pD }
] } ) ;
} else {
task . push ( { cmd : "CALL" , fnName : fnMultiExp , params : [
{ var : 0 , offset : pB } ,
{ var : 1 , offset : pS } ,
{ val : curve . Fr . n8 } ,
{ val : arr [ i ] . length } ,
{ var : 2 , offset : pD }
] } ) ;
}
pB += sGin * arr [ i ] . length ;
pS += curve . Fr . n8 * arr [ i ] . length ;
pD += sGmid ;
}
task . push ( { cmd : "CALL" , fnName : fnBatchToAffine , params : [
{ var : 2 } ,
{ val : arr . length } ,
{ var : 2 } ,
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 2 , len : arr . length * sGout } ) ;
const res = await curve . tm . queueAction ( task ) ;
return res ;
}
async function hashHPoints ( ) {
const CHUNK _SIZE = 1 << 14 ;
hashU32 ( domainSize - 1 ) ;
for ( let i = 0 ; i < domainSize - 1 ; i += CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` HashingHPoints: ${ i } / ${ domainSize } ` ) ;
const n = Math . min ( domainSize - 1 , CHUNK _SIZE ) ;
await hashHPointsChunk ( i , n ) ;
}
}
async function hashHPointsChunk ( offset , nPoints ) {
const buff1 = await fdPTau . read ( nPoints * sG1 , sectionsPTau [ 2 ] [ 0 ] . p + ( offset + domainSize ) * sG1 ) ;
const buff2 = await fdPTau . read ( nPoints * sG1 , sectionsPTau [ 2 ] [ 0 ] . p + offset * sG1 ) ;
const concurrency = curve . tm . concurrency ;
const nPointsPerThread = Math . floor ( nPoints / concurrency ) ;
const opPromises = [ ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
let n ;
if ( i < concurrency - 1 ) {
n = nPointsPerThread ;
} else {
n = nPoints - i * nPointsPerThread ;
}
if ( n == 0 ) continue ;
const subBuff1 = buff1 . slice ( i * nPointsPerThread * sG1 , ( i * nPointsPerThread + n ) * sG1 ) ;
const subBuff2 = buff2 . slice ( i * nPointsPerThread * sG1 , ( i * nPointsPerThread + n ) * sG1 ) ;
opPromises . push ( hashHPointsThread ( subBuff1 , subBuff2 ) ) ;
}
const result = await Promise . all ( opPromises ) ;
for ( let i = 0 ; i < result . length ; i ++ ) {
csHasher . update ( result [ i ] [ 0 ] ) ;
}
}
async function hashHPointsThread ( buff1 , buff2 ) {
const nPoints = buff1 . byteLength / sG1 ;
const sGmid = curve . G1 . F . n8 * 3 ;
const task = [ ] ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : buff1 } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : buff2 } ) ;
task . push ( { cmd : "ALLOC" , var : 2 , len : nPoints * sGmid } ) ;
for ( let i = 0 ; i < nPoints ; i ++ ) {
task . push ( {
cmd : "CALL" ,
fnName : "g1m_subAffine" ,
params : [
{ var : 0 , offset : i * sG1 } ,
{ var : 1 , offset : i * sG1 } ,
{ var : 2 , offset : i * sGmid } ,
]
} ) ;
}
task . push ( { cmd : "CALL" , fnName : "g1m_batchToAffine" , params : [
{ var : 2 } ,
{ val : nPoints } ,
{ var : 2 } ,
] } ) ;
task . push ( { cmd : "CALL" , fnName : "g1m_batchLEMtoU" , params : [
{ var : 2 } ,
{ val : nPoints } ,
{ var : 2 } ,
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 2 , len : nPoints * sG1 } ) ;
const res = await curve . tm . queueAction ( task ) ;
return res ;
}
function hashU32 ( n ) {
const buff = new Uint8Array ( 4 ) ;
const buffV = new DataView ( buff . buffer , buff . byteOffset , buff . byteLength ) ;
buffV . setUint32 ( 0 , n , false ) ;
csHasher . update ( buff ) ;
}
}
async function writeHeader ( fd , zkey ) {
// Write the header
///////////
await startWriteSection ( fd , 1 ) ;
await fd . writeULE32 ( 1 ) ; // Groth
await endWriteSection ( fd ) ;
// Write the Groth header section
///////////
const curve = await getCurveFromQ ( zkey . q ) ;
await startWriteSection ( fd , 2 ) ;
const primeQ = curve . q ;
const n8q = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeQ ) - 1 ) / 64 ) + 1 ) * 8 ;
const primeR = curve . r ;
const n8r = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeR ) - 1 ) / 64 ) + 1 ) * 8 ;
await fd . writeULE32 ( n8q ) ;
await writeBigInt ( fd , primeQ , n8q ) ;
await fd . writeULE32 ( n8r ) ;
await writeBigInt ( fd , primeR , n8r ) ;
await fd . writeULE32 ( zkey . nVars ) ; // Total number of bars
await fd . writeULE32 ( zkey . nPublic ) ; // Total number of public vars (not including ONE)
await fd . writeULE32 ( zkey . domainSize ) ; // domainSize
await writeG1 ( fd , curve , zkey . vk _alpha _1 ) ;
await writeG1 ( fd , curve , zkey . vk _beta _1 ) ;
await writeG2 ( fd , curve , zkey . vk _beta _2 ) ;
await writeG2 ( fd , curve , zkey . vk _gamma _2 ) ;
await writeG1 ( fd , curve , zkey . vk _delta _1 ) ;
await writeG2 ( fd , curve , zkey . vk _delta _2 ) ;
await endWriteSection ( fd ) ;
}
async function writeG1 ( fd , curve , p ) {
const buff = new Uint8Array ( curve . G1 . F . n8 * 2 ) ;
curve . G1 . toRprLEM ( buff , 0 , p ) ;
await fd . write ( buff ) ;
}
async function writeG2 ( fd , curve , p ) {
const buff = new Uint8Array ( curve . G2 . F . n8 * 2 ) ;
curve . G2 . toRprLEM ( buff , 0 , p ) ;
await fd . write ( buff ) ;
}
async function readG1 ( fd , curve ) {
const buff = await fd . read ( curve . G1 . F . n8 * 2 ) ;
return curve . G1 . fromRprLEM ( buff , 0 ) ;
}
async function readG2 ( fd , curve ) {
const buff = await fd . read ( curve . G2 . F . n8 * 2 ) ;
return curve . G2 . fromRprLEM ( buff , 0 ) ;
}
async function readHeader ( fd , sections , protocol ) {
if ( protocol != "groth16" ) throw new Error ( "Protocol not supported: " + protocol ) ;
const zkey = { } ;
// Read Header
/////////////////////
await startReadUniqueSection$1 ( fd , sections , 1 ) ;
const protocolId = await fd . readULE32 ( ) ;
if ( protocolId != 1 ) throw new Error ( "File is not groth" ) ;
zkey . protocol = "groth16" ;
await endReadSection$1 ( fd ) ;
// Read Groth Header
/////////////////////
await startReadUniqueSection$1 ( fd , sections , 2 ) ;
const n8q = await fd . readULE32 ( ) ;
zkey . n8q = n8q ;
zkey . q = await readBigInt$1 ( fd , n8q ) ;
const n8r = await fd . readULE32 ( ) ;
zkey . n8r = n8r ;
zkey . r = await readBigInt$1 ( fd , n8r ) ;
let curve = await getCurveFromQ ( zkey . q ) ;
zkey . nVars = await fd . readULE32 ( ) ;
zkey . nPublic = await fd . readULE32 ( ) ;
zkey . domainSize = await fd . readULE32 ( ) ;
zkey . power = log2 ( zkey . domainSize ) ;
zkey . vk _alpha _1 = await readG1 ( fd , curve ) ;
zkey . vk _beta _1 = await readG1 ( fd , curve ) ;
zkey . vk _beta _2 = await readG2 ( fd , curve ) ;
zkey . vk _gamma _2 = await readG2 ( fd , curve ) ;
zkey . vk _delta _1 = await readG1 ( fd , curve ) ;
zkey . vk _delta _2 = await readG2 ( fd , curve ) ;
await endReadSection$1 ( fd ) ;
return zkey ;
}
async function readZKey ( fileName ) {
const { fd , sections } = await readBinFile$1 ( fileName , "zkey" , 1 ) ;
const zkey = await readHeader ( fd , sections , "groth16" ) ;
const Fr = new ffjavascript . F1Field ( zkey . r ) ;
const Rr = ffjavascript . Scalar . mod ( ffjavascript . Scalar . shl ( 1 , zkey . n8r * 8 ) , zkey . r ) ;
const Rri = Fr . inv ( Rr ) ;
const Rri2 = Fr . mul ( Rri , Rri ) ;
let curve = getCurveFromQ ( zkey . q ) ;
// Read IC Section
///////////
await startReadUniqueSection$1 ( fd , sections , 3 ) ;
zkey . IC = [ ] ;
for ( let i = 0 ; i <= zkey . nPublic ; i ++ ) {
const P = await readG1 ( fd , curve ) ;
zkey . IC . push ( P ) ;
}
await endReadSection$1 ( fd ) ;
// Read Coefs
///////////
await startReadUniqueSection$1 ( fd , sections , 4 ) ;
const nCCoefs = await fd . readULE32 ( ) ;
zkey . ccoefs = [ ] ;
for ( let i = 0 ; i < nCCoefs ; i ++ ) {
const m = await fd . readULE32 ( ) ;
const c = await fd . readULE32 ( ) ;
const s = await fd . readULE32 ( ) ;
const v = await readFr2 ( ) ;
zkey . ccoefs . push ( {
matrix : m ,
constraint : c ,
signal : s ,
value : v
} ) ;
}
await endReadSection$1 ( fd ) ;
// Read A points
///////////
await startReadUniqueSection$1 ( fd , sections , 5 ) ;
zkey . A = [ ] ;
for ( let i = 0 ; i < zkey . nVars ; i ++ ) {
const A = await readG1 ( fd , curve ) ;
zkey . A [ i ] = A ;
}
await endReadSection$1 ( fd ) ;
// Read B1
///////////
await startReadUniqueSection$1 ( fd , sections , 6 ) ;
zkey . B1 = [ ] ;
for ( let i = 0 ; i < zkey . nVars ; i ++ ) {
const B1 = await readG1 ( fd , curve ) ;
zkey . B1 [ i ] = B1 ;
}
await endReadSection$1 ( fd ) ;
// Read B2 points
///////////
await startReadUniqueSection$1 ( fd , sections , 7 ) ;
zkey . B2 = [ ] ;
for ( let i = 0 ; i < zkey . nVars ; i ++ ) {
const B2 = await readG2 ( fd , curve ) ;
zkey . B2 [ i ] = B2 ;
}
await endReadSection$1 ( fd ) ;
// Read C points
///////////
await startReadUniqueSection$1 ( fd , sections , 8 ) ;
zkey . C = [ ] ;
for ( let i = zkey . nPublic + 1 ; i < zkey . nVars ; i ++ ) {
const C = await readG1 ( fd , curve ) ;
zkey . C [ i ] = C ;
}
await endReadSection$1 ( fd ) ;
// Read H points
///////////
await startReadUniqueSection$1 ( fd , sections , 9 ) ;
zkey . hExps = [ ] ;
for ( let i = 0 ; i < zkey . domainSize ; i ++ ) {
const H = await readG1 ( fd , curve ) ;
zkey . hExps . push ( H ) ;
}
await endReadSection$1 ( fd ) ;
await fd . close ( ) ;
return zkey ;
async function readFr2 ( ) {
const n = await readBigInt$1 ( fd , zkey . n8r ) ;
return Fr . mul ( n , Rri2 ) ;
}
}
async function readContribution$1 ( fd , curve ) {
const c = { delta : { } } ;
c . deltaAfter = await readG1 ( fd , curve ) ;
c . delta . g1 _s = await readG1 ( fd , curve ) ;
c . delta . g1 _sx = await readG1 ( fd , curve ) ;
c . delta . g2 _spx = await readG2 ( fd , curve ) ;
c . transcript = await fd . read ( 64 ) ;
c . type = await fd . readULE32 ( ) ;
const paramLength = await fd . readULE32 ( ) ;
const curPos = fd . pos ;
let lastType = 0 ;
while ( fd . pos - curPos < paramLength ) {
const buffType = await fd . read ( 1 ) ;
if ( buffType [ 0 ] <= lastType ) throw new Error ( "Parameters in the contribution must be sorted" ) ;
lastType = buffType [ 0 ] ;
if ( buffType [ 0 ] == 1 ) { // Name
const buffLen = await fd . read ( 1 ) ;
const buffStr = await fd . read ( buffLen [ 0 ] ) ;
c . name = new TextDecoder ( ) . decode ( buffStr ) ;
} else if ( buffType [ 0 ] == 2 ) {
const buffExp = await fd . read ( 1 ) ;
c . numIterationsExp = buffExp [ 0 ] ;
} else if ( buffType [ 0 ] == 3 ) {
const buffLen = await fd . read ( 1 ) ;
c . beaconHash = await fd . read ( buffLen [ 0 ] ) ;
} else {
throw new Error ( "Parameter not recognized" ) ;
}
}
if ( fd . pos != curPos + paramLength ) {
throw new Error ( "Parametes do not match" ) ;
}
return c ;
}
async function readMPCParams ( fd , curve , sections ) {
await startReadUniqueSection$1 ( fd , sections , 10 ) ;
const res = { contributions : [ ] } ;
res . csHash = await fd . read ( 64 ) ;
const n = await fd . readULE32 ( ) ;
for ( let i = 0 ; i < n ; i ++ ) {
const c = await readContribution$1 ( fd , curve ) ;
res . contributions . push ( c ) ;
}
await endReadSection$1 ( fd ) ;
return res ;
}
async function writeContribution$1 ( fd , curve , c ) {
await writeG1 ( fd , curve , c . deltaAfter ) ;
await writeG1 ( fd , curve , c . delta . g1 _s ) ;
await writeG1 ( fd , curve , c . delta . g1 _sx ) ;
await writeG2 ( fd , curve , c . delta . g2 _spx ) ;
await fd . write ( c . transcript ) ;
await fd . writeULE32 ( c . type || 0 ) ;
const params = [ ] ;
if ( c . name ) {
params . push ( 1 ) ; // Param Name
const nameData = new TextEncoder ( "utf-8" ) . encode ( c . name . substring ( 0 , 64 ) ) ;
params . push ( nameData . byteLength ) ;
for ( let i = 0 ; i < nameData . byteLength ; i ++ ) params . push ( nameData [ i ] ) ;
}
if ( c . type == 1 ) {
params . push ( 2 ) ; // Param numIterationsExp
params . push ( c . numIterationsExp ) ;
params . push ( 3 ) ; // Beacon Hash
params . push ( c . beaconHash . byteLength ) ;
for ( let i = 0 ; i < c . beaconHash . byteLength ; i ++ ) params . push ( c . beaconHash [ i ] ) ;
}
if ( params . length > 0 ) {
const paramsBuff = new Uint8Array ( params ) ;
await fd . writeULE32 ( paramsBuff . byteLength ) ;
await fd . write ( paramsBuff ) ;
} else {
await fd . writeULE32 ( 0 ) ;
}
}
async function writeMPCParams ( fd , curve , mpcParams ) {
await startWriteSection ( fd , 10 ) ;
await fd . write ( mpcParams . csHash ) ;
await fd . writeULE32 ( mpcParams . contributions . length ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
await writeContribution$1 ( fd , curve , mpcParams . contributions [ i ] ) ;
}
await endWriteSection ( fd ) ;
}
function hashG1 ( hasher , curve , p ) {
const buff = new Uint8Array ( curve . G1 . F . n8 * 2 ) ;
curve . G1 . toRprUncompressed ( buff , 0 , p ) ;
hasher . update ( buff ) ;
}
function hashG2 ( hasher , curve , p ) {
const buff = new Uint8Array ( curve . G2 . F . n8 * 2 ) ;
curve . G2 . toRprUncompressed ( buff , 0 , p ) ;
hasher . update ( buff ) ;
}
function hashPubKey ( hasher , curve , c ) {
hashG1 ( hasher , curve , c . deltaAfter ) ;
hashG1 ( hasher , curve , c . delta . g1 _s ) ;
hashG1 ( hasher , curve , c . delta . g1 _sx ) ;
hashG2 ( hasher , curve , c . delta . g2 _spx ) ;
hasher . update ( c . transcript ) ;
}
async function phase2exportMPCParams ( zkeyName , mpcparamsName , logger ) {
const { fd : fdZKey , sections : sectionsZKey } = await readBinFile$1 ( zkeyName , "zkey" , 2 ) ;
const zkey = await readHeader ( fdZKey , sectionsZKey , "groth16" ) ;
const curve = await getCurveFromQ ( zkey . q ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
const mpcParams = await readMPCParams ( fdZKey , curve , sectionsZKey ) ;
const fdMPCParams = await createOverride ( mpcparamsName ) ;
/////////////////////
// Verification Key Section
/////////////////////
await writeG1 ( zkey . vk _alpha _1 ) ;
await writeG1 ( zkey . vk _beta _1 ) ;
await writeG2 ( zkey . vk _beta _2 ) ;
await writeG2 ( zkey . vk _gamma _2 ) ;
await writeG1 ( zkey . vk _delta _1 ) ;
await writeG2 ( zkey . vk _delta _2 ) ;
// IC
let buffBasesIC ;
buffBasesIC = await readFullSection ( fdZKey , sectionsZKey , 3 ) ;
buffBasesIC = await curve . G1 . batchLEMtoU ( buffBasesIC ) ;
await writePointArray ( "G1" , buffBasesIC ) ;
/////////////////////
// h Section
/////////////////////
const buffBasesH _Lodd = await readFullSection ( fdZKey , sectionsZKey , 9 ) ;
let buffBasesH _Tau ;
buffBasesH _Tau = await curve . G1 . fft ( buffBasesH _Lodd , "affine" , "jacobian" , logger ) ;
buffBasesH _Tau = await curve . G1 . batchApplyKey ( buffBasesH _Tau , curve . Fr . neg ( curve . Fr . e ( 2 ) ) , curve . Fr . w [ zkey . power + 1 ] , "jacobian" , "affine" , logger ) ;
// Remove last element. (The degree of H will be allways m-2)
buffBasesH _Tau = buffBasesH _Tau . slice ( 0 , buffBasesH _Tau . byteLength - sG1 ) ;
buffBasesH _Tau = await curve . G1 . batchLEMtoU ( buffBasesH _Tau ) ;
await writePointArray ( "G1" , buffBasesH _Tau ) ;
/////////////////////
// L section
/////////////////////
let buffBasesC ;
buffBasesC = await readFullSection ( fdZKey , sectionsZKey , 8 ) ;
buffBasesC = await curve . G1 . batchLEMtoU ( buffBasesC ) ;
await writePointArray ( "G1" , buffBasesC ) ;
/////////////////////
// A Section (C section)
/////////////////////
let buffBasesA ;
buffBasesA = await readFullSection ( fdZKey , sectionsZKey , 5 ) ;
buffBasesA = await curve . G1 . batchLEMtoU ( buffBasesA ) ;
await writePointArray ( "G1" , buffBasesA ) ;
/////////////////////
// B1 Section
/////////////////////
let buffBasesB1 ;
buffBasesB1 = await readFullSection ( fdZKey , sectionsZKey , 6 ) ;
buffBasesB1 = await curve . G1 . batchLEMtoU ( buffBasesB1 ) ;
await writePointArray ( "G1" , buffBasesB1 ) ;
/////////////////////
// B2 Section
/////////////////////
let buffBasesB2 ;
buffBasesB2 = await readFullSection ( fdZKey , sectionsZKey , 7 ) ;
buffBasesB2 = await curve . G2 . batchLEMtoU ( buffBasesB2 ) ;
await writePointArray ( "G2" , buffBasesB2 ) ;
await fdMPCParams . write ( mpcParams . csHash ) ;
await writeU32 ( mpcParams . contributions . length ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
const c = mpcParams . contributions [ i ] ;
await writeG1 ( c . deltaAfter ) ;
await writeG1 ( c . delta . g1 _s ) ;
await writeG1 ( c . delta . g1 _sx ) ;
await writeG2 ( c . delta . g2 _spx ) ;
await fdMPCParams . write ( c . transcript ) ;
}
await fdZKey . close ( ) ;
await fdMPCParams . close ( ) ;
async function writeG1 ( P ) {
const buff = new Uint8Array ( sG1 ) ;
curve . G1 . toRprUncompressed ( buff , 0 , P ) ;
await fdMPCParams . write ( buff ) ;
}
async function writeG2 ( P ) {
const buff = new Uint8Array ( sG2 ) ;
curve . G2 . toRprUncompressed ( buff , 0 , P ) ;
await fdMPCParams . write ( buff ) ;
}
async function writePointArray ( groupName , buff ) {
let sG ;
if ( groupName == "G1" ) {
sG = sG1 ;
} else {
sG = sG2 ;
}
const buffSize = new Uint8Array ( 4 ) ;
const buffSizeV = new DataView ( buffSize . buffer , buffSize . byteOffset , buffSize . byteLength ) ;
buffSizeV . setUint32 ( 0 , buff . byteLength / sG , false ) ;
await fdMPCParams . write ( buffSize ) ;
await fdMPCParams . write ( buff ) ;
}
async function writeU32 ( n ) {
const buffSize = new Uint8Array ( 4 ) ;
const buffSizeV = new DataView ( buffSize . buffer , buffSize . byteOffset , buffSize . byteLength ) ;
buffSizeV . setUint32 ( 0 , n , false ) ;
await fdMPCParams . write ( buffSize ) ;
}
}
async function phase2importMPCParams ( zkeyNameOld , mpcparamsName , zkeyNameNew , name , logger ) {
const { fd : fdZKeyOld , sections : sectionsZKeyOld } = await readBinFile$1 ( zkeyNameOld , "zkey" , 2 ) ;
const zkeyHeader = await readHeader ( fdZKeyOld , sectionsZKeyOld , "groth16" ) ;
const curve = await getCurveFromQ ( zkeyHeader . q ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
const oldMPCParams = await readMPCParams ( fdZKeyOld , curve , sectionsZKeyOld ) ;
const newMPCParams = { } ;
2020-07-26 15:05:23 +03:00
const fdMPCParams = await readExisting$3 ( mpcparamsName ) ;
2020-07-11 11:31:52 +03:00
fdMPCParams . pos =
sG1 * 3 + sG2 * 3 + // vKey
8 + sG1 * zkeyHeader . nVars + // IC + C
4 + sG1 * ( zkeyHeader . domainSize - 1 ) + // H
4 + sG1 * zkeyHeader . nVars + // A
4 + sG1 * zkeyHeader . nVars + // B1
4 + sG2 * zkeyHeader . nVars ; // B2
// csHash
newMPCParams . csHash = await fdMPCParams . read ( 64 ) ;
const nConttributions = await fdMPCParams . readUBE32 ( ) ;
newMPCParams . contributions = [ ] ;
for ( let i = 0 ; i < nConttributions ; i ++ ) {
const c = { delta : { } } ;
c . deltaAfter = await readG1 ( fdMPCParams ) ;
c . delta . g1 _s = await readG1 ( fdMPCParams ) ;
c . delta . g1 _sx = await readG1 ( fdMPCParams ) ;
c . delta . g2 _spx = await readG2 ( fdMPCParams ) ;
c . transcript = await fdMPCParams . read ( 64 ) ;
if ( i < oldMPCParams . contributions . length ) {
c . type = oldMPCParams . contributions [ i ] . type ;
if ( c . type == 1 ) {
c . beaconHash = oldMPCParams . contributions [ i ] . beaconHash ;
c . numIterationsExp = oldMPCParams . contributions [ i ] . numIterationsExp ;
}
if ( oldMPCParams . contributions [ i ] . name ) {
c . name = oldMPCParams . contributions [ i ] . name ;
}
}
newMPCParams . contributions . push ( c ) ;
}
if ( ! hashIsEqual ( newMPCParams . csHash , oldMPCParams . csHash ) ) {
if ( logger ) logger . error ( "Hash of the original circuit does not match with the MPC one" ) ;
return false ;
}
if ( oldMPCParams . contributions . length > newMPCParams . contributions . length ) {
if ( logger ) logger . error ( "The impoerted file does not include new contributions" ) ;
return false ;
}
for ( let i = 0 ; i < oldMPCParams . contributions . length ; i ++ ) {
if ( ! contributionIsEqual ( oldMPCParams . contributions [ i ] , newMPCParams . contributions [ i ] ) ) {
if ( logger ) logger . error ( ` Previos contribution ${ i } does not match ` ) ;
return false ;
}
}
// Set the same name to all new controbutions
if ( name ) {
for ( let i = oldMPCParams . contributions . length ; i < newMPCParams . contributions . length ; i ++ ) {
newMPCParams . contributions [ i ] . name = name ;
}
}
const fdZKeyNew = await createBinFile ( zkeyNameNew , "zkey" , 1 , 10 ) ;
fdMPCParams . pos = 0 ;
// Header
fdMPCParams . pos += sG1 ; // ignore alpha1 (keep original)
fdMPCParams . pos += sG1 ; // ignore beta1
fdMPCParams . pos += sG2 ; // ignore beta2
fdMPCParams . pos += sG2 ; // ignore gamma2
zkeyHeader . vk _delta _1 = await readG1 ( fdMPCParams ) ;
zkeyHeader . vk _delta _2 = await readG2 ( fdMPCParams ) ;
await writeHeader ( fdZKeyNew , zkeyHeader ) ;
// IC (Keep original)
const nIC = await fdMPCParams . readUBE32 ( ) ;
if ( nIC != zkeyHeader . nPublic + 1 ) {
if ( logger ) logger . error ( "Invalid number of points in IC" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
fdMPCParams . pos += sG1 * ( zkeyHeader . nPublic + 1 ) ;
await copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 3 ) ;
// Coeffs (Keep original)
await copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 4 ) ;
// H Section
const nH = await fdMPCParams . readUBE32 ( ) ;
if ( nH != zkeyHeader . domainSize - 1 ) {
if ( logger ) logger . error ( "Invalid number of points in H" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
let buffH ;
const buffTauU = await fdMPCParams . read ( sG1 * ( zkeyHeader . domainSize - 1 ) ) ;
const buffTauLEM = await curve . G1 . batchUtoLEM ( buffTauU ) ;
buffH = new Uint8Array ( zkeyHeader . domainSize * sG1 ) ;
buffH . set ( buffTauLEM ) ; // Let the last one to zero.
const n2Inv = curve . Fr . neg ( curve . Fr . inv ( curve . Fr . e ( 2 ) ) ) ;
const wInv = curve . Fr . inv ( curve . Fr . w [ zkeyHeader . power + 1 ] ) ;
buffH = await curve . G1 . batchApplyKey ( buffH , n2Inv , wInv , "affine" , "jacobian" , logger ) ;
buffH = await curve . G1 . ifft ( buffH , "jacobian" , "affine" , logger ) ;
await startWriteSection ( fdZKeyNew , 9 ) ;
await fdZKeyNew . write ( buffH ) ;
await endWriteSection ( fdZKeyNew ) ;
// C Secion (L section)
const nL = await fdMPCParams . readUBE32 ( ) ;
if ( nL != ( zkeyHeader . nVars - zkeyHeader . nPublic - 1 ) ) {
if ( logger ) logger . error ( "Invalid number of points in L" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
let buffL ;
buffL = await fdMPCParams . read ( sG1 * ( zkeyHeader . nVars - zkeyHeader . nPublic - 1 ) ) ;
buffL = await curve . G1 . batchUtoLEM ( buffL ) ;
await startWriteSection ( fdZKeyNew , 8 ) ;
await fdZKeyNew . write ( buffL ) ;
await endWriteSection ( fdZKeyNew ) ;
// A Section
const nA = await fdMPCParams . readUBE32 ( ) ;
if ( nA != zkeyHeader . nVars ) {
if ( logger ) logger . error ( "Invalid number of points in A" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
fdMPCParams . pos += sG1 * ( zkeyHeader . nVars ) ;
await copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 5 ) ;
// B1 Section
const nB1 = await fdMPCParams . readUBE32 ( ) ;
if ( nB1 != zkeyHeader . nVars ) {
if ( logger ) logger . error ( "Invalid number of points in B1" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
fdMPCParams . pos += sG1 * ( zkeyHeader . nVars ) ;
await copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 6 ) ;
// B2 Section
const nB2 = await fdMPCParams . readUBE32 ( ) ;
if ( nB2 != zkeyHeader . nVars ) {
if ( logger ) logger . error ( "Invalid number of points in B2" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
fdMPCParams . pos += sG2 * ( zkeyHeader . nVars ) ;
await copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 7 ) ;
await writeMPCParams ( fdZKeyNew , curve , newMPCParams ) ;
await fdMPCParams . close ( ) ;
await fdZKeyNew . close ( ) ;
await fdZKeyOld . close ( ) ;
return true ;
async function readG1 ( fd ) {
const buff = await fd . read ( curve . G1 . F . n8 * 2 ) ;
return curve . G1 . fromRprUncompressed ( buff , 0 ) ;
}
async function readG2 ( fd ) {
const buff = await fd . read ( curve . G2 . F . n8 * 2 ) ;
return curve . G2 . fromRprUncompressed ( buff , 0 ) ;
}
function contributionIsEqual ( c1 , c2 ) {
if ( ! curve . G1 . eq ( c1 . deltaAfter , c2 . deltaAfter ) ) return false ;
if ( ! curve . G1 . eq ( c1 . delta . g1 _s , c2 . delta . g1 _s ) ) return false ;
if ( ! curve . G1 . eq ( c1 . delta . g1 _sx , c2 . delta . g1 _sx ) ) return false ;
if ( ! curve . G2 . eq ( c1 . delta . g2 _spx , c2 . delta . g2 _spx ) ) return false ;
if ( ! hashIsEqual ( c1 . transcript , c2 . transcript ) ) return false ;
return true ;
}
}
const sameRatio$2 = sameRatio ;
async function phase2verify ( r1csFileName , pTauFileName , zkeyFileName , logger ) {
let sr ;
await Blake2b . ready ( ) ;
const { fd , sections } = await readBinFile$1 ( zkeyFileName , "zkey" , 2 ) ;
const zkey = await readHeader ( fd , sections , "groth16" ) ;
const curve = await getCurveFromQ ( zkey . q ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
const mpcParams = await readMPCParams ( fd , curve , sections ) ;
const accumulatedHasher = Blake2b ( 64 ) ;
accumulatedHasher . update ( mpcParams . csHash ) ;
let curDelta = curve . G1 . g ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
const c = mpcParams . contributions [ i ] ;
const ourHasher = cloneHasher ( accumulatedHasher ) ;
hashG1 ( ourHasher , curve , c . delta . g1 _s ) ;
hashG1 ( ourHasher , curve , c . delta . g1 _sx ) ;
if ( ! hashIsEqual ( ourHasher . digest ( ) , c . transcript ) ) {
console . log ( ` INVALID( ${ i } ): Inconsistent transcript ` ) ;
return false ;
}
const delta _g2 _sp = hashToG2 ( curve , c . transcript ) ;
sr = await sameRatio$2 ( curve , c . delta . g1 _s , c . delta . g1 _sx , delta _g2 _sp , c . delta . g2 _spx ) ;
if ( sr !== true ) {
console . log ( ` INVALID( ${ i } ): public key G1 and G2 do not have the same ration ` ) ;
return false ;
}
sr = await sameRatio$2 ( curve , curDelta , c . deltaAfter , delta _g2 _sp , c . delta . g2 _spx ) ;
if ( sr !== true ) {
console . log ( ` INVALID( ${ i } ): deltaAfter does not fillow the public key ` ) ;
return false ;
}
if ( c . type == 1 ) {
const rng = rngFromBeaconParams ( c . beaconHash , c . numIterationsExp ) ;
const expected _prvKey = curve . Fr . fromRng ( rng ) ;
const expected _g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
const expected _g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( expected _g1 _s , expected _prvKey ) ) ;
if ( curve . G1 . eq ( expected _g1 _s , c . delta . g1 _s ) !== true ) {
console . log ( ` INVALID( ${ i } ): Key of the beacon does not match. g1_s ` ) ;
return false ;
}
if ( curve . G1 . eq ( expected _g1 _sx , c . delta . g1 _sx ) !== true ) {
console . log ( ` INVALID( ${ i } ): Key of the beacon does not match. g1_sx ` ) ;
return false ;
}
}
hashPubKey ( accumulatedHasher , curve , c ) ;
const contributionHasher = Blake2b ( 64 ) ;
hashPubKey ( contributionHasher , curve , c ) ;
c . contributionHash = contributionHasher . digest ( ) ;
curDelta = c . deltaAfter ;
}
2020-07-11 11:48:50 +03:00
// const initFileName = "~" + zkeyFileName + ".init";
const initFileName = { type : "mem" } ;
2020-07-11 11:31:52 +03:00
await newZKey ( r1csFileName , pTauFileName , initFileName ) ;
const { fd : fdInit , sections : sectionsInit } = await readBinFile$1 ( initFileName , "zkey" , 2 ) ;
const zkeyInit = await readHeader ( fdInit , sectionsInit , "groth16" ) ;
if ( ( ! ffjavascript . Scalar . eq ( zkeyInit . q , zkey . q ) )
|| ( ! ffjavascript . Scalar . eq ( zkeyInit . r , zkey . r ) )
|| ( zkeyInit . n8q != zkey . n8q )
|| ( zkeyInit . n8r != zkey . n8r ) )
{
if ( logger ) logger . error ( "INVALID: Different curves" ) ;
return false ;
}
if ( ( zkeyInit . nVars != zkey . nVars )
|| ( zkeyInit . nPublic != zkey . nPublic )
|| ( zkeyInit . domainSize != zkey . domainSize ) )
{
if ( logger ) logger . error ( "INVALID: Different circuit parameters" ) ;
return false ;
}
if ( ! curve . G1 . eq ( zkey . vk _alpha _1 , zkeyInit . vk _alpha _1 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid alpha1" ) ;
return false ;
}
if ( ! curve . G1 . eq ( zkey . vk _beta _1 , zkeyInit . vk _beta _1 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid beta1" ) ;
return false ;
}
if ( ! curve . G2 . eq ( zkey . vk _beta _2 , zkeyInit . vk _beta _2 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid beta2" ) ;
return false ;
}
if ( ! curve . G2 . eq ( zkey . vk _gamma _2 , zkeyInit . vk _gamma _2 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid gamma2" ) ;
return false ;
}
if ( ! curve . G1 . eq ( zkey . vk _delta _1 , curDelta ) ) {
if ( logger ) logger . error ( "INVALID: Invalud delta1" ) ;
return false ;
}
sr = await sameRatio$2 ( curve , curve . G1 . g , curDelta , curve . G2 . g , zkey . vk _delta _2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "INVALID: Invalud delta2" ) ;
return false ;
}
const mpcParamsInit = await readMPCParams ( fdInit , curve , sectionsInit ) ;
if ( ! hashIsEqual ( mpcParams . csHash , mpcParamsInit . csHash ) ) {
if ( logger ) logger . error ( "INVALID: Circuit does not match" ) ;
return false ;
}
// Check sizes of sections
if ( sections [ 8 ] [ 0 ] . size != sG1 * ( zkey . nVars - zkey . nPublic - 1 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid L section size" ) ;
return false ;
}
if ( sections [ 9 ] [ 0 ] . size != sG1 * ( zkey . domainSize ) ) {
if ( logger ) logger . error ( "INVALID: Invalid H section size" ) ;
return false ;
}
let ss ;
ss = await sectionIsEqual ( fd , sections , fdInit , sectionsInit , 3 ) ;
if ( ! ss ) {
if ( logger ) logger . error ( "INVALID: IC section is not identical" ) ;
return false ;
}
ss = await sectionIsEqual ( fd , sections , fdInit , sectionsInit , 4 ) ;
if ( ! ss ) {
if ( logger ) logger . error ( "Coeffs section is not identical" ) ;
return false ;
}
ss = await sectionIsEqual ( fd , sections , fdInit , sectionsInit , 5 ) ;
if ( ! ss ) {
if ( logger ) logger . error ( "A section is not identical" ) ;
return false ;
}
ss = await sectionIsEqual ( fd , sections , fdInit , sectionsInit , 6 ) ;
if ( ! ss ) {
if ( logger ) logger . error ( "B1 section is not identical" ) ;
return false ;
}
ss = await sectionIsEqual ( fd , sections , fdInit , sectionsInit , 7 ) ;
if ( ! ss ) {
if ( logger ) logger . error ( "B2 section is not identical" ) ;
return false ;
}
// Check L
sr = await sectionHasSameRatio ( "G1" , fdInit , sectionsInit , fd , sections , 8 , zkey . vk _delta _2 , zkeyInit . vk _delta _2 , "L section" ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "L section does not match" ) ;
return false ;
}
// Check H
sr = await sameRatioH ( ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "H section does not match" ) ;
return false ;
}
if ( logger ) logger . info ( formatHash ( mpcParams . csHash , "Circuit Hash: " ) ) ;
await fd . close ( ) ;
await fdInit . close ( ) ;
for ( let i = mpcParams . contributions . length - 1 ; i >= 0 ; i -- ) {
const c = mpcParams . contributions [ i ] ;
if ( logger ) logger . info ( "-------------------------" ) ;
if ( logger ) logger . info ( formatHash ( c . contributionHash , ` contribution # ${ i + 1 } ${ c . name ? c . name : "" } : ` ) ) ;
if ( c . type == 1 ) {
if ( logger ) logger . info ( ` Beacon generator: ${ byteArray2hex ( c . beaconHash ) } ` ) ;
if ( logger ) logger . info ( ` Beacon iterations Exp: ${ c . numIterationsExp } ` ) ;
}
}
if ( logger ) logger . info ( "-------------------------" ) ;
if ( logger ) logger . info ( "ZKey Ok!" ) ;
return true ;
async function sectionHasSameRatio ( groupName , fd1 , sections1 , fd2 , sections2 , idSection , g2sp , g2spx , sectionName ) {
const MAX _CHUNK _SIZE = 1 << 20 ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
await startReadUniqueSection$1 ( fd1 , sections1 , idSection ) ;
await startReadUniqueSection$1 ( fd2 , sections2 , idSection ) ;
let R1 = G . zero ;
let R2 = G . zero ;
const nPoints = sections1 [ idSection ] [ 0 ] . size / sG ;
for ( let i = 0 ; i < nPoints ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` Same ratio check ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , MAX _CHUNK _SIZE ) ;
const bases1 = await fd1 . read ( n * sG ) ;
const bases2 = await fd2 . read ( n * sG ) ;
const scalars = new Uint8Array ( 4 * n ) ;
crypto . randomFillSync ( scalars ) ;
const r1 = await G . multiExpAffine ( bases1 , scalars ) ;
const r2 = await G . multiExpAffine ( bases2 , scalars ) ;
R1 = G . add ( R1 , r1 ) ;
R2 = G . add ( R2 , r2 ) ;
}
await endReadSection$1 ( fd1 ) ;
await endReadSection$1 ( fd2 ) ;
sr = await sameRatio$2 ( curve , R1 , R2 , g2sp , g2spx ) ;
if ( sr !== true ) return false ;
return true ;
}
async function sameRatioH ( ) {
const MAX _CHUNK _SIZE = 1 << 20 ;
const G = curve . G1 ;
const sG = G . F . n8 * 2 ;
const { fd : fdPTau , sections : sectionsPTau } = await readBinFile$1 ( pTauFileName , "ptau" , 1 ) ;
let buff _r = new Uint8Array ( zkey . domainSize * zkey . n8r ) ;
const seed = new Array ( 8 ) ;
for ( let i = 0 ; i < 8 ; i ++ ) {
seed [ i ] = crypto . randomBytes ( 4 ) . readUInt32BE ( 0 , true ) ;
}
const rng = new ffjavascript . ChaCha ( seed ) ;
for ( let i = 0 ; i < zkey . domainSize - 1 ; i ++ ) { // Note that last one is zero
const e = curve . Fr . fromRng ( rng ) ;
curve . Fr . toRprLE ( buff _r , i * zkey . n8r , e ) ;
}
let R1 = G . zero ;
for ( let i = 0 ; i < zkey . domainSize ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` H Verificaition(tau): ${ i } / ${ zkey . domainSize } ` ) ;
const n = Math . min ( zkey . domainSize - i , MAX _CHUNK _SIZE ) ;
const buff1 = await fdPTau . read ( sG * n , sectionsPTau [ 2 ] [ 0 ] . p + zkey . domainSize * sG + i * MAX _CHUNK _SIZE * sG ) ;
const buff2 = await fdPTau . read ( sG * n , sectionsPTau [ 2 ] [ 0 ] . p + i * MAX _CHUNK _SIZE * sG ) ;
const buffB = await batchSubstract ( buff1 , buff2 ) ;
const buffS = buff _r . slice ( ( i * MAX _CHUNK _SIZE ) * zkey . n8r , ( i * MAX _CHUNK _SIZE + n ) * zkey . n8r ) ;
const r = await G . multiExpAffine ( buffB , buffS ) ;
R1 = G . add ( R1 , r ) ;
}
// Caluclate odd coeficients in transformed domain
buff _r = await curve . Fr . batchToMontgomery ( buff _r ) ;
// const first = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2)));
// Works*2 const first = curve.Fr.neg(curve.Fr.e(2));
const first = curve . Fr . neg ( curve . Fr . e ( 2 ) ) ;
// const inc = curve.Fr.inv(curve.PFr.w[zkey.power+1]);
const inc = curve . Fr . w [ zkey . power + 1 ] ;
buff _r = await curve . Fr . batchApplyKey ( buff _r , first , inc ) ;
buff _r = await curve . Fr . fft ( buff _r ) ;
buff _r = await curve . Fr . batchFromMontgomery ( buff _r ) ;
await startReadUniqueSection$1 ( fd , sections , 9 ) ;
let R2 = G . zero ;
for ( let i = 0 ; i < zkey . domainSize ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` H Verificaition(lagrange): ${ i } / ${ zkey . domainSize } ` ) ;
const n = Math . min ( zkey . domainSize - i , MAX _CHUNK _SIZE ) ;
const buff = await fd . read ( sG * n ) ;
const buffS = buff _r . slice ( ( i * MAX _CHUNK _SIZE ) * zkey . n8r , ( i * MAX _CHUNK _SIZE + n ) * zkey . n8r ) ;
const r = await G . multiExpAffine ( buff , buffS ) ;
R2 = G . add ( R2 , r ) ;
}
await endReadSection$1 ( fd ) ;
sr = await sameRatio$2 ( curve , R1 , R2 , zkey . vk _delta _2 , zkeyInit . vk _delta _2 ) ;
if ( sr !== true ) return false ;
return true ;
}
async function batchSubstract ( buff1 , buff2 ) {
const sG = curve . G1 . F . n8 * 2 ;
const nPoints = buff1 . byteLength / sG ;
const concurrency = curve . tm . concurrency ;
const nPointsPerThread = Math . floor ( nPoints / concurrency ) ;
const opPromises = [ ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
let n ;
if ( i < concurrency - 1 ) {
n = nPointsPerThread ;
} else {
n = nPoints - i * nPointsPerThread ;
}
if ( n == 0 ) continue ;
const subBuff1 = buff1 . slice ( i * nPointsPerThread * sG1 , ( i * nPointsPerThread + n ) * sG1 ) ;
const subBuff2 = buff2 . slice ( i * nPointsPerThread * sG1 , ( i * nPointsPerThread + n ) * sG1 ) ;
opPromises . push ( batchSubstractThread ( subBuff1 , subBuff2 ) ) ;
}
const result = await Promise . all ( opPromises ) ;
const fullBuffOut = new Uint8Array ( nPoints * sG ) ;
let p = 0 ;
for ( let i = 0 ; i < result . length ; i ++ ) {
fullBuffOut . set ( result [ i ] [ 0 ] , p ) ;
p += result [ i ] [ 0 ] . byteLength ;
}
return fullBuffOut ;
}
async function batchSubstractThread ( buff1 , buff2 ) {
const sG1 = curve . G1 . F . n8 * 2 ;
const sGmid = curve . G1 . F . n8 * 3 ;
const nPoints = buff1 . byteLength / sG1 ;
const task = [ ] ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : buff1 } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : buff2 } ) ;
task . push ( { cmd : "ALLOC" , var : 2 , len : nPoints * sGmid } ) ;
for ( let i = 0 ; i < nPoints ; i ++ ) {
task . push ( {
cmd : "CALL" ,
fnName : "g1m_subAffine" ,
params : [
{ var : 0 , offset : i * sG1 } ,
{ var : 1 , offset : i * sG1 } ,
{ var : 2 , offset : i * sGmid } ,
]
} ) ;
}
task . push ( { cmd : "CALL" , fnName : "g1m_batchToAffine" , params : [
{ var : 2 } ,
{ val : nPoints } ,
{ var : 2 } ,
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 2 , len : nPoints * sG1 } ) ;
const res = await curve . tm . queueAction ( task ) ;
return res ;
}
}
async function phase2contribute ( zkeyNameOld , zkeyNameNew , name , entropy , logger ) {
await Blake2b . ready ( ) ;
const { fd : fdOld , sections : sections } = await readBinFile$1 ( zkeyNameOld , "zkey" , 2 ) ;
const zkey = await readHeader ( fdOld , sections , "groth16" ) ;
const curve = await getCurveFromQ ( zkey . q ) ;
const mpcParams = await readMPCParams ( fdOld , curve , sections ) ;
const fdNew = await createBinFile ( zkeyNameNew , "zkey" , 1 , 10 ) ;
const rng = await getRandomRng ( entropy ) ;
const transcriptHasher = Blake2b ( 64 ) ;
transcriptHasher . update ( mpcParams . csHash ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
hashPubKey ( transcriptHasher , curve , mpcParams . contributions [ i ] ) ;
}
const curContribution = { } ;
curContribution . delta = { } ;
curContribution . delta . prvKey = curve . Fr . fromRng ( rng ) ;
curContribution . delta . g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
curContribution . delta . g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( curContribution . delta . g1 _s , curContribution . delta . prvKey ) ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _s ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _sx ) ;
curContribution . transcript = transcriptHasher . digest ( ) ;
curContribution . delta . g2 _sp = hashToG2 ( curve , curContribution . transcript ) ;
curContribution . delta . g2 _spx = curve . G2 . toAffine ( curve . G2 . timesFr ( curContribution . delta . g2 _sp , curContribution . delta . prvKey ) ) ;
zkey . vk _delta _1 = curve . G1 . timesFr ( zkey . vk _delta _1 , curContribution . delta . prvKey ) ;
zkey . vk _delta _2 = curve . G2 . timesFr ( zkey . vk _delta _2 , curContribution . delta . prvKey ) ;
curContribution . deltaAfter = zkey . vk _delta _1 ;
curContribution . type = 0 ;
if ( name ) curContribution . name = name ;
mpcParams . contributions . push ( curContribution ) ;
await writeHeader ( fdNew , zkey ) ;
// IC
await copySection ( fdOld , sections , fdNew , 3 ) ;
// Coeffs (Keep original)
await copySection ( fdOld , sections , fdNew , 4 ) ;
// A Section
await copySection ( fdOld , sections , fdNew , 5 ) ;
// B1 Section
await copySection ( fdOld , sections , fdNew , 6 ) ;
// B2 Section
await copySection ( fdOld , sections , fdNew , 7 ) ;
const invDelta = curve . Fr . inv ( curContribution . delta . prvKey ) ;
await applyKeyToSection ( fdOld , sections , fdNew , 8 , curve , "G1" , invDelta , curve . Fr . e ( 1 ) , "L Section" , logger ) ;
await applyKeyToSection ( fdOld , sections , fdNew , 9 , curve , "G1" , invDelta , curve . Fr . e ( 1 ) , "H Section" , logger ) ;
await writeMPCParams ( fdNew , curve , mpcParams ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
const contributionHasher = Blake2b ( 64 ) ;
hashPubKey ( contributionHasher , curve , curContribution ) ;
const contribuionHash = contributionHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( contribuionHash , "Contribution Hash: " ) ) ;
return contribuionHash ;
}
async function beacon$1 ( zkeyNameOld , zkeyNameNew , name , beaconHashStr , numIterationsExp , logger ) {
await Blake2b . ready ( ) ;
const beaconHash = hex2ByteArray ( beaconHashStr ) ;
if ( ( beaconHash . byteLength == 0 )
|| ( beaconHash . byteLength * 2 != beaconHashStr . length ) )
{
if ( logger ) logger . error ( "Invalid Beacon Hash. (It must be a valid hexadecimal sequence)" ) ;
return false ;
}
if ( beaconHash . length >= 256 ) {
if ( logger ) logger . error ( "Maximum lenght of beacon hash is 255 bytes" ) ;
return false ;
}
numIterationsExp = parseInt ( numIterationsExp ) ;
if ( ( numIterationsExp < 10 ) || ( numIterationsExp > 63 ) ) {
if ( logger ) logger . error ( "Invalid numIterationsExp. (Must be between 10 and 63)" ) ;
return false ;
}
const { fd : fdOld , sections : sections } = await readBinFile$1 ( zkeyNameOld , "zkey" , 2 ) ;
const zkey = await readHeader ( fdOld , sections , "groth16" ) ;
const curve = await getCurveFromQ ( zkey . q ) ;
const mpcParams = await readMPCParams ( fdOld , curve , sections ) ;
const fdNew = await createBinFile ( zkeyNameNew , "zkey" , 1 , 10 ) ;
const rng = await rngFromBeaconParams ( beaconHash , numIterationsExp ) ;
const transcriptHasher = Blake2b ( 64 ) ;
transcriptHasher . update ( mpcParams . csHash ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
hashPubKey ( transcriptHasher , curve , mpcParams . contributions [ i ] ) ;
}
const curContribution = { } ;
curContribution . delta = { } ;
curContribution . delta . prvKey = curve . Fr . fromRng ( rng ) ;
curContribution . delta . g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
curContribution . delta . g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( curContribution . delta . g1 _s , curContribution . delta . prvKey ) ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _s ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _sx ) ;
curContribution . transcript = transcriptHasher . digest ( ) ;
curContribution . delta . g2 _sp = hashToG2 ( curve , curContribution . transcript ) ;
curContribution . delta . g2 _spx = curve . G2 . toAffine ( curve . G2 . timesFr ( curContribution . delta . g2 _sp , curContribution . delta . prvKey ) ) ;
zkey . vk _delta _1 = curve . G1 . timesFr ( zkey . vk _delta _1 , curContribution . delta . prvKey ) ;
zkey . vk _delta _2 = curve . G2 . timesFr ( zkey . vk _delta _2 , curContribution . delta . prvKey ) ;
curContribution . deltaAfter = zkey . vk _delta _1 ;
curContribution . type = 1 ;
curContribution . numIterationsExp = numIterationsExp ;
curContribution . beaconHash = beaconHash ;
if ( name ) curContribution . name = name ;
mpcParams . contributions . push ( curContribution ) ;
await writeHeader ( fdNew , zkey ) ;
// IC
await copySection ( fdOld , sections , fdNew , 3 ) ;
// Coeffs (Keep original)
await copySection ( fdOld , sections , fdNew , 4 ) ;
// A Section
await copySection ( fdOld , sections , fdNew , 5 ) ;
// B1 Section
await copySection ( fdOld , sections , fdNew , 6 ) ;
// B2 Section
await copySection ( fdOld , sections , fdNew , 7 ) ;
const invDelta = curve . Fr . inv ( curContribution . delta . prvKey ) ;
await applyKeyToSection ( fdOld , sections , fdNew , 8 , curve , "G1" , invDelta , curve . Fr . e ( 1 ) , "L Section" , logger ) ;
await applyKeyToSection ( fdOld , sections , fdNew , 9 , curve , "G1" , invDelta , curve . Fr . e ( 1 ) , "H Section" , logger ) ;
await writeMPCParams ( fdNew , curve , mpcParams ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
const contributionHasher = Blake2b ( 64 ) ;
hashPubKey ( contributionHasher , curve , curContribution ) ;
const contribuionHash = contributionHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( contribuionHash , "Contribution Hash: " ) ) ;
return contribuionHash ;
}
async function zkeyExportJson ( zkeyFileName , verbose ) {
const zKey = await readZKey ( zkeyFileName ) ;
return zKey ;
}
// Format of the output
2020-07-14 12:55:12 +03:00
async function bellmanContribute ( curve , challengeFilename , responesFileName , entropy , logger ) {
2020-07-11 11:31:52 +03:00
await Blake2b . ready ( ) ;
const rng = await getRandomRng ( entropy ) ;
const delta = curve . Fr . fromRng ( rng ) ;
const invDelta = curve . Fr . inv ( delta ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
2020-07-26 15:05:23 +03:00
const fdFrom = await readExisting$3 ( challengeFilename ) ;
2020-07-11 11:31:52 +03:00
const fdTo = await createOverride ( responesFileName ) ;
await copy ( sG1 ) ; // alpha1
await copy ( sG1 ) ; // beta1
await copy ( sG2 ) ; // beta2
await copy ( sG2 ) ; // gamma2
const oldDelta1 = await readG1 ( ) ;
const delta1 = curve . G1 . timesFr ( oldDelta1 , delta ) ;
await writeG1 ( delta1 ) ;
const oldDelta2 = await readG2 ( ) ;
const delta2 = curve . G2 . timesFr ( oldDelta2 , delta ) ;
await writeG2 ( delta2 ) ;
// IC
const nIC = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nIC ) ;
await copy ( nIC * sG1 ) ;
// H
const nH = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nH ) ;
2020-07-14 12:55:12 +03:00
await applyKeyToChallengeSection ( fdFrom , fdTo , null , curve , "G1" , nH , invDelta , curve . Fr . e ( 1 ) , "UNCOMPRESSED" , "H" , logger ) ;
2020-07-11 11:31:52 +03:00
// L
const nL = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nL ) ;
2020-07-14 12:55:12 +03:00
await applyKeyToChallengeSection ( fdFrom , fdTo , null , curve , "G1" , nL , invDelta , curve . Fr . e ( 1 ) , "UNCOMPRESSED" , "L" , logger ) ;
2020-07-11 11:31:52 +03:00
// A
const nA = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nA ) ;
await copy ( nA * sG1 ) ;
// B1
const nB1 = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nB1 ) ;
await copy ( nB1 * sG1 ) ;
// B2
const nB2 = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nB2 ) ;
await copy ( nB2 * sG2 ) ;
//////////
/// Read contributions
//////////
const transcriptHasher = Blake2b ( 64 ) ;
const mpcParams = { } ;
// csHash
mpcParams . csHash = await fdFrom . read ( 64 ) ;
transcriptHasher . update ( mpcParams . csHash ) ;
const nConttributions = await fdFrom . readUBE32 ( ) ;
mpcParams . contributions = [ ] ;
for ( let i = 0 ; i < nConttributions ; i ++ ) {
const c = { delta : { } } ;
c . deltaAfter = await readG1 ( ) ;
c . delta . g1 _s = await readG1 ( ) ;
c . delta . g1 _sx = await readG1 ( ) ;
c . delta . g2 _spx = await readG2 ( ) ;
c . transcript = await fdFrom . read ( 64 ) ;
mpcParams . contributions . push ( c ) ;
hashPubKey ( transcriptHasher , curve , c ) ;
}
const curContribution = { } ;
curContribution . delta = { } ;
curContribution . delta . prvKey = delta ;
curContribution . delta . g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
curContribution . delta . g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( curContribution . delta . g1 _s , delta ) ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _s ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _sx ) ;
curContribution . transcript = transcriptHasher . digest ( ) ;
curContribution . delta . g2 _sp = hashToG2 ( curve , curContribution . transcript ) ;
curContribution . delta . g2 _spx = curve . G2 . toAffine ( curve . G2 . timesFr ( curContribution . delta . g2 _sp , delta ) ) ;
curContribution . deltaAfter = delta1 ;
curContribution . type = 0 ;
mpcParams . contributions . push ( curContribution ) ;
//////////
/// Write COntribution
//////////
await fdTo . write ( mpcParams . csHash ) ;
await fdTo . writeUBE32 ( mpcParams . contributions . length ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
const c = mpcParams . contributions [ i ] ;
await writeG1 ( c . deltaAfter ) ;
await writeG1 ( c . delta . g1 _s ) ;
await writeG1 ( c . delta . g1 _sx ) ;
await writeG2 ( c . delta . g2 _spx ) ;
await fdTo . write ( c . transcript ) ;
}
const contributionHasher = Blake2b ( 64 ) ;
hashPubKey ( contributionHasher , curve , curContribution ) ;
const contributionHash = contributionHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( contributionHash , "Contribution Hash: " ) ) ;
await fdTo . close ( ) ;
await fdFrom . close ( ) ;
return contributionHash ;
async function copy ( nBytes ) {
const CHUNK _SIZE = fdFrom . pageSize * 2 ;
for ( let i = 0 ; i < nBytes ; i += CHUNK _SIZE ) {
const n = Math . min ( nBytes - i , CHUNK _SIZE ) ;
const buff = await fdFrom . read ( n ) ;
await fdTo . write ( buff ) ;
}
}
async function readG1 ( ) {
const buff = await fdFrom . read ( curve . G1 . F . n8 * 2 ) ;
return curve . G1 . fromRprUncompressed ( buff , 0 ) ;
}
async function readG2 ( ) {
const buff = await fdFrom . read ( curve . G2 . F . n8 * 2 ) ;
return curve . G2 . fromRprUncompressed ( buff , 0 ) ;
}
async function writeG1 ( P ) {
const buff = new Uint8Array ( sG1 ) ;
curve . G1 . toRprUncompressed ( buff , 0 , P ) ;
await fdTo . write ( buff ) ;
}
async function writeG2 ( P ) {
const buff = new Uint8Array ( sG2 ) ;
curve . G2 . toRprUncompressed ( buff , 0 , P ) ;
await fdTo . write ( buff ) ;
}
}
const { stringifyBigInts } = ffjavascript . utils ;
async function zkeyExportVerificationKey ( zkeyName , logger ) {
const { fd , sections } = await readBinFile$1 ( zkeyName , "zkey" , 2 ) ;
const zkey = await readHeader ( fd , sections , "groth16" ) ;
const curve = await getCurveFromQ ( zkey . q ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const alphaBeta = await curve . pairing ( zkey . vk _alpha _1 , zkey . vk _beta _2 ) ;
let vKey = {
protocol : zkey . protocol ,
curve : curve . name ,
nPublic : zkey . nPublic ,
vk _alpha _1 : curve . G1 . toObject ( zkey . vk _alpha _1 ) ,
vk _beta _2 : curve . G2 . toObject ( zkey . vk _beta _2 ) ,
vk _gamma _2 : curve . G2 . toObject ( zkey . vk _gamma _2 ) ,
vk _delta _2 : curve . G2 . toObject ( zkey . vk _delta _2 ) ,
vk _alphabeta _12 : curve . Gt . toObject ( alphaBeta )
} ;
// Read IC Section
///////////
await startReadUniqueSection$1 ( fd , sections , 3 ) ;
vKey . IC = [ ] ;
for ( let i = 0 ; i <= zkey . nPublic ; i ++ ) {
const buff = await fd . read ( sG1 ) ;
const P = curve . G1 . toObject ( buff ) ;
vKey . IC . push ( P ) ;
}
await endReadSection$1 ( fd ) ;
vKey = stringifyBigInts ( vKey ) ;
await fd . close ( ) ;
return vKey ;
}
// Not ready yet
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
async function exportSolidityVerifier ( zKeyName , templateName , logger ) {
const verificationKey = await zkeyExportVerificationKey ( zKeyName ) ;
2020-07-26 15:05:23 +03:00
const fd = await readExisting$3 ( templateName ) ;
2020-07-11 11:31:52 +03:00
const buff = await fd . read ( fd . totalSize ) ;
let template = new TextDecoder ( "utf-8" ) . decode ( buff ) ;
const vkalpha1 _str = ` ${ verificationKey . vk _alpha _1 [ 0 ] . toString ( ) } , ` +
` ${ verificationKey . vk _alpha _1 [ 1 ] . toString ( ) } ` ;
template = template . replace ( "<%vk_alpha1%>" , vkalpha1 _str ) ;
const vkbeta2 _str = ` [ ${ verificationKey . vk _beta _2 [ 0 ] [ 1 ] . toString ( ) } , ` +
` ${ verificationKey . vk _beta _2 [ 0 ] [ 0 ] . toString ( ) } ], ` +
` [ ${ verificationKey . vk _beta _2 [ 1 ] [ 1 ] . toString ( ) } , ` +
` ${ verificationKey . vk _beta _2 [ 1 ] [ 0 ] . toString ( ) } ] ` ;
template = template . replace ( "<%vk_beta2%>" , vkbeta2 _str ) ;
const vkgamma2 _str = ` [ ${ verificationKey . vk _gamma _2 [ 0 ] [ 1 ] . toString ( ) } , ` +
` ${ verificationKey . vk _gamma _2 [ 0 ] [ 0 ] . toString ( ) } ], ` +
` [ ${ verificationKey . vk _gamma _2 [ 1 ] [ 1 ] . toString ( ) } , ` +
` ${ verificationKey . vk _gamma _2 [ 1 ] [ 0 ] . toString ( ) } ] ` ;
template = template . replace ( "<%vk_gamma2%>" , vkgamma2 _str ) ;
const vkdelta2 _str = ` [ ${ verificationKey . vk _delta _2 [ 0 ] [ 1 ] . toString ( ) } , ` +
` ${ verificationKey . vk _delta _2 [ 0 ] [ 0 ] . toString ( ) } ], ` +
` [ ${ verificationKey . vk _delta _2 [ 1 ] [ 1 ] . toString ( ) } , ` +
` ${ verificationKey . vk _delta _2 [ 1 ] [ 0 ] . toString ( ) } ] ` ;
template = template . replace ( "<%vk_delta2%>" , vkdelta2 _str ) ;
// The points
template = template . replace ( "<%vk_input_length%>" , ( verificationKey . IC . length - 1 ) . toString ( ) ) ;
template = template . replace ( "<%vk_ic_length%>" , verificationKey . IC . length . toString ( ) ) ;
let vi = "" ;
for ( let i = 0 ; i < verificationKey . IC . length ; i ++ ) {
if ( vi != "" ) vi = vi + " " ;
vi = vi + ` vk.IC[ ${ i } ] = Pairing.G1Point( ${ verificationKey . IC [ i ] [ 0 ] . toString ( ) } , ` +
` ${ verificationKey . IC [ i ] [ 1 ] . toString ( ) } ); \n ` ;
}
template = template . replace ( "<%vk_ic_pts%>" , vi ) ;
return template ;
}
async function write ( fd , witness , prime ) {
await startWriteSection ( fd , 1 ) ;
const n8 = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( prime ) - 1 ) / 64 ) + 1 ) * 8 ;
await fd . writeULE32 ( n8 ) ;
await writeBigInt ( fd , prime , n8 ) ;
await fd . writeULE32 ( witness . length ) ;
await endWriteSection ( fd ) ;
await startWriteSection ( fd , 2 ) ;
for ( let i = 0 ; i < witness . length ; i ++ ) {
await writeBigInt ( fd , witness [ i ] , n8 ) ;
}
await endWriteSection ( fd ) ;
}
async function writeBin ( fd , witnessBin , prime ) {
await startWriteSection ( fd , 1 ) ;
const n8 = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( prime ) - 1 ) / 64 ) + 1 ) * 8 ;
await fd . writeULE32 ( n8 ) ;
await writeBigInt ( fd , prime , n8 ) ;
if ( witnessBin . byteLength % n8 != 0 ) {
throw new Error ( "Invalid witness length" ) ;
}
await fd . writeULE32 ( witnessBin . byteLength / n8 ) ;
await endWriteSection ( fd ) ;
await startWriteSection ( fd , 2 ) ;
await fd . write ( witnessBin ) ;
await endWriteSection ( fd ) ;
}
async function readHeader$1 ( fd , sections ) {
await startReadUniqueSection$1 ( fd , sections , 1 ) ;
const n8 = await fd . readULE32 ( ) ;
const q = await readBigInt$1 ( fd , n8 ) ;
const nWitness = await fd . readULE32 ( ) ;
await endReadSection$1 ( fd ) ;
return { n8 , q , nWitness } ;
}
async function read ( fileName ) {
const { fd , sections } = await readBinFile$1 ( fileName , "wtns" , 2 ) ;
const { n8 , nWitness } = await readHeader$1 ( fd , sections ) ;
await startReadUniqueSection$1 ( fd , sections , 2 ) ;
const res = [ ] ;
for ( let i = 0 ; i < nWitness ; i ++ ) {
const v = await readBigInt$1 ( fd , n8 ) ;
res . push ( v ) ;
}
await endReadSection$1 ( fd ) ;
await fd . close ( ) ;
return res ;
}
const { stringifyBigInts : stringifyBigInts$1 } = ffjavascript . utils ;
2020-07-13 08:21:03 +03:00
async function groth16Prove ( zkeyFileName , witnessFileName , logger ) {
2020-07-11 11:31:52 +03:00
const { fd : fdWtns , sections : sectionsWtns } = await readBinFile$1 ( witnessFileName , "wtns" , 2 ) ;
const wtns = await readHeader$1 ( fdWtns , sectionsWtns ) ;
const { fd : fdZKey , sections : sectionsZKey } = await readBinFile$1 ( zkeyFileName , "zkey" , 2 ) ;
const zkey = await readHeader ( fdZKey , sectionsZKey , "groth16" ) ;
if ( ! ffjavascript . Scalar . eq ( zkey . r , wtns . q ) ) {
throw new Error ( "Curve of the witness does not match the curve of the proving key" ) ;
}
if ( wtns . nWitness != zkey . nVars ) {
throw new Error ( ` Invalid witness length. Circuit: ${ zkey . nVars } , witness: ${ wtns . nWitness } ` ) ;
}
const curve = await getCurveFromQ ( zkey . q ) ;
const Fr = curve . Fr ;
const G1 = curve . G1 ;
const G2 = curve . G2 ;
const power = log2 ( zkey . domainSize ) ;
const buffWitness = await readFullSection ( fdWtns , sectionsWtns , 2 ) ;
const buffCoeffs = await readFullSection ( fdZKey , sectionsZKey , 4 ) ;
const buffBasesA = await readFullSection ( fdZKey , sectionsZKey , 5 ) ;
const buffBasesB1 = await readFullSection ( fdZKey , sectionsZKey , 6 ) ;
const buffBasesB2 = await readFullSection ( fdZKey , sectionsZKey , 7 ) ;
const buffBasesC = await readFullSection ( fdZKey , sectionsZKey , 8 ) ;
const buffBasesH = await readFullSection ( fdZKey , sectionsZKey , 9 ) ;
const [ buffA _T , buffB _T , buffC _T ] = await buldABC ( curve , zkey , buffWitness , buffCoeffs ) ;
const buffA = await Fr . ifft ( buffA _T ) ;
const buffAodd = await Fr . batchApplyKey ( buffA , Fr . e ( 1 ) , curve . Fr . w [ power + 1 ] ) ;
const buffAodd _T = await Fr . fft ( buffAodd ) ;
const buffB = await Fr . ifft ( buffB _T ) ;
const buffBodd = await Fr . batchApplyKey ( buffB , Fr . e ( 1 ) , curve . Fr . w [ power + 1 ] ) ;
const buffBodd _T = await Fr . fft ( buffBodd ) ;
const buffC = await Fr . ifft ( buffC _T ) ;
const buffCodd = await Fr . batchApplyKey ( buffC , Fr . e ( 1 ) , curve . Fr . w [ power + 1 ] ) ;
const buffCodd _T = await Fr . fft ( buffCodd ) ;
const buffPodd _T = await joinABC ( curve , zkey , buffAodd _T , buffBodd _T , buffCodd _T ) ;
let proof = { } ;
proof . pi _a = await curve . G1 . multiExpAffine ( buffBasesA , buffWitness ) ;
let pib1 = await curve . G1 . multiExpAffine ( buffBasesB1 , buffWitness ) ;
proof . pi _b = await curve . G2 . multiExpAffine ( buffBasesB2 , buffWitness ) ;
proof . pi _c = await curve . G1 . multiExpAffine ( buffBasesC , buffWitness . slice ( ( zkey . nPublic + 1 ) * curve . Fr . n8 ) ) ;
const resH = await curve . G1 . multiExpAffine ( buffBasesH , buffPodd _T ) ;
const r = curve . Fr . random ( ) ;
const s = curve . Fr . random ( ) ;
proof . pi _a = G1 . add ( proof . pi _a , zkey . vk _alpha _1 ) ;
proof . pi _a = G1 . add ( proof . pi _a , G1 . timesFr ( zkey . vk _delta _1 , r ) ) ;
proof . pi _b = G2 . add ( proof . pi _b , zkey . vk _beta _2 ) ;
proof . pi _b = G2 . add ( proof . pi _b , G2 . timesFr ( zkey . vk _delta _2 , s ) ) ;
pib1 = G1 . add ( pib1 , zkey . vk _beta _1 ) ;
pib1 = G1 . add ( pib1 , G1 . timesFr ( zkey . vk _delta _1 , s ) ) ;
proof . pi _c = G1 . add ( proof . pi _c , resH ) ;
proof . pi _c = G1 . add ( proof . pi _c , G1 . timesFr ( proof . pi _a , s ) ) ;
proof . pi _c = G1 . add ( proof . pi _c , G1 . timesFr ( pib1 , r ) ) ;
proof . pi _c = G1 . add ( proof . pi _c , G1 . timesFr ( zkey . vk _delta _1 , Fr . neg ( Fr . mul ( r , s ) ) ) ) ;
let publicSignals = [ ] ;
for ( let i = 1 ; i <= zkey . nPublic ; i ++ ) {
const b = buffWitness . slice ( i * Fr . n8 , i * Fr . n8 + Fr . n8 ) ;
publicSignals . push ( ffjavascript . Scalar . fromRprLE ( b ) ) ;
}
proof . pi _a = G1 . toObject ( G1 . toAffine ( proof . pi _a ) ) ;
proof . pi _b = G2 . toObject ( G2 . toAffine ( proof . pi _b ) ) ;
proof . pi _c = G1 . toObject ( G1 . toAffine ( proof . pi _c ) ) ;
proof . protocol = "groth16" ;
await fdZKey . close ( ) ;
await fdWtns . close ( ) ;
proof = stringifyBigInts$1 ( proof ) ;
publicSignals = stringifyBigInts$1 ( publicSignals ) ;
return { proof , publicSignals } ;
}
async function buldABC ( curve , zkey , witness , coeffs ) {
const concurrency = curve . tm . concurrency ;
const sCoef = 4 * 3 + zkey . n8r ;
const elementsPerChunk = Math . floor ( zkey . domainSize / concurrency ) ;
const coeffsDV = new DataView ( coeffs . buffer , coeffs . byteOffset , coeffs . byteLength ) ;
const promises = [ ] ;
const cutPoints = [ ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
cutPoints . push ( getCutPoint ( Math . floor ( i * zkey . domainSize / concurrency ) ) ) ;
}
cutPoints . push ( coeffs . byteLength ) ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
let n ;
if ( i < concurrency - 1 ) {
n = elementsPerChunk ;
} else {
n = zkey . domainSize - i * elementsPerChunk ;
}
if ( n == 0 ) continue ;
const task = [ ] ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : coeffs . slice ( cutPoints [ i ] , cutPoints [ i + 1 ] ) } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : witness . slice ( ) } ) ;
task . push ( { cmd : "ALLOC" , var : 2 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "ALLOC" , var : 3 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "ALLOC" , var : 4 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "CALL" , fnName : "qap_buildABC" , params : [
{ var : 0 } ,
{ val : ( cutPoints [ i + 1 ] - cutPoints [ i ] ) / sCoef } ,
{ var : 1 } ,
{ var : 2 } ,
{ var : 3 } ,
{ var : 4 } ,
{ val : i * elementsPerChunk } ,
{ val : n }
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 2 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "GET" , out : 1 , var : 3 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "GET" , out : 2 , var : 4 , len : n * curve . Fr . n8 } ) ;
promises . push ( curve . tm . queueAction ( task ) ) ;
}
const result = await Promise . all ( promises ) ;
const outBuffA = new Uint8Array ( zkey . domainSize * curve . Fr . n8 ) ;
const outBuffB = new Uint8Array ( zkey . domainSize * curve . Fr . n8 ) ;
const outBuffC = new Uint8Array ( zkey . domainSize * curve . Fr . n8 ) ;
let p = 0 ;
for ( let i = 0 ; i < result . length ; i ++ ) {
outBuffA . set ( result [ i ] [ 0 ] , p ) ;
outBuffB . set ( result [ i ] [ 1 ] , p ) ;
outBuffC . set ( result [ i ] [ 2 ] , p ) ;
p += result [ i ] [ 0 ] . byteLength ;
}
return [ outBuffA , outBuffB , outBuffC ] ;
function getCutPoint ( v ) {
let m = 0 ;
let n = coeffsDV . getUint32 ( 0 , true ) ;
while ( m < n ) {
var k = ( n + m ) >> 1 ;
const va = coeffsDV . getUint32 ( 4 + k * sCoef + 4 , true ) ;
if ( va > v ) {
n = k - 1 ;
} else if ( va < v ) {
m = k + 1 ;
} else {
n = k ;
}
}
return 4 + m * sCoef ;
}
}
async function joinABC ( curve , zkey , a , b , c ) {
const concurrency = curve . tm . concurrency ;
const n8 = curve . Fr . n8 ;
const nElements = Math . floor ( a . byteLength / curve . Fr . n8 ) ;
const elementsPerChunk = Math . floor ( nElements / concurrency ) ;
const promises = [ ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
let n ;
if ( i < concurrency - 1 ) {
n = elementsPerChunk ;
} else {
n = nElements - i * elementsPerChunk ;
}
if ( n == 0 ) continue ;
const task = [ ] ;
const aChunk = a . slice ( i * elementsPerChunk * n8 , ( i * elementsPerChunk + n ) * n8 ) ;
const bChunk = b . slice ( i * elementsPerChunk * n8 , ( i * elementsPerChunk + n ) * n8 ) ;
const cChunk = c . slice ( i * elementsPerChunk * n8 , ( i * elementsPerChunk + n ) * n8 ) ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : aChunk } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : bChunk } ) ;
task . push ( { cmd : "ALLOCSET" , var : 2 , buff : cChunk } ) ;
task . push ( { cmd : "ALLOC" , var : 3 , len : n * n8 } ) ;
task . push ( { cmd : "CALL" , fnName : "qap_joinABC" , params : [
{ var : 0 } ,
{ var : 1 } ,
{ var : 2 } ,
{ val : n } ,
{ var : 3 } ,
] } ) ;
task . push ( { cmd : "CALL" , fnName : "frm_batchFromMontgomery" , params : [
{ var : 3 } ,
{ val : n } ,
{ var : 3 }
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 3 , len : n * n8 } ) ;
promises . push ( curve . tm . queueAction ( task ) ) ;
}
const result = await Promise . all ( promises ) ;
const outBuff = new Uint8Array ( a . byteLength ) ;
let p = 0 ;
for ( let i = 0 ; i < result . length ; i ++ ) {
outBuff . set ( result [ i ] [ 0 ] , p ) ;
p += result [ i ] [ 0 ] . byteLength ;
}
return outBuff ;
}
const { WitnessCalculatorBuilder } = circomRuntime ;
async function wtnsCalculate ( input , wasmFileName , wtnsFileName , options ) {
2020-07-26 15:05:23 +03:00
const fdWasm = await readExisting$3 ( wasmFileName ) ;
2020-07-11 11:31:52 +03:00
const wasm = await fdWasm . read ( fdWasm . totalSize ) ;
await fdWasm . close ( ) ;
const wc = await WitnessCalculatorBuilder ( wasm ) ;
const w = await wc . calculateBinWitness ( input ) ;
const fdWtns = await createBinFile ( wtnsFileName , "wtns" , 2 , 2 ) ;
await writeBin ( fdWtns , w , wc . prime ) ;
await fdWtns . close ( ) ;
}
2020-07-13 08:21:03 +03:00
async function groth16FullProve ( input , wasmFile , zkeyFileName , logger ) {
2020-07-11 11:31:52 +03:00
const wtns = {
type : "mem"
} ;
await wtnsCalculate ( input , wasmFile , wtns ) ;
2020-07-13 08:21:03 +03:00
return await groth16Prove ( zkeyFileName , wtns ) ;
2020-07-11 11:31:52 +03:00
}
/ *
Copyright 2018 0 kims association .
This file is part of snarkjs .
snarkjs is a free software : you can redistribute it and / or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation , either version 3 of the License , or ( at your option )
any later version .
snarkjs is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License for
more details .
You should have received a copy of the GNU General Public License along with
snarkjs . If not , see < https : //www.gnu.org/licenses/>.
* /
const { unstringifyBigInts } = ffjavascript . utils ;
2020-07-13 08:21:03 +03:00
async function groth16Verify ( vk _verifier , publicSignals , proof , logger ) {
2020-07-11 11:31:52 +03:00
/ *
let cpub = vk _verifier . IC [ 0 ] ;
for ( let s = 0 ; s < vk _verifier . nPublic ; s ++ ) {
cpub = G1 . add ( cpub , G1 . timesScalar ( vk _verifier . IC [ s + 1 ] , publicSignals [ s ] ) ) ;
}
* /
vk _verifier = unstringifyBigInts ( vk _verifier ) ;
proof = unstringifyBigInts ( proof ) ;
publicSignals = unstringifyBigInts ( publicSignals ) ;
const curve = await getCurveFromName ( vk _verifier . curve ) ;
const IC0 = curve . G1 . fromObject ( vk _verifier . IC [ 0 ] ) ;
const IC = new Uint8Array ( curve . G1 . F . n8 * 2 * publicSignals . length ) ;
const w = new Uint8Array ( curve . Fr . n8 * publicSignals . length ) ;
for ( let i = 0 ; i < publicSignals . length ; i ++ ) {
const buffP = curve . G1 . fromObject ( vk _verifier . IC [ i + 1 ] ) ;
IC . set ( buffP , i * curve . G1 . F . n8 * 2 ) ;
ffjavascript . Scalar . toRprLE ( w , curve . Fr . n8 * i , publicSignals [ i ] , curve . Fr . n8 ) ;
}
let cpub = await curve . G1 . multiExpAffine ( IC , w ) ;
cpub = curve . G1 . add ( cpub , IC0 ) ;
const pi _a = curve . G1 . fromObject ( proof . pi _a ) ;
const pi _b = curve . G2 . fromObject ( proof . pi _b ) ;
const pi _c = curve . G1 . fromObject ( proof . pi _c ) ;
const vk _gamma _2 = curve . G2 . fromObject ( vk _verifier . vk _gamma _2 ) ;
const vk _delta _2 = curve . G2 . fromObject ( vk _verifier . vk _delta _2 ) ;
const vk _alpha _1 = curve . G1 . fromObject ( vk _verifier . vk _alpha _1 ) ;
const vk _beta _2 = curve . G2 . fromObject ( vk _verifier . vk _beta _2 ) ;
const res = await curve . pairingEq (
curve . G1 . neg ( pi _a ) , pi _b ,
cpub , vk _gamma _2 ,
pi _c , vk _delta _2 ,
vk _alpha _1 , vk _beta _2
) ;
if ( ! res ) {
if ( logger ) logger . error ( "Invalid proof" ) ;
return false ;
}
if ( logger ) logger . info ( "OK!" ) ;
return true ;
}
const { WitnessCalculatorBuilder : WitnessCalculatorBuilder$1 } = circomRuntime ;
async function wtnsDebug ( input , wasmFileName , wtnsFileName , symName , options , logger ) {
2020-07-26 15:05:23 +03:00
const fdWasm = await readExisting$3 ( wasmFileName ) ;
2020-07-11 11:31:52 +03:00
const wasm = await fdWasm . read ( fdWasm . totalSize ) ;
await fdWasm . close ( ) ;
let wcOps = {
sanityCheck : true
} ;
let sym = await loadSymbols ( symName ) ;
if ( options . set ) {
if ( ! sym ) sym = await loadSymbols ( symName ) ;
wcOps . logSetSignal = function ( labelIdx , value ) {
if ( logger ) logger . info ( "SET " + sym . labelIdx2Name [ labelIdx ] + " <-- " + value . toString ( ) ) ;
} ;
}
if ( options . get ) {
if ( ! sym ) sym = await loadSymbols ( symName ) ;
wcOps . logGetSignal = function ( varIdx , value ) {
if ( logger ) logger . info ( "GET " + sym . labelIdx2Name [ varIdx ] + " --> " + value . toString ( ) ) ;
} ;
}
if ( options . trigger ) {
if ( ! sym ) sym = await loadSymbols ( symName ) ;
wcOps . logStartComponent = function ( cIdx ) {
if ( logger ) logger . info ( "START: " + sym . componentIdx2Name [ cIdx ] ) ;
} ;
wcOps . logFinishComponent = function ( cIdx ) {
if ( logger ) logger . info ( "FINISH: " + sym . componentIdx2Name [ cIdx ] ) ;
} ;
}
const wc = await WitnessCalculatorBuilder$1 ( wasm , wcOps ) ;
const w = await wc . calculateWitness ( input ) ;
const fdWtns = await createBinFile ( wtnsFileName , "wtns" , 2 , 2 ) ;
await write ( fdWtns , w , wc . prime ) ;
await fdWtns . close ( ) ;
}
async function wtnsExportJson ( wtnsFileName ) {
const w = await read ( wtnsFileName ) ;
return w ;
}
/ *
Copyright 2018 0 KIMS association .
This file is part of jaz ( Zero Knowledge Circuit Compiler ) .
jaz is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
jaz is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with jaz . If not , see < https : //www.gnu.org/licenses/>.
* /
const { stringifyBigInts : stringifyBigInts$2 , unstringifyBigInts : unstringifyBigInts$1 } = ffjavascript . utils ;
const logger = Logger . create ( "snarkJS" , { showTimestamp : false } ) ;
Logger . setLogLevel ( "INFO" ) ;
const commands = [
{
cmd : "powersoftau new <curve> <power> [powersoftau_0000.ptau]" ,
description : "Starts a powers of tau ceremony" ,
alias : [ "ptn" ] ,
options : "-verbose|v" ,
action : powersOfTawNew
} ,
{
cmd : "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>" ,
description : "creates a ptau file with a new contribution" ,
alias : [ "ptc" ] ,
options : "-verbose|v -name|n -entropy|e" ,
action : powersOfTawContribute
} ,
{
2020-07-14 12:55:12 +03:00
cmd : "powersoftau export challenge <powersoftau_0000.ptau> [challenge]" ,
description : "Creates a challenge" ,
2020-07-11 11:31:52 +03:00
alias : [ "ptec" ] ,
options : "-verbose|v" ,
2020-07-14 12:55:12 +03:00
action : powersOfTawExportChallenge
2020-07-11 11:31:52 +03:00
} ,
{
2020-07-14 12:55:12 +03:00
cmd : "powersoftau challenge contribute <curve> <challenge> [response]" ,
description : "Contribute to a challenge" ,
2020-07-11 11:31:52 +03:00
alias : [ "ptcc" ] ,
options : "-verbose|v -entropy|e" ,
2020-07-14 12:55:12 +03:00
action : powersOfTawChallengeContribute
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "powersoftau import response <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>" ,
description : "import a response to a ptau file" ,
alias : [ "ptir" ] ,
options : "-verbose|v -nopoints -nocheck -name|n" ,
action : powersOfTawImport
} ,
{
cmd : "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>" ,
description : "adds a beacon" ,
alias : [ "ptb" ] ,
options : "-verbose|v -name|n" ,
action : powersOfTawBeacon
} ,
{
cmd : "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>" ,
description : "Prepares phase 2. " ,
longDescription : " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau" ,
alias : [ "pt2" ] ,
options : "-verbose|v" ,
action : powersOfTawPreparePhase2
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "powersoftau verify <powersoftau.ptau>" ,
description : "verifies a powers of tau file" ,
alias : [ "ptv" ] ,
options : "-verbose|v" ,
action : powersOfTawVerify
} ,
2020-07-11 11:31:52 +03:00
{
cmd : "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>" ,
description : "Exports a power of tau file to a JSON" ,
alias : [ "ptej" ] ,
options : "-verbose|v" ,
action : powersOfTawExportJson
} ,
{
cmd : "r1cs info [circuit.r1cs]" ,
description : "Print statistiscs of a circuit" ,
alias : [ "ri" , "info -r|r1cs:circuit.r1cs" ] ,
action : r1csInfo$1
} ,
{
cmd : "r1cs print [circuit.r1cs] [circuit.sym]" ,
description : "Print the constraints of a circuit" ,
alias : [ "rp" , "print -r|r1cs:circuit.r1cs -s|sym" ] ,
action : r1csPrint$1
} ,
{
cmd : "r1cs export json [circuit.r1cs] [circuit.json]" ,
description : "Export r1cs to JSON file" ,
alias : [ "rej" ] ,
action : r1csExportJSON
} ,
{
cmd : "wtns calculate [circuit.wasm] [input.json] [witness.wtns]" ,
description : "Caclculate specific witness of a circuit given an input" ,
alias : [ "wc" , "calculatewitness -ws|wasm:circuit.wasm -i|input:input.json -wt|witness:witness.wtns" ] ,
action : wtnsCalculate$1
} ,
{
cmd : "wtns debug [circuit.wasm] [input.json] [witness.wtns] [circuit.sym]" ,
description : "Calculate the witness with debug info." ,
longDescription : "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers " ,
options : "-get|g -set|s -trigger|t" ,
alias : [ "wd" ] ,
action : wtnsDebug$1
} ,
{
cmd : "wtns export json [witness.wtns] [witnes.json]" ,
description : "Calculate the witness with debug info." ,
longDescription : "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers " ,
options : "-verbose|v" ,
alias : [ "wej" ] ,
action : wtnsExportJson$1
} ,
{
cmd : "zkey new [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]" ,
description : "Creates an initial pkey file with zero contributions " ,
alias : [ "zkn" ] ,
options : "-verbose|v" ,
action : zkeyNew
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "zkey contribute <circuit_old.zkey> <circuit_new.zkey>" ,
description : "creates a zkey file with a new contribution" ,
alias : [ "zkc" ] ,
options : "-verbose|v -entropy|e -name|n" ,
action : zkeyContribute
} ,
2020-07-11 11:31:52 +03:00
{
cmd : "zkey export bellman [circuit.zkey] [circuit.mpcparams]" ,
description : "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman)" ,
alias : [ "zkeb" ] ,
options : "-verbose|v" ,
action : zkeyExportBellman
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "zkey bellman contribute <curve> <circuit.mpcparams> <circuit_response.mpcparams>" ,
description : "contributes to a llallange file in bellman format" ,
alias : [ "zkbc" ] ,
options : "-verbose|v -entropy|e" ,
action : zkeyBellmanContribute
} ,
2020-07-11 11:31:52 +03:00
{
cmd : "zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>" ,
description : "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman) " ,
alias : [ "zkib" ] ,
options : "-verbose|v -name|n" ,
action : zkeyImportBellman
} ,
{
cmd : "zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp>" ,
description : "adds a beacon" ,
alias : [ "zkb" ] ,
options : "-verbose|v -name|n" ,
action : zkeyBeacon
} ,
{
2020-07-13 08:21:03 +03:00
cmd : "zkey verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]" ,
description : "Verify zkey file contributions and verify that matches with the original circuit.r1cs and ptau" ,
alias : [ "zkv" ] ,
options : "-verbose|v" ,
action : zkeyVerify
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "zkey export verificationkey [circuit.zkey] [verification_key.json]" ,
description : "Exports a verification key" ,
alias : [ "zkev" ] ,
action : zkeyExportVKey
} ,
{
cmd : "zkey export json [circuit.zkey] [circuit.zkey.json]" ,
description : "Exports a circuit key to a JSON file" ,
alias : [ "zkej" ] ,
options : "-verbose|v" ,
action : zkeyExportJson$1
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "zkey export solidityverifier [circuit.zkey] [verifier.sol]" ,
description : "Creates a verifier in solidity" ,
alias : [ "zkesv" , "generateverifier -vk|verificationkey -v|verifier" ] ,
action : zkeyExportSolidityVerifier
} ,
{
cmd : "zkey export soliditycalldata <public.json> <proof.json>" ,
description : "Generates call parameters ready to be called." ,
alias : [ "zkesc" , "generatecall -pub|public -p|proof" ] ,
action : zkeyExportSolidityCalldata
} ,
{
cmd : "groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]" ,
description : "Generates a zk Proof from witness" ,
alias : [ "g16p" , "zpw" , "zksnark proof" , "proof -pk|provingkey -wt|witness -p|proof -pub|public" ] ,
options : "-verbose|v -protocol" ,
action : groth16Prove$1
} ,
{
cmd : "groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]" ,
description : "Generates a zk Proof from input" ,
alias : [ "g16f" , "g16i" ] ,
options : "-verbose|v -protocol" ,
action : groth16FullProve$1
} ,
{
cmd : "groth16 verify [verification_key.json] [public.json] [proof.json]" ,
description : "Verify a zk Proof" ,
alias : [ "g16v" , "verify -vk|verificationkey -pub|public -p|proof" ] ,
action : groth16Verify$1
} ,
2020-07-11 11:31:52 +03:00
] ;
clProcessor ( commands ) . then ( ( res ) => {
process . exit ( res ) ;
} , ( err ) => {
logger . error ( err ) ;
process . exit ( 1 ) ;
} ) ;
/ *
TODO COMMANDS
=== === === === =
{
2020-07-13 08:21:03 +03:00
cmd : "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]" ,
description : "Run a simple setup for a circuit generating the proving key." ,
alias : [ "zs" , "setup -r1cs|r -provingkey|pk -verificationkey|vk" ] ,
options : "-verbose|v -protocol" ,
action : zksnarkSetup
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "witness verify <circuit.r1cs> <witness.wtns>" ,
description : "Verify a witness agains a r1cs" ,
alias : [ "wv" ] ,
action : witnessVerify
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "powersOfTau export response"
}
2020-07-11 11:31:52 +03:00
* /
function p256 ( n ) {
let nstr = n . toString ( 16 ) ;
while ( nstr . length < 64 ) nstr = "0" + nstr ;
nstr = ` "0x ${ nstr } " ` ;
return nstr ;
}
function changeExt ( fileName , newExt ) {
let S = fileName ;
while ( ( S . length > 0 ) && ( S [ S . length - 1 ] != "." ) ) S = S . slice ( 0 , S . length - 1 ) ;
if ( S . length > 0 ) {
return S + newExt ;
} else {
return fileName + "." + newExt ;
}
}
// r1cs export circomJSON [circuit.r1cs] [circuit.json]
async function r1csInfo$1 ( params , options ) {
const r1csName = params [ 0 ] || "circuit.r1cs" ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
await r1csInfo ( r1csName , logger ) ;
return 0 ;
}
// r1cs print [circuit.r1cs] [circuit.sym]
async function r1csPrint$1 ( params , options ) {
const r1csName = params [ 0 ] || "circuit.r1cs" ;
const symName = params [ 1 ] || changeExt ( r1csName , "sym" ) ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const cir = await load ( r1csName , true , true ) ;
const sym = await loadSymbols ( symName ) ;
await r1csPrint ( cir , sym , logger ) ;
return 0 ;
}
// r1cs export json [circuit.r1cs] [circuit.json]
async function r1csExportJSON ( params , options ) {
const r1csName = params [ 0 ] || "circuit.r1cs" ;
const jsonName = params [ 1 ] || changeExt ( r1csName , "json" ) ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const r1csObj = await r1csExportJson ( r1csName ) ;
const S = JSON . stringify ( ffjavascript . utils . stringifyBigInts ( r1csObj ) , null , 1 ) ;
await fs . promises . writeFile ( jsonName , S ) ;
return 0 ;
}
// wtns calculate <circuit.wasm> <input.json> <witness.wtns>
async function wtnsCalculate$1 ( params , options ) {
const wasmName = params [ 0 ] || "circuit.wasm" ;
const inputName = params [ 1 ] || "input.json" ;
const witnessName = params [ 2 ] || "witness.wtns" ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const input = unstringifyBigInts$1 ( JSON . parse ( await fs . promises . readFile ( inputName , "utf8" ) ) ) ;
await wtnsCalculate ( input , wasmName , witnessName ) ;
return 0 ;
}
// wtns debug <circuit.wasm> <input.json> <witness.wtns> <circuit.sym>
// -get|g -set|s -trigger|t
async function wtnsDebug$1 ( params , options ) {
const wasmName = params [ 0 ] || "circuit.wasm" ;
const inputName = params [ 1 ] || "input.json" ;
const witnessName = params [ 2 ] || "witness.wtns" ;
const symName = params [ 3 ] || changeExt ( wasmName , "sym" ) ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const input = unstringifyBigInts$1 ( JSON . parse ( await fs . promises . readFile ( inputName , "utf8" ) ) ) ;
await wtnsDebug ( input , wasmName , witnessName , symName , options , logger ) ;
return 0 ;
}
// wtns export json [witness.wtns] [witness.json]
// -get|g -set|s -trigger|t
async function wtnsExportJson$1 ( params , options ) {
const wtnsName = params [ 0 ] || "witness.wtns" ;
const jsonName = params [ 1 ] || "witness.json" ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const w = await wtnsExportJson ( wtnsName ) ;
await fs . promises . writeFile ( jsonName , JSON . stringify ( stringifyBigInts$2 ( w ) , null , 1 ) ) ;
return 0 ;
}
/ *
// zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]
async function zksnarkSetup ( params , options ) {
const r1csName = params [ 0 ] || "circuit.r1cs" ;
const zkeyName = params [ 1 ] || changeExt ( r1csName , "zkey" ) ;
const verificationKeyName = params [ 2 ] || "verification_key.json" ;
const protocol = options . protocol || "groth16" ;
const cir = await loadR1cs ( r1csName , true ) ;
if ( ! zkSnark [ protocol ] ) throw new Error ( "Invalid protocol" ) ;
const setup = zkSnark [ protocol ] . setup ( cir , options . verbose ) ;
await zkey . utils . write ( zkeyName , setup . vk _proof ) ;
// await fs.promises.writeFile(provingKeyName, JSON.stringify(stringifyBigInts(setup.vk_proof), null, 1), "utf-8");
await fs . promises . writeFile ( verificationKeyName , JSON . stringify ( stringifyBigInts ( setup . vk _verifier ) , null , 1 ) , "utf-8" ) ;
return 0 ;
}
* /
// groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
2020-07-13 08:21:03 +03:00
async function groth16Prove$1 ( params , options ) {
2020-07-11 11:31:52 +03:00
const zkeyName = params [ 0 ] || "circuit.zkey" ;
const witnessName = params [ 1 ] || "witness.wtns" ;
const proofName = params [ 2 ] || "proof.json" ;
const publicName = params [ 3 ] || "public.json" ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
2020-07-13 08:21:03 +03:00
const { proof , publicSignals } = await groth16Prove ( zkeyName , witnessName ) ;
2020-07-11 11:31:52 +03:00
await fs . promises . writeFile ( proofName , JSON . stringify ( stringifyBigInts$2 ( proof ) , null , 1 ) , "utf-8" ) ;
await fs . promises . writeFile ( publicName , JSON . stringify ( stringifyBigInts$2 ( publicSignals ) , null , 1 ) , "utf-8" ) ;
return 0 ;
}
// groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]
2020-07-13 08:21:03 +03:00
async function groth16FullProve$1 ( params , options ) {
2020-07-11 11:31:52 +03:00
const inputName = params [ 0 ] || "input.json" ;
const wasmName = params [ 1 ] || "circuit.wasm" ;
const zkeyName = params [ 2 ] || "circuit.zkey" ;
const proofName = params [ 3 ] || "proof.json" ;
const publicName = params [ 4 ] || "public.json" ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const input = unstringifyBigInts$1 ( JSON . parse ( await fs . promises . readFile ( inputName , "utf8" ) ) ) ;
2020-07-13 08:21:03 +03:00
const { proof , publicSignals } = await groth16FullProve ( input , wasmName , zkeyName ) ;
2020-07-11 11:31:52 +03:00
await fs . promises . writeFile ( proofName , JSON . stringify ( stringifyBigInts$2 ( proof ) , null , 1 ) , "utf-8" ) ;
await fs . promises . writeFile ( publicName , JSON . stringify ( stringifyBigInts$2 ( publicSignals ) , null , 1 ) , "utf-8" ) ;
return 0 ;
}
// groth16 verify [verification_key.json] [public.json] [proof.json]
2020-07-13 08:21:03 +03:00
async function groth16Verify$1 ( params , options ) {
2020-07-11 11:31:52 +03:00
const verificationKeyName = params [ 0 ] || "verification_key.json" ;
const publicName = params [ 1 ] || "public.json" ;
const proofName = params [ 2 ] || "proof.json" ;
const verificationKey = unstringifyBigInts$1 ( JSON . parse ( fs . readFileSync ( verificationKeyName , "utf8" ) ) ) ;
const pub = unstringifyBigInts$1 ( JSON . parse ( fs . readFileSync ( publicName , "utf8" ) ) ) ;
const proof = unstringifyBigInts$1 ( JSON . parse ( fs . readFileSync ( proofName , "utf8" ) ) ) ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
2020-07-13 08:21:03 +03:00
const isValid = await groth16Verify ( verificationKey , pub , proof , logger ) ;
2020-07-11 11:31:52 +03:00
2020-07-13 08:21:03 +03:00
if ( isValid ) {
2020-07-11 11:31:52 +03:00
return 0 ;
} else {
return 1 ;
}
}
// zkey export vkey [circuit.zkey] [verification_key.json]",
async function zkeyExportVKey ( params , options ) {
const zkeyName = params [ 0 ] || "circuit.zkey" ;
const verificationKeyName = params [ 2 ] || "verification_key.json" ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const vKey = await zkeyExportVerificationKey ( zkeyName ) ;
const S = JSON . stringify ( ffjavascript . utils . stringifyBigInts ( vKey ) , null , 1 ) ;
await fs . promises . writeFile ( verificationKeyName , S ) ;
}
// zkey export json [circuit.zkey] [circuit.zkey.json]",
async function zkeyExportJson$1 ( params , options ) {
const zkeyName = params [ 0 ] || "circuit.zkey" ;
const zkeyJsonName = params [ 1 ] || "circuit.zkey.json" ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const zKey = await zkeyExportJson ( zkeyName ) ;
const S = JSON . stringify ( ffjavascript . utils . stringifyBigInts ( zKey ) , null , 1 ) ;
await fs . promises . writeFile ( zkeyJsonName , S ) ;
}
// solidity genverifier [circuit.zkey] [verifier.sol]
async function zkeyExportSolidityVerifier ( params , options ) {
let zkeyName ;
let verifierName ;
if ( params . length < 1 ) {
zkeyName = "circuit.zkey" ;
} else {
zkeyName = params [ 0 ] ;
}
if ( params . length < 2 ) {
verifierName = "verifier.sol" ;
} else {
verifierName = params [ 1 ] ;
}
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
2020-07-13 09:47:02 +03:00
let templateName ;
2020-07-13 09:54:48 +03:00
try {
2020-07-13 09:47:02 +03:00
templateName = path . join ( _ _dirname , "templates" , "verifier_groth16.sol" ) ;
2020-07-13 09:54:48 +03:00
await fs . promises . stat ( templateName ) ;
} catch ( err ) {
2020-07-13 09:47:02 +03:00
templateName = path . join ( _ _dirname , ".." , "templates" , "verifier_groth16.sol" ) ;
}
2020-07-11 11:31:52 +03:00
const verifierCode = await exportSolidityVerifier ( zkeyName , templateName ) ;
fs . writeFileSync ( verifierName , verifierCode , "utf-8" ) ;
return 0 ;
}
// solidity gencall <public.json> <proof.json>
async function zkeyExportSolidityCalldata ( params , options ) {
let publicName ;
let proofName ;
if ( params . length < 1 ) {
publicName = "public.json" ;
} else {
publicName = params [ 0 ] ;
}
if ( params . length < 2 ) {
proofName = "proof.json" ;
} else {
proofName = params [ 1 ] ;
}
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const pub = unstringifyBigInts$1 ( JSON . parse ( fs . readFileSync ( publicName , "utf8" ) ) ) ;
const proof = unstringifyBigInts$1 ( JSON . parse ( fs . readFileSync ( proofName , "utf8" ) ) ) ;
let inputs = "" ;
for ( let i = 0 ; i < pub . length ; i ++ ) {
if ( inputs != "" ) inputs = inputs + "," ;
inputs = inputs + p256 ( pub [ i ] ) ;
}
let S ;
if ( ( typeof proof . protocol === "undefined" ) || ( proof . protocol == "original" ) ) {
S = ` [ ${ p256 ( proof . pi _a [ 0 ] ) } , ${ p256 ( proof . pi _a [ 1 ] ) } ], ` +
` [ ${ p256 ( proof . pi _ap [ 0 ] ) } , ${ p256 ( proof . pi _ap [ 1 ] ) } ], ` +
` [[ ${ p256 ( proof . pi _b [ 0 ] [ 1 ] ) } , ${ p256 ( proof . pi _b [ 0 ] [ 0 ] ) } ],[ ${ p256 ( proof . pi _b [ 1 ] [ 1 ] ) } , ${ p256 ( proof . pi _b [ 1 ] [ 0 ] ) } ]], ` +
` [ ${ p256 ( proof . pi _bp [ 0 ] ) } , ${ p256 ( proof . pi _bp [ 1 ] ) } ], ` +
` [ ${ p256 ( proof . pi _c [ 0 ] ) } , ${ p256 ( proof . pi _c [ 1 ] ) } ], ` +
` [ ${ p256 ( proof . pi _cp [ 0 ] ) } , ${ p256 ( proof . pi _cp [ 1 ] ) } ], ` +
` [ ${ p256 ( proof . pi _h [ 0 ] ) } , ${ p256 ( proof . pi _h [ 1 ] ) } ], ` +
` [ ${ p256 ( proof . pi _kp [ 0 ] ) } , ${ p256 ( proof . pi _kp [ 1 ] ) } ], ` +
` [ ${ inputs } ] ` ;
} else if ( ( proof . protocol == "groth16" ) || ( proof . protocol == "kimleeoh" ) ) {
S = ` [ ${ p256 ( proof . pi _a [ 0 ] ) } , ${ p256 ( proof . pi _a [ 1 ] ) } ], ` +
` [[ ${ p256 ( proof . pi _b [ 0 ] [ 1 ] ) } , ${ p256 ( proof . pi _b [ 0 ] [ 0 ] ) } ],[ ${ p256 ( proof . pi _b [ 1 ] [ 1 ] ) } , ${ p256 ( proof . pi _b [ 1 ] [ 0 ] ) } ]], ` +
` [ ${ p256 ( proof . pi _c [ 0 ] ) } , ${ p256 ( proof . pi _c [ 1 ] ) } ], ` +
` [ ${ inputs } ] ` ;
} else {
throw new Error ( "InvalidProof" ) ;
}
console . log ( S ) ;
return 0 ;
}
// powersoftau new <curve> <power> [powersoftau_0000.ptau]",
async function powersOfTawNew ( params , options ) {
let curveName ;
let power ;
let ptauName ;
curveName = params [ 0 ] ;
power = parseInt ( params [ 1 ] ) ;
if ( ( power < 1 ) || ( power > 28 ) ) {
throw new Error ( "Power must be between 1 and 28" ) ;
}
if ( params . length < 3 ) {
ptauName = "powersOfTaw" + power + "_0000.ptau" ;
} else {
ptauName = params [ 2 ] ;
}
const curve = await getCurveFromName ( curveName ) ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return await newAccumulator ( curve , power , ptauName , logger ) ;
}
2020-07-14 12:55:12 +03:00
async function powersOfTawExportChallenge ( params , options ) {
2020-07-11 11:31:52 +03:00
let ptauName ;
2020-07-14 12:55:12 +03:00
let challengeName ;
2020-07-11 11:31:52 +03:00
ptauName = params [ 0 ] ;
if ( params . length < 2 ) {
2020-07-14 12:55:12 +03:00
challengeName = "challenge" ;
2020-07-11 11:31:52 +03:00
} else {
2020-07-14 12:55:12 +03:00
challengeName = params [ 1 ] ;
2020-07-11 11:31:52 +03:00
}
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
2020-07-14 12:55:12 +03:00
return await exportChallenge ( ptauName , challengeName , logger ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
// powersoftau challenge contribute <curve> <challenge> [response]
async function powersOfTawChallengeContribute ( params , options ) {
let challengeName ;
2020-07-11 11:31:52 +03:00
let responseName ;
const curve = await getCurveFromName ( params [ 0 ] ) ;
2020-07-14 12:55:12 +03:00
challengeName = params [ 1 ] ;
2020-07-11 11:31:52 +03:00
if ( params . length < 3 ) {
2020-07-14 12:55:12 +03:00
responseName = changeExt ( challengeName , "response" ) ;
2020-07-11 11:31:52 +03:00
} else {
responseName = params [ 2 ] ;
}
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
2020-07-14 12:55:12 +03:00
return await challengeContribute ( curve , challengeName , responseName , options . entropy , logger ) ;
2020-07-11 11:31:52 +03:00
}
async function powersOfTawImport ( params , options ) {
let oldPtauName ;
let response ;
let newPtauName ;
let importPoints = true ;
let doCheck = true ;
oldPtauName = params [ 0 ] ;
response = params [ 1 ] ;
newPtauName = params [ 2 ] ;
if ( options . nopoints ) importPoints = false ;
if ( options . nocheck ) doCheck = false ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const res = await importResponse ( oldPtauName , response , newPtauName , options . name , importPoints , logger ) ;
if ( res ) return res ;
if ( ! doCheck ) return ;
// TODO Verify
}
async function powersOfTawVerify ( params , options ) {
let ptauName ;
ptauName = params [ 0 ] ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const res = await verify ( ptauName , logger ) ;
if ( res === true ) {
return 0 ;
} else {
return 1 ;
}
}
async function powersOfTawBeacon ( params , options ) {
let oldPtauName ;
let newPtauName ;
let beaconHashStr ;
let numIterationsExp ;
oldPtauName = params [ 0 ] ;
newPtauName = params [ 1 ] ;
beaconHashStr = params [ 2 ] ;
numIterationsExp = params [ 3 ] ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return await beacon ( oldPtauName , newPtauName , options . name , beaconHashStr , numIterationsExp , logger ) ;
}
async function powersOfTawContribute ( params , options ) {
let oldPtauName ;
let newPtauName ;
oldPtauName = params [ 0 ] ;
newPtauName = params [ 1 ] ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return await contribute ( oldPtauName , newPtauName , options . name , options . entropy , logger ) ;
}
async function powersOfTawPreparePhase2 ( params , options ) {
let oldPtauName ;
let newPtauName ;
oldPtauName = params [ 0 ] ;
newPtauName = params [ 1 ] ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return await preparePhase2 ( oldPtauName , newPtauName , logger ) ;
}
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
async function powersOfTawExportJson ( params , options ) {
let ptauName ;
let jsonName ;
ptauName = params [ 0 ] ;
jsonName = params [ 1 ] ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const pTau = await exportJson ( ptauName , logger ) ;
const S = JSON . stringify ( stringifyBigInts$2 ( pTau ) , null , 1 ) ;
await fs . promises . writeFile ( jsonName , S ) ;
}
// phase2 new <circuit.r1cs> <powersoftau.ptau> <circuit.zkey>
async function zkeyNew ( params , options ) {
let r1csName ;
let ptauName ;
let zkeyName ;
if ( params . length < 1 ) {
r1csName = "circuit.r1cs" ;
} else {
r1csName = params [ 0 ] ;
}
if ( params . length < 2 ) {
ptauName = "powersoftau.ptau" ;
} else {
ptauName = params [ 1 ] ;
}
if ( params . length < 3 ) {
zkeyName = "circuit.zkey" ;
} else {
zkeyName = params [ 2 ] ;
}
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return newZKey ( r1csName , ptauName , zkeyName , logger ) ;
}
// zkey export bellman [circuit.zkey] [circuit.mpcparams]
async function zkeyExportBellman ( params , options ) {
let zkeyName ;
let mpcparamsName ;
if ( params . length < 1 ) {
zkeyName = "circuit.zkey" ;
} else {
zkeyName = params [ 0 ] ;
}
if ( params . length < 2 ) {
mpcparamsName = "circuit.mpcparams" ;
} else {
mpcparamsName = params [ 1 ] ;
}
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return phase2exportMPCParams ( zkeyName , mpcparamsName , logger ) ;
}
// zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>
async function zkeyImportBellman ( params , options ) {
let zkeyNameOld ;
let mpcParamsName ;
let zkeyNameNew ;
zkeyNameOld = params [ 0 ] ;
mpcParamsName = params [ 1 ] ;
zkeyNameNew = params [ 2 ] ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return phase2importMPCParams ( zkeyNameOld , mpcParamsName , zkeyNameNew , options . name , logger ) ;
}
// phase2 verify [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]
async function zkeyVerify ( params , options ) {
let r1csName ;
let ptauName ;
let zkeyName ;
if ( params . length < 1 ) {
r1csName = "circuit.r1cs" ;
} else {
r1csName = params [ 0 ] ;
}
if ( params . length < 2 ) {
ptauName = "powersoftau.ptau" ;
} else {
ptauName = params [ 1 ] ;
}
if ( params . length < 3 ) {
zkeyName = "circuit.zkey" ;
} else {
zkeyName = params [ 2 ] ;
}
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
const res = await phase2verify ( r1csName , ptauName , zkeyName , logger ) ;
if ( res === true ) {
return 0 ;
} else {
return 1 ;
}
}
// zkey contribute <circuit_old.zkey> <circuit_new.zkey>
async function zkeyContribute ( params , options ) {
let zkeyOldName ;
let zkeyNewName ;
zkeyOldName = params [ 0 ] ;
zkeyNewName = params [ 1 ] ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return phase2contribute ( zkeyOldName , zkeyNewName , options . name , options . entropy , logger ) ;
}
// zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp>
async function zkeyBeacon ( params , options ) {
let zkeyOldName ;
let zkeyNewName ;
let beaconHashStr ;
let numIterationsExp ;
zkeyOldName = params [ 0 ] ;
zkeyNewName = params [ 1 ] ;
beaconHashStr = params [ 2 ] ;
numIterationsExp = params [ 3 ] ;
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
return await beacon$1 ( zkeyOldName , zkeyNewName , options . name , beaconHashStr , numIterationsExp , logger ) ;
}
2020-07-14 12:55:12 +03:00
// zkey challenge contribute <curve> <challenge> [response]",
2020-07-11 11:31:52 +03:00
async function zkeyBellmanContribute ( params , options ) {
2020-07-14 12:55:12 +03:00
let challengeName ;
2020-07-11 11:31:52 +03:00
let responseName ;
const curve = await getCurveFromName ( params [ 0 ] ) ;
2020-07-14 12:55:12 +03:00
challengeName = params [ 1 ] ;
2020-07-11 11:31:52 +03:00
if ( params . length < 3 ) {
2020-07-14 12:55:12 +03:00
responseName = changeExt ( challengeName , "response" ) ;
2020-07-11 11:31:52 +03:00
} else {
responseName = params [ 2 ] ;
}
if ( options . verbose ) Logger . setLogLevel ( "DEBUG" ) ;
2020-07-14 12:55:12 +03:00
return bellmanContribute ( curve , challengeName , responseName , options . entropy , logger ) ;
2020-07-11 11:31:52 +03:00
}