2020-07-11 11:31:52 +03:00
#! /usr/bin/env node
'use strict' ;
2021-01-17 11:56:08 +03:00
var fs = require ( 'fs' ) ;
2021-02-10 11:53:04 +03:00
var r1csfile = require ( 'r1csfile' ) ;
var fastFile = require ( 'fastfile' ) ;
2020-07-11 11:31:52 +03:00
var ffjavascript = require ( 'ffjavascript' ) ;
2021-01-17 11:56:08 +03:00
var path = require ( 'path' ) ;
var Blake2b = require ( 'blake2b-wasm' ) ;
var readline = require ( 'readline' ) ;
var crypto = require ( 'crypto' ) ;
2021-02-10 11:53:04 +03:00
var binFileUtils = require ( '@iden3/binfileutils' ) ;
2021-05-31 14:21:07 +03:00
var ejs = require ( 'ejs' ) ;
2021-01-21 21:26:22 +03:00
var circom _runtime = require ( 'circom_runtime' ) ;
2021-05-31 14:21:07 +03:00
var jsSha3 = require ( 'js-sha3' ) ;
2021-01-17 11:56:08 +03:00
var Logger = require ( 'logplease' ) ;
function _interopDefaultLegacy ( e ) { return e && typeof e === 'object' && 'default' in e ? e : { 'default' : e } ; }
2022-01-19 23:59:11 +03:00
function _interopNamespace ( e ) {
if ( e && e . _ _esModule ) return e ;
var n = Object . create ( null ) ;
if ( e ) {
Object . keys ( e ) . forEach ( function ( k ) {
if ( k !== 'default' ) {
var d = Object . getOwnPropertyDescriptor ( e , k ) ;
Object . defineProperty ( n , k , d . get ? d : {
enumerable : true ,
get : function ( ) { return e [ k ] ; }
} ) ;
}
} ) ;
}
n [ "default" ] = e ;
return Object . freeze ( n ) ;
}
2021-01-17 11:56:08 +03:00
var fs _ _default = /*#__PURE__*/ _interopDefaultLegacy ( fs ) ;
2022-01-19 23:59:11 +03:00
var fastFile _ _namespace = /*#__PURE__*/ _interopNamespace ( fastFile ) ;
2021-01-17 11:56:08 +03:00
var path _ _default = /*#__PURE__*/ _interopDefaultLegacy ( path ) ;
var Blake2b _ _default = /*#__PURE__*/ _interopDefaultLegacy ( Blake2b ) ;
var readline _ _default = /*#__PURE__*/ _interopDefaultLegacy ( readline ) ;
var crypto _ _default = /*#__PURE__*/ _interopDefaultLegacy ( crypto ) ;
2022-01-19 23:59:11 +03:00
var binFileUtils _ _namespace = /*#__PURE__*/ _interopNamespace ( binFileUtils ) ;
2021-05-31 14:21:07 +03:00
var ejs _ _default = /*#__PURE__*/ _interopDefaultLegacy ( ejs ) ;
var jsSha3 _ _default = /*#__PURE__*/ _interopDefaultLegacy ( jsSha3 ) ;
2021-01-17 11:56:08 +03:00
var Logger _ _default = /*#__PURE__*/ _interopDefaultLegacy ( Logger ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function loadSymbols ( symFileName ) {
const sym = {
labelIdx2Name : [ "one" ] ,
varIdx2Name : [ "one" ] ,
componentIdx2Name : [ ]
} ;
2022-01-19 23:59:11 +03:00
const fd = await fastFile _ _namespace . readExisting ( symFileName ) ;
2020-07-11 11:31:52 +03:00
const buff = await fd . read ( fd . totalSize ) ;
const symsStr = new TextDecoder ( "utf-8" ) . decode ( buff ) ;
const lines = symsStr . split ( "\n" ) ;
for ( let i = 0 ; i < lines . length ; i ++ ) {
const arr = lines [ i ] . split ( "," ) ;
if ( arr . length != 4 ) continue ;
if ( sym . varIdx2Name [ arr [ 1 ] ] ) {
sym . varIdx2Name [ arr [ 1 ] ] += "|" + arr [ 3 ] ;
} else {
sym . varIdx2Name [ arr [ 1 ] ] = arr [ 3 ] ;
}
sym . labelIdx2Name [ arr [ 0 ] ] = arr [ 3 ] ;
if ( ! sym . componentIdx2Name [ arr [ 2 ] ] ) {
sym . componentIdx2Name [ arr [ 2 ] ] = extractComponent ( arr [ 3 ] ) ;
}
}
await fd . close ( ) ;
return sym ;
function extractComponent ( name ) {
const arr = name . split ( "." ) ;
arr . pop ( ) ; // Remove the lasr element
return arr . join ( "." ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
function r1csPrint$1 ( r1cs , syms , logger ) {
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < r1cs . constraints . length ; i ++ ) {
printCostraint ( r1cs . constraints [ i ] ) ;
}
function printCostraint ( c ) {
const lc2str = ( lc ) => {
let S = "" ;
const keys = Object . keys ( lc ) ;
keys . forEach ( ( k ) => {
let name = syms . varIdx2Name [ k ] ;
if ( name == "one" ) name = "" ;
2020-10-08 12:54:00 +03:00
let vs = r1cs . curve . Fr . toString ( lc [ k ] ) ;
2020-07-11 11:31:52 +03:00
if ( vs == "1" ) vs = "" ; // Do not show ones
if ( vs == "-1" ) vs = "-" ; // Do not show ones
if ( ( S != "" ) && ( vs [ 0 ] != "-" ) ) vs = "+" + vs ;
if ( S != "" ) vs = " " + vs ;
S = S + vs + name ;
} ) ;
return S ;
} ;
const S = ` [ ${ lc2str ( c [ 0 ] ) } ] * [ ${ lc2str ( c [ 1 ] ) } ] - [ ${ lc2str ( c [ 2 ] ) } ] = 0 ` ;
if ( logger ) logger . info ( S ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
const bls12381r = ffjavascript . Scalar . e ( "73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001" , 16 ) ;
const bn128r = ffjavascript . Scalar . e ( "21888242871839275222246405745257275088548364400416034343698204186575808495617" ) ;
2022-01-19 23:59:11 +03:00
async function r1csInfo$1 ( r1csName , logger ) {
2020-07-11 11:31:52 +03:00
2021-02-10 11:53:04 +03:00
const cir = await r1csfile . readR1cs ( r1csName ) ;
2020-07-11 11:31:52 +03:00
if ( ffjavascript . Scalar . eq ( cir . prime , bn128r ) ) {
if ( logger ) logger . info ( "Curve: bn-128" ) ;
} else if ( ffjavascript . Scalar . eq ( cir . prime , bls12381r ) ) {
if ( logger ) logger . info ( "Curve: bls12-381" ) ;
} else {
if ( logger ) logger . info ( ` Unknown Curve. Prime: ${ ffjavascript . Scalar . toString ( cir . prime ) } ` ) ;
}
if ( logger ) logger . info ( ` # of Wires: ${ cir . nVars } ` ) ;
if ( logger ) logger . info ( ` # of Constraints: ${ cir . nConstraints } ` ) ;
if ( logger ) logger . info ( ` # of Private Inputs: ${ cir . nPrvInputs } ` ) ;
if ( logger ) logger . info ( ` # of Public Inputs: ${ cir . nPubInputs } ` ) ;
2020-12-16 13:24:34 +03:00
if ( logger ) logger . info ( ` # of Labels: ${ cir . nLabels } ` ) ;
2020-07-11 11:31:52 +03:00
if ( logger ) logger . info ( ` # of Outputs: ${ cir . nOutputs } ` ) ;
return cir ;
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
function stringifyBigInts$4 ( Fr , o ) {
2020-10-08 17:06:48 +03:00
if ( o instanceof Uint8Array ) {
return Fr . toString ( o ) ;
} else if ( Array . isArray ( o ) ) {
2022-01-19 23:59:11 +03:00
return o . map ( stringifyBigInts$4 . bind ( null , Fr ) ) ;
2020-10-08 17:06:48 +03:00
} else if ( typeof o == "object" ) {
const res = { } ;
const keys = Object . keys ( o ) ;
keys . forEach ( ( k ) => {
2022-01-19 23:59:11 +03:00
res [ k ] = stringifyBigInts$4 ( Fr , o [ k ] ) ;
2020-10-08 17:06:48 +03:00
} ) ;
return res ;
} else if ( ( typeof ( o ) == "bigint" ) || o . eq !== undefined ) {
return o . toString ( 10 ) ;
} else {
return o ;
}
}
2020-07-11 11:31:52 +03:00
async function r1csExportJson ( r1csFileName , logger ) {
2021-02-10 11:53:04 +03:00
const cir = await r1csfile . readR1cs ( r1csFileName , true , true , true , logger ) ;
2020-10-08 17:06:48 +03:00
const Fr = cir . curve . Fr ;
delete cir . curve ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
return stringifyBigInts$4 ( Fr , cir ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-26 20:50:50 +03:00
/ *
2021-05-31 14:21:07 +03:00
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
2020-07-26 20:50:50 +03:00
* /
2022-01-19 23:59:11 +03:00
const _ _dirname$2 = path _ _default [ "default" ] . dirname ( new URL ( ( typeof document === 'undefined' ? new ( require ( 'u' + 'rl' ) . URL ) ( 'file:' + _ _filename ) . href : ( document . currentScript && document . currentScript . src || new URL ( 'cli.cjs' , document . baseURI ) . href ) ) ) . pathname ) ;
2020-07-29 10:08:09 +03:00
2020-07-26 20:50:50 +03:00
let pkgS ;
try {
2022-01-19 23:59:11 +03:00
pkgS = fs _ _default [ "default" ] . readFileSync ( path _ _default [ "default" ] . join ( _ _dirname$2 , "package.json" ) ) ;
2020-07-26 20:50:50 +03:00
} catch ( err ) {
2022-01-19 23:59:11 +03:00
pkgS = fs _ _default [ "default" ] . readFileSync ( path _ _default [ "default" ] . join ( _ _dirname$2 , ".." , "package.json" ) ) ;
2020-07-26 20:50:50 +03:00
}
const pkg = JSON . parse ( pkgS ) ;
const version = pkg . version ;
2020-07-11 11:31:52 +03:00
let selectedCommand = null ;
async function clProcessor ( commands ) {
const cl = [ ] ;
const argv = { } ;
for ( let i = 2 ; i < process . argv . length ; i ++ ) {
if ( process . argv [ i ] [ 0 ] == "-" ) {
let S = process . argv [ i ] ;
while ( S [ 0 ] == "-" ) S = S . slice ( 1 ) ;
const arr = S . split ( "=" ) ;
if ( arr . length > 1 ) {
argv [ arr [ 0 ] ] = arr . slice ( 1 ) . join ( "=" ) ;
} else {
argv [ arr [ 0 ] ] = true ;
}
} else {
cl . push ( process . argv [ i ] ) ;
}
}
for ( let i = 0 ; i < commands . length ; i ++ ) {
const cmd = commands [ i ] ;
const m = calculateMatch ( commands [ i ] , cl ) ;
if ( m ) {
if ( ( argv . h ) || ( argv . help ) ) {
helpCmd ( cmd ) ;
return ;
}
if ( areParamsValid ( cmd . cmd , m ) ) {
if ( cmd . options ) {
const options = getOptions ( cmd . options ) ;
await cmd . action ( m , options ) ;
} else {
await cmd . action ( m , { } ) ;
}
} else {
if ( m . length > 0 ) console . log ( "Invalid number of parameters" ) ;
helpCmd ( cmd ) ;
2020-12-31 08:41:42 +03:00
return 99 ;
2020-07-11 11:31:52 +03:00
}
return ;
}
}
if ( cl . length > 0 ) console . log ( "Invalid command" ) ;
helpAll ( ) ;
2020-12-31 08:41:42 +03:00
return 99 ;
2020-07-11 11:31:52 +03:00
function calculateMatch ( cmd , cl ) {
const alias = [ ] ;
2020-09-07 13:43:50 +03:00
const m = parseLine ( cmd . cmd ) ;
alias . push ( m ) ;
2020-07-11 11:31:52 +03:00
if ( cmd . alias ) {
if ( Array . isArray ( cmd . alias ) ) {
for ( let i = 0 ; i < cmd . alias . length ; i ++ ) {
2020-09-07 13:43:50 +03:00
const a = parseLine ( cmd . alias [ i ] ) ;
alias . push ( {
cmd : a . cmd ,
params : m . params
} ) ;
2020-07-11 11:31:52 +03:00
}
} else {
2020-09-07 13:43:50 +03:00
const a = parseLine ( cmd . alias ) ;
alias . push ( {
cmd : a . cmd ,
params : m . params
} ) ;
2020-07-11 11:31:52 +03:00
}
}
for ( let i = 0 ; i < cl . length ; i ++ ) {
for ( let j = 0 ; j < alias . length ; j ++ ) {
const w = alias [ j ] . cmd . shift ( ) ;
if ( cl [ i ] . toUpperCase ( ) == w . toUpperCase ( ) ) {
if ( alias [ j ] . cmd . length == 0 ) {
return buildRemaining ( alias [ j ] . params , cl . slice ( i + 1 ) ) ;
}
} else {
alias . splice ( j , 1 ) ;
j -- ;
}
}
}
return null ;
function buildRemaining ( defParams , cl ) {
const res = [ ] ;
let p = 0 ;
for ( let i = 0 ; i < defParams . length ; i ++ ) {
if ( defParams [ i ] [ 0 ] == "-" ) {
res . push ( getOption ( defParams [ i ] ) . val ) ;
} else {
if ( p < cl . length ) {
res . push ( cl [ p ++ ] ) ;
} else {
res . push ( null ) ;
}
}
}
while ( p < cl . length ) {
res . push ( cl [ p ++ ] ) ;
}
return res ;
}
}
function parseLine ( l ) {
const words = l . match ( /(\S+)/g ) ;
for ( let i = 0 ; i < words . length ; i ++ ) {
if ( ( words [ i ] [ 0 ] == "<" )
|| ( words [ i ] [ 0 ] == "[" )
|| ( words [ i ] [ 0 ] == "-" ) )
{
return {
cmd : words . slice ( 0 , i ) ,
params : words . slice ( i )
} ;
}
}
return {
cmd : words ,
params : [ ]
} ;
}
function getOption ( o ) {
const arr1 = o . slice ( 1 ) . split ( ":" ) ;
const arr2 = arr1 [ 0 ] . split ( "|" ) ;
for ( let i = 0 ; i < arr2 . length ; i ++ ) {
if ( argv [ arr2 [ i ] ] ) return {
key : arr2 [ 0 ] ,
val : argv [ arr2 [ i ] ]
} ;
}
return {
key : arr2 [ 0 ] ,
val : ( arr1 . length > 1 ) ? arr1 [ 1 ] : null
} ;
}
function areParamsValid ( cmd , params ) {
2020-09-07 13:43:50 +03:00
while ( ( params . length ) && ( ! params [ params . length - 1 ] ) ) params . pop ( ) ;
2020-07-11 11:31:52 +03:00
const pl = parseLine ( cmd ) ;
if ( params . length > pl . params . length ) return false ;
let minParams = pl . params . length ;
while ( ( minParams > 0 ) && ( pl . params [ minParams - 1 ] [ 0 ] == "[" ) ) minParams -- ;
if ( params . length < minParams ) return false ;
for ( let i = 0 ; ( i < pl . params . length ) && ( pl . params [ i ] [ 0 ] == "<" ) ; i ++ ) {
if ( typeof params [ i ] == "undefined" ) return false ;
}
return true ;
}
function getOptions ( options ) {
const res = { } ;
const opts = options . match ( /(\S+)/g ) ;
for ( let i = 0 ; i < opts . length ; i ++ ) {
const o = getOption ( opts [ i ] ) ;
res [ o . key ] = o . val ;
}
return res ;
}
function printVersion ( ) {
2020-07-26 20:50:50 +03:00
console . log ( "snarkjs@" + version ) ;
2020-07-11 11:31:52 +03:00
}
function epilog ( ) {
console . log ( ` Copyright (C) 2018 0kims association
This program comes with ABSOLUTELY NO WARRANTY ;
This is free software , and you are welcome to redistribute it
under certain conditions ; see the COPYING file in the official
repo directory at https : //github.com/iden3/snarkjs `);
}
function helpAll ( ) {
printVersion ( ) ;
epilog ( ) ;
console . log ( "" ) ;
console . log ( "Usage:" ) ;
console . log ( " snarkjs <full command> ... <options>" ) ;
console . log ( " or snarkjs <shorcut> ... <options>" ) ;
console . log ( "" ) ;
console . log ( "Type snarkjs <command> --help to get more information for that command" ) ;
console . log ( "" ) ;
console . log ( "Full Command Description" ) ;
console . log ( "============ =================" ) ;
for ( let i = 0 ; i < commands . length ; i ++ ) {
const cmd = commands [ i ] ;
let S = "" ;
const pl = parseLine ( cmd . cmd ) ;
S += pl . cmd . join ( " " ) ;
while ( S . length < 30 ) S = S + " " ;
S += cmd . description ;
console . log ( S ) ;
S = " Usage: snarkjs " ;
if ( cmd . alias ) {
if ( Array . isArray ( cmd . alias ) ) {
S += cmd . alias [ 0 ] ;
} else {
S += cmd . alias ;
}
} else {
S += pl . cmd . join ( " " ) ;
}
S += " " + pl . params . join ( " " ) ;
console . log ( S ) ;
}
}
function helpCmd ( cmd ) {
if ( typeof cmd == "undefined" ) cmd = selectedCommand ;
if ( typeof cmd == "undefined" ) return helpAll ( ) ;
printVersion ( ) ;
epilog ( ) ;
console . log ( "" ) ;
if ( cmd . longDescription ) {
console . log ( cmd . longDescription ) ;
} else {
console . log ( cmd . description ) ;
}
console . log ( "Usage: " ) ;
console . log ( " snarkjs " + cmd . cmd ) ;
const pl = parseLine ( cmd . cmd ) ;
let S = " or snarkjs " ;
if ( cmd . alias ) {
if ( Array . isArray ( cmd . alias ) ) {
S += cmd . alias [ 0 ] ;
} else {
S += cmd . alias ;
}
} else {
S += pl . cmd . join ( " " ) ;
}
S += " " + pl . params . join ( " " ) ;
console . log ( S ) ;
console . log ( "" ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
function hashToG2 ( curve , hash ) {
const hashV = new DataView ( hash . buffer , hash . byteOffset , hash . byteLength ) ;
const seed = [ ] ;
for ( let i = 0 ; i < 8 ; i ++ ) {
seed [ i ] = hashV . getUint32 ( i * 4 ) ;
}
const rng = new ffjavascript . ChaCha ( seed ) ;
const g2 _sp = curve . G2 . fromRng ( rng ) ;
return g2 _sp ;
}
2020-07-14 12:55:12 +03:00
function getG2sp ( curve , persinalization , challenge , g1s , g1sx ) {
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const h = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
const b1 = new Uint8Array ( [ persinalization ] ) ;
h . update ( b1 ) ;
2020-07-14 12:55:12 +03:00
h . update ( challenge ) ;
2020-07-11 11:31:52 +03:00
const b3 = curve . G1 . toUncompressed ( g1s ) ;
h . update ( b3 ) ;
const b4 = curve . G1 . toUncompressed ( g1sx ) ;
h . update ( b4 ) ;
const hash = h . digest ( ) ;
return hashToG2 ( curve , hash ) ;
}
2020-07-14 12:55:12 +03:00
function calculatePubKey ( k , curve , personalization , challengeHash , rng ) {
2020-07-11 11:31:52 +03:00
k . g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
k . g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( k . g1 _s , k . prvKey ) ) ;
2020-07-14 12:55:12 +03:00
k . g2 _sp = curve . G2 . toAffine ( getG2sp ( curve , personalization , challengeHash , k . g1 _s , k . g1 _sx ) ) ;
2020-07-11 11:31:52 +03:00
k . g2 _spx = curve . G2 . toAffine ( curve . G2 . timesFr ( k . g2 _sp , k . prvKey ) ) ;
return k ;
}
2020-07-14 12:55:12 +03:00
function createPTauKey ( curve , challengeHash , rng ) {
2020-07-11 11:31:52 +03:00
const key = {
tau : { } ,
alpha : { } ,
beta : { }
} ;
key . tau . prvKey = curve . Fr . fromRng ( rng ) ;
key . alpha . prvKey = curve . Fr . fromRng ( rng ) ;
key . beta . prvKey = curve . Fr . fromRng ( rng ) ;
2020-07-14 12:55:12 +03:00
calculatePubKey ( key . tau , curve , 0 , challengeHash , rng ) ;
calculatePubKey ( key . alpha , curve , 1 , challengeHash , rng ) ;
calculatePubKey ( key . beta , curve , 2 , challengeHash , rng ) ;
2020-07-11 11:31:52 +03:00
return key ;
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
const _revTable = [ ] ;
for ( let i = 0 ; i < 256 ; i ++ ) {
_revTable [ i ] = _revSlow ( i , 8 ) ;
}
function _revSlow ( idx , bits ) {
let res = 0 ;
let a = idx ;
for ( let i = 0 ; i < bits ; i ++ ) {
res <<= 1 ;
res = res | ( a & 1 ) ;
a >>= 1 ;
}
return res ;
}
function log2 ( V )
{
return ( ( ( V & 0xFFFF0000 ) !== 0 ? ( V &= 0xFFFF0000 , 16 ) : 0 ) | ( ( V & 0xFF00FF00 ) !== 0 ? ( V &= 0xFF00FF00 , 8 ) : 0 ) | ( ( V & 0xF0F0F0F0 ) !== 0 ? ( V &= 0xF0F0F0F0 , 4 ) : 0 ) | ( ( V & 0xCCCCCCCC ) !== 0 ? ( V &= 0xCCCCCCCC , 2 ) : 0 ) | ( ( V & 0xAAAAAAAA ) !== 0 ) ) ;
}
function formatHash ( b , title ) {
const a = new DataView ( b . buffer , b . byteOffset , b . byteLength ) ;
let S = "" ;
for ( let i = 0 ; i < 4 ; i ++ ) {
if ( i > 0 ) S += "\n" ;
S += "\t\t" ;
for ( let j = 0 ; j < 4 ; j ++ ) {
if ( j > 0 ) S += " " ;
S += a . getUint32 ( i * 16 + j * 4 ) . toString ( 16 ) . padStart ( 8 , "0" ) ;
}
}
if ( title ) S = title + "\n" + S ;
return S ;
}
function hashIsEqual ( h1 , h2 ) {
if ( h1 . byteLength != h2 . byteLength ) return false ;
var dv1 = new Int8Array ( h1 ) ;
var dv2 = new Int8Array ( h2 ) ;
for ( var i = 0 ; i != h1 . byteLength ; i ++ )
{
if ( dv1 [ i ] != dv2 [ i ] ) return false ;
}
return true ;
}
function cloneHasher ( h ) {
const ph = h . getPartialHash ( ) ;
2022-01-19 23:59:11 +03:00
const res = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
res . setPartialHash ( ph ) ;
return res ;
}
2022-01-19 23:59:11 +03:00
async function sameRatio$2 ( curve , g1s , g1sx , g2s , g2sx ) {
2020-07-11 11:31:52 +03:00
if ( curve . G1 . isZero ( g1s ) ) return false ;
if ( curve . G1 . isZero ( g1sx ) ) return false ;
if ( curve . G2 . isZero ( g2s ) ) return false ;
if ( curve . G2 . isZero ( g2sx ) ) return false ;
// return curve.F12.eq(curve.pairing(g1s, g2sx), curve.pairing(g1sx, g2s));
const res = await curve . pairingEq ( g1s , g2sx , curve . G1 . neg ( g1sx ) , g2s ) ;
return res ;
}
function askEntropy ( ) {
if ( process . browser ) {
return window . prompt ( "Enter a random text. (Entropy): " , "" ) ;
} else {
2022-01-19 23:59:11 +03:00
const rl = readline _ _default [ "default" ] . createInterface ( {
2020-07-11 11:31:52 +03:00
input : process . stdin ,
output : process . stdout
} ) ;
return new Promise ( ( resolve ) => {
rl . question ( "Enter a random text. (Entropy): " , ( input ) => resolve ( input ) ) ;
} ) ;
}
}
async function getRandomRng ( entropy ) {
// Generate a random Rng
while ( ! entropy ) {
entropy = await askEntropy ( ) ;
}
2022-01-19 23:59:11 +03:00
const hasher = Blake2b _ _default [ "default" ] ( 64 ) ;
hasher . update ( crypto _ _default [ "default" ] . randomBytes ( 64 ) ) ;
2020-07-11 11:31:52 +03:00
const enc = new TextEncoder ( ) ; // always utf-8
hasher . update ( enc . encode ( entropy ) ) ;
const hash = Buffer . from ( hasher . digest ( ) ) ;
const seed = [ ] ;
for ( let i = 0 ; i < 8 ; i ++ ) {
seed [ i ] = hash . readUInt32BE ( i * 4 ) ;
}
const rng = new ffjavascript . ChaCha ( seed ) ;
return rng ;
}
function rngFromBeaconParams ( beaconHash , numIterationsExp ) {
let nIterationsInner ;
let nIterationsOuter ;
if ( numIterationsExp < 32 ) {
nIterationsInner = ( 1 << numIterationsExp ) >>> 0 ;
nIterationsOuter = 1 ;
} else {
nIterationsInner = 0x100000000 ;
nIterationsOuter = ( 1 << ( numIterationsExp - 32 ) ) >>> 0 ;
}
let curHash = beaconHash ;
for ( let i = 0 ; i < nIterationsOuter ; i ++ ) {
for ( let j = 0 ; j < nIterationsInner ; j ++ ) {
2022-01-19 23:59:11 +03:00
curHash = crypto _ _default [ "default" ] . createHash ( "sha256" ) . update ( curHash ) . digest ( ) ;
2020-07-11 11:31:52 +03:00
}
}
const curHashV = new DataView ( curHash . buffer , curHash . byteOffset , curHash . byteLength ) ;
const seed = [ ] ;
for ( let i = 0 ; i < 8 ; i ++ ) {
seed [ i ] = curHashV . getUint32 ( i * 4 , false ) ;
}
const rng = new ffjavascript . ChaCha ( seed ) ;
return rng ;
}
function hex2ByteArray ( s ) {
if ( s instanceof Uint8Array ) return s ;
if ( s . slice ( 0 , 2 ) == "0x" ) s = s . slice ( 2 ) ;
return new Uint8Array ( s . match ( /[\da-f]{2}/gi ) . map ( function ( h ) {
return parseInt ( h , 16 ) ;
} ) ) ;
}
function byteArray2hex ( byteArray ) {
return Array . prototype . map . call ( byteArray , function ( byte ) {
return ( "0" + ( byte & 0xFF ) . toString ( 16 ) ) . slice ( - 2 ) ;
} ) . join ( "" ) ;
}
2022-01-19 23:59:11 +03:00
ffjavascript . Scalar . e ( "73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001" , 16 ) ;
ffjavascript . Scalar . e ( "21888242871839275222246405745257275088548364400416034343698204186575808495617" ) ;
2020-07-11 11:31:52 +03:00
const bls12381q = ffjavascript . Scalar . e ( "1a0111ea397fe69a4b1ba7b6434bacd764774b84f38512bf6730d2a0f6b0f6241eabfffeb153ffffb9feffffffffaaab" , 16 ) ;
const bn128q = ffjavascript . Scalar . e ( "21888242871839275222246405745257275088696311157297823662689037894645226208583" ) ;
async function getCurveFromQ ( q ) {
let curve ;
if ( ffjavascript . Scalar . eq ( q , bn128q ) ) {
curve = await ffjavascript . buildBn128 ( ) ;
} else if ( ffjavascript . Scalar . eq ( q , bls12381q ) ) {
curve = await ffjavascript . buildBls12381 ( ) ;
} else {
throw new Error ( ` Curve not supported: ${ ffjavascript . Scalar . toString ( q ) } ` ) ;
}
return curve ;
}
2020-09-07 13:43:50 +03:00
2020-07-11 11:31:52 +03:00
async function getCurveFromName ( name ) {
let curve ;
const normName = normalizeName ( name ) ;
if ( [ "BN128" , "BN254" , "ALTBN128" ] . indexOf ( normName ) >= 0 ) {
curve = await ffjavascript . buildBn128 ( ) ;
} else if ( [ "BLS12381" ] . indexOf ( normName ) >= 0 ) {
curve = await ffjavascript . buildBls12381 ( ) ;
} else {
throw new Error ( ` Curve not supported: ${ name } ` ) ;
}
return curve ;
function normalizeName ( n ) {
return n . toUpperCase ( ) . match ( /[A-Za-z0-9]+/g ) . join ( "" ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function writePTauHeader ( fd , curve , power , ceremonyPower ) {
// Write the header
///////////
if ( ! ceremonyPower ) ceremonyPower = power ;
await fd . writeULE32 ( 1 ) ; // Header type
const pHeaderSize = fd . pos ;
await fd . writeULE64 ( 0 ) ; // Temporally set to 0 length
await fd . writeULE32 ( curve . F1 . n64 * 8 ) ;
const buff = new Uint8Array ( curve . F1 . n8 ) ;
ffjavascript . Scalar . toRprLE ( buff , 0 , curve . q , curve . F1 . n8 ) ;
await fd . write ( buff ) ;
await fd . writeULE32 ( power ) ; // power
await fd . writeULE32 ( ceremonyPower ) ; // power
const headerSize = fd . pos - pHeaderSize - 8 ;
const oldPos = fd . pos ;
2020-07-28 14:16:43 +03:00
await fd . writeULE64 ( headerSize , pHeaderSize ) ;
2020-07-11 11:31:52 +03:00
fd . pos = oldPos ;
}
async function readPTauHeader ( fd , sections ) {
if ( ! sections [ 1 ] ) throw new Error ( fd . fileName + ": File has no header" ) ;
if ( sections [ 1 ] . length > 1 ) throw new Error ( fd . fileName + ": File has more than one header" ) ;
fd . pos = sections [ 1 ] [ 0 ] . p ;
const n8 = await fd . readULE32 ( ) ;
const buff = await fd . read ( n8 ) ;
const q = ffjavascript . Scalar . fromRprLE ( buff ) ;
const curve = await getCurveFromQ ( q ) ;
if ( curve . F1 . n64 * 8 != n8 ) throw new Error ( fd . fileName + ": Invalid size" ) ;
const power = await fd . readULE32 ( ) ;
const ceremonyPower = await fd . readULE32 ( ) ;
if ( fd . pos - sections [ 1 ] [ 0 ] . p != sections [ 1 ] [ 0 ] . size ) throw new Error ( "Invalid PTau header size" ) ;
return { curve , power , ceremonyPower } ;
}
async function readPtauPubKey ( fd , curve , montgomery ) {
const buff = await fd . read ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
return fromPtauPubKeyRpr ( buff , 0 , curve , montgomery ) ;
}
function fromPtauPubKeyRpr ( buff , pos , curve , montgomery ) {
const key = {
tau : { } ,
alpha : { } ,
beta : { }
} ;
key . tau . g1 _s = readG1 ( ) ;
key . tau . g1 _sx = readG1 ( ) ;
key . alpha . g1 _s = readG1 ( ) ;
key . alpha . g1 _sx = readG1 ( ) ;
key . beta . g1 _s = readG1 ( ) ;
key . beta . g1 _sx = readG1 ( ) ;
key . tau . g2 _spx = readG2 ( ) ;
key . alpha . g2 _spx = readG2 ( ) ;
key . beta . g2 _spx = readG2 ( ) ;
return key ;
function readG1 ( ) {
let p ;
if ( montgomery ) {
p = curve . G1 . fromRprLEM ( buff , pos ) ;
} else {
p = curve . G1 . fromRprUncompressed ( buff , pos ) ;
}
pos += curve . G1 . F . n8 * 2 ;
return p ;
}
function readG2 ( ) {
let p ;
if ( montgomery ) {
p = curve . G2 . fromRprLEM ( buff , pos ) ;
} else {
p = curve . G2 . fromRprUncompressed ( buff , pos ) ;
}
pos += curve . G2 . F . n8 * 2 ;
return p ;
}
}
function toPtauPubKeyRpr ( buff , pos , curve , key , montgomery ) {
writeG1 ( key . tau . g1 _s ) ;
writeG1 ( key . tau . g1 _sx ) ;
writeG1 ( key . alpha . g1 _s ) ;
writeG1 ( key . alpha . g1 _sx ) ;
writeG1 ( key . beta . g1 _s ) ;
writeG1 ( key . beta . g1 _sx ) ;
writeG2 ( key . tau . g2 _spx ) ;
writeG2 ( key . alpha . g2 _spx ) ;
writeG2 ( key . beta . g2 _spx ) ;
async function writeG1 ( p ) {
if ( montgomery ) {
curve . G1 . toRprLEM ( buff , pos , p ) ;
} else {
curve . G1 . toRprUncompressed ( buff , pos , p ) ;
}
pos += curve . F1 . n8 * 2 ;
}
async function writeG2 ( p ) {
if ( montgomery ) {
curve . G2 . toRprLEM ( buff , pos , p ) ;
} else {
curve . G2 . toRprUncompressed ( buff , pos , p ) ;
}
pos += curve . F2 . n8 * 2 ;
}
return buff ;
}
async function writePtauPubKey ( fd , curve , key , montgomery ) {
const buff = new Uint8Array ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buff , 0 , curve , key , montgomery ) ;
await fd . write ( buff ) ;
}
2022-01-19 23:59:11 +03:00
async function readContribution$1 ( fd , curve ) {
2020-07-11 11:31:52 +03:00
const c = { } ;
c . tauG1 = await readG1 ( ) ;
c . tauG2 = await readG2 ( ) ;
c . alphaG1 = await readG1 ( ) ;
c . betaG1 = await readG1 ( ) ;
c . betaG2 = await readG2 ( ) ;
c . key = await readPtauPubKey ( fd , curve , true ) ;
c . partialHash = await fd . read ( 216 ) ;
2020-07-14 12:55:12 +03:00
c . nextChallenge = await fd . read ( 64 ) ;
2020-07-11 11:31:52 +03:00
c . type = await fd . readULE32 ( ) ;
const buffV = new Uint8Array ( curve . G1 . F . n8 * 2 * 6 + curve . G2 . F . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffV , 0 , curve , c . key , false ) ;
2022-01-19 23:59:11 +03:00
const responseHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
responseHasher . setPartialHash ( c . partialHash ) ;
responseHasher . update ( buffV ) ;
c . responseHash = responseHasher . digest ( ) ;
const paramLength = await fd . readULE32 ( ) ;
const curPos = fd . pos ;
let lastType = 0 ;
while ( fd . pos - curPos < paramLength ) {
const buffType = await readDV ( 1 ) ;
if ( buffType [ 0 ] <= lastType ) throw new Error ( "Parameters in the contribution must be sorted" ) ;
lastType = buffType [ 0 ] ;
if ( buffType [ 0 ] == 1 ) { // Name
const buffLen = await readDV ( 1 ) ;
const buffStr = await readDV ( buffLen [ 0 ] ) ;
c . name = new TextDecoder ( ) . decode ( buffStr ) ;
} else if ( buffType [ 0 ] == 2 ) {
const buffExp = await readDV ( 1 ) ;
c . numIterationsExp = buffExp [ 0 ] ;
} else if ( buffType [ 0 ] == 3 ) {
const buffLen = await readDV ( 1 ) ;
c . beaconHash = await readDV ( buffLen [ 0 ] ) ;
} else {
throw new Error ( "Parameter not recognized" ) ;
}
}
if ( fd . pos != curPos + paramLength ) {
throw new Error ( "Parametes do not match" ) ;
}
return c ;
async function readG1 ( ) {
const pBuff = await fd . read ( curve . G1 . F . n8 * 2 ) ;
return curve . G1 . fromRprLEM ( pBuff ) ;
}
async function readG2 ( ) {
const pBuff = await fd . read ( curve . G2 . F . n8 * 2 ) ;
return curve . G2 . fromRprLEM ( pBuff ) ;
}
async function readDV ( n ) {
const b = await fd . read ( n ) ;
return new Uint8Array ( b ) ;
}
}
async function readContributions ( fd , curve , sections ) {
if ( ! sections [ 7 ] ) throw new Error ( fd . fileName + ": File has no contributions" ) ;
if ( sections [ 7 ] [ 0 ] . length > 1 ) throw new Error ( fd . fileName + ": File has more than one contributions section" ) ;
fd . pos = sections [ 7 ] [ 0 ] . p ;
const nContributions = await fd . readULE32 ( ) ;
const contributions = [ ] ;
for ( let i = 0 ; i < nContributions ; i ++ ) {
2022-01-19 23:59:11 +03:00
const c = await readContribution$1 ( fd , curve ) ;
2020-07-11 11:31:52 +03:00
c . id = i + 1 ;
contributions . push ( c ) ;
}
if ( fd . pos - sections [ 7 ] [ 0 ] . p != sections [ 7 ] [ 0 ] . size ) throw new Error ( "Invalid contribution section size" ) ;
return contributions ;
}
2022-01-19 23:59:11 +03:00
async function writeContribution$1 ( fd , curve , contribution ) {
2020-07-11 11:31:52 +03:00
const buffG1 = new Uint8Array ( curve . F1 . n8 * 2 ) ;
const buffG2 = new Uint8Array ( curve . F2 . n8 * 2 ) ;
await writeG1 ( contribution . tauG1 ) ;
await writeG2 ( contribution . tauG2 ) ;
await writeG1 ( contribution . alphaG1 ) ;
await writeG1 ( contribution . betaG1 ) ;
await writeG2 ( contribution . betaG2 ) ;
await writePtauPubKey ( fd , curve , contribution . key , true ) ;
await fd . write ( contribution . partialHash ) ;
2020-07-14 12:55:12 +03:00
await fd . write ( contribution . nextChallenge ) ;
2020-07-11 11:31:52 +03:00
await fd . writeULE32 ( contribution . type || 0 ) ;
const params = [ ] ;
if ( contribution . name ) {
params . push ( 1 ) ; // Param Name
const nameData = new TextEncoder ( "utf-8" ) . encode ( contribution . name . substring ( 0 , 64 ) ) ;
params . push ( nameData . byteLength ) ;
for ( let i = 0 ; i < nameData . byteLength ; i ++ ) params . push ( nameData [ i ] ) ;
}
if ( contribution . type == 1 ) {
params . push ( 2 ) ; // Param numIterationsExp
params . push ( contribution . numIterationsExp ) ;
params . push ( 3 ) ; // Beacon Hash
params . push ( contribution . beaconHash . byteLength ) ;
for ( let i = 0 ; i < contribution . beaconHash . byteLength ; i ++ ) params . push ( contribution . beaconHash [ i ] ) ;
}
if ( params . length > 0 ) {
const paramsBuff = new Uint8Array ( params ) ;
await fd . writeULE32 ( paramsBuff . byteLength ) ;
await fd . write ( paramsBuff ) ;
} else {
await fd . writeULE32 ( 0 ) ;
}
async function writeG1 ( p ) {
curve . G1 . toRprLEM ( buffG1 , 0 , p ) ;
await fd . write ( buffG1 ) ;
}
async function writeG2 ( p ) {
curve . G2 . toRprLEM ( buffG2 , 0 , p ) ;
await fd . write ( buffG2 ) ;
}
}
async function writeContributions ( fd , curve , contributions ) {
await fd . writeULE32 ( 7 ) ; // Header type
const pContributionsSize = fd . pos ;
await fd . writeULE64 ( 0 ) ; // Temporally set to 0 length
await fd . writeULE32 ( contributions . length ) ;
for ( let i = 0 ; i < contributions . length ; i ++ ) {
2022-01-19 23:59:11 +03:00
await writeContribution$1 ( fd , curve , contributions [ i ] ) ;
2020-07-11 11:31:52 +03:00
}
const contributionsSize = fd . pos - pContributionsSize - 8 ;
const oldPos = fd . pos ;
2020-07-28 14:16:43 +03:00
await fd . writeULE64 ( contributionsSize , pContributionsSize ) ;
2020-07-11 11:31:52 +03:00
fd . pos = oldPos ;
}
2020-07-14 12:55:12 +03:00
function calculateFirstChallengeHash ( curve , power , logger ) {
if ( logger ) logger . debug ( "Calculating First Challenge Hash" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const hasher = new Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
const vG1 = new Uint8Array ( curve . G1 . F . n8 * 2 ) ;
const vG2 = new Uint8Array ( curve . G2 . F . n8 * 2 ) ;
curve . G1 . toRprUncompressed ( vG1 , 0 , curve . G1 . g ) ;
curve . G2 . toRprUncompressed ( vG2 , 0 , curve . G2 . g ) ;
2022-01-19 23:59:11 +03:00
hasher . update ( Blake2b _ _default [ "default" ] ( 64 ) . digest ( ) ) ;
2020-07-11 11:31:52 +03:00
let n ;
2020-09-07 13:43:50 +03:00
n = ( 2 * * power ) * 2 - 1 ;
2020-07-11 11:31:52 +03:00
if ( logger ) logger . debug ( "Calculate Initial Hash: tauG1" ) ;
hashBlock ( vG1 , n ) ;
2020-09-07 13:43:50 +03:00
n = 2 * * power ;
2020-07-11 11:31:52 +03:00
if ( logger ) logger . debug ( "Calculate Initial Hash: tauG2" ) ;
hashBlock ( vG2 , n ) ;
if ( logger ) logger . debug ( "Calculate Initial Hash: alphaTauG1" ) ;
hashBlock ( vG1 , n ) ;
if ( logger ) logger . debug ( "Calculate Initial Hash: betaTauG1" ) ;
hashBlock ( vG1 , n ) ;
hasher . update ( vG2 ) ;
return hasher . digest ( ) ;
function hashBlock ( buff , n ) {
const blockSize = 500000 ;
const nBlocks = Math . floor ( n / blockSize ) ;
const rem = n % blockSize ;
const bigBuff = new Uint8Array ( blockSize * buff . byteLength ) ;
for ( let i = 0 ; i < blockSize ; i ++ ) {
bigBuff . set ( buff , i * buff . byteLength ) ;
}
for ( let i = 0 ; i < nBlocks ; i ++ ) {
hasher . update ( bigBuff ) ;
if ( logger ) logger . debug ( "Initial hash: " + i * blockSize ) ;
}
for ( let i = 0 ; i < rem ; i ++ ) {
hasher . update ( buff ) ;
}
}
}
2020-07-14 12:55:12 +03:00
function keyFromBeacon ( curve , challengeHash , beaconHash , numIterationsExp ) {
2020-07-11 11:31:52 +03:00
const rng = rngFromBeaconParams ( beaconHash , numIterationsExp ) ;
2020-07-14 12:55:12 +03:00
const key = createPTauKey ( curve , challengeHash , rng ) ;
2020-07-11 11:31:52 +03:00
return key ;
}
/ *
2021-05-31 14:21:07 +03:00
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function newAccumulator ( curve , power , fileName , logger ) {
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const fd = await binFileUtils _ _namespace . createBinFile ( fileName , "ptau" , 1 , 7 ) ;
2020-07-11 11:31:52 +03:00
await writePTauHeader ( fd , curve , power , 0 ) ;
const buffG1 = curve . G1 . oneAffine ;
const buffG2 = curve . G2 . oneAffine ;
// Write tauG1
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 2 ) ;
2020-09-07 13:43:50 +03:00
const nTauG1 = ( 2 * * power ) * 2 - 1 ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < nTauG1 ; i ++ ) {
await fd . write ( buffG1 ) ;
2020-08-20 11:44:00 +03:00
if ( ( logger ) && ( ( i % 100000 ) == 0 ) && i ) logger . log ( "tauG1: " + i ) ;
2020-07-11 11:31:52 +03:00
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Write tauG2
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 3 ) ;
2020-09-07 13:43:50 +03:00
const nTauG2 = ( 2 * * power ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < nTauG2 ; i ++ ) {
await fd . write ( buffG2 ) ;
if ( ( logger ) && ( ( i % 100000 ) == 0 ) && i ) logger . log ( "tauG2: " + i ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Write alphaTauG1
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 4 ) ;
2020-09-07 13:43:50 +03:00
const nAlfaTauG1 = ( 2 * * power ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < nAlfaTauG1 ; i ++ ) {
await fd . write ( buffG1 ) ;
if ( ( logger ) && ( ( i % 100000 ) == 0 ) && i ) logger . log ( "alphaTauG1: " + i ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Write betaTauG1
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 5 ) ;
2020-09-07 13:43:50 +03:00
const nBetaTauG1 = ( 2 * * power ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < nBetaTauG1 ; i ++ ) {
await fd . write ( buffG1 ) ;
if ( ( logger ) && ( ( i % 100000 ) == 0 ) && i ) logger . log ( "betaTauG1: " + i ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Write betaG2
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 6 ) ;
2020-07-11 11:31:52 +03:00
await fd . write ( buffG2 ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Contributions
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 7 ) ;
2020-07-11 11:31:52 +03:00
await fd . writeULE32 ( 0 ) ; // 0 Contributions
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
await fd . close ( ) ;
2020-07-14 12:55:12 +03:00
const firstChallengeHash = calculateFirstChallengeHash ( curve , power , logger ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
if ( logger ) logger . debug ( formatHash ( Blake2b _ _default [ "default" ] ( 64 ) . digest ( ) , "Blank Contribution Hash:" ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( firstChallengeHash , "First Contribution Hash:" ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
return firstChallengeHash ;
2020-07-11 11:31:52 +03:00
}
// Format of the outpu
2020-07-14 12:55:12 +03:00
async function exportChallenge ( pTauFilename , challengeFilename , logger ) {
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
const { fd : fdFrom , sections } = await binFileUtils _ _namespace . readBinFile ( pTauFilename , "ptau" , 1 ) ;
2020-07-11 11:31:52 +03:00
const { curve , power } = await readPTauHeader ( fdFrom , sections ) ;
const contributions = await readContributions ( fdFrom , curve , sections ) ;
2020-07-14 12:55:12 +03:00
let lastResponseHash , curChallengeHash ;
2020-07-11 11:31:52 +03:00
if ( contributions . length == 0 ) {
2022-01-19 23:59:11 +03:00
lastResponseHash = Blake2b _ _default [ "default" ] ( 64 ) . digest ( ) ;
2020-07-14 12:55:12 +03:00
curChallengeHash = calculateFirstChallengeHash ( curve , power ) ;
2020-07-11 11:31:52 +03:00
} else {
lastResponseHash = contributions [ contributions . length - 1 ] . responseHash ;
2020-07-14 12:55:12 +03:00
curChallengeHash = contributions [ contributions . length - 1 ] . nextChallenge ;
2020-07-11 11:31:52 +03:00
}
if ( logger ) logger . info ( formatHash ( lastResponseHash , "Last Response Hash: " ) ) ;
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( curChallengeHash , "New Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const fdTo = await fastFile _ _namespace . createOverride ( challengeFilename ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const toHash = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
await fdTo . write ( lastResponseHash ) ;
toHash . update ( lastResponseHash ) ;
2020-09-07 13:43:50 +03:00
await exportSection ( 2 , "G1" , ( 2 * * power ) * 2 - 1 , "tauG1" ) ;
await exportSection ( 3 , "G2" , ( 2 * * power ) , "tauG2" ) ;
await exportSection ( 4 , "G1" , ( 2 * * power ) , "alphaTauG1" ) ;
await exportSection ( 5 , "G1" , ( 2 * * power ) , "betaTauG1" ) ;
2020-07-11 11:31:52 +03:00
await exportSection ( 6 , "G2" , 1 , "betaG2" ) ;
await fdFrom . close ( ) ;
await fdTo . close ( ) ;
2020-07-14 12:55:12 +03:00
const calcCurChallengeHash = toHash . digest ( ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( ! hashIsEqual ( curChallengeHash , calcCurChallengeHash ) ) {
if ( logger ) logger . info ( formatHash ( calcCurChallengeHash , "Calc Curret Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "PTau file is corrupted. Calculated new challenge hash does not match with the eclared one" ) ;
throw new Error ( "PTau file is corrupted. Calculated new challenge hash does not match with the eclared one" ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
return curChallengeHash ;
2020-07-11 11:31:52 +03:00
async function exportSection ( sectionId , groupName , nPoints , sectionName ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fdFrom , sections , sectionId ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Exporting ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
let buff ;
buff = await fdFrom . read ( n * sG ) ;
buff = await G . batchLEMtoU ( buff ) ;
await fdTo . write ( buff ) ;
toHash . update ( buff ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fdFrom ) ;
2020-07-11 11:31:52 +03:00
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function importResponse ( oldPtauFilename , contributionFilename , newPTauFilename , name , importPoints , logger ) {
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
2020-08-20 11:44:00 +03:00
const noHash = new Uint8Array ( 64 ) ;
for ( let i = 0 ; i < 64 ; i ++ ) noHash [ i ] = 0xFF ;
2022-01-19 23:59:11 +03:00
const { fd : fdOld , sections } = await binFileUtils _ _namespace . readBinFile ( oldPtauFilename , "ptau" , 1 ) ;
2020-07-11 11:31:52 +03:00
const { curve , power } = await readPTauHeader ( fdOld , sections ) ;
const contributions = await readContributions ( fdOld , curve , sections ) ;
const currentContribution = { } ;
if ( name ) currentContribution . name = name ;
const sG1 = curve . F1 . n8 * 2 ;
const scG1 = curve . F1 . n8 ; // Compresed size
const sG2 = curve . F2 . n8 * 2 ;
const scG2 = curve . F2 . n8 ; // Compresed size
2022-01-19 23:59:11 +03:00
const fdResponse = await fastFile _ _namespace . readExisting ( contributionFilename ) ;
2020-07-11 11:31:52 +03:00
if ( fdResponse . totalSize !=
64 + // Old Hash
2020-09-07 13:43:50 +03:00
( ( 2 * * power ) * 2 - 1 ) * scG1 +
( 2 * * power ) * scG2 +
( 2 * * power ) * scG1 +
( 2 * * power ) * scG1 +
2020-07-11 11:31:52 +03:00
scG2 +
sG1 * 6 + sG2 * 3 )
throw new Error ( "Size of the contribution is invalid" ) ;
2020-07-14 12:55:12 +03:00
let lastChallengeHash ;
2020-07-11 11:31:52 +03:00
if ( contributions . length > 0 ) {
2020-07-14 12:55:12 +03:00
lastChallengeHash = contributions [ contributions . length - 1 ] . nextChallenge ;
2020-07-11 11:31:52 +03:00
} else {
2020-07-14 12:55:12 +03:00
lastChallengeHash = calculateFirstChallengeHash ( curve , power , logger ) ;
2020-07-11 11:31:52 +03:00
}
2022-01-19 23:59:11 +03:00
const fdNew = await binFileUtils _ _namespace . createBinFile ( newPTauFilename , "ptau" , 1 , importPoints ? 7 : 2 ) ;
2020-07-11 11:31:52 +03:00
await writePTauHeader ( fdNew , curve , power ) ;
const contributionPreviousHash = await fdResponse . read ( 64 ) ;
2020-08-20 11:44:00 +03:00
if ( hashIsEqual ( noHash , lastChallengeHash ) ) {
lastChallengeHash = contributionPreviousHash ;
contributions [ contributions . length - 1 ] . nextChallenge = lastChallengeHash ;
}
2020-07-14 12:55:12 +03:00
if ( ! hashIsEqual ( contributionPreviousHash , lastChallengeHash ) )
2020-07-11 11:31:52 +03:00
throw new Error ( "Wrong contribution. this contribution is not based on the previus hash" ) ;
2022-01-19 23:59:11 +03:00
const hasherResponse = new Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
hasherResponse . update ( contributionPreviousHash ) ;
const startSections = [ ] ;
let res ;
2020-09-07 13:43:50 +03:00
res = await processSection ( fdResponse , fdNew , "G1" , 2 , ( 2 * * power ) * 2 - 1 , [ 1 ] , "tauG1" ) ;
2020-07-11 11:31:52 +03:00
currentContribution . tauG1 = res [ 0 ] ;
2020-09-07 13:43:50 +03:00
res = await processSection ( fdResponse , fdNew , "G2" , 3 , ( 2 * * power ) , [ 1 ] , "tauG2" ) ;
2020-07-11 11:31:52 +03:00
currentContribution . tauG2 = res [ 0 ] ;
2020-09-07 13:43:50 +03:00
res = await processSection ( fdResponse , fdNew , "G1" , 4 , ( 2 * * power ) , [ 0 ] , "alphaG1" ) ;
2020-07-11 11:31:52 +03:00
currentContribution . alphaG1 = res [ 0 ] ;
2020-09-07 13:43:50 +03:00
res = await processSection ( fdResponse , fdNew , "G1" , 5 , ( 2 * * power ) , [ 0 ] , "betaG1" ) ;
2020-07-11 11:31:52 +03:00
currentContribution . betaG1 = res [ 0 ] ;
res = await processSection ( fdResponse , fdNew , "G2" , 6 , 1 , [ 0 ] , "betaG2" ) ;
currentContribution . betaG2 = res [ 0 ] ;
currentContribution . partialHash = hasherResponse . getPartialHash ( ) ;
const buffKey = await fdResponse . read ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
currentContribution . key = fromPtauPubKeyRpr ( buffKey , 0 , curve , false ) ;
hasherResponse . update ( new Uint8Array ( buffKey ) ) ;
const hashResponse = hasherResponse . digest ( ) ;
if ( logger ) logger . info ( formatHash ( hashResponse , "Contribution Response Hash imported: " ) ) ;
2020-08-20 11:44:00 +03:00
if ( importPoints ) {
2022-01-19 23:59:11 +03:00
const nextChallengeHasher = new Blake2b _ _default [ "default" ] ( 64 ) ;
2020-08-20 11:44:00 +03:00
nextChallengeHasher . update ( hashResponse ) ;
2020-07-11 11:31:52 +03:00
2020-09-07 13:43:50 +03:00
await hashSection ( nextChallengeHasher , fdNew , "G1" , 2 , ( 2 * * power ) * 2 - 1 , "tauG1" , logger ) ;
await hashSection ( nextChallengeHasher , fdNew , "G2" , 3 , ( 2 * * power ) , "tauG2" , logger ) ;
await hashSection ( nextChallengeHasher , fdNew , "G1" , 4 , ( 2 * * power ) , "alphaTauG1" , logger ) ;
await hashSection ( nextChallengeHasher , fdNew , "G1" , 5 , ( 2 * * power ) , "betaTauG1" , logger ) ;
2020-08-20 11:44:00 +03:00
await hashSection ( nextChallengeHasher , fdNew , "G2" , 6 , 1 , "betaG2" , logger ) ;
2020-07-11 11:31:52 +03:00
2020-08-20 11:44:00 +03:00
currentContribution . nextChallenge = nextChallengeHasher . digest ( ) ;
2020-07-11 11:31:52 +03:00
2020-08-20 11:44:00 +03:00
if ( logger ) logger . info ( formatHash ( currentContribution . nextChallenge , "Next Challenge Hash: " ) ) ;
} else {
currentContribution . nextChallenge = noHash ;
}
2020-07-11 11:31:52 +03:00
contributions . push ( currentContribution ) ;
await writeContributions ( fdNew , curve , contributions ) ;
await fdResponse . close ( ) ;
await fdNew . close ( ) ;
await fdOld . close ( ) ;
2020-07-14 12:55:12 +03:00
return currentContribution . nextChallenge ;
2020-07-11 11:31:52 +03:00
async function processSection ( fdFrom , fdTo , groupName , sectionId , nPoints , singularPointIndexes , sectionName ) {
2020-08-20 11:44:00 +03:00
if ( importPoints ) {
return await processSectionImportPoints ( fdFrom , fdTo , groupName , sectionId , nPoints , singularPointIndexes , sectionName ) ;
} else {
return await processSectionNoImportPoints ( fdFrom , fdTo , groupName , sectionId , nPoints , singularPointIndexes , sectionName ) ;
}
}
async function processSectionImportPoints ( fdFrom , fdTo , groupName , sectionId , nPoints , singularPointIndexes , sectionName ) {
2020-07-11 11:31:52 +03:00
const G = curve [ groupName ] ;
const scG = G . F . n8 ;
const sG = G . F . n8 * 2 ;
const singularPoints = [ ] ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fdTo , sectionId ) ;
2020-07-11 11:31:52 +03:00
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
startSections [ sectionId ] = fdTo . pos ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Importing ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffC = await fdFrom . read ( n * scG ) ;
hasherResponse . update ( buffC ) ;
const buffLEM = await G . batchCtoLEM ( buffC ) ;
await fdTo . write ( buffLEM ) ;
for ( let j = 0 ; j < singularPointIndexes . length ; j ++ ) {
const sp = singularPointIndexes [ j ] ;
if ( ( sp >= i ) && ( sp < i + n ) ) {
const P = G . fromRprLEM ( buffLEM , ( sp - i ) * sG ) ;
singularPoints . push ( P ) ;
}
}
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fdTo ) ;
2020-07-11 11:31:52 +03:00
return singularPoints ;
}
2020-08-20 11:44:00 +03:00
async function processSectionNoImportPoints ( fdFrom , fdTo , groupName , sectionId , nPoints , singularPointIndexes , sectionName ) {
const G = curve [ groupName ] ;
const scG = G . F . n8 ;
const singularPoints = [ ] ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / scG ) ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Importing ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffC = await fdFrom . read ( n * scG ) ;
hasherResponse . update ( buffC ) ;
for ( let j = 0 ; j < singularPointIndexes . length ; j ++ ) {
const sp = singularPointIndexes [ j ] ;
if ( ( sp >= i ) && ( sp < i + n ) ) {
const P = G . fromRprCompressed ( buffC , ( sp - i ) * scG ) ;
singularPoints . push ( P ) ;
}
}
}
return singularPoints ;
}
async function hashSection ( nextChallengeHasher , fdTo , groupName , sectionId , nPoints , sectionName , logger ) {
2020-07-11 11:31:52 +03:00
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
const oldPos = fdTo . pos ;
fdTo . pos = startSections [ sectionId ] ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Hashing ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffLEM = await fdTo . read ( n * sG ) ;
const buffU = await G . batchLEMtoU ( buffLEM ) ;
2020-07-14 12:55:12 +03:00
nextChallengeHasher . update ( buffU ) ;
2020-07-11 11:31:52 +03:00
}
fdTo . pos = oldPos ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
const sameRatio$1 = sameRatio$2 ;
2020-07-11 11:31:52 +03:00
async function verifyContribution ( curve , cur , prev , logger ) {
let sr ;
if ( cur . type == 1 ) { // Verify the beacon.
2020-07-14 12:55:12 +03:00
const beaconKey = keyFromBeacon ( curve , prev . nextChallenge , cur . beaconHash , cur . numIterationsExp ) ;
2020-07-11 11:31:52 +03:00
if ( ! curve . G1 . eq ( cur . key . tau . g1 _s , beaconKey . tau . g1 _s ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (tauG1_s) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . tau . g1 _sx , beaconKey . tau . g1 _sx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (tauG1_sx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G2 . eq ( cur . key . tau . g2 _spx , beaconKey . tau . g2 _spx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (tauG2_spx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . alpha . g1 _s , beaconKey . alpha . g1 _s ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (alphaG1_s) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . alpha . g1 _sx , beaconKey . alpha . g1 _sx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (alphaG1_sx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G2 . eq ( cur . key . alpha . g2 _spx , beaconKey . alpha . g2 _spx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (alphaG2_spx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . beta . g1 _s , beaconKey . beta . g1 _s ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (betaG1_s) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G1 . eq ( cur . key . beta . g1 _sx , beaconKey . beta . g1 _sx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (betaG1_sx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( ! curve . G2 . eq ( cur . key . beta . g2 _spx , beaconKey . beta . g2 _spx ) ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( ` BEACON key (betaG2_spx) is not generated correctly in challenge # ${ cur . id } ${ cur . name || "" } ` ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
}
2020-07-14 12:55:12 +03:00
cur . key . tau . g2 _sp = curve . G2 . toAffine ( getG2sp ( curve , 0 , prev . nextChallenge , cur . key . tau . g1 _s , cur . key . tau . g1 _sx ) ) ;
cur . key . alpha . g2 _sp = curve . G2 . toAffine ( getG2sp ( curve , 1 , prev . nextChallenge , cur . key . alpha . g1 _s , cur . key . alpha . g1 _sx ) ) ;
cur . key . beta . g2 _sp = curve . G2 . toAffine ( getG2sp ( curve , 2 , prev . nextChallenge , cur . key . beta . g1 _s , cur . key . beta . g1 _sx ) ) ;
2020-07-11 11:31:52 +03:00
sr = await sameRatio$1 ( curve , cur . key . tau . g1 _s , cur . key . tau . g1 _sx , cur . key . tau . g2 _sp , cur . key . tau . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID key (tau) in challenge #" + cur . id ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , cur . key . alpha . g1 _s , cur . key . alpha . g1 _sx , cur . key . alpha . g2 _sp , cur . key . alpha . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID key (alpha) in challenge #" + cur . id ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , cur . key . beta . g1 _s , cur . key . beta . g1 _sx , cur . key . beta . g2 _sp , cur . key . beta . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID key (beta) in challenge #" + cur . id ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , prev . tauG1 , cur . tauG1 , cur . key . tau . g2 _sp , cur . key . tau . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID tau*G1. challenge #" + cur . id + " It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , cur . key . tau . g1 _s , cur . key . tau . g1 _sx , prev . tauG2 , cur . tauG2 ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID tau*G2. challenge #" + cur . id + " It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , prev . alphaG1 , cur . alphaG1 , cur . key . alpha . g2 _sp , cur . key . alpha . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID alpha*G1. challenge #" + cur . id + " It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , prev . betaG1 , cur . betaG1 , cur . key . beta . g2 _sp , cur . key . beta . g2 _spx ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID beta*G1. challenge #" + cur . id + " It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
sr = await sameRatio$1 ( curve , cur . key . beta . g1 _s , cur . key . beta . g1 _sx , prev . betaG2 , cur . betaG2 ) ;
if ( sr !== true ) {
2020-07-14 12:55:12 +03:00
if ( logger ) logger . error ( "INVALID beta*G2. challenge #" + cur . id + "It does not follow the previous contribution" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
if ( logger ) logger . info ( "Powers Of tau file OK!" ) ;
return true ;
}
async function verify ( tauFilename , logger ) {
let sr ;
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { fd , sections } = await binFileUtils _ _namespace . readBinFile ( tauFilename , "ptau" , 1 ) ;
2020-07-11 11:31:52 +03:00
const { curve , power , ceremonyPower } = await readPTauHeader ( fd , sections ) ;
const contrs = await readContributions ( fd , curve , sections ) ;
if ( logger ) logger . debug ( "power: 2**" + power ) ;
// Verify Last contribution
if ( logger ) logger . debug ( "Computing initial contribution hash" ) ;
const initialContribution = {
tauG1 : curve . G1 . g ,
tauG2 : curve . G2 . g ,
alphaG1 : curve . G1 . g ,
betaG1 : curve . G1 . g ,
betaG2 : curve . G2 . g ,
2020-07-14 12:55:12 +03:00
nextChallenge : calculateFirstChallengeHash ( curve , ceremonyPower , logger ) ,
2022-01-19 23:59:11 +03:00
responseHash : Blake2b _ _default [ "default" ] ( 64 ) . digest ( )
2020-07-11 11:31:52 +03:00
} ;
if ( contrs . length == 0 ) {
if ( logger ) logger . error ( "This file has no contribution! It cannot be used in production" ) ;
return false ;
}
let prevContr ;
if ( contrs . length > 1 ) {
prevContr = contrs [ contrs . length - 2 ] ;
} else {
prevContr = initialContribution ;
}
const curContr = contrs [ contrs . length - 1 ] ;
if ( logger ) logger . debug ( "Validating contribution #" + contrs [ contrs . length - 1 ] . id ) ;
const res = await verifyContribution ( curve , curContr , prevContr , logger ) ;
if ( ! res ) return false ;
2022-01-19 23:59:11 +03:00
const nextContributionHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
nextContributionHasher . update ( curContr . responseHash ) ;
2020-07-14 12:55:12 +03:00
// Verify powers and compute nextChallengeHash
2020-07-11 11:31:52 +03:00
// await test();
// Verify Section tau*G1
if ( logger ) logger . debug ( "Verifying powers in tau*G1 section" ) ;
2020-09-07 13:43:50 +03:00
const rTau1 = await processSection ( 2 , "G1" , "tauG1" , ( 2 * * power ) * 2 - 1 , [ 0 , 1 ] , logger ) ;
2020-07-11 11:31:52 +03:00
sr = await sameRatio$1 ( curve , rTau1 . R1 , rTau1 . R2 , curve . G2 . g , curContr . tauG2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "tauG1 section. Powers do not match" ) ;
return false ;
}
if ( ! curve . G1 . eq ( curve . G1 . g , rTau1 . singularPoints [ 0 ] ) ) {
if ( logger ) logger . error ( "First element of tau*G1 section must be the generator" ) ;
return false ;
}
if ( ! curve . G1 . eq ( curContr . tauG1 , rTau1 . singularPoints [ 1 ] ) ) {
if ( logger ) logger . error ( "Second element of tau*G1 section does not match the one in the contribution section" ) ;
return false ;
}
// await test();
// Verify Section tau*G2
if ( logger ) logger . debug ( "Verifying powers in tau*G2 section" ) ;
2020-09-07 13:43:50 +03:00
const rTau2 = await processSection ( 3 , "G2" , "tauG2" , 2 * * power , [ 0 , 1 ] , logger ) ;
2020-07-11 11:31:52 +03:00
sr = await sameRatio$1 ( curve , curve . G1 . g , curContr . tauG1 , rTau2 . R1 , rTau2 . R2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "tauG2 section. Powers do not match" ) ;
return false ;
}
if ( ! curve . G2 . eq ( curve . G2 . g , rTau2 . singularPoints [ 0 ] ) ) {
if ( logger ) logger . error ( "First element of tau*G2 section must be the generator" ) ;
return false ;
}
if ( ! curve . G2 . eq ( curContr . tauG2 , rTau2 . singularPoints [ 1 ] ) ) {
if ( logger ) logger . error ( "Second element of tau*G2 section does not match the one in the contribution section" ) ;
return false ;
}
// Verify Section alpha*tau*G1
if ( logger ) logger . debug ( "Verifying powers in alpha*tau*G1 section" ) ;
2020-09-07 13:43:50 +03:00
const rAlphaTauG1 = await processSection ( 4 , "G1" , "alphatauG1" , 2 * * power , [ 0 ] , logger ) ;
2020-07-11 11:31:52 +03:00
sr = await sameRatio$1 ( curve , rAlphaTauG1 . R1 , rAlphaTauG1 . R2 , curve . G2 . g , curContr . tauG2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "alphaTauG1 section. Powers do not match" ) ;
return false ;
}
if ( ! curve . G1 . eq ( curContr . alphaG1 , rAlphaTauG1 . singularPoints [ 0 ] ) ) {
if ( logger ) logger . error ( "First element of alpha*tau*G1 section (alpha*G1) does not match the one in the contribution section" ) ;
return false ;
}
// Verify Section beta*tau*G1
if ( logger ) logger . debug ( "Verifying powers in beta*tau*G1 section" ) ;
2020-09-07 13:43:50 +03:00
const rBetaTauG1 = await processSection ( 5 , "G1" , "betatauG1" , 2 * * power , [ 0 ] , logger ) ;
2020-07-11 11:31:52 +03:00
sr = await sameRatio$1 ( curve , rBetaTauG1 . R1 , rBetaTauG1 . R2 , curve . G2 . g , curContr . tauG2 ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "betaTauG1 section. Powers do not match" ) ;
return false ;
}
if ( ! curve . G1 . eq ( curContr . betaG1 , rBetaTauG1 . singularPoints [ 0 ] ) ) {
if ( logger ) logger . error ( "First element of beta*tau*G1 section (beta*G1) does not match the one in the contribution section" ) ;
return false ;
}
//Verify Beta G2
const betaG2 = await processSectionBetaG2 ( logger ) ;
if ( ! curve . G2 . eq ( curContr . betaG2 , betaG2 ) ) {
if ( logger ) logger . error ( "betaG2 element in betaG2 section does not match the one in the contribution section" ) ;
return false ;
}
const nextContributionHash = nextContributionHasher . digest ( ) ;
2020-07-14 12:55:12 +03:00
// Check the nextChallengeHash
2020-09-02 13:06:20 +03:00
if ( power == ceremonyPower ) {
if ( ! hashIsEqual ( nextContributionHash , curContr . nextChallenge ) ) {
if ( logger ) logger . error ( "Hash of the values does not match the next challenge of the last contributor in the contributions section" ) ;
return false ;
}
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( nextContributionHash , "Next challenge hash: " ) ) ;
2020-07-11 11:31:52 +03:00
// Verify Previous contributions
printContribution ( curContr , prevContr ) ;
for ( let i = contrs . length - 2 ; i >= 0 ; i -- ) {
const curContr = contrs [ i ] ;
const prevContr = ( i > 0 ) ? contrs [ i - 1 ] : initialContribution ;
const res = await verifyContribution ( curve , curContr , prevContr , logger ) ;
if ( ! res ) return false ;
printContribution ( curContr , prevContr ) ;
}
if ( logger ) logger . info ( "-----------------------------------------------------" ) ;
if ( ( ! sections [ 12 ] ) || ( ! sections [ 13 ] ) || ( ! sections [ 14 ] ) || ( ! sections [ 15 ] ) ) {
if ( logger ) logger . warn (
"this file does not contain phase2 precalculated values. Please run: \n" +
" snarkjs \"powersoftau preparephase2\" to prepare this file to be used in the phase2 ceremony."
) ;
} else {
let res ;
res = await verifyLagrangeEvaluations ( "G1" , 2 , 12 , "tauG1" , logger ) ;
if ( ! res ) return false ;
res = await verifyLagrangeEvaluations ( "G2" , 3 , 13 , "tauG2" , logger ) ;
if ( ! res ) return false ;
res = await verifyLagrangeEvaluations ( "G1" , 4 , 14 , "alphaTauG1" , logger ) ;
if ( ! res ) return false ;
res = await verifyLagrangeEvaluations ( "G1" , 5 , 15 , "betaTauG1" , logger ) ;
if ( ! res ) return false ;
}
await fd . close ( ) ;
2020-09-02 13:06:20 +03:00
if ( logger ) logger . info ( "Powers of Tau Ok!" ) ;
2020-07-11 11:31:52 +03:00
return true ;
function printContribution ( curContr , prevContr ) {
if ( ! logger ) return ;
logger . info ( "-----------------------------------------------------" ) ;
logger . info ( ` Contribution # ${ curContr . id } : ${ curContr . name || "" } ` ) ;
2020-07-14 12:55:12 +03:00
logger . info ( formatHash ( curContr . nextChallenge , "Next Challenge: " ) ) ;
2020-07-11 11:31:52 +03:00
const buffV = new Uint8Array ( curve . G1 . F . n8 * 2 * 6 + curve . G2 . F . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffV , 0 , curve , curContr . key , false ) ;
2022-01-19 23:59:11 +03:00
const responseHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
responseHasher . setPartialHash ( curContr . partialHash ) ;
responseHasher . update ( buffV ) ;
const responseHash = responseHasher . digest ( ) ;
logger . info ( formatHash ( responseHash , "Response Hash:" ) ) ;
2020-07-14 12:55:12 +03:00
logger . info ( formatHash ( prevContr . nextChallenge , "Response Hash:" ) ) ;
2020-07-11 11:31:52 +03:00
if ( curContr . type == 1 ) {
logger . info ( ` Beacon generator: ${ byteArray2hex ( curContr . beaconHash ) } ` ) ;
logger . info ( ` Beacon iterations Exp: ${ curContr . numIterationsExp } ` ) ;
}
}
async function processSectionBetaG2 ( logger ) {
const G = curve . G2 ;
const sG = G . F . n8 * 2 ;
const buffUv = new Uint8Array ( sG ) ;
if ( ! sections [ 6 ] ) {
logger . error ( "File has no BetaG2 section" ) ;
throw new Error ( "File has no BetaG2 section" ) ;
}
if ( sections [ 6 ] . length > 1 ) {
logger . error ( "File has no BetaG2 section" ) ;
throw new Error ( "File has more than one GetaG2 section" ) ;
}
fd . pos = sections [ 6 ] [ 0 ] . p ;
const buff = await fd . read ( sG ) ;
const P = G . fromRprLEM ( buff ) ;
G . toRprUncompressed ( buffUv , 0 , P ) ;
nextContributionHasher . update ( buffUv ) ;
return P ;
}
async function processSection ( idSection , groupName , sectionName , nPoints , singularPointIndexes , logger ) {
const MAX _CHUNK _SIZE = 1 << 16 ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , idSection ) ;
2020-07-11 11:31:52 +03:00
const singularPoints = [ ] ;
let R1 = G . zero ;
let R2 = G . zero ;
let lastBase = G . zero ;
for ( let i = 0 ; i < nPoints ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` points relations: ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , MAX _CHUNK _SIZE ) ;
const bases = await fd . read ( n * sG ) ;
const basesU = await G . batchLEMtoU ( bases ) ;
nextContributionHasher . update ( basesU ) ;
const scalars = new Uint8Array ( 4 * ( n - 1 ) ) ;
2022-01-19 23:59:11 +03:00
crypto _ _default [ "default" ] . randomFillSync ( scalars ) ;
2020-07-11 11:31:52 +03:00
if ( i > 0 ) {
const firstBase = G . fromRprLEM ( bases , 0 ) ;
2022-01-19 23:59:11 +03:00
const r = crypto _ _default [ "default" ] . randomBytes ( 4 ) . readUInt32BE ( 0 , true ) ;
2020-07-11 11:31:52 +03:00
R1 = G . add ( R1 , G . timesScalar ( lastBase , r ) ) ;
R2 = G . add ( R2 , G . timesScalar ( firstBase , r ) ) ;
}
const r1 = await G . multiExpAffine ( bases . slice ( 0 , ( n - 1 ) * sG ) , scalars ) ;
const r2 = await G . multiExpAffine ( bases . slice ( sG ) , scalars ) ;
R1 = G . add ( R1 , r1 ) ;
R2 = G . add ( R2 , r2 ) ;
lastBase = G . fromRprLEM ( bases , ( n - 1 ) * sG ) ;
for ( let j = 0 ; j < singularPointIndexes . length ; j ++ ) {
const sp = singularPointIndexes [ j ] ;
if ( ( sp >= i ) && ( sp < i + n ) ) {
const P = G . fromRprLEM ( bases , ( sp - i ) * sG ) ;
singularPoints . push ( P ) ;
}
}
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
return {
R1 : R1 ,
R2 : R2 ,
singularPoints : singularPoints
} ;
}
async function verifyLagrangeEvaluations ( gName , tauSection , lagrangeSection , sectionName , logger ) {
if ( logger ) logger . debug ( ` Verifying phase2 calculated values ${ sectionName } ... ` ) ;
const G = curve [ gName ] ;
const sG = G . F . n8 * 2 ;
const seed = new Array ( 8 ) ;
for ( let i = 0 ; i < 8 ; i ++ ) {
2022-01-19 23:59:11 +03:00
seed [ i ] = crypto _ _default [ "default" ] . randomBytes ( 4 ) . readUInt32BE ( 0 , true ) ;
2020-07-11 11:31:52 +03:00
}
for ( let p = 0 ; p <= power ; p ++ ) {
const res = await verifyPower ( p ) ;
if ( ! res ) return false ;
}
2020-09-02 13:06:20 +03:00
if ( tauSection == 2 ) {
const res = await verifyPower ( power + 1 ) ;
if ( ! res ) return false ;
}
2020-07-11 11:31:52 +03:00
return true ;
async function verifyPower ( p ) {
if ( logger ) logger . debug ( ` Power ${ p } ... ` ) ;
const n8r = curve . Fr . n8 ;
2020-09-07 13:43:50 +03:00
const nPoints = 2 * * p ;
2020-08-29 15:12:24 +03:00
let buff _r = new Uint32Array ( nPoints ) ;
2020-07-11 11:31:52 +03:00
let buffG ;
2020-08-29 15:12:24 +03:00
let rng = new ffjavascript . ChaCha ( seed ) ;
if ( logger ) logger . debug ( ` Creating random numbers Powers ${ p } ... ` ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < nPoints ; i ++ ) {
2020-09-07 13:43:50 +03:00
if ( ( p == power + 1 ) && ( i == nPoints - 1 ) ) {
buff _r [ i ] = 0 ;
} else {
buff _r [ i ] = rng . nextU32 ( ) ;
}
2020-07-11 11:31:52 +03:00
}
2020-08-29 15:12:24 +03:00
buff _r = new Uint8Array ( buff _r . buffer , buff _r . byteOffset , buff _r . byteLength ) ;
if ( logger ) logger . debug ( ` reading points Powers ${ p } ... ` ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , tauSection ) ;
2020-08-29 15:12:24 +03:00
buffG = new ffjavascript . BigBuffer ( nPoints * sG ) ;
2020-09-02 13:06:20 +03:00
if ( p == power + 1 ) {
await fd . readToBuffer ( buffG , 0 , ( nPoints - 1 ) * sG ) ;
buffG . set ( curve . G1 . zeroAffine , ( nPoints - 1 ) * sG ) ;
} else {
await fd . readToBuffer ( buffG , 0 , nPoints * sG ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd , true ) ;
2020-07-11 11:31:52 +03:00
2020-08-29 15:12:24 +03:00
const resTau = await G . multiExpAffine ( buffG , buff _r , logger , sectionName + "_" + p ) ;
buff _r = new ffjavascript . BigBuffer ( nPoints * n8r ) ;
2020-07-11 11:31:52 +03:00
2020-08-29 15:12:24 +03:00
rng = new ffjavascript . ChaCha ( seed ) ;
const buff4 = new Uint8Array ( 4 ) ;
const buff4V = new DataView ( buff4 . buffer ) ;
if ( logger ) logger . debug ( ` Creating random numbers Powers ${ p } ... ` ) ;
for ( let i = 0 ; i < nPoints ; i ++ ) {
2020-09-07 13:43:50 +03:00
if ( ( i != nPoints - 1 ) || ( p != power + 1 ) ) {
buff4V . setUint32 ( 0 , rng . nextU32 ( ) , true ) ;
buff _r . set ( buff4 , i * n8r ) ;
}
2020-08-29 15:12:24 +03:00
}
if ( logger ) logger . debug ( ` batchToMontgomery ${ p } ... ` ) ;
2020-07-11 11:31:52 +03:00
buff _r = await curve . Fr . batchToMontgomery ( buff _r ) ;
2020-08-29 15:12:24 +03:00
if ( logger ) logger . debug ( ` fft ${ p } ... ` ) ;
2020-07-11 11:31:52 +03:00
buff _r = await curve . Fr . fft ( buff _r ) ;
2020-08-29 15:12:24 +03:00
if ( logger ) logger . debug ( ` batchFromMontgomery ${ p } ... ` ) ;
2020-07-11 11:31:52 +03:00
buff _r = await curve . Fr . batchFromMontgomery ( buff _r ) ;
2020-08-29 15:12:24 +03:00
if ( logger ) logger . debug ( ` reading points Lagrange ${ p } ... ` ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , lagrangeSection ) ;
2020-09-07 13:43:50 +03:00
fd . pos += sG * ( ( 2 * * p ) - 1 ) ;
2020-08-29 15:12:24 +03:00
await fd . readToBuffer ( buffG , 0 , nPoints * sG ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd , true ) ;
2020-07-11 11:31:52 +03:00
2020-08-29 15:12:24 +03:00
const resLagrange = await G . multiExpAffine ( buffG , buff _r , logger , sectionName + "_" + p + "_transformed" ) ;
2020-07-11 11:31:52 +03:00
if ( ! G . eq ( resTau , resLagrange ) ) {
if ( logger ) logger . error ( "Phase2 caclutation does not match with powers of tau" ) ;
return false ;
}
return true ;
}
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
/ *
This function creates a new section in the fdTo file with id idSection .
It multiplies the pooints in fdFrom by first , first * inc , first * inc ^ 2 , ... .
nPoint Times .
2020-07-14 12:55:12 +03:00
It also updates the newChallengeHasher with the new points
2020-07-11 11:31:52 +03:00
* /
async function applyKeyToSection ( fdOld , sections , fdNew , idSection , curve , groupName , first , inc , sectionName , logger ) {
const MAX _CHUNK _SIZE = 1 << 16 ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPoints = sections [ idSection ] [ 0 ] . size / sG ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fdOld , sections , idSection ) ;
await binFileUtils _ _namespace . startWriteSection ( fdNew , idSection ) ;
2020-07-11 11:31:52 +03:00
let t = first ;
for ( let i = 0 ; i < nPoints ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` Applying key: ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , MAX _CHUNK _SIZE ) ;
let buff ;
buff = await fdOld . read ( n * sG ) ;
buff = await G . batchApplyKey ( buff , t , inc ) ;
await fdNew . write ( buff ) ;
t = curve . Fr . mul ( t , curve . Fr . exp ( inc , n ) ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fdNew ) ;
await binFileUtils _ _namespace . endReadSection ( fdOld ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
async function applyKeyToChallengeSection ( fdOld , fdNew , responseHasher , curve , groupName , nPoints , first , inc , formatOut , sectionName , logger ) {
2020-07-11 11:31:52 +03:00
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const chunkSize = Math . floor ( ( 1 << 20 ) / sG ) ; // 128Mb chunks
let t = first ;
for ( let i = 0 ; i < nPoints ; i += chunkSize ) {
if ( logger ) logger . debug ( ` Applying key ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , chunkSize ) ;
const buffInU = await fdOld . read ( n * sG ) ;
const buffInLEM = await G . batchUtoLEM ( buffInU ) ;
const buffOutLEM = await G . batchApplyKey ( buffInLEM , t , inc ) ;
let buffOut ;
if ( formatOut == "COMPRESSED" ) {
buffOut = await G . batchLEMtoC ( buffOutLEM ) ;
} else {
buffOut = await G . batchLEMtoU ( buffOutLEM ) ;
}
if ( responseHasher ) responseHasher . update ( buffOut ) ;
await fdNew . write ( buffOut ) ;
t = curve . Fr . mul ( t , curve . Fr . exp ( inc , n ) ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
async function challengeContribute ( curve , challengeFilename , responesFileName , entropy , logger ) {
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const fdFrom = await fastFile _ _namespace . readExisting ( challengeFilename ) ;
2020-07-11 11:31:52 +03:00
const sG1 = curve . F1 . n64 * 8 * 2 ;
const sG2 = curve . F2 . n64 * 8 * 2 ;
const domainSize = ( fdFrom . totalSize + sG1 - 64 - sG2 ) / ( 4 * sG1 + sG2 ) ;
let e = domainSize ;
let power = 0 ;
while ( e > 1 ) {
e = e / 2 ;
power += 1 ;
}
2020-09-07 13:43:50 +03:00
if ( 2 * * power != domainSize ) throw new Error ( "Invalid file size" ) ;
2020-07-11 11:31:52 +03:00
if ( logger ) logger . debug ( "Power to tau size: " + power ) ;
const rng = await getRandomRng ( entropy ) ;
2022-01-19 23:59:11 +03:00
const fdTo = await fastFile _ _namespace . createOverride ( responesFileName ) ;
2020-07-11 11:31:52 +03:00
// Calculate the hash
2022-01-19 23:59:11 +03:00
const challengeHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < fdFrom . totalSize ; i += fdFrom . pageSize ) {
2020-07-15 07:30:52 +03:00
if ( logger ) logger . debug ( ` Hashing challenge ${ i } / ${ fdFrom . totalSize } ` ) ;
2020-07-11 11:31:52 +03:00
const s = Math . min ( fdFrom . totalSize - i , fdFrom . pageSize ) ;
const buff = await fdFrom . read ( s ) ;
2020-07-14 12:55:12 +03:00
challengeHasher . update ( buff ) ;
2020-07-11 11:31:52 +03:00
}
const claimedHash = await fdFrom . read ( 64 , 0 ) ;
2020-07-29 10:11:59 +03:00
if ( logger ) logger . info ( formatHash ( claimedHash , "Claimed Previous Response Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
const challengeHash = challengeHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( challengeHash , "Current Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
const key = createPTauKey ( curve , challengeHash , rng ) ;
2020-07-11 11:31:52 +03:00
if ( logger ) {
[ "tau" , "alpha" , "beta" ] . forEach ( ( k ) => {
logger . debug ( k + ".g1_s: " + curve . G1 . toString ( key [ k ] . g1 _s , 16 ) ) ;
logger . debug ( k + ".g1_sx: " + curve . G1 . toString ( key [ k ] . g1 _sx , 16 ) ) ;
logger . debug ( k + ".g2_sp: " + curve . G2 . toString ( key [ k ] . g2 _sp , 16 ) ) ;
logger . debug ( k + ".g2_spx: " + curve . G2 . toString ( key [ k ] . g2 _spx , 16 ) ) ;
logger . debug ( "" ) ;
} ) ;
}
2022-01-19 23:59:11 +03:00
const responseHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
await fdTo . write ( challengeHash ) ;
responseHasher . update ( challengeHash ) ;
2020-07-11 11:31:52 +03:00
2020-09-07 13:43:50 +03:00
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G1" , ( 2 * * power ) * 2 - 1 , curve . Fr . one , key . tau . prvKey , "COMPRESSED" , "tauG1" , logger ) ;
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G2" , ( 2 * * power ) , curve . Fr . one , key . tau . prvKey , "COMPRESSED" , "tauG2" , logger ) ;
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G1" , ( 2 * * power ) , key . alpha . prvKey , key . tau . prvKey , "COMPRESSED" , "alphaTauG1" , logger ) ;
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G1" , ( 2 * * power ) , key . beta . prvKey , key . tau . prvKey , "COMPRESSED" , "betaTauG1" , logger ) ;
2020-07-14 12:55:12 +03:00
await applyKeyToChallengeSection ( fdFrom , fdTo , responseHasher , curve , "G2" , 1 , key . beta . prvKey , key . tau . prvKey , "COMPRESSED" , "betaTauG2" , logger ) ;
2020-07-11 11:31:52 +03:00
// Write and hash key
const buffKey = new Uint8Array ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffKey , 0 , curve , key , false ) ;
await fdTo . write ( buffKey ) ;
responseHasher . update ( buffKey ) ;
const responseHash = responseHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( responseHash , "Contribution Response Hash: " ) ) ;
await fdTo . close ( ) ;
await fdFrom . close ( ) ;
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
async function beacon$1 ( oldPtauFilename , newPTauFilename , name , beaconHashStr , numIterationsExp , logger ) {
2020-07-11 11:31:52 +03:00
const beaconHash = hex2ByteArray ( beaconHashStr ) ;
if ( ( beaconHash . byteLength == 0 )
|| ( beaconHash . byteLength * 2 != beaconHashStr . length ) )
{
if ( logger ) logger . error ( "Invalid Beacon Hash. (It must be a valid hexadecimal sequence)" ) ;
return false ;
}
if ( beaconHash . length >= 256 ) {
if ( logger ) logger . error ( "Maximum lenght of beacon hash is 255 bytes" ) ;
return false ;
}
numIterationsExp = parseInt ( numIterationsExp ) ;
if ( ( numIterationsExp < 10 ) || ( numIterationsExp > 63 ) ) {
if ( logger ) logger . error ( "Invalid numIterationsExp. (Must be between 10 and 63)" ) ;
return false ;
}
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { fd : fdOld , sections } = await binFileUtils _ _namespace . readBinFile ( oldPtauFilename , "ptau" , 1 ) ;
2020-07-11 11:31:52 +03:00
const { curve , power , ceremonyPower } = await readPTauHeader ( fdOld , sections ) ;
if ( power != ceremonyPower ) {
if ( logger ) logger . error ( "This file has been reduced. You cannot contribute into a reduced file." ) ;
return false ;
}
if ( sections [ 12 ] ) {
if ( logger ) logger . warn ( "Contributing into a file that has phase2 calculated. You will have to prepare phase2 again." ) ;
}
const contributions = await readContributions ( fdOld , curve , sections ) ;
const curContribution = {
name : name ,
type : 1 , // Beacon
numIterationsExp : numIterationsExp ,
beaconHash : beaconHash
} ;
2020-07-14 12:55:12 +03:00
let lastChallengeHash ;
2020-07-11 11:31:52 +03:00
if ( contributions . length > 0 ) {
2020-07-14 12:55:12 +03:00
lastChallengeHash = contributions [ contributions . length - 1 ] . nextChallenge ;
2020-07-11 11:31:52 +03:00
} else {
2020-07-14 12:55:12 +03:00
lastChallengeHash = calculateFirstChallengeHash ( curve , power , logger ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
curContribution . key = keyFromBeacon ( curve , lastChallengeHash , beaconHash , numIterationsExp ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const responseHasher = new Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-14 12:55:12 +03:00
responseHasher . update ( lastChallengeHash ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const fdNew = await binFileUtils _ _namespace . createBinFile ( newPTauFilename , "ptau" , 1 , 7 ) ;
2020-07-11 11:31:52 +03:00
await writePTauHeader ( fdNew , curve , power ) ;
const startSections = [ ] ;
let firstPoints ;
2020-09-07 13:43:50 +03:00
firstPoints = await processSection ( 2 , "G1" , ( 2 * * power ) * 2 - 1 , curve . Fr . e ( 1 ) , curContribution . key . tau . prvKey , "tauG1" , logger ) ;
2020-07-11 11:31:52 +03:00
curContribution . tauG1 = firstPoints [ 1 ] ;
2020-09-07 13:43:50 +03:00
firstPoints = await processSection ( 3 , "G2" , ( 2 * * power ) , curve . Fr . e ( 1 ) , curContribution . key . tau . prvKey , "tauG2" , logger ) ;
2020-07-11 11:31:52 +03:00
curContribution . tauG2 = firstPoints [ 1 ] ;
2020-09-07 13:43:50 +03:00
firstPoints = await processSection ( 4 , "G1" , ( 2 * * power ) , curContribution . key . alpha . prvKey , curContribution . key . tau . prvKey , "alphaTauG1" , logger ) ;
2020-07-11 11:31:52 +03:00
curContribution . alphaG1 = firstPoints [ 0 ] ;
2020-09-07 13:43:50 +03:00
firstPoints = await processSection ( 5 , "G1" , ( 2 * * power ) , curContribution . key . beta . prvKey , curContribution . key . tau . prvKey , "betaTauG1" , logger ) ;
2020-07-11 11:31:52 +03:00
curContribution . betaG1 = firstPoints [ 0 ] ;
firstPoints = await processSection ( 6 , "G2" , 1 , curContribution . key . beta . prvKey , curContribution . key . tau . prvKey , "betaTauG2" , logger ) ;
curContribution . betaG2 = firstPoints [ 0 ] ;
curContribution . partialHash = responseHasher . getPartialHash ( ) ;
const buffKey = new Uint8Array ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffKey , 0 , curve , curContribution . key , false ) ;
responseHasher . update ( new Uint8Array ( buffKey ) ) ;
const hashResponse = responseHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( hashResponse , "Contribution Response Hash imported: " ) ) ;
2022-01-19 23:59:11 +03:00
const nextChallengeHasher = new Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-14 12:55:12 +03:00
nextChallengeHasher . update ( hashResponse ) ;
2020-07-11 11:31:52 +03:00
2020-09-07 13:43:50 +03:00
await hashSection ( fdNew , "G1" , 2 , ( 2 * * power ) * 2 - 1 , "tauG1" , logger ) ;
await hashSection ( fdNew , "G2" , 3 , ( 2 * * power ) , "tauG2" , logger ) ;
await hashSection ( fdNew , "G1" , 4 , ( 2 * * power ) , "alphaTauG1" , logger ) ;
await hashSection ( fdNew , "G1" , 5 , ( 2 * * power ) , "betaTauG1" , logger ) ;
2020-07-11 11:31:52 +03:00
await hashSection ( fdNew , "G2" , 6 , 1 , "betaG2" , logger ) ;
2020-07-14 12:55:12 +03:00
curContribution . nextChallenge = nextChallengeHasher . digest ( ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( curContribution . nextChallenge , "Next Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
contributions . push ( curContribution ) ;
await writeContributions ( fdNew , curve , contributions ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
return hashResponse ;
async function processSection ( sectionId , groupName , NPoints , first , inc , sectionName , logger ) {
const res = [ ] ;
fdOld . pos = sections [ sectionId ] [ 0 ] . p ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fdNew , sectionId ) ;
2020-07-11 11:31:52 +03:00
startSections [ sectionId ] = fdNew . pos ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const chunkSize = Math . floor ( ( 1 << 20 ) / sG ) ; // 128Mb chunks
let t = first ;
for ( let i = 0 ; i < NPoints ; i += chunkSize ) {
if ( logger ) logger . debug ( ` applying key ${ sectionName } : ${ i } / ${ NPoints } ` ) ;
const n = Math . min ( NPoints - i , chunkSize ) ;
const buffIn = await fdOld . read ( n * sG ) ;
const buffOutLEM = await G . batchApplyKey ( buffIn , t , inc ) ;
/ * C o d e t o t e s t t h e c a s e w h e r e w e d o n ' t h a v e t h e 2 ^ m - 2 c o m p o n e n t
if ( sectionName == "tauG1" ) {
const bz = new Uint8Array ( 64 ) ;
2020-09-07 13:43:50 +03:00
buffOutLEM . set ( bz , 64 * ( ( 2 * * power ) - 1 ) ) ;
2020-07-11 11:31:52 +03:00
}
* /
const promiseWrite = fdNew . write ( buffOutLEM ) ;
const buffOutC = await G . batchLEMtoC ( buffOutLEM ) ;
responseHasher . update ( buffOutC ) ;
await promiseWrite ;
if ( i == 0 ) // Return the 2 first points.
for ( let j = 0 ; j < Math . min ( 2 , NPoints ) ; j ++ )
res . push ( G . fromRprLEM ( buffOutLEM , j * sG ) ) ;
t = curve . Fr . mul ( t , curve . Fr . exp ( inc , n ) ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fdNew ) ;
2020-07-11 11:31:52 +03:00
return res ;
}
async function hashSection ( fdTo , groupName , sectionId , nPoints , sectionName , logger ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
const oldPos = fdTo . pos ;
fdTo . pos = startSections [ sectionId ] ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( logger ) logger . debug ( ` Hashing ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffLEM = await fdTo . read ( n * sG ) ;
const buffU = await G . batchLEMtoU ( buffLEM ) ;
2020-07-14 12:55:12 +03:00
nextChallengeHasher . update ( buffU ) ;
2020-07-11 11:31:52 +03:00
}
fdTo . pos = oldPos ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function contribute ( oldPtauFilename , newPTauFilename , name , entropy , logger ) {
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { fd : fdOld , sections } = await binFileUtils _ _namespace . readBinFile ( oldPtauFilename , "ptau" , 1 ) ;
2020-07-11 11:31:52 +03:00
const { curve , power , ceremonyPower } = await readPTauHeader ( fdOld , sections ) ;
if ( power != ceremonyPower ) {
if ( logger ) logger . error ( "This file has been reduced. You cannot contribute into a reduced file." ) ;
throw new Error ( "This file has been reduced. You cannot contribute into a reduced file." ) ;
}
if ( sections [ 12 ] ) {
if ( logger ) logger . warn ( "WARNING: Contributing into a file that has phase2 calculated. You will have to prepare phase2 again." ) ;
}
const contributions = await readContributions ( fdOld , curve , sections ) ;
const curContribution = {
name : name ,
type : 0 , // Beacon
} ;
2020-07-14 12:55:12 +03:00
let lastChallengeHash ;
2020-07-11 11:31:52 +03:00
const rng = await getRandomRng ( entropy ) ;
if ( contributions . length > 0 ) {
2020-07-14 12:55:12 +03:00
lastChallengeHash = contributions [ contributions . length - 1 ] . nextChallenge ;
2020-07-11 11:31:52 +03:00
} else {
2020-07-14 12:55:12 +03:00
lastChallengeHash = calculateFirstChallengeHash ( curve , power , logger ) ;
2020-07-11 11:31:52 +03:00
}
// Generate a random key
2020-07-14 12:55:12 +03:00
curContribution . key = createPTauKey ( curve , lastChallengeHash , rng ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const responseHasher = new Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-14 12:55:12 +03:00
responseHasher . update ( lastChallengeHash ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const fdNew = await binFileUtils _ _namespace . createBinFile ( newPTauFilename , "ptau" , 1 , 7 ) ;
2020-07-11 11:31:52 +03:00
await writePTauHeader ( fdNew , curve , power ) ;
const startSections = [ ] ;
let firstPoints ;
2020-09-07 13:43:50 +03:00
firstPoints = await processSection ( 2 , "G1" , ( 2 * * power ) * 2 - 1 , curve . Fr . e ( 1 ) , curContribution . key . tau . prvKey , "tauG1" ) ;
2020-07-11 11:31:52 +03:00
curContribution . tauG1 = firstPoints [ 1 ] ;
2020-09-07 13:43:50 +03:00
firstPoints = await processSection ( 3 , "G2" , ( 2 * * power ) , curve . Fr . e ( 1 ) , curContribution . key . tau . prvKey , "tauG2" ) ;
2020-07-11 11:31:52 +03:00
curContribution . tauG2 = firstPoints [ 1 ] ;
2020-09-07 13:43:50 +03:00
firstPoints = await processSection ( 4 , "G1" , ( 2 * * power ) , curContribution . key . alpha . prvKey , curContribution . key . tau . prvKey , "alphaTauG1" ) ;
2020-07-11 11:31:52 +03:00
curContribution . alphaG1 = firstPoints [ 0 ] ;
2020-09-07 13:43:50 +03:00
firstPoints = await processSection ( 5 , "G1" , ( 2 * * power ) , curContribution . key . beta . prvKey , curContribution . key . tau . prvKey , "betaTauG1" ) ;
2020-07-11 11:31:52 +03:00
curContribution . betaG1 = firstPoints [ 0 ] ;
firstPoints = await processSection ( 6 , "G2" , 1 , curContribution . key . beta . prvKey , curContribution . key . tau . prvKey , "betaTauG2" ) ;
curContribution . betaG2 = firstPoints [ 0 ] ;
curContribution . partialHash = responseHasher . getPartialHash ( ) ;
const buffKey = new Uint8Array ( curve . F1 . n8 * 2 * 6 + curve . F2 . n8 * 2 * 3 ) ;
toPtauPubKeyRpr ( buffKey , 0 , curve , curContribution . key , false ) ;
responseHasher . update ( new Uint8Array ( buffKey ) ) ;
const hashResponse = responseHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( hashResponse , "Contribution Response Hash imported: " ) ) ;
2022-01-19 23:59:11 +03:00
const nextChallengeHasher = new Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-14 12:55:12 +03:00
nextChallengeHasher . update ( hashResponse ) ;
2020-07-11 11:31:52 +03:00
2020-09-07 13:43:50 +03:00
await hashSection ( fdNew , "G1" , 2 , ( 2 * * power ) * 2 - 1 , "tauG1" ) ;
await hashSection ( fdNew , "G2" , 3 , ( 2 * * power ) , "tauG2" ) ;
await hashSection ( fdNew , "G1" , 4 , ( 2 * * power ) , "alphaTauG1" ) ;
await hashSection ( fdNew , "G1" , 5 , ( 2 * * power ) , "betaTauG1" ) ;
2020-07-11 11:31:52 +03:00
await hashSection ( fdNew , "G2" , 6 , 1 , "betaG2" ) ;
2020-07-14 12:55:12 +03:00
curContribution . nextChallenge = nextChallengeHasher . digest ( ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
if ( logger ) logger . info ( formatHash ( curContribution . nextChallenge , "Next Challenge Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
contributions . push ( curContribution ) ;
await writeContributions ( fdNew , curve , contributions ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
return hashResponse ;
async function processSection ( sectionId , groupName , NPoints , first , inc , sectionName ) {
const res = [ ] ;
fdOld . pos = sections [ sectionId ] [ 0 ] . p ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fdNew , sectionId ) ;
2020-07-11 11:31:52 +03:00
startSections [ sectionId ] = fdNew . pos ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const chunkSize = Math . floor ( ( 1 << 20 ) / sG ) ; // 128Mb chunks
let t = first ;
for ( let i = 0 ; i < NPoints ; i += chunkSize ) {
if ( logger ) logger . debug ( ` processing: ${ sectionName } : ${ i } / ${ NPoints } ` ) ;
const n = Math . min ( NPoints - i , chunkSize ) ;
const buffIn = await fdOld . read ( n * sG ) ;
const buffOutLEM = await G . batchApplyKey ( buffIn , t , inc ) ;
/ * C o d e t o t e s t t h e c a s e w h e r e w e d o n ' t h a v e t h e 2 ^ m - 2 c o m p o n e n t
if ( sectionName == "tauG1" ) {
const bz = new Uint8Array ( 64 ) ;
2020-09-07 13:43:50 +03:00
buffOutLEM . set ( bz , 64 * ( ( 2 * * power ) - 1 ) ) ;
2020-07-11 11:31:52 +03:00
}
* /
const promiseWrite = fdNew . write ( buffOutLEM ) ;
const buffOutC = await G . batchLEMtoC ( buffOutLEM ) ;
responseHasher . update ( buffOutC ) ;
await promiseWrite ;
if ( i == 0 ) // Return the 2 first points.
for ( let j = 0 ; j < Math . min ( 2 , NPoints ) ; j ++ )
res . push ( G . fromRprLEM ( buffOutLEM , j * sG ) ) ;
t = curve . Fr . mul ( t , curve . Fr . exp ( inc , n ) ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fdNew ) ;
2020-07-11 11:31:52 +03:00
return res ;
}
async function hashSection ( fdTo , groupName , sectionId , nPoints , sectionName ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const nPointsChunk = Math . floor ( ( 1 << 24 ) / sG ) ;
const oldPos = fdTo . pos ;
fdTo . pos = startSections [ sectionId ] ;
for ( let i = 0 ; i < nPoints ; i += nPointsChunk ) {
if ( ( logger ) && i ) logger . debug ( ` Hashing ${ sectionName } : ` + i ) ;
const n = Math . min ( nPoints - i , nPointsChunk ) ;
const buffLEM = await fdTo . read ( n * sG ) ;
const buffU = await G . batchLEMtoU ( buffLEM ) ;
2020-07-14 12:55:12 +03:00
nextChallengeHasher . update ( buffU ) ;
2020-07-11 11:31:52 +03:00
}
fdTo . pos = oldPos ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function preparePhase2 ( oldPtauFilename , newPTauFilename , logger ) {
2022-01-19 23:59:11 +03:00
const { fd : fdOld , sections } = await binFileUtils _ _namespace . readBinFile ( oldPtauFilename , "ptau" , 1 ) ;
2020-07-11 11:31:52 +03:00
const { curve , power } = await readPTauHeader ( fdOld , sections ) ;
2022-01-19 23:59:11 +03:00
const fdNew = await binFileUtils _ _namespace . createBinFile ( newPTauFilename , "ptau" , 1 , 11 ) ;
2020-07-11 11:31:52 +03:00
await writePTauHeader ( fdNew , curve , power ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 2 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 3 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 4 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 5 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 6 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 7 ) ;
2020-07-11 11:31:52 +03:00
await processSection ( 2 , 12 , "G1" , "tauG1" ) ;
await processSection ( 3 , 13 , "G2" , "tauG2" ) ;
await processSection ( 4 , 14 , "G1" , "alphaTauG1" ) ;
await processSection ( 5 , 15 , "G1" , "betaTauG1" ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
// await fs.promises.unlink(newPTauFilename+ ".tmp");
return ;
async function processSection ( oldSectionId , newSectionId , Gstr , sectionName ) {
if ( logger ) logger . debug ( "Starting section: " + sectionName ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fdNew , newSectionId ) ;
2020-07-11 11:31:52 +03:00
for ( let p = 0 ; p <= power ; p ++ ) {
await processSectionPower ( p ) ;
}
2020-09-02 13:06:20 +03:00
if ( oldSectionId == 2 ) {
await processSectionPower ( power + 1 ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fdNew ) ;
2020-09-02 13:06:20 +03:00
2020-09-07 13:43:50 +03:00
async function processSectionPower ( p ) {
const nPoints = 2 * * p ;
2020-09-02 13:06:20 +03:00
const G = curve [ Gstr ] ;
2022-01-19 23:59:11 +03:00
curve . Fr ;
2020-09-02 13:06:20 +03:00
const sGin = G . F . n8 * 2 ;
2022-01-19 23:59:11 +03:00
G . F . n8 * 3 ;
2020-09-02 13:06:20 +03:00
2020-09-07 13:43:50 +03:00
let buff ;
buff = new ffjavascript . BigBuffer ( nPoints * sGin ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fdOld , sections , oldSectionId ) ;
2020-09-07 13:43:50 +03:00
if ( ( oldSectionId == 2 ) && ( p == power + 1 ) ) {
await fdOld . readToBuffer ( buff , 0 , ( nPoints - 1 ) * sGin ) ;
buff . set ( curve . G1 . zeroAffine , ( nPoints - 1 ) * sGin ) ;
} else {
await fdOld . readToBuffer ( buff , 0 , nPoints * sGin ) ;
2020-09-02 13:06:20 +03:00
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fdOld , true ) ;
2020-09-02 13:06:20 +03:00
2020-09-07 19:24:09 +03:00
buff = await G . lagrangeEvaluations ( buff , "affine" , "affine" , logger , sectionName ) ;
await fdNew . write ( buff ) ;
/ *
2020-09-07 13:43:50 +03:00
if ( p <= curve . Fr . s ) {
buff = await G . ifft ( buff , "affine" , "affine" , logger , sectionName ) ;
await fdNew . write ( buff ) ;
} else if ( p == curve . Fr . s + 1 ) {
const smallM = 1 << curve . Fr . s ;
2020-09-07 19:24:09 +03:00
let t0 = new BigBuffer ( smallM * sGmid ) ;
let t1 = new BigBuffer ( smallM * sGmid ) ;
2020-09-07 13:43:50 +03:00
const shift _to _small _m = Fr . exp ( Fr . shift , smallM ) ;
const one _over _denom = Fr . inv ( Fr . sub ( shift _to _small _m , Fr . one ) ) ;
let sInvAcc = Fr . one ;
for ( let i = 0 ; i < smallM ; i ++ ) {
const ti = buff . slice ( i * sGin , ( i + 1 ) * sGin ) ;
const tmi = buff . slice ( ( i + smallM ) * sGin , ( i + smallM + 1 ) * sGin ) ;
t0 . set (
G . timesFr (
G . sub (
G . timesFr ( ti , shift _to _small _m ) ,
tmi
) ,
one _over _denom
) ,
i * sGmid
) ;
t1 . set (
G . timesFr (
G . sub ( tmi , ti ) ,
Fr . mul ( sInvAcc , one _over _denom )
) ,
i * sGmid
) ;
sInvAcc = Fr . mul ( sInvAcc , Fr . shiftInv ) ;
2020-09-02 13:06:20 +03:00
}
2020-09-07 13:43:50 +03:00
t0 = await G . ifft ( t0 , "jacobian" , "affine" , logger , sectionName + " t0" ) ;
await fdNew . write ( t0 ) ;
t0 = null ;
t1 = await G . ifft ( t1 , "jacobian" , "affine" , logger , sectionName + " t0" ) ;
await fdNew . write ( t1 ) ;
} else {
if ( logger ) logger . error ( "Power too big" ) ;
throw new Error ( "Power to big" ) ;
2020-09-02 13:06:20 +03:00
}
2020-09-07 19:24:09 +03:00
* /
2020-09-02 13:06:20 +03:00
}
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-09-02 13:06:20 +03:00
async function truncate ( ptauFilename , template , logger ) {
2022-01-19 23:59:11 +03:00
const { fd : fdOld , sections } = await binFileUtils _ _namespace . readBinFile ( ptauFilename , "ptau" , 1 ) ;
2020-09-02 13:06:20 +03:00
const { curve , power , ceremonyPower } = await readPTauHeader ( fdOld , sections ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
2020-09-14 14:11:51 +03:00
for ( let p = 1 ; p < power ; p ++ ) {
2020-09-02 13:06:20 +03:00
await generateTruncate ( p ) ;
}
await fdOld . close ( ) ;
return true ;
async function generateTruncate ( p ) {
let sP = p . toString ( ) ;
while ( sP . length < 2 ) sP = "0" + sP ;
if ( logger ) logger . debug ( "Writing Power: " + sP ) ;
2022-01-19 23:59:11 +03:00
const fdNew = await binFileUtils _ _namespace . createBinFile ( template + sP + ".ptau" , "ptau" , 1 , 11 ) ;
2020-09-02 13:06:20 +03:00
await writePTauHeader ( fdNew , curve , p , ceremonyPower ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 2 , ( ( 2 * * p ) * 2 - 1 ) * sG1 ) ; // tagG1
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 3 , ( 2 * * p ) * sG2 ) ; // tauG2
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 4 , ( 2 * * p ) * sG1 ) ; // alfaTauG1
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 5 , ( 2 * * p ) * sG1 ) ; // betaTauG1
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 6 , sG2 ) ; // betaTauG2
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 7 ) ; // contributions
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 12 , ( ( 2 * * ( p + 1 ) ) * 2 - 1 ) * sG1 ) ; // L_tauG1
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 13 , ( ( 2 * * p ) * 2 - 1 ) * sG2 ) ; // L_tauG2
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 14 , ( ( 2 * * p ) * 2 - 1 ) * sG1 ) ; // L_alfaTauG1
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 15 , ( ( 2 * * p ) * 2 - 1 ) * sG1 ) ; // L_betaTauG1
2020-09-02 13:06:20 +03:00
await fdNew . close ( ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
2020-09-02 13:06:20 +03:00
2021-05-31 14:21:07 +03:00
This file is part of snarkJS .
2020-09-02 13:06:20 +03:00
2021-05-31 14:21:07 +03:00
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
async function convert ( oldPtauFilename , newPTauFilename , logger ) {
2022-01-19 23:59:11 +03:00
const { fd : fdOld , sections } = await binFileUtils _ _namespace . readBinFile ( oldPtauFilename , "ptau" , 1 ) ;
2021-05-31 14:21:07 +03:00
const { curve , power } = await readPTauHeader ( fdOld , sections ) ;
2022-01-19 23:59:11 +03:00
const fdNew = await binFileUtils _ _namespace . createBinFile ( newPTauFilename , "ptau" , 1 , 11 ) ;
2021-05-31 14:21:07 +03:00
await writePTauHeader ( fdNew , curve , power ) ;
// const fdTmp = await fastFile.createOverride(newPTauFilename+ ".tmp");
2020-09-02 13:06:20 +03:00
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 2 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 3 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 4 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 5 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 6 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 7 ) ;
2020-09-02 13:06:20 +03:00
await processSection ( 2 , 12 , "G1" , "tauG1" ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 13 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 14 ) ;
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 15 ) ;
2020-09-02 13:06:20 +03:00
await fdOld . close ( ) ;
await fdNew . close ( ) ;
// await fs.promises.unlink(newPTauFilename+ ".tmp");
return ;
async function processSection ( oldSectionId , newSectionId , Gstr , sectionName ) {
if ( logger ) logger . debug ( "Starting section: " + sectionName ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fdNew , newSectionId ) ;
2020-09-02 13:06:20 +03:00
const size = sections [ newSectionId ] [ 0 ] . size ;
const chunkSize = fdOld . pageSize ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fdOld , sections , newSectionId ) ;
2020-09-02 13:06:20 +03:00
for ( let p = 0 ; p < size ; p += chunkSize ) {
const l = Math . min ( size - p , chunkSize ) ;
const buff = await fdOld . read ( l ) ;
await fdNew . write ( buff ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fdOld ) ;
2020-09-02 13:06:20 +03:00
if ( oldSectionId == 2 ) {
await processSectionPower ( power + 1 ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fdNew ) ;
2020-07-11 11:31:52 +03:00
async function processSectionPower ( p ) {
2020-09-07 13:43:50 +03:00
const nPoints = 2 * * p ;
2020-07-11 11:31:52 +03:00
const G = curve [ Gstr ] ;
const sGin = G . F . n8 * 2 ;
2020-09-07 13:43:50 +03:00
let buff ;
buff = new ffjavascript . BigBuffer ( nPoints * sGin ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fdOld , sections , oldSectionId ) ;
2020-09-07 13:43:50 +03:00
if ( ( oldSectionId == 2 ) && ( p == power + 1 ) ) {
await fdOld . readToBuffer ( buff , 0 , ( nPoints - 1 ) * sGin ) ;
buff . set ( curve . G1 . zeroAffine , ( nPoints - 1 ) * sGin ) ;
} else {
await fdOld . readToBuffer ( buff , 0 , nPoints * sGin ) ;
2020-07-11 11:31:52 +03:00
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fdOld , true ) ;
2020-07-11 11:31:52 +03:00
2020-09-07 19:24:09 +03:00
buff = await G . lagrangeEvaluations ( buff , "affine" , "affine" , logger , sectionName ) ;
await fdNew . write ( buff ) ;
/ *
2020-09-07 13:43:50 +03:00
if ( p <= curve . Fr . s ) {
buff = await G . ifft ( buff , "affine" , "affine" , logger , sectionName ) ;
await fdNew . write ( buff ) ;
} else if ( p == curve . Fr . s + 1 ) {
const smallM = 1 << curve . Fr . s ;
2020-09-07 19:24:09 +03:00
let t0 = new BigBuffer ( smallM * sGmid ) ;
let t1 = new BigBuffer ( smallM * sGmid ) ;
2020-09-07 13:43:50 +03:00
const shift _to _small _m = Fr . exp ( Fr . shift , smallM ) ;
const one _over _denom = Fr . inv ( Fr . sub ( shift _to _small _m , Fr . one ) ) ;
let sInvAcc = Fr . one ;
for ( let i = 0 ; i < smallM ; i ++ ) {
2020-09-07 19:24:09 +03:00
if ( i % 10000 ) logger . debug ( ` sectionName prepare L calc: ${ sectionName } , ${ i } / ${ smallM } ` ) ;
2020-09-07 13:43:50 +03:00
const ti = buff . slice ( i * sGin , ( i + 1 ) * sGin ) ;
const tmi = buff . slice ( ( i + smallM ) * sGin , ( i + smallM + 1 ) * sGin ) ;
t0 . set (
G . timesFr (
G . sub (
G . timesFr ( ti , shift _to _small _m ) ,
tmi
) ,
one _over _denom
) ,
i * sGmid
) ;
t1 . set (
G . timesFr (
G . sub ( tmi , ti ) ,
Fr . mul ( sInvAcc , one _over _denom )
) ,
i * sGmid
) ;
sInvAcc = Fr . mul ( sInvAcc , Fr . shiftInv ) ;
2020-07-11 11:31:52 +03:00
}
2020-09-07 13:43:50 +03:00
t0 = await G . ifft ( t0 , "jacobian" , "affine" , logger , sectionName + " t0" ) ;
await fdNew . write ( t0 ) ;
t0 = null ;
2020-09-07 19:24:09 +03:00
t1 = await G . ifft ( t1 , "jacobian" , "affine" , logger , sectionName + " t1" ) ;
2020-09-07 13:43:50 +03:00
await fdNew . write ( t1 ) ;
} else {
if ( logger ) logger . error ( "Power too big" ) ;
throw new Error ( "Power to big" ) ;
2020-07-11 11:31:52 +03:00
}
2020-09-07 19:24:09 +03:00
* /
2020-07-11 11:31:52 +03:00
}
2020-09-07 13:43:50 +03:00
2020-07-11 11:31:52 +03:00
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function exportJson ( pTauFilename , verbose ) {
2022-01-19 23:59:11 +03:00
const { fd , sections } = await binFileUtils _ _namespace . readBinFile ( pTauFilename , "ptau" , 1 ) ;
2020-07-11 11:31:52 +03:00
const { curve , power } = await readPTauHeader ( fd , sections ) ;
const pTau = { } ;
pTau . q = curve . q ;
pTau . power = power ;
pTau . contributions = await readContributions ( fd , curve , sections ) ;
2020-09-07 13:43:50 +03:00
pTau . tauG1 = await exportSection ( 2 , "G1" , ( 2 * * power ) * 2 - 1 , "tauG1" ) ;
pTau . tauG2 = await exportSection ( 3 , "G2" , ( 2 * * power ) , "tauG2" ) ;
pTau . alphaTauG1 = await exportSection ( 4 , "G1" , ( 2 * * power ) , "alphaTauG1" ) ;
pTau . betaTauG1 = await exportSection ( 5 , "G1" , ( 2 * * power ) , "betaTauG1" ) ;
2020-07-11 11:31:52 +03:00
pTau . betaG2 = await exportSection ( 6 , "G2" , 1 , "betaG2" ) ;
pTau . lTauG1 = await exportLagrange ( 12 , "G1" , "lTauG1" ) ;
pTau . lTauG2 = await exportLagrange ( 13 , "G2" , "lTauG2" ) ;
pTau . lAlphaTauG1 = await exportLagrange ( 14 , "G1" , "lAlphaTauG2" ) ;
pTau . lBetaTauG1 = await exportLagrange ( 15 , "G1" , "lBetaTauG2" ) ;
await fd . close ( ) ;
return pTau ;
async function exportSection ( sectionId , groupName , nPoints , sectionName ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const res = [ ] ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , sectionId ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < nPoints ; i ++ ) {
if ( ( verbose ) && i && ( i % 10000 == 0 ) ) console . log ( ` ${ sectionName } : ` + i ) ;
const buff = await fd . read ( sG ) ;
res . push ( G . fromRprLEM ( buff , 0 ) ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
return res ;
}
async function exportLagrange ( sectionId , groupName , sectionName ) {
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
const res = [ ] ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , sectionId ) ;
2020-07-11 11:31:52 +03:00
for ( let p = 0 ; p <= power ; p ++ ) {
if ( verbose ) console . log ( ` ${ sectionName } : Power: ${ p } ` ) ;
res [ p ] = [ ] ;
2020-09-07 13:43:50 +03:00
const nPoints = ( 2 * * p ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < nPoints ; i ++ ) {
if ( ( verbose ) && i && ( i % 10000 == 0 ) ) console . log ( ` ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const buff = await fd . read ( sG ) ;
res [ p ] . push ( G . fromRprLEM ( buff , 0 ) ) ;
}
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
return res ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2021-02-10 11:53:04 +03:00
const SUBARRAY _SIZE = 0x40000 ;
2020-08-18 21:09:46 +03:00
2021-02-10 11:53:04 +03:00
const BigArrayHandler = {
2020-08-18 21:09:46 +03:00
get : function ( obj , prop ) {
if ( ! isNaN ( prop ) ) {
return obj . getElement ( prop ) ;
} else return obj [ prop ] ;
} ,
set : function ( obj , prop , value ) {
if ( ! isNaN ( prop ) ) {
return obj . setElement ( prop , value ) ;
} else {
obj [ prop ] = value ;
return true ;
}
}
} ;
2021-02-10 11:53:04 +03:00
class _BigArray {
2020-08-18 21:09:46 +03:00
constructor ( initSize ) {
this . length = initSize || 0 ;
2021-02-10 11:53:04 +03:00
this . arr = new Array ( SUBARRAY _SIZE ) ;
2020-08-18 21:09:46 +03:00
2021-02-10 11:53:04 +03:00
for ( let i = 0 ; i < initSize ; i += SUBARRAY _SIZE ) {
this . arr [ i / SUBARRAY _SIZE ] = new Array ( Math . min ( SUBARRAY _SIZE , initSize - i ) ) ;
2020-08-18 21:09:46 +03:00
}
return this ;
}
push ( ) {
for ( let i = 0 ; i < arguments . length ; i ++ ) {
this . setElement ( this . length , arguments [ i ] ) ;
}
}
slice ( f , t ) {
const arr = new Array ( t - f ) ;
for ( let i = f ; i < t ; i ++ ) arr [ i - f ] = this . getElement ( i ) ;
return arr ;
}
getElement ( idx ) {
idx = parseInt ( idx ) ;
2021-02-10 11:53:04 +03:00
const idx1 = Math . floor ( idx / SUBARRAY _SIZE ) ;
const idx2 = idx % SUBARRAY _SIZE ;
2020-08-18 21:09:46 +03:00
return this . arr [ idx1 ] ? this . arr [ idx1 ] [ idx2 ] : undefined ;
}
setElement ( idx , value ) {
idx = parseInt ( idx ) ;
2021-02-10 11:53:04 +03:00
const idx1 = Math . floor ( idx / SUBARRAY _SIZE ) ;
2020-08-18 21:09:46 +03:00
if ( ! this . arr [ idx1 ] ) {
2021-02-10 11:53:04 +03:00
this . arr [ idx1 ] = new Array ( SUBARRAY _SIZE ) ;
2020-08-18 21:09:46 +03:00
}
2021-02-10 11:53:04 +03:00
const idx2 = idx % SUBARRAY _SIZE ;
2020-08-18 21:09:46 +03:00
this . arr [ idx1 ] [ idx2 ] = value ;
if ( idx >= this . length ) this . length = idx + 1 ;
return true ;
}
getKeys ( ) {
2021-02-10 11:53:04 +03:00
const newA = new BigArray ( ) ;
2020-08-18 21:09:46 +03:00
for ( let i = 0 ; i < this . arr . length ; i ++ ) {
if ( this . arr [ i ] ) {
for ( let j = 0 ; j < this . arr [ i ] . length ; j ++ ) {
if ( typeof this . arr [ i ] [ j ] !== "undefined" ) {
2021-02-10 11:53:04 +03:00
newA . push ( i * SUBARRAY _SIZE + j ) ;
2020-08-18 21:09:46 +03:00
}
}
}
}
return newA ;
}
}
2021-02-10 11:53:04 +03:00
class BigArray {
2020-08-18 21:09:46 +03:00
constructor ( initSize ) {
2021-02-10 11:53:04 +03:00
const obj = new _BigArray ( initSize ) ;
const extObj = new Proxy ( obj , BigArrayHandler ) ;
2020-08-18 21:09:46 +03:00
return extObj ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function newZKey ( r1csName , ptauName , zkeyName , logger ) {
2020-10-20 20:24:45 +03:00
const TAU _G1 = 0 ;
const TAU _G2 = 1 ;
const ALPHATAU _G1 = 2 ;
const BETATAU _G1 = 3 ;
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
const csHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
2021-02-10 11:53:04 +03:00
const { fd : fdPTau , sections : sectionsPTau } = await binFileUtils . readBinFile ( ptauName , "ptau" , 1 , 1 << 22 , 1 << 24 ) ;
2020-07-11 11:31:52 +03:00
const { curve , power } = await readPTauHeader ( fdPTau , sectionsPTau ) ;
2021-02-10 11:53:04 +03:00
const { fd : fdR1cs , sections : sectionsR1cs } = await binFileUtils . readBinFile ( r1csName , "r1cs" , 1 , 1 << 22 , 1 << 24 ) ;
const r1cs = await r1csfile . readR1csHeader ( fdR1cs , sectionsR1cs , false ) ;
2020-07-11 11:31:52 +03:00
2021-02-10 11:53:04 +03:00
const fdZKey = await binFileUtils . createBinFile ( zkeyName , "zkey" , 1 , 10 , 1 << 22 , 1 << 24 ) ;
2020-07-11 11:31:52 +03:00
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
if ( r1cs . prime != curve . r ) {
if ( logger ) logger . error ( "r1cs curve does not match powers of tau ceremony curve" ) ;
return - 1 ;
}
const cirPower = log2 ( r1cs . nConstraints + r1cs . nPubInputs + r1cs . nOutputs + 1 - 1 ) + 1 ;
2020-09-02 13:06:20 +03:00
if ( cirPower > power ) {
2020-08-12 02:33:08 +03:00
if ( logger ) logger . error ( ` circuit too big for this power of tau ceremony. ${ r1cs . nConstraints } *2 > 2** ${ power } ` ) ;
2020-07-11 11:31:52 +03:00
return - 1 ;
}
if ( ! sectionsPTau [ 12 ] ) {
if ( logger ) logger . error ( "Powers of tau is not prepared." ) ;
return - 1 ;
}
const nPublic = r1cs . nOutputs + r1cs . nPubInputs ;
2020-09-07 13:43:50 +03:00
const domainSize = 2 * * cirPower ;
2020-07-11 11:31:52 +03:00
// Write the header
///////////
2021-02-10 11:53:04 +03:00
await binFileUtils . startWriteSection ( fdZKey , 1 ) ;
2020-07-11 11:31:52 +03:00
await fdZKey . writeULE32 ( 1 ) ; // Groth
2021-02-10 11:53:04 +03:00
await binFileUtils . endWriteSection ( fdZKey ) ;
2020-07-11 11:31:52 +03:00
// Write the Groth header section
///////////
2021-02-10 11:53:04 +03:00
await binFileUtils . startWriteSection ( fdZKey , 2 ) ;
2020-07-11 11:31:52 +03:00
const primeQ = curve . q ;
const n8q = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeQ ) - 1 ) / 64 ) + 1 ) * 8 ;
const primeR = curve . r ;
const n8r = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeR ) - 1 ) / 64 ) + 1 ) * 8 ;
const Rr = ffjavascript . Scalar . mod ( ffjavascript . Scalar . shl ( 1 , n8r * 8 ) , primeR ) ;
const R2r = curve . Fr . e ( ffjavascript . Scalar . mod ( ffjavascript . Scalar . mul ( Rr , Rr ) , primeR ) ) ;
await fdZKey . writeULE32 ( n8q ) ;
2021-02-10 11:53:04 +03:00
await binFileUtils . writeBigInt ( fdZKey , primeQ , n8q ) ;
2020-07-11 11:31:52 +03:00
await fdZKey . writeULE32 ( n8r ) ;
2021-02-10 11:53:04 +03:00
await binFileUtils . writeBigInt ( fdZKey , primeR , n8r ) ;
2020-07-11 11:31:52 +03:00
await fdZKey . writeULE32 ( r1cs . nVars ) ; // Total number of bars
await fdZKey . writeULE32 ( nPublic ) ; // Total number of public vars (not including ONE)
await fdZKey . writeULE32 ( domainSize ) ; // domainSize
let bAlpha1 ;
bAlpha1 = await fdPTau . read ( sG1 , sectionsPTau [ 4 ] [ 0 ] . p ) ;
await fdZKey . write ( bAlpha1 ) ;
bAlpha1 = await curve . G1 . batchLEMtoU ( bAlpha1 ) ;
csHasher . update ( bAlpha1 ) ;
let bBeta1 ;
bBeta1 = await fdPTau . read ( sG1 , sectionsPTau [ 5 ] [ 0 ] . p ) ;
await fdZKey . write ( bBeta1 ) ;
bBeta1 = await curve . G1 . batchLEMtoU ( bBeta1 ) ;
csHasher . update ( bBeta1 ) ;
let bBeta2 ;
bBeta2 = await fdPTau . read ( sG2 , sectionsPTau [ 6 ] [ 0 ] . p ) ;
await fdZKey . write ( bBeta2 ) ;
bBeta2 = await curve . G2 . batchLEMtoU ( bBeta2 ) ;
csHasher . update ( bBeta2 ) ;
const bg1 = new Uint8Array ( sG1 ) ;
curve . G1 . toRprLEM ( bg1 , 0 , curve . G1 . g ) ;
const bg2 = new Uint8Array ( sG2 ) ;
curve . G2 . toRprLEM ( bg2 , 0 , curve . G2 . g ) ;
const bg1U = new Uint8Array ( sG1 ) ;
curve . G1 . toRprUncompressed ( bg1U , 0 , curve . G1 . g ) ;
const bg2U = new Uint8Array ( sG2 ) ;
curve . G2 . toRprUncompressed ( bg2U , 0 , curve . G2 . g ) ;
await fdZKey . write ( bg2 ) ; // gamma2
await fdZKey . write ( bg1 ) ; // delta1
await fdZKey . write ( bg2 ) ; // delta2
csHasher . update ( bg2U ) ; // gamma2
csHasher . update ( bg1U ) ; // delta1
csHasher . update ( bg2U ) ; // delta2
2021-02-10 11:53:04 +03:00
await binFileUtils . endWriteSection ( fdZKey ) ;
2020-07-11 11:31:52 +03:00
2020-10-20 20:24:45 +03:00
if ( logger ) logger . info ( "Reading r1cs" ) ;
2021-02-10 11:53:04 +03:00
let sR1cs = await binFileUtils . readSection ( fdR1cs , sectionsR1cs , 2 ) ;
2020-07-11 11:31:52 +03:00
2021-02-10 11:53:04 +03:00
const A = new BigArray ( r1cs . nVars ) ;
const B1 = new BigArray ( r1cs . nVars ) ;
const B2 = new BigArray ( r1cs . nVars ) ;
const C = new BigArray ( r1cs . nVars - nPublic - 1 ) ;
2020-07-11 11:31:52 +03:00
const IC = new Array ( nPublic + 1 ) ;
2020-10-20 20:24:45 +03:00
if ( logger ) logger . info ( "Reading tauG1" ) ;
2021-02-10 11:53:04 +03:00
let sTauG1 = await binFileUtils . readSection ( fdPTau , sectionsPTau , 12 , ( domainSize - 1 ) * sG1 , domainSize * sG1 ) ;
2020-10-20 20:24:45 +03:00
if ( logger ) logger . info ( "Reading tauG2" ) ;
2021-02-10 11:53:04 +03:00
let sTauG2 = await binFileUtils . readSection ( fdPTau , sectionsPTau , 13 , ( domainSize - 1 ) * sG2 , domainSize * sG2 ) ;
2020-10-20 20:24:45 +03:00
if ( logger ) logger . info ( "Reading alphatauG1" ) ;
2021-02-10 11:53:04 +03:00
let sAlphaTauG1 = await binFileUtils . readSection ( fdPTau , sectionsPTau , 14 , ( domainSize - 1 ) * sG1 , domainSize * sG1 ) ;
2020-10-20 20:24:45 +03:00
if ( logger ) logger . info ( "Reading betatauG1" ) ;
2021-02-10 11:53:04 +03:00
let sBetaTauG1 = await binFileUtils . readSection ( fdPTau , sectionsPTau , 15 , ( domainSize - 1 ) * sG1 , domainSize * sG1 ) ;
2020-10-20 20:24:45 +03:00
2020-10-12 13:18:45 +03:00
await processConstraints ( ) ;
2020-08-18 21:09:46 +03:00
2020-10-12 13:18:45 +03:00
await composeAndWritePoints ( 3 , "G1" , IC , "IC" ) ;
2020-07-11 11:31:52 +03:00
2020-10-12 13:18:45 +03:00
await writeHs ( ) ;
2020-10-11 13:42:04 +03:00
2020-10-12 13:18:45 +03:00
await hashHPoints ( ) ;
2020-10-11 13:42:04 +03:00
2020-10-12 13:18:45 +03:00
await composeAndWritePoints ( 8 , "G1" , C , "C" ) ;
await composeAndWritePoints ( 5 , "G1" , A , "A" ) ;
await composeAndWritePoints ( 6 , "G1" , B1 , "B1" ) ;
await composeAndWritePoints ( 7 , "G2" , B2 , "B2" ) ;
2020-10-11 13:42:04 +03:00
2020-10-12 13:18:45 +03:00
const csHash = csHasher . digest ( ) ;
// Contributions section
2021-02-10 11:53:04 +03:00
await binFileUtils . startWriteSection ( fdZKey , 10 ) ;
2020-10-12 13:18:45 +03:00
await fdZKey . write ( csHash ) ;
await fdZKey . writeULE32 ( 0 ) ;
2021-02-10 11:53:04 +03:00
await binFileUtils . endWriteSection ( fdZKey ) ;
2020-07-11 11:31:52 +03:00
2020-10-12 13:18:45 +03:00
if ( logger ) logger . info ( formatHash ( csHash , "Circuit hash: " ) ) ;
2020-07-11 11:31:52 +03:00
2020-10-20 20:24:45 +03:00
2020-10-12 13:18:45 +03:00
await fdZKey . close ( ) ;
await fdR1cs . close ( ) ;
2020-10-20 20:24:45 +03:00
await fdPTau . close ( ) ;
2020-07-11 11:31:52 +03:00
2020-10-12 13:18:45 +03:00
return csHash ;
async function writeHs ( ) {
2021-02-10 11:53:04 +03:00
await binFileUtils . startWriteSection ( fdZKey , 9 ) ;
2020-10-12 13:18:45 +03:00
const buffOut = new ffjavascript . BigBuffer ( domainSize * sG1 ) ;
if ( cirPower < curve . Fr . s ) {
2021-02-10 11:53:04 +03:00
let sTauG1 = await binFileUtils . readSection ( fdPTau , sectionsPTau , 12 , ( domainSize * 2 - 1 ) * sG1 , domainSize * 2 * sG1 ) ;
2020-10-12 13:18:45 +03:00
for ( let i = 0 ; i < domainSize ; i ++ ) {
if ( ( logger ) && ( i % 10000 == 0 ) ) logger . debug ( ` spliting buffer: ${ i } / ${ domainSize } ` ) ;
const buff = sTauG1 . slice ( ( i * 2 + 1 ) * sG1 , ( i * 2 + 1 ) * sG1 + sG1 ) ;
buffOut . set ( buff , i * sG1 ) ;
2020-07-11 11:31:52 +03:00
}
2020-10-12 13:18:45 +03:00
} else if ( cirPower == curve . Fr . s ) {
const o = sectionsPTau [ 12 ] [ 0 ] . p + ( ( 2 * * ( cirPower + 1 ) ) - 1 ) * sG1 ;
await fdPTau . readToBuffer ( buffOut , 0 , domainSize * sG1 , o + domainSize * sG1 ) ;
} else {
if ( logger ) logger . error ( "Circuit too big" ) ;
throw new Error ( "Circuit too big for this curve" ) ;
2020-07-11 11:31:52 +03:00
}
2020-10-12 13:18:45 +03:00
await fdZKey . write ( buffOut ) ;
2021-02-10 11:53:04 +03:00
await binFileUtils . endWriteSection ( fdZKey ) ;
2020-10-12 13:18:45 +03:00
}
2020-07-11 11:31:52 +03:00
2020-10-12 13:18:45 +03:00
async function processConstraints ( ) {
const buffCoeff = new Uint8Array ( 12 + curve . Fr . n8 ) ;
const buffCoeffV = new DataView ( buffCoeff . buffer ) ;
2020-10-20 20:24:45 +03:00
const bOne = new Uint8Array ( curve . Fr . n8 ) ;
curve . Fr . toRprLE ( bOne , 0 , curve . Fr . e ( 1 ) ) ;
2020-08-18 21:09:46 +03:00
2020-10-12 13:18:45 +03:00
let r1csPos = 0 ;
2020-07-11 11:31:52 +03:00
2020-10-12 13:18:45 +03:00
function r1cs _readULE32 ( ) {
const buff = sR1cs . slice ( r1csPos , r1csPos + 4 ) ;
r1csPos += 4 ;
const buffV = new DataView ( buff . buffer ) ;
return buffV . getUint32 ( 0 , true ) ;
}
2020-07-11 11:31:52 +03:00
2021-02-10 11:53:04 +03:00
const coefs = new BigArray ( ) ;
2020-10-12 13:18:45 +03:00
for ( let c = 0 ; c < r1cs . nConstraints ; c ++ ) {
if ( ( logger ) && ( c % 10000 == 0 ) ) logger . debug ( ` processing constraints: ${ c } / ${ r1cs . nConstraints } ` ) ;
const nA = r1cs _readULE32 ( ) ;
for ( let i = 0 ; i < nA ; i ++ ) {
const s = r1cs _readULE32 ( ) ;
2020-10-20 20:24:45 +03:00
const coefp = r1csPos ;
r1csPos += curve . Fr . n8 ;
2020-10-12 13:18:45 +03:00
2020-10-20 20:24:45 +03:00
const l1t = TAU _G1 ;
const l1 = sG1 * c ;
const l2t = BETATAU _G1 ;
const l2 = sG1 * c ;
2020-10-12 13:18:45 +03:00
if ( typeof A [ s ] === "undefined" ) A [ s ] = [ ] ;
2020-10-20 20:24:45 +03:00
A [ s ] . push ( [ l1t , l1 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
if ( s <= nPublic ) {
if ( typeof IC [ s ] === "undefined" ) IC [ s ] = [ ] ;
2020-10-20 20:24:45 +03:00
IC [ s ] . push ( [ l2t , l2 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
} else {
if ( typeof C [ s - nPublic - 1 ] === "undefined" ) C [ s - nPublic - 1 ] = [ ] ;
2020-10-20 20:24:45 +03:00
C [ s - nPublic - 1 ] . push ( [ l2t , l2 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
}
2020-10-20 20:24:45 +03:00
coefs . push ( [ 0 , c , s , coefp ] ) ;
2020-10-12 13:18:45 +03:00
}
2020-07-11 11:31:52 +03:00
2020-10-12 13:18:45 +03:00
const nB = r1cs _readULE32 ( ) ;
for ( let i = 0 ; i < nB ; i ++ ) {
const s = r1cs _readULE32 ( ) ;
2020-10-20 20:24:45 +03:00
const coefp = r1csPos ;
r1csPos += curve . Fr . n8 ;
const l1t = TAU _G1 ;
const l1 = sG1 * c ;
const l2t = TAU _G2 ;
const l2 = sG2 * c ;
const l3t = ALPHATAU _G1 ;
const l3 = sG1 * c ;
2020-10-12 13:18:45 +03:00
if ( typeof B1 [ s ] === "undefined" ) B1 [ s ] = [ ] ;
2020-10-20 20:24:45 +03:00
B1 [ s ] . push ( [ l1t , l1 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
if ( typeof B2 [ s ] === "undefined" ) B2 [ s ] = [ ] ;
2020-10-20 20:24:45 +03:00
B2 [ s ] . push ( [ l2t , l2 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
if ( s <= nPublic ) {
if ( typeof IC [ s ] === "undefined" ) IC [ s ] = [ ] ;
2020-10-20 20:24:45 +03:00
IC [ s ] . push ( [ l3t , l3 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
} else {
if ( typeof C [ s - nPublic - 1 ] === "undefined" ) C [ s - nPublic - 1 ] = [ ] ;
2020-10-20 20:24:45 +03:00
C [ s - nPublic - 1 ] . push ( [ l3t , l3 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
}
2020-07-11 11:31:52 +03:00
2020-10-20 20:24:45 +03:00
coefs . push ( [ 1 , c , s , coefp ] ) ;
2020-10-12 13:18:45 +03:00
}
2020-09-07 13:43:50 +03:00
2020-10-12 13:18:45 +03:00
const nC = r1cs _readULE32 ( ) ;
for ( let i = 0 ; i < nC ; i ++ ) {
const s = r1cs _readULE32 ( ) ;
2020-10-20 20:24:45 +03:00
const coefp = r1csPos ;
r1csPos += curve . Fr . n8 ;
2020-10-11 13:42:04 +03:00
2020-10-20 20:24:45 +03:00
const l1t = TAU _G1 ;
const l1 = sG1 * c ;
2020-10-12 13:18:45 +03:00
if ( s <= nPublic ) {
if ( typeof IC [ s ] === "undefined" ) IC [ s ] = [ ] ;
2020-10-20 20:24:45 +03:00
IC [ s ] . push ( [ l1t , l1 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
} else {
if ( typeof C [ s - nPublic - 1 ] === "undefined" ) C [ s - nPublic - 1 ] = [ ] ;
2020-10-20 20:24:45 +03:00
C [ s - nPublic - 1 ] . push ( [ l1t , l1 , coefp ] ) ;
2020-10-12 13:18:45 +03:00
}
}
2020-09-07 13:43:50 +03:00
}
2020-07-11 11:31:52 +03:00
2020-10-12 13:18:45 +03:00
for ( let s = 0 ; s <= nPublic ; s ++ ) {
2020-10-20 20:24:45 +03:00
const l1t = TAU _G1 ;
const l1 = sG1 * ( r1cs . nConstraints + s ) ;
const l2t = BETATAU _G1 ;
const l2 = sG1 * ( r1cs . nConstraints + s ) ;
2020-10-12 13:18:45 +03:00
if ( typeof A [ s ] === "undefined" ) A [ s ] = [ ] ;
2020-10-20 20:24:45 +03:00
A [ s ] . push ( [ l1t , l1 , - 1 ] ) ;
2020-10-12 13:18:45 +03:00
if ( typeof IC [ s ] === "undefined" ) IC [ s ] = [ ] ;
2020-10-20 20:24:45 +03:00
IC [ s ] . push ( [ l2t , l2 , - 1 ] ) ;
coefs . push ( [ 0 , r1cs . nConstraints + s , s , - 1 ] ) ;
2020-10-12 13:18:45 +03:00
}
2020-07-11 11:31:52 +03:00
2021-02-10 11:53:04 +03:00
await binFileUtils . startWriteSection ( fdZKey , 4 ) ;
2020-07-11 11:31:52 +03:00
2020-10-20 20:24:45 +03:00
const buffSection = new ffjavascript . BigBuffer ( coefs . length * ( 12 + curve . Fr . n8 ) + 4 ) ;
2020-07-11 11:31:52 +03:00
2020-10-20 20:24:45 +03:00
const buff4 = new Uint8Array ( 4 ) ;
const buff4V = new DataView ( buff4 . buffer ) ;
buff4V . setUint32 ( 0 , coefs . length , true ) ;
buffSection . set ( buff4 ) ;
let coefsPos = 4 ;
for ( let i = 0 ; i < coefs . length ; i ++ ) {
if ( ( logger ) && ( i % 100000 == 0 ) ) logger . debug ( ` writing coeffs: ${ i } / ${ coefs . length } ` ) ;
writeCoef ( coefs [ i ] ) ;
}
await fdZKey . write ( buffSection ) ;
2021-02-10 11:53:04 +03:00
await binFileUtils . endWriteSection ( fdZKey ) ;
2020-10-20 20:24:45 +03:00
function writeCoef ( c ) {
buffCoeffV . setUint32 ( 0 , c [ 0 ] , true ) ;
buffCoeffV . setUint32 ( 4 , c [ 1 ] , true ) ;
buffCoeffV . setUint32 ( 8 , c [ 2 ] , true ) ;
let n ;
if ( c [ 3 ] >= 0 ) {
n = curve . Fr . fromRprLE ( sR1cs . slice ( c [ 3 ] , c [ 3 ] + curve . Fr . n8 ) , 0 ) ;
} else {
n = curve . Fr . fromRprLE ( bOne , 0 ) ;
}
2020-10-12 13:18:45 +03:00
const nR2 = curve . Fr . mul ( n , R2r ) ;
curve . Fr . toRprLE ( buffCoeff , 12 , nR2 ) ;
2020-10-20 20:24:45 +03:00
buffSection . set ( buffCoeff , coefsPos ) ;
coefsPos += buffCoeff . length ;
2020-10-12 13:18:45 +03:00
}
2020-07-11 11:31:52 +03:00
}
async function composeAndWritePoints ( idSection , groupName , arr , sectionName ) {
2020-10-21 11:22:07 +03:00
const CHUNK _SIZE = 1 << 15 ;
2020-09-25 08:38:22 +03:00
const G = curve [ groupName ] ;
2020-07-11 11:31:52 +03:00
hashU32 ( arr . length ) ;
2021-02-10 11:53:04 +03:00
await binFileUtils . startWriteSection ( fdZKey , idSection ) ;
2020-07-11 11:31:52 +03:00
2020-09-25 08:38:22 +03:00
let opPromises = [ ] ;
2020-07-11 11:31:52 +03:00
2020-09-25 08:38:22 +03:00
let i = 0 ;
while ( i < arr . length ) {
let t = 0 ;
while ( ( i < arr . length ) && ( t < curve . tm . concurrency ) ) {
if ( logger ) logger . debug ( ` Writing points start ${ sectionName } : ${ i } / ${ arr . length } ` ) ;
let n = 1 ;
let nP = ( arr [ i ] ? arr [ i ] . length : 0 ) ;
2020-10-21 11:22:07 +03:00
while ( ( i + n < arr . length ) && ( nP + ( arr [ i + n ] ? arr [ i + n ] . length : 0 ) < CHUNK _SIZE ) && ( n < CHUNK _SIZE ) ) {
2020-09-25 08:38:22 +03:00
nP += ( arr [ i + n ] ? arr [ i + n ] . length : 0 ) ;
n ++ ;
}
const subArr = arr . slice ( i , i + n ) ;
const _i = i ;
opPromises . push ( composeAndWritePointsThread ( groupName , subArr , logger , sectionName ) . then ( ( r ) => {
if ( logger ) logger . debug ( ` Writing points end ${ sectionName } : ${ _i } / ${ arr . length } ` ) ;
return r ;
} ) ) ;
i += n ;
t ++ ;
2020-07-11 11:31:52 +03:00
}
2020-09-25 08:38:22 +03:00
const result = await Promise . all ( opPromises ) ;
2020-07-11 11:31:52 +03:00
2020-09-25 08:38:22 +03:00
for ( let k = 0 ; k < result . length ; k ++ ) {
await fdZKey . write ( result [ k ] [ 0 ] ) ;
const buff = await G . batchLEMtoU ( result [ k ] [ 0 ] ) ;
csHasher . update ( buff ) ;
}
opPromises = [ ] ;
2020-07-11 11:31:52 +03:00
}
2021-02-10 11:53:04 +03:00
await binFileUtils . endWriteSection ( fdZKey ) ;
2020-09-25 08:38:22 +03:00
2020-07-11 11:31:52 +03:00
}
2020-09-25 08:38:22 +03:00
async function composeAndWritePointsThread ( groupName , arr , logger , sectionName ) {
2020-07-11 11:31:52 +03:00
const G = curve [ groupName ] ;
const sGin = G . F . n8 * 2 ;
const sGmid = G . F . n8 * 3 ;
const sGout = G . F . n8 * 2 ;
let fnExp , fnMultiExp , fnBatchToAffine , fnZero ;
if ( groupName == "G1" ) {
fnExp = "g1m_timesScalarAffine" ;
fnMultiExp = "g1m_multiexpAffine" ;
fnBatchToAffine = "g1m_batchToAffine" ;
fnZero = "g1m_zero" ;
} else if ( groupName == "G2" ) {
fnExp = "g2m_timesScalarAffine" ;
fnMultiExp = "g2m_multiexpAffine" ;
fnBatchToAffine = "g2m_batchToAffine" ;
fnZero = "g2m_zero" ;
} else {
throw new Error ( "Invalid group" ) ;
}
let acc = 0 ;
for ( let i = 0 ; i < arr . length ; i ++ ) acc += arr [ i ] ? arr [ i ] . length : 0 ;
2020-09-25 08:38:22 +03:00
let bBases , bScalars ;
if ( acc > 2 << 14 ) {
bBases = new ffjavascript . BigBuffer ( acc * sGin ) ;
bScalars = new ffjavascript . BigBuffer ( acc * curve . Fr . n8 ) ;
} else {
bBases = new Uint8Array ( acc * sGin ) ;
bScalars = new Uint8Array ( acc * curve . Fr . n8 ) ;
}
2020-07-11 11:31:52 +03:00
let pB = 0 ;
let pS = 0 ;
2020-09-25 08:38:22 +03:00
2020-10-20 20:24:45 +03:00
const sBuffs = [
sTauG1 ,
sTauG2 ,
sAlphaTauG1 ,
sBetaTauG1
] ;
const bOne = new Uint8Array ( curve . Fr . n8 ) ;
curve . Fr . toRprLE ( bOne , 0 , curve . Fr . e ( 1 ) ) ;
2020-09-25 08:38:22 +03:00
let offset = 0 ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < arr . length ; i ++ ) {
if ( ! arr [ i ] ) continue ;
for ( let j = 0 ; j < arr [ i ] . length ; j ++ ) {
2020-10-21 11:22:07 +03:00
if ( ( logger ) && ( j ) && ( j % 10000 == 0 ) ) logger . debug ( ` Configuring big array ${ sectionName } : ${ j } / ${ arr [ i ] . length } ` ) ;
2020-10-20 20:24:45 +03:00
bBases . set (
sBuffs [ arr [ i ] [ j ] [ 0 ] ] . slice (
arr [ i ] [ j ] [ 1 ] ,
arr [ i ] [ j ] [ 1 ] + sGin
) , offset * sGin
) ;
if ( arr [ i ] [ j ] [ 2 ] >= 0 ) {
bScalars . set (
sR1cs . slice (
arr [ i ] [ j ] [ 2 ] ,
arr [ i ] [ j ] [ 2 ] + curve . Fr . n8
) ,
offset * curve . Fr . n8
) ;
} else {
bScalars . set ( bOne , offset * curve . Fr . n8 ) ;
}
2020-10-11 11:32:31 +03:00
offset ++ ;
2020-07-11 11:31:52 +03:00
}
}
2020-09-25 08:38:22 +03:00
if ( arr . length > 1 ) {
const task = [ ] ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : bBases } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : bScalars } ) ;
task . push ( { cmd : "ALLOC" , var : 2 , len : arr . length * sGmid } ) ;
pB = 0 ;
pS = 0 ;
let pD = 0 ;
for ( let i = 0 ; i < arr . length ; i ++ ) {
if ( ! arr [ i ] ) {
task . push ( { cmd : "CALL" , fnName : fnZero , params : [
{ var : 2 , offset : pD }
] } ) ;
pD += sGmid ;
continue ;
}
if ( arr [ i ] . length == 1 ) {
task . push ( { cmd : "CALL" , fnName : fnExp , params : [
{ var : 0 , offset : pB } ,
{ var : 1 , offset : pS } ,
{ val : curve . Fr . n8 } ,
{ var : 2 , offset : pD }
] } ) ;
} else {
task . push ( { cmd : "CALL" , fnName : fnMultiExp , params : [
{ var : 0 , offset : pB } ,
{ var : 1 , offset : pS } ,
{ val : curve . Fr . n8 } ,
{ val : arr [ i ] . length } ,
{ var : 2 , offset : pD }
] } ) ;
}
pB += sGin * arr [ i ] . length ;
pS += curve . Fr . n8 * arr [ i ] . length ;
2020-07-11 11:31:52 +03:00
pD += sGmid ;
}
2020-09-25 08:38:22 +03:00
task . push ( { cmd : "CALL" , fnName : fnBatchToAffine , params : [
{ var : 2 } ,
{ val : arr . length } ,
{ var : 2 } ,
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 2 , len : arr . length * sGout } ) ;
const res = await curve . tm . queueAction ( task ) ;
return res ;
} else {
let res = await G . multiExpAffine ( bBases , bScalars , logger , sectionName ) ;
res = [ G . toAffine ( res ) ] ;
return res ;
2020-07-11 11:31:52 +03:00
}
}
async function hashHPoints ( ) {
const CHUNK _SIZE = 1 << 14 ;
hashU32 ( domainSize - 1 ) ;
for ( let i = 0 ; i < domainSize - 1 ; i += CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` HashingHPoints: ${ i } / ${ domainSize } ` ) ;
const n = Math . min ( domainSize - 1 , CHUNK _SIZE ) ;
await hashHPointsChunk ( i , n ) ;
}
}
async function hashHPointsChunk ( offset , nPoints ) {
const buff1 = await fdPTau . read ( nPoints * sG1 , sectionsPTau [ 2 ] [ 0 ] . p + ( offset + domainSize ) * sG1 ) ;
const buff2 = await fdPTau . read ( nPoints * sG1 , sectionsPTau [ 2 ] [ 0 ] . p + offset * sG1 ) ;
const concurrency = curve . tm . concurrency ;
const nPointsPerThread = Math . floor ( nPoints / concurrency ) ;
const opPromises = [ ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
let n ;
if ( i < concurrency - 1 ) {
n = nPointsPerThread ;
} else {
n = nPoints - i * nPointsPerThread ;
}
if ( n == 0 ) continue ;
const subBuff1 = buff1 . slice ( i * nPointsPerThread * sG1 , ( i * nPointsPerThread + n ) * sG1 ) ;
const subBuff2 = buff2 . slice ( i * nPointsPerThread * sG1 , ( i * nPointsPerThread + n ) * sG1 ) ;
opPromises . push ( hashHPointsThread ( subBuff1 , subBuff2 ) ) ;
}
const result = await Promise . all ( opPromises ) ;
for ( let i = 0 ; i < result . length ; i ++ ) {
csHasher . update ( result [ i ] [ 0 ] ) ;
}
}
async function hashHPointsThread ( buff1 , buff2 ) {
const nPoints = buff1 . byteLength / sG1 ;
const sGmid = curve . G1 . F . n8 * 3 ;
const task = [ ] ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : buff1 } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : buff2 } ) ;
task . push ( { cmd : "ALLOC" , var : 2 , len : nPoints * sGmid } ) ;
for ( let i = 0 ; i < nPoints ; i ++ ) {
task . push ( {
cmd : "CALL" ,
fnName : "g1m_subAffine" ,
params : [
{ var : 0 , offset : i * sG1 } ,
{ var : 1 , offset : i * sG1 } ,
{ var : 2 , offset : i * sGmid } ,
]
} ) ;
}
task . push ( { cmd : "CALL" , fnName : "g1m_batchToAffine" , params : [
{ var : 2 } ,
{ val : nPoints } ,
{ var : 2 } ,
] } ) ;
task . push ( { cmd : "CALL" , fnName : "g1m_batchLEMtoU" , params : [
{ var : 2 } ,
{ val : nPoints } ,
{ var : 2 } ,
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 2 , len : nPoints * sG1 } ) ;
const res = await curve . tm . queueAction ( task ) ;
return res ;
}
function hashU32 ( n ) {
const buff = new Uint8Array ( 4 ) ;
const buffV = new DataView ( buff . buffer , buff . byteOffset , buff . byteLength ) ;
buffV . setUint32 ( 0 , n , false ) ;
csHasher . update ( buff ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function writeHeader ( fd , zkey ) {
// Write the header
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 1 ) ;
2020-07-11 11:31:52 +03:00
await fd . writeULE32 ( 1 ) ; // Groth
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Write the Groth header section
///////////
const curve = await getCurveFromQ ( zkey . q ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 2 ) ;
2020-07-11 11:31:52 +03:00
const primeQ = curve . q ;
const n8q = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeQ ) - 1 ) / 64 ) + 1 ) * 8 ;
const primeR = curve . r ;
const n8r = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeR ) - 1 ) / 64 ) + 1 ) * 8 ;
await fd . writeULE32 ( n8q ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . writeBigInt ( fd , primeQ , n8q ) ;
2020-07-11 11:31:52 +03:00
await fd . writeULE32 ( n8r ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . writeBigInt ( fd , primeR , n8r ) ;
2020-07-11 11:31:52 +03:00
await fd . writeULE32 ( zkey . nVars ) ; // Total number of bars
await fd . writeULE32 ( zkey . nPublic ) ; // Total number of public vars (not including ONE)
await fd . writeULE32 ( zkey . domainSize ) ; // domainSize
await writeG1 ( fd , curve , zkey . vk _alpha _1 ) ;
await writeG1 ( fd , curve , zkey . vk _beta _1 ) ;
await writeG2 ( fd , curve , zkey . vk _beta _2 ) ;
await writeG2 ( fd , curve , zkey . vk _gamma _2 ) ;
await writeG1 ( fd , curve , zkey . vk _delta _1 ) ;
await writeG2 ( fd , curve , zkey . vk _delta _2 ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
}
async function writeG1 ( fd , curve , p ) {
const buff = new Uint8Array ( curve . G1 . F . n8 * 2 ) ;
curve . G1 . toRprLEM ( buff , 0 , p ) ;
await fd . write ( buff ) ;
}
async function writeG2 ( fd , curve , p ) {
const buff = new Uint8Array ( curve . G2 . F . n8 * 2 ) ;
curve . G2 . toRprLEM ( buff , 0 , p ) ;
await fd . write ( buff ) ;
}
2021-01-28 23:55:41 +03:00
async function readG1 ( fd , curve , toObject ) {
2020-07-11 11:31:52 +03:00
const buff = await fd . read ( curve . G1 . F . n8 * 2 ) ;
2021-01-28 23:55:41 +03:00
const res = curve . G1 . fromRprLEM ( buff , 0 ) ;
return toObject ? curve . G1 . toObject ( res ) : res ;
2020-07-11 11:31:52 +03:00
}
2021-01-28 23:55:41 +03:00
async function readG2 ( fd , curve , toObject ) {
2020-07-11 11:31:52 +03:00
const buff = await fd . read ( curve . G2 . F . n8 * 2 ) ;
2021-01-28 23:55:41 +03:00
const res = curve . G2 . fromRprLEM ( buff , 0 ) ;
return toObject ? curve . G2 . toObject ( res ) : res ;
2020-07-11 11:31:52 +03:00
}
2022-01-19 23:59:11 +03:00
async function readHeader$1 ( fd , sections , toObject ) {
2020-07-11 11:31:52 +03:00
// Read Header
/////////////////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 1 ) ;
2020-07-11 11:31:52 +03:00
const protocolId = await fd . readULE32 ( ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
if ( protocolId == 1 ) {
return await readHeaderGroth16 ( fd , sections , toObject ) ;
} else if ( protocolId == 2 ) {
return await readHeaderPlonk ( fd , sections ) ;
} else {
throw new Error ( "Protocol not supported: " ) ;
}
}
async function readHeaderGroth16 ( fd , sections , toObject ) {
const zkey = { } ;
zkey . protocol = "groth16" ;
2020-07-11 11:31:52 +03:00
// Read Groth Header
/////////////////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 2 ) ;
2020-07-11 11:31:52 +03:00
const n8q = await fd . readULE32 ( ) ;
zkey . n8q = n8q ;
2022-01-19 23:59:11 +03:00
zkey . q = await binFileUtils _ _namespace . readBigInt ( fd , n8q ) ;
2020-07-11 11:31:52 +03:00
const n8r = await fd . readULE32 ( ) ;
zkey . n8r = n8r ;
2022-01-19 23:59:11 +03:00
zkey . r = await binFileUtils _ _namespace . readBigInt ( fd , n8r ) ;
2020-07-11 11:31:52 +03:00
let curve = await getCurveFromQ ( zkey . q ) ;
zkey . nVars = await fd . readULE32 ( ) ;
zkey . nPublic = await fd . readULE32 ( ) ;
zkey . domainSize = await fd . readULE32 ( ) ;
zkey . power = log2 ( zkey . domainSize ) ;
2021-01-28 23:55:41 +03:00
zkey . vk _alpha _1 = await readG1 ( fd , curve , toObject ) ;
zkey . vk _beta _1 = await readG1 ( fd , curve , toObject ) ;
zkey . vk _beta _2 = await readG2 ( fd , curve , toObject ) ;
zkey . vk _gamma _2 = await readG2 ( fd , curve , toObject ) ;
zkey . vk _delta _1 = await readG1 ( fd , curve , toObject ) ;
zkey . vk _delta _2 = await readG2 ( fd , curve , toObject ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
return zkey ;
}
2021-05-31 14:21:07 +03:00
async function readHeaderPlonk ( fd , sections , protocol , toObject ) {
const zkey = { } ;
zkey . protocol = "plonk" ;
// Read Plonk Header
/////////////////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 2 ) ;
2021-05-31 14:21:07 +03:00
const n8q = await fd . readULE32 ( ) ;
zkey . n8q = n8q ;
2022-01-19 23:59:11 +03:00
zkey . q = await binFileUtils _ _namespace . readBigInt ( fd , n8q ) ;
2021-05-31 14:21:07 +03:00
const n8r = await fd . readULE32 ( ) ;
zkey . n8r = n8r ;
2022-01-19 23:59:11 +03:00
zkey . r = await binFileUtils _ _namespace . readBigInt ( fd , n8r ) ;
2021-05-31 14:21:07 +03:00
let curve = await getCurveFromQ ( zkey . q ) ;
zkey . nVars = await fd . readULE32 ( ) ;
zkey . nPublic = await fd . readULE32 ( ) ;
zkey . domainSize = await fd . readULE32 ( ) ;
zkey . power = log2 ( zkey . domainSize ) ;
zkey . nAdditions = await fd . readULE32 ( ) ;
zkey . nConstrains = await fd . readULE32 ( ) ;
zkey . k1 = await fd . read ( n8r ) ;
zkey . k2 = await fd . read ( n8r ) ;
zkey . Qm = await readG1 ( fd , curve , toObject ) ;
zkey . Ql = await readG1 ( fd , curve , toObject ) ;
zkey . Qr = await readG1 ( fd , curve , toObject ) ;
zkey . Qo = await readG1 ( fd , curve , toObject ) ;
zkey . Qc = await readG1 ( fd , curve , toObject ) ;
zkey . S1 = await readG1 ( fd , curve , toObject ) ;
zkey . S2 = await readG1 ( fd , curve , toObject ) ;
zkey . S3 = await readG1 ( fd , curve , toObject ) ;
zkey . X _2 = await readG2 ( fd , curve , toObject ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2021-05-31 14:21:07 +03:00
return zkey ;
}
2021-01-28 23:55:41 +03:00
async function readZKey ( fileName , toObject ) {
2022-01-19 23:59:11 +03:00
const { fd , sections } = await binFileUtils _ _namespace . readBinFile ( fileName , "zkey" , 1 ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const zkey = await readHeader$1 ( fd , sections , "groth16" ) ;
2020-07-11 11:31:52 +03:00
const Fr = new ffjavascript . F1Field ( zkey . r ) ;
const Rr = ffjavascript . Scalar . mod ( ffjavascript . Scalar . shl ( 1 , zkey . n8r * 8 ) , zkey . r ) ;
const Rri = Fr . inv ( Rr ) ;
const Rri2 = Fr . mul ( Rri , Rri ) ;
2021-01-28 23:55:41 +03:00
let curve = await getCurveFromQ ( zkey . q ) ;
2020-07-11 11:31:52 +03:00
// Read IC Section
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 3 ) ;
2020-07-11 11:31:52 +03:00
zkey . IC = [ ] ;
for ( let i = 0 ; i <= zkey . nPublic ; i ++ ) {
2021-01-28 23:55:41 +03:00
const P = await readG1 ( fd , curve , toObject ) ;
2020-07-11 11:31:52 +03:00
zkey . IC . push ( P ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Read Coefs
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 4 ) ;
2020-07-11 11:31:52 +03:00
const nCCoefs = await fd . readULE32 ( ) ;
zkey . ccoefs = [ ] ;
for ( let i = 0 ; i < nCCoefs ; i ++ ) {
const m = await fd . readULE32 ( ) ;
const c = await fd . readULE32 ( ) ;
const s = await fd . readULE32 ( ) ;
const v = await readFr2 ( ) ;
zkey . ccoefs . push ( {
matrix : m ,
constraint : c ,
signal : s ,
value : v
} ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Read A points
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 5 ) ;
2020-07-11 11:31:52 +03:00
zkey . A = [ ] ;
for ( let i = 0 ; i < zkey . nVars ; i ++ ) {
2021-01-28 23:55:41 +03:00
const A = await readG1 ( fd , curve , toObject ) ;
2020-07-11 11:31:52 +03:00
zkey . A [ i ] = A ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Read B1
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 6 ) ;
2020-07-11 11:31:52 +03:00
zkey . B1 = [ ] ;
for ( let i = 0 ; i < zkey . nVars ; i ++ ) {
2021-01-28 23:55:41 +03:00
const B1 = await readG1 ( fd , curve , toObject ) ;
2020-07-11 11:31:52 +03:00
zkey . B1 [ i ] = B1 ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Read B2 points
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 7 ) ;
2020-07-11 11:31:52 +03:00
zkey . B2 = [ ] ;
for ( let i = 0 ; i < zkey . nVars ; i ++ ) {
2021-01-28 23:55:41 +03:00
const B2 = await readG2 ( fd , curve , toObject ) ;
2020-07-11 11:31:52 +03:00
zkey . B2 [ i ] = B2 ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Read C points
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 8 ) ;
2020-07-11 11:31:52 +03:00
zkey . C = [ ] ;
for ( let i = zkey . nPublic + 1 ; i < zkey . nVars ; i ++ ) {
2021-01-28 23:55:41 +03:00
const C = await readG1 ( fd , curve , toObject ) ;
2020-07-11 11:31:52 +03:00
zkey . C [ i ] = C ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
// Read H points
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 9 ) ;
2020-07-11 11:31:52 +03:00
zkey . hExps = [ ] ;
for ( let i = 0 ; i < zkey . domainSize ; i ++ ) {
2021-01-28 23:55:41 +03:00
const H = await readG1 ( fd , curve , toObject ) ;
2020-07-11 11:31:52 +03:00
zkey . hExps . push ( H ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
await fd . close ( ) ;
return zkey ;
2021-05-31 14:21:07 +03:00
async function readFr2 ( /* toObject */ ) {
2022-01-19 23:59:11 +03:00
const n = await binFileUtils _ _namespace . readBigInt ( fd , zkey . n8r ) ;
2020-07-11 11:31:52 +03:00
return Fr . mul ( n , Rri2 ) ;
}
}
2022-01-19 23:59:11 +03:00
async function readContribution ( fd , curve , toObject ) {
2020-07-11 11:31:52 +03:00
const c = { delta : { } } ;
2021-01-28 23:55:41 +03:00
c . deltaAfter = await readG1 ( fd , curve , toObject ) ;
c . delta . g1 _s = await readG1 ( fd , curve , toObject ) ;
c . delta . g1 _sx = await readG1 ( fd , curve , toObject ) ;
c . delta . g2 _spx = await readG2 ( fd , curve , toObject ) ;
2020-07-11 11:31:52 +03:00
c . transcript = await fd . read ( 64 ) ;
c . type = await fd . readULE32 ( ) ;
const paramLength = await fd . readULE32 ( ) ;
const curPos = fd . pos ;
let lastType = 0 ;
while ( fd . pos - curPos < paramLength ) {
const buffType = await fd . read ( 1 ) ;
if ( buffType [ 0 ] <= lastType ) throw new Error ( "Parameters in the contribution must be sorted" ) ;
lastType = buffType [ 0 ] ;
if ( buffType [ 0 ] == 1 ) { // Name
const buffLen = await fd . read ( 1 ) ;
const buffStr = await fd . read ( buffLen [ 0 ] ) ;
c . name = new TextDecoder ( ) . decode ( buffStr ) ;
} else if ( buffType [ 0 ] == 2 ) {
const buffExp = await fd . read ( 1 ) ;
c . numIterationsExp = buffExp [ 0 ] ;
} else if ( buffType [ 0 ] == 3 ) {
const buffLen = await fd . read ( 1 ) ;
c . beaconHash = await fd . read ( buffLen [ 0 ] ) ;
} else {
throw new Error ( "Parameter not recognized" ) ;
}
}
if ( fd . pos != curPos + paramLength ) {
throw new Error ( "Parametes do not match" ) ;
}
return c ;
}
async function readMPCParams ( fd , curve , sections ) {
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 10 ) ;
2020-07-11 11:31:52 +03:00
const res = { contributions : [ ] } ;
res . csHash = await fd . read ( 64 ) ;
const n = await fd . readULE32 ( ) ;
for ( let i = 0 ; i < n ; i ++ ) {
2022-01-19 23:59:11 +03:00
const c = await readContribution ( fd , curve ) ;
2020-07-11 11:31:52 +03:00
res . contributions . push ( c ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
return res ;
}
2022-01-19 23:59:11 +03:00
async function writeContribution ( fd , curve , c ) {
2020-07-11 11:31:52 +03:00
await writeG1 ( fd , curve , c . deltaAfter ) ;
await writeG1 ( fd , curve , c . delta . g1 _s ) ;
await writeG1 ( fd , curve , c . delta . g1 _sx ) ;
await writeG2 ( fd , curve , c . delta . g2 _spx ) ;
await fd . write ( c . transcript ) ;
await fd . writeULE32 ( c . type || 0 ) ;
const params = [ ] ;
if ( c . name ) {
params . push ( 1 ) ; // Param Name
const nameData = new TextEncoder ( "utf-8" ) . encode ( c . name . substring ( 0 , 64 ) ) ;
params . push ( nameData . byteLength ) ;
for ( let i = 0 ; i < nameData . byteLength ; i ++ ) params . push ( nameData [ i ] ) ;
}
if ( c . type == 1 ) {
params . push ( 2 ) ; // Param numIterationsExp
params . push ( c . numIterationsExp ) ;
params . push ( 3 ) ; // Beacon Hash
params . push ( c . beaconHash . byteLength ) ;
for ( let i = 0 ; i < c . beaconHash . byteLength ; i ++ ) params . push ( c . beaconHash [ i ] ) ;
}
if ( params . length > 0 ) {
const paramsBuff = new Uint8Array ( params ) ;
await fd . writeULE32 ( paramsBuff . byteLength ) ;
await fd . write ( paramsBuff ) ;
} else {
await fd . writeULE32 ( 0 ) ;
}
}
async function writeMPCParams ( fd , curve , mpcParams ) {
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 10 ) ;
2020-07-11 11:31:52 +03:00
await fd . write ( mpcParams . csHash ) ;
await fd . writeULE32 ( mpcParams . contributions . length ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
2022-01-19 23:59:11 +03:00
await writeContribution ( fd , curve , mpcParams . contributions [ i ] ) ;
2020-07-11 11:31:52 +03:00
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
}
function hashG1 ( hasher , curve , p ) {
const buff = new Uint8Array ( curve . G1 . F . n8 * 2 ) ;
curve . G1 . toRprUncompressed ( buff , 0 , p ) ;
hasher . update ( buff ) ;
}
function hashG2 ( hasher , curve , p ) {
const buff = new Uint8Array ( curve . G2 . F . n8 * 2 ) ;
curve . G2 . toRprUncompressed ( buff , 0 , p ) ;
hasher . update ( buff ) ;
}
function hashPubKey ( hasher , curve , c ) {
hashG1 ( hasher , curve , c . deltaAfter ) ;
hashG1 ( hasher , curve , c . delta . g1 _s ) ;
hashG1 ( hasher , curve , c . delta . g1 _sx ) ;
hashG2 ( hasher , curve , c . delta . g2 _spx ) ;
hasher . update ( c . transcript ) ;
}
async function phase2exportMPCParams ( zkeyName , mpcparamsName , logger ) {
2022-01-19 23:59:11 +03:00
const { fd : fdZKey , sections : sectionsZKey } = await binFileUtils _ _namespace . readBinFile ( zkeyName , "zkey" , 2 ) ;
const zkey = await readHeader$1 ( fdZKey , sectionsZKey ) ;
2021-05-31 14:21:07 +03:00
if ( zkey . protocol != "groth16" ) {
throw new Error ( "zkey file is not groth16" ) ;
}
2020-07-11 11:31:52 +03:00
const curve = await getCurveFromQ ( zkey . q ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
const mpcParams = await readMPCParams ( fdZKey , curve , sectionsZKey ) ;
2022-01-19 23:59:11 +03:00
const fdMPCParams = await fastFile _ _namespace . createOverride ( mpcparamsName ) ;
2020-07-11 11:31:52 +03:00
/////////////////////
// Verification Key Section
/////////////////////
await writeG1 ( zkey . vk _alpha _1 ) ;
await writeG1 ( zkey . vk _beta _1 ) ;
await writeG2 ( zkey . vk _beta _2 ) ;
await writeG2 ( zkey . vk _gamma _2 ) ;
await writeG1 ( zkey . vk _delta _1 ) ;
await writeG2 ( zkey . vk _delta _2 ) ;
// IC
let buffBasesIC ;
2022-01-19 23:59:11 +03:00
buffBasesIC = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 3 ) ;
2020-07-11 11:31:52 +03:00
buffBasesIC = await curve . G1 . batchLEMtoU ( buffBasesIC ) ;
await writePointArray ( "G1" , buffBasesIC ) ;
/////////////////////
// h Section
/////////////////////
2022-01-19 23:59:11 +03:00
const buffBasesH _Lodd = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 9 ) ;
2020-07-11 11:31:52 +03:00
let buffBasesH _Tau ;
buffBasesH _Tau = await curve . G1 . fft ( buffBasesH _Lodd , "affine" , "jacobian" , logger ) ;
buffBasesH _Tau = await curve . G1 . batchApplyKey ( buffBasesH _Tau , curve . Fr . neg ( curve . Fr . e ( 2 ) ) , curve . Fr . w [ zkey . power + 1 ] , "jacobian" , "affine" , logger ) ;
// Remove last element. (The degree of H will be allways m-2)
buffBasesH _Tau = buffBasesH _Tau . slice ( 0 , buffBasesH _Tau . byteLength - sG1 ) ;
buffBasesH _Tau = await curve . G1 . batchLEMtoU ( buffBasesH _Tau ) ;
await writePointArray ( "G1" , buffBasesH _Tau ) ;
/////////////////////
// L section
/////////////////////
let buffBasesC ;
2022-01-19 23:59:11 +03:00
buffBasesC = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 8 ) ;
2020-07-11 11:31:52 +03:00
buffBasesC = await curve . G1 . batchLEMtoU ( buffBasesC ) ;
await writePointArray ( "G1" , buffBasesC ) ;
/////////////////////
// A Section (C section)
/////////////////////
let buffBasesA ;
2022-01-19 23:59:11 +03:00
buffBasesA = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 5 ) ;
2020-07-11 11:31:52 +03:00
buffBasesA = await curve . G1 . batchLEMtoU ( buffBasesA ) ;
await writePointArray ( "G1" , buffBasesA ) ;
/////////////////////
// B1 Section
/////////////////////
let buffBasesB1 ;
2022-01-19 23:59:11 +03:00
buffBasesB1 = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 6 ) ;
2020-07-11 11:31:52 +03:00
buffBasesB1 = await curve . G1 . batchLEMtoU ( buffBasesB1 ) ;
await writePointArray ( "G1" , buffBasesB1 ) ;
/////////////////////
// B2 Section
/////////////////////
let buffBasesB2 ;
2022-01-19 23:59:11 +03:00
buffBasesB2 = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 7 ) ;
2020-07-11 11:31:52 +03:00
buffBasesB2 = await curve . G2 . batchLEMtoU ( buffBasesB2 ) ;
await writePointArray ( "G2" , buffBasesB2 ) ;
await fdMPCParams . write ( mpcParams . csHash ) ;
await writeU32 ( mpcParams . contributions . length ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
const c = mpcParams . contributions [ i ] ;
await writeG1 ( c . deltaAfter ) ;
await writeG1 ( c . delta . g1 _s ) ;
await writeG1 ( c . delta . g1 _sx ) ;
await writeG2 ( c . delta . g2 _spx ) ;
await fdMPCParams . write ( c . transcript ) ;
}
await fdZKey . close ( ) ;
await fdMPCParams . close ( ) ;
async function writeG1 ( P ) {
const buff = new Uint8Array ( sG1 ) ;
curve . G1 . toRprUncompressed ( buff , 0 , P ) ;
await fdMPCParams . write ( buff ) ;
}
async function writeG2 ( P ) {
const buff = new Uint8Array ( sG2 ) ;
curve . G2 . toRprUncompressed ( buff , 0 , P ) ;
await fdMPCParams . write ( buff ) ;
}
async function writePointArray ( groupName , buff ) {
let sG ;
if ( groupName == "G1" ) {
sG = sG1 ;
} else {
sG = sG2 ;
}
const buffSize = new Uint8Array ( 4 ) ;
const buffSizeV = new DataView ( buffSize . buffer , buffSize . byteOffset , buffSize . byteLength ) ;
buffSizeV . setUint32 ( 0 , buff . byteLength / sG , false ) ;
await fdMPCParams . write ( buffSize ) ;
await fdMPCParams . write ( buff ) ;
}
async function writeU32 ( n ) {
const buffSize = new Uint8Array ( 4 ) ;
const buffSizeV = new DataView ( buffSize . buffer , buffSize . byteOffset , buffSize . byteLength ) ;
buffSizeV . setUint32 ( 0 , n , false ) ;
await fdMPCParams . write ( buffSize ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function phase2importMPCParams ( zkeyNameOld , mpcparamsName , zkeyNameNew , name , logger ) {
2022-01-19 23:59:11 +03:00
const { fd : fdZKeyOld , sections : sectionsZKeyOld } = await binFileUtils _ _namespace . readBinFile ( zkeyNameOld , "zkey" , 2 ) ;
const zkeyHeader = await readHeader$1 ( fdZKeyOld , sectionsZKeyOld , false ) ;
2021-05-31 14:21:07 +03:00
if ( zkeyHeader . protocol != "groth16" ) {
throw new Error ( "zkey file is not groth16" ) ;
}
2020-07-11 11:31:52 +03:00
const curve = await getCurveFromQ ( zkeyHeader . q ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
const oldMPCParams = await readMPCParams ( fdZKeyOld , curve , sectionsZKeyOld ) ;
const newMPCParams = { } ;
2022-01-19 23:59:11 +03:00
const fdMPCParams = await fastFile _ _namespace . readExisting ( mpcparamsName ) ;
2020-07-11 11:31:52 +03:00
fdMPCParams . pos =
sG1 * 3 + sG2 * 3 + // vKey
8 + sG1 * zkeyHeader . nVars + // IC + C
4 + sG1 * ( zkeyHeader . domainSize - 1 ) + // H
4 + sG1 * zkeyHeader . nVars + // A
4 + sG1 * zkeyHeader . nVars + // B1
4 + sG2 * zkeyHeader . nVars ; // B2
// csHash
newMPCParams . csHash = await fdMPCParams . read ( 64 ) ;
const nConttributions = await fdMPCParams . readUBE32 ( ) ;
newMPCParams . contributions = [ ] ;
for ( let i = 0 ; i < nConttributions ; i ++ ) {
const c = { delta : { } } ;
c . deltaAfter = await readG1 ( fdMPCParams ) ;
c . delta . g1 _s = await readG1 ( fdMPCParams ) ;
c . delta . g1 _sx = await readG1 ( fdMPCParams ) ;
c . delta . g2 _spx = await readG2 ( fdMPCParams ) ;
c . transcript = await fdMPCParams . read ( 64 ) ;
if ( i < oldMPCParams . contributions . length ) {
c . type = oldMPCParams . contributions [ i ] . type ;
if ( c . type == 1 ) {
c . beaconHash = oldMPCParams . contributions [ i ] . beaconHash ;
c . numIterationsExp = oldMPCParams . contributions [ i ] . numIterationsExp ;
}
if ( oldMPCParams . contributions [ i ] . name ) {
c . name = oldMPCParams . contributions [ i ] . name ;
}
}
newMPCParams . contributions . push ( c ) ;
}
if ( ! hashIsEqual ( newMPCParams . csHash , oldMPCParams . csHash ) ) {
if ( logger ) logger . error ( "Hash of the original circuit does not match with the MPC one" ) ;
return false ;
}
if ( oldMPCParams . contributions . length > newMPCParams . contributions . length ) {
if ( logger ) logger . error ( "The impoerted file does not include new contributions" ) ;
return false ;
}
for ( let i = 0 ; i < oldMPCParams . contributions . length ; i ++ ) {
if ( ! contributionIsEqual ( oldMPCParams . contributions [ i ] , newMPCParams . contributions [ i ] ) ) {
if ( logger ) logger . error ( ` Previos contribution ${ i } does not match ` ) ;
return false ;
}
}
// Set the same name to all new controbutions
if ( name ) {
for ( let i = oldMPCParams . contributions . length ; i < newMPCParams . contributions . length ; i ++ ) {
newMPCParams . contributions [ i ] . name = name ;
}
}
2022-01-19 23:59:11 +03:00
const fdZKeyNew = await binFileUtils _ _namespace . createBinFile ( zkeyNameNew , "zkey" , 1 , 10 ) ;
2020-07-11 11:31:52 +03:00
fdMPCParams . pos = 0 ;
// Header
fdMPCParams . pos += sG1 ; // ignore alpha1 (keep original)
fdMPCParams . pos += sG1 ; // ignore beta1
fdMPCParams . pos += sG2 ; // ignore beta2
fdMPCParams . pos += sG2 ; // ignore gamma2
zkeyHeader . vk _delta _1 = await readG1 ( fdMPCParams ) ;
zkeyHeader . vk _delta _2 = await readG2 ( fdMPCParams ) ;
await writeHeader ( fdZKeyNew , zkeyHeader ) ;
// IC (Keep original)
const nIC = await fdMPCParams . readUBE32 ( ) ;
if ( nIC != zkeyHeader . nPublic + 1 ) {
if ( logger ) logger . error ( "Invalid number of points in IC" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
fdMPCParams . pos += sG1 * ( zkeyHeader . nPublic + 1 ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 3 ) ;
2020-07-11 11:31:52 +03:00
// Coeffs (Keep original)
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 4 ) ;
2020-07-11 11:31:52 +03:00
// H Section
const nH = await fdMPCParams . readUBE32 ( ) ;
if ( nH != zkeyHeader . domainSize - 1 ) {
if ( logger ) logger . error ( "Invalid number of points in H" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
let buffH ;
const buffTauU = await fdMPCParams . read ( sG1 * ( zkeyHeader . domainSize - 1 ) ) ;
const buffTauLEM = await curve . G1 . batchUtoLEM ( buffTauU ) ;
buffH = new Uint8Array ( zkeyHeader . domainSize * sG1 ) ;
buffH . set ( buffTauLEM ) ; // Let the last one to zero.
2020-08-12 02:33:08 +03:00
curve . G1 . toRprLEM ( buffH , sG1 * ( zkeyHeader . domainSize - 1 ) , curve . G1 . zeroAffine ) ;
2020-07-11 11:31:52 +03:00
const n2Inv = curve . Fr . neg ( curve . Fr . inv ( curve . Fr . e ( 2 ) ) ) ;
const wInv = curve . Fr . inv ( curve . Fr . w [ zkeyHeader . power + 1 ] ) ;
buffH = await curve . G1 . batchApplyKey ( buffH , n2Inv , wInv , "affine" , "jacobian" , logger ) ;
buffH = await curve . G1 . ifft ( buffH , "jacobian" , "affine" , logger ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fdZKeyNew , 9 ) ;
2020-07-11 11:31:52 +03:00
await fdZKeyNew . write ( buffH ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fdZKeyNew ) ;
2020-07-11 11:31:52 +03:00
// C Secion (L section)
const nL = await fdMPCParams . readUBE32 ( ) ;
if ( nL != ( zkeyHeader . nVars - zkeyHeader . nPublic - 1 ) ) {
if ( logger ) logger . error ( "Invalid number of points in L" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
let buffL ;
buffL = await fdMPCParams . read ( sG1 * ( zkeyHeader . nVars - zkeyHeader . nPublic - 1 ) ) ;
buffL = await curve . G1 . batchUtoLEM ( buffL ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fdZKeyNew , 8 ) ;
2020-07-11 11:31:52 +03:00
await fdZKeyNew . write ( buffL ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fdZKeyNew ) ;
2020-07-11 11:31:52 +03:00
// A Section
const nA = await fdMPCParams . readUBE32 ( ) ;
if ( nA != zkeyHeader . nVars ) {
if ( logger ) logger . error ( "Invalid number of points in A" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
fdMPCParams . pos += sG1 * ( zkeyHeader . nVars ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 5 ) ;
2020-07-11 11:31:52 +03:00
// B1 Section
const nB1 = await fdMPCParams . readUBE32 ( ) ;
if ( nB1 != zkeyHeader . nVars ) {
if ( logger ) logger . error ( "Invalid number of points in B1" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
fdMPCParams . pos += sG1 * ( zkeyHeader . nVars ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 6 ) ;
2020-07-11 11:31:52 +03:00
// B2 Section
const nB2 = await fdMPCParams . readUBE32 ( ) ;
if ( nB2 != zkeyHeader . nVars ) {
if ( logger ) logger . error ( "Invalid number of points in B2" ) ;
await fdZKeyNew . discard ( ) ;
return false ;
}
fdMPCParams . pos += sG2 * ( zkeyHeader . nVars ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdZKeyOld , sectionsZKeyOld , fdZKeyNew , 7 ) ;
2020-07-11 11:31:52 +03:00
await writeMPCParams ( fdZKeyNew , curve , newMPCParams ) ;
await fdMPCParams . close ( ) ;
await fdZKeyNew . close ( ) ;
await fdZKeyOld . close ( ) ;
return true ;
async function readG1 ( fd ) {
const buff = await fd . read ( curve . G1 . F . n8 * 2 ) ;
return curve . G1 . fromRprUncompressed ( buff , 0 ) ;
}
async function readG2 ( fd ) {
const buff = await fd . read ( curve . G2 . F . n8 * 2 ) ;
return curve . G2 . fromRprUncompressed ( buff , 0 ) ;
}
function contributionIsEqual ( c1 , c2 ) {
if ( ! curve . G1 . eq ( c1 . deltaAfter , c2 . deltaAfter ) ) return false ;
if ( ! curve . G1 . eq ( c1 . delta . g1 _s , c2 . delta . g1 _s ) ) return false ;
if ( ! curve . G1 . eq ( c1 . delta . g1 _sx , c2 . delta . g1 _sx ) ) return false ;
if ( ! curve . G2 . eq ( c1 . delta . g2 _spx , c2 . delta . g2 _spx ) ) return false ;
if ( ! hashIsEqual ( c1 . transcript , c2 . transcript ) ) return false ;
return true ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
const sameRatio = sameRatio$2 ;
2020-07-11 11:31:52 +03:00
2020-12-18 17:38:31 +03:00
async function phase2verifyFromInit ( initFileName , pTauFileName , zkeyFileName , logger ) {
2020-07-11 11:31:52 +03:00
let sr ;
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { fd , sections } = await binFileUtils _ _namespace . readBinFile ( zkeyFileName , "zkey" , 2 ) ;
const zkey = await readHeader$1 ( fd , sections , false ) ;
2021-05-31 14:21:07 +03:00
if ( zkey . protocol != "groth16" ) {
throw new Error ( "zkey file is not groth16" ) ;
}
2020-07-11 11:31:52 +03:00
const curve = await getCurveFromQ ( zkey . q ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const mpcParams = await readMPCParams ( fd , curve , sections ) ;
2022-01-19 23:59:11 +03:00
const accumulatedHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
accumulatedHasher . update ( mpcParams . csHash ) ;
let curDelta = curve . G1 . g ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
const c = mpcParams . contributions [ i ] ;
const ourHasher = cloneHasher ( accumulatedHasher ) ;
hashG1 ( ourHasher , curve , c . delta . g1 _s ) ;
hashG1 ( ourHasher , curve , c . delta . g1 _sx ) ;
if ( ! hashIsEqual ( ourHasher . digest ( ) , c . transcript ) ) {
console . log ( ` INVALID( ${ i } ): Inconsistent transcript ` ) ;
return false ;
}
const delta _g2 _sp = hashToG2 ( curve , c . transcript ) ;
2022-01-19 23:59:11 +03:00
sr = await sameRatio ( curve , c . delta . g1 _s , c . delta . g1 _sx , delta _g2 _sp , c . delta . g2 _spx ) ;
2020-07-11 11:31:52 +03:00
if ( sr !== true ) {
console . log ( ` INVALID( ${ i } ): public key G1 and G2 do not have the same ration ` ) ;
return false ;
}
2022-01-19 23:59:11 +03:00
sr = await sameRatio ( curve , curDelta , c . deltaAfter , delta _g2 _sp , c . delta . g2 _spx ) ;
2020-07-11 11:31:52 +03:00
if ( sr !== true ) {
console . log ( ` INVALID( ${ i } ): deltaAfter does not fillow the public key ` ) ;
return false ;
}
if ( c . type == 1 ) {
const rng = rngFromBeaconParams ( c . beaconHash , c . numIterationsExp ) ;
const expected _prvKey = curve . Fr . fromRng ( rng ) ;
const expected _g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
const expected _g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( expected _g1 _s , expected _prvKey ) ) ;
if ( curve . G1 . eq ( expected _g1 _s , c . delta . g1 _s ) !== true ) {
console . log ( ` INVALID( ${ i } ): Key of the beacon does not match. g1_s ` ) ;
return false ;
}
if ( curve . G1 . eq ( expected _g1 _sx , c . delta . g1 _sx ) !== true ) {
console . log ( ` INVALID( ${ i } ): Key of the beacon does not match. g1_sx ` ) ;
return false ;
}
}
hashPubKey ( accumulatedHasher , curve , c ) ;
2022-01-19 23:59:11 +03:00
const contributionHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
hashPubKey ( contributionHasher , curve , c ) ;
c . contributionHash = contributionHasher . digest ( ) ;
curDelta = c . deltaAfter ;
}
2022-01-19 23:59:11 +03:00
const { fd : fdInit , sections : sectionsInit } = await binFileUtils _ _namespace . readBinFile ( initFileName , "zkey" , 2 ) ;
const zkeyInit = await readHeader$1 ( fdInit , sectionsInit , false ) ;
2021-05-31 14:21:07 +03:00
if ( zkeyInit . protocol != "groth16" ) {
throw new Error ( "zkeyinit file is not groth16" ) ;
}
2020-07-11 11:31:52 +03:00
if ( ( ! ffjavascript . Scalar . eq ( zkeyInit . q , zkey . q ) )
|| ( ! ffjavascript . Scalar . eq ( zkeyInit . r , zkey . r ) )
|| ( zkeyInit . n8q != zkey . n8q )
|| ( zkeyInit . n8r != zkey . n8r ) )
{
if ( logger ) logger . error ( "INVALID: Different curves" ) ;
return false ;
}
if ( ( zkeyInit . nVars != zkey . nVars )
|| ( zkeyInit . nPublic != zkey . nPublic )
|| ( zkeyInit . domainSize != zkey . domainSize ) )
{
if ( logger ) logger . error ( "INVALID: Different circuit parameters" ) ;
return false ;
}
if ( ! curve . G1 . eq ( zkey . vk _alpha _1 , zkeyInit . vk _alpha _1 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid alpha1" ) ;
return false ;
}
if ( ! curve . G1 . eq ( zkey . vk _beta _1 , zkeyInit . vk _beta _1 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid beta1" ) ;
return false ;
}
if ( ! curve . G2 . eq ( zkey . vk _beta _2 , zkeyInit . vk _beta _2 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid beta2" ) ;
return false ;
}
if ( ! curve . G2 . eq ( zkey . vk _gamma _2 , zkeyInit . vk _gamma _2 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid gamma2" ) ;
return false ;
}
if ( ! curve . G1 . eq ( zkey . vk _delta _1 , curDelta ) ) {
2020-10-29 08:56:31 +03:00
if ( logger ) logger . error ( "INVALID: Invalid delta1" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
2022-01-19 23:59:11 +03:00
sr = await sameRatio ( curve , curve . G1 . g , curDelta , curve . G2 . g , zkey . vk _delta _2 ) ;
2020-07-11 11:31:52 +03:00
if ( sr !== true ) {
2020-10-29 08:56:31 +03:00
if ( logger ) logger . error ( "INVALID: Invalid delta2" ) ;
2020-07-11 11:31:52 +03:00
return false ;
}
const mpcParamsInit = await readMPCParams ( fdInit , curve , sectionsInit ) ;
if ( ! hashIsEqual ( mpcParams . csHash , mpcParamsInit . csHash ) ) {
if ( logger ) logger . error ( "INVALID: Circuit does not match" ) ;
return false ;
}
// Check sizes of sections
if ( sections [ 8 ] [ 0 ] . size != sG1 * ( zkey . nVars - zkey . nPublic - 1 ) ) {
if ( logger ) logger . error ( "INVALID: Invalid L section size" ) ;
return false ;
}
if ( sections [ 9 ] [ 0 ] . size != sG1 * ( zkey . domainSize ) ) {
if ( logger ) logger . error ( "INVALID: Invalid H section size" ) ;
return false ;
}
let ss ;
2022-01-19 23:59:11 +03:00
ss = await binFileUtils _ _namespace . sectionIsEqual ( fd , sections , fdInit , sectionsInit , 3 ) ;
2020-07-11 11:31:52 +03:00
if ( ! ss ) {
if ( logger ) logger . error ( "INVALID: IC section is not identical" ) ;
return false ;
}
2022-01-19 23:59:11 +03:00
ss = await binFileUtils _ _namespace . sectionIsEqual ( fd , sections , fdInit , sectionsInit , 4 ) ;
2020-07-11 11:31:52 +03:00
if ( ! ss ) {
if ( logger ) logger . error ( "Coeffs section is not identical" ) ;
return false ;
}
2022-01-19 23:59:11 +03:00
ss = await binFileUtils _ _namespace . sectionIsEqual ( fd , sections , fdInit , sectionsInit , 5 ) ;
2020-07-11 11:31:52 +03:00
if ( ! ss ) {
if ( logger ) logger . error ( "A section is not identical" ) ;
return false ;
}
2022-01-19 23:59:11 +03:00
ss = await binFileUtils _ _namespace . sectionIsEqual ( fd , sections , fdInit , sectionsInit , 6 ) ;
2020-07-11 11:31:52 +03:00
if ( ! ss ) {
if ( logger ) logger . error ( "B1 section is not identical" ) ;
return false ;
}
2022-01-19 23:59:11 +03:00
ss = await binFileUtils _ _namespace . sectionIsEqual ( fd , sections , fdInit , sectionsInit , 7 ) ;
2020-07-11 11:31:52 +03:00
if ( ! ss ) {
if ( logger ) logger . error ( "B2 section is not identical" ) ;
return false ;
}
// Check L
sr = await sectionHasSameRatio ( "G1" , fdInit , sectionsInit , fd , sections , 8 , zkey . vk _delta _2 , zkeyInit . vk _delta _2 , "L section" ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "L section does not match" ) ;
return false ;
}
// Check H
sr = await sameRatioH ( ) ;
if ( sr !== true ) {
if ( logger ) logger . error ( "H section does not match" ) ;
return false ;
}
if ( logger ) logger . info ( formatHash ( mpcParams . csHash , "Circuit Hash: " ) ) ;
await fd . close ( ) ;
await fdInit . close ( ) ;
for ( let i = mpcParams . contributions . length - 1 ; i >= 0 ; i -- ) {
const c = mpcParams . contributions [ i ] ;
if ( logger ) logger . info ( "-------------------------" ) ;
if ( logger ) logger . info ( formatHash ( c . contributionHash , ` contribution # ${ i + 1 } ${ c . name ? c . name : "" } : ` ) ) ;
if ( c . type == 1 ) {
if ( logger ) logger . info ( ` Beacon generator: ${ byteArray2hex ( c . beaconHash ) } ` ) ;
if ( logger ) logger . info ( ` Beacon iterations Exp: ${ c . numIterationsExp } ` ) ;
}
}
if ( logger ) logger . info ( "-------------------------" ) ;
if ( logger ) logger . info ( "ZKey Ok!" ) ;
return true ;
async function sectionHasSameRatio ( groupName , fd1 , sections1 , fd2 , sections2 , idSection , g2sp , g2spx , sectionName ) {
const MAX _CHUNK _SIZE = 1 << 20 ;
const G = curve [ groupName ] ;
const sG = G . F . n8 * 2 ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd1 , sections1 , idSection ) ;
await binFileUtils _ _namespace . startReadUniqueSection ( fd2 , sections2 , idSection ) ;
2020-07-11 11:31:52 +03:00
let R1 = G . zero ;
let R2 = G . zero ;
const nPoints = sections1 [ idSection ] [ 0 ] . size / sG ;
for ( let i = 0 ; i < nPoints ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` Same ratio check ${ sectionName } : ${ i } / ${ nPoints } ` ) ;
const n = Math . min ( nPoints - i , MAX _CHUNK _SIZE ) ;
const bases1 = await fd1 . read ( n * sG ) ;
const bases2 = await fd2 . read ( n * sG ) ;
const scalars = new Uint8Array ( 4 * n ) ;
2022-01-19 23:59:11 +03:00
crypto _ _default [ "default" ] . randomFillSync ( scalars ) ;
2020-07-11 11:31:52 +03:00
const r1 = await G . multiExpAffine ( bases1 , scalars ) ;
const r2 = await G . multiExpAffine ( bases2 , scalars ) ;
R1 = G . add ( R1 , r1 ) ;
R2 = G . add ( R2 , r2 ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd1 ) ;
await binFileUtils _ _namespace . endReadSection ( fd2 ) ;
2020-07-11 11:31:52 +03:00
2020-08-13 00:51:58 +03:00
if ( nPoints == 0 ) return true ;
2022-01-19 23:59:11 +03:00
sr = await sameRatio ( curve , R1 , R2 , g2sp , g2spx ) ;
2020-07-11 11:31:52 +03:00
if ( sr !== true ) return false ;
return true ;
}
async function sameRatioH ( ) {
const MAX _CHUNK _SIZE = 1 << 20 ;
const G = curve . G1 ;
2020-09-07 13:43:50 +03:00
const Fr = curve . Fr ;
2020-07-11 11:31:52 +03:00
const sG = G . F . n8 * 2 ;
2022-01-19 23:59:11 +03:00
const { fd : fdPTau , sections : sectionsPTau } = await binFileUtils _ _namespace . readBinFile ( pTauFileName , "ptau" , 1 ) ;
2020-07-11 11:31:52 +03:00
2020-10-27 09:39:05 +03:00
let buff _r = new ffjavascript . BigBuffer ( zkey . domainSize * zkey . n8r ) ;
2020-07-11 11:31:52 +03:00
const seed = new Array ( 8 ) ;
for ( let i = 0 ; i < 8 ; i ++ ) {
2022-01-19 23:59:11 +03:00
seed [ i ] = crypto _ _default [ "default" ] . randomBytes ( 4 ) . readUInt32BE ( 0 , true ) ;
2020-07-11 11:31:52 +03:00
}
const rng = new ffjavascript . ChaCha ( seed ) ;
for ( let i = 0 ; i < zkey . domainSize - 1 ; i ++ ) { // Note that last one is zero
2020-09-07 13:43:50 +03:00
const e = Fr . fromRng ( rng ) ;
Fr . toRprLE ( buff _r , i * zkey . n8r , e ) ;
2020-07-11 11:31:52 +03:00
}
2020-09-07 13:43:50 +03:00
Fr . toRprLE ( buff _r , ( zkey . domainSize - 1 ) * zkey . n8r , Fr . zero ) ;
2020-07-11 11:31:52 +03:00
let R1 = G . zero ;
for ( let i = 0 ; i < zkey . domainSize ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` H Verificaition(tau): ${ i } / ${ zkey . domainSize } ` ) ;
const n = Math . min ( zkey . domainSize - i , MAX _CHUNK _SIZE ) ;
2020-12-16 13:18:14 +03:00
const buff1 = await fdPTau . read ( sG * n , sectionsPTau [ 2 ] [ 0 ] . p + zkey . domainSize * sG + i * sG ) ;
const buff2 = await fdPTau . read ( sG * n , sectionsPTau [ 2 ] [ 0 ] . p + i * sG ) ;
2020-07-11 11:31:52 +03:00
const buffB = await batchSubstract ( buff1 , buff2 ) ;
2020-10-28 09:28:26 +03:00
const buffS = buff _r . slice ( i * zkey . n8r , ( i + n ) * zkey . n8r ) ;
2020-07-11 11:31:52 +03:00
const r = await G . multiExpAffine ( buffB , buffS ) ;
R1 = G . add ( R1 , r ) ;
}
// Caluclate odd coeficients in transformed domain
2020-09-07 13:43:50 +03:00
buff _r = await Fr . batchToMontgomery ( buff _r ) ;
2020-07-11 11:31:52 +03:00
// const first = curve.Fr.neg(curve.Fr.inv(curve.Fr.e(2)));
// Works*2 const first = curve.Fr.neg(curve.Fr.e(2));
2020-09-07 13:43:50 +03:00
let first ;
if ( zkey . power < Fr . s ) {
first = Fr . neg ( Fr . e ( 2 ) ) ;
} else {
const small _m = 2 * * Fr . s ;
const shift _to _small _m = Fr . exp ( Fr . shift , small _m ) ;
first = Fr . sub ( shift _to _small _m , Fr . one ) ;
}
2020-07-11 11:31:52 +03:00
// const inc = curve.Fr.inv(curve.PFr.w[zkey.power+1]);
2020-09-07 13:43:50 +03:00
const inc = zkey . power < Fr . s ? Fr . w [ zkey . power + 1 ] : Fr . shift ;
buff _r = await Fr . batchApplyKey ( buff _r , first , inc ) ;
buff _r = await Fr . fft ( buff _r ) ;
buff _r = await Fr . batchFromMontgomery ( buff _r ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 9 ) ;
2020-07-11 11:31:52 +03:00
let R2 = G . zero ;
for ( let i = 0 ; i < zkey . domainSize ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` H Verificaition(lagrange): ${ i } / ${ zkey . domainSize } ` ) ;
const n = Math . min ( zkey . domainSize - i , MAX _CHUNK _SIZE ) ;
const buff = await fd . read ( sG * n ) ;
2020-10-29 08:56:31 +03:00
const buffS = buff _r . slice ( i * zkey . n8r , ( i + n ) * zkey . n8r ) ;
2020-07-11 11:31:52 +03:00
const r = await G . multiExpAffine ( buff , buffS ) ;
R2 = G . add ( R2 , r ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
sr = await sameRatio ( curve , R1 , R2 , zkey . vk _delta _2 , zkeyInit . vk _delta _2 ) ;
2020-07-11 11:31:52 +03:00
if ( sr !== true ) return false ;
return true ;
}
async function batchSubstract ( buff1 , buff2 ) {
const sG = curve . G1 . F . n8 * 2 ;
const nPoints = buff1 . byteLength / sG ;
const concurrency = curve . tm . concurrency ;
const nPointsPerThread = Math . floor ( nPoints / concurrency ) ;
const opPromises = [ ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
let n ;
if ( i < concurrency - 1 ) {
n = nPointsPerThread ;
} else {
n = nPoints - i * nPointsPerThread ;
}
if ( n == 0 ) continue ;
const subBuff1 = buff1 . slice ( i * nPointsPerThread * sG1 , ( i * nPointsPerThread + n ) * sG1 ) ;
const subBuff2 = buff2 . slice ( i * nPointsPerThread * sG1 , ( i * nPointsPerThread + n ) * sG1 ) ;
opPromises . push ( batchSubstractThread ( subBuff1 , subBuff2 ) ) ;
}
const result = await Promise . all ( opPromises ) ;
const fullBuffOut = new Uint8Array ( nPoints * sG ) ;
let p = 0 ;
for ( let i = 0 ; i < result . length ; i ++ ) {
fullBuffOut . set ( result [ i ] [ 0 ] , p ) ;
p += result [ i ] [ 0 ] . byteLength ;
}
return fullBuffOut ;
}
async function batchSubstractThread ( buff1 , buff2 ) {
const sG1 = curve . G1 . F . n8 * 2 ;
const sGmid = curve . G1 . F . n8 * 3 ;
const nPoints = buff1 . byteLength / sG1 ;
const task = [ ] ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : buff1 } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : buff2 } ) ;
task . push ( { cmd : "ALLOC" , var : 2 , len : nPoints * sGmid } ) ;
for ( let i = 0 ; i < nPoints ; i ++ ) {
task . push ( {
cmd : "CALL" ,
fnName : "g1m_subAffine" ,
params : [
{ var : 0 , offset : i * sG1 } ,
{ var : 1 , offset : i * sG1 } ,
{ var : 2 , offset : i * sGmid } ,
]
} ) ;
}
task . push ( { cmd : "CALL" , fnName : "g1m_batchToAffine" , params : [
{ var : 2 } ,
{ val : nPoints } ,
{ var : 2 } ,
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 2 , len : nPoints * sG1 } ) ;
const res = await curve . tm . queueAction ( task ) ;
return res ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-12-18 17:38:31 +03:00
async function phase2verifyFromR1cs ( r1csFileName , pTauFileName , zkeyFileName , logger ) {
// const initFileName = "~" + zkeyFileName + ".init";
const initFileName = { type : "bigMem" } ;
await newZKey ( r1csFileName , pTauFileName , initFileName , logger ) ;
2020-12-21 15:30:36 +03:00
return await phase2verifyFromInit ( initFileName , pTauFileName , zkeyFileName , logger ) ;
2020-12-18 17:38:31 +03:00
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function phase2contribute ( zkeyNameOld , zkeyNameNew , name , entropy , logger ) {
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { fd : fdOld , sections : sections } = await binFileUtils _ _namespace . readBinFile ( zkeyNameOld , "zkey" , 2 ) ;
const zkey = await readHeader$1 ( fdOld , sections ) ;
2021-05-31 14:21:07 +03:00
if ( zkey . protocol != "groth16" ) {
throw new Error ( "zkey file is not groth16" ) ;
}
2020-07-11 11:31:52 +03:00
const curve = await getCurveFromQ ( zkey . q ) ;
const mpcParams = await readMPCParams ( fdOld , curve , sections ) ;
2022-01-19 23:59:11 +03:00
const fdNew = await binFileUtils _ _namespace . createBinFile ( zkeyNameNew , "zkey" , 1 , 10 ) ;
2020-07-11 11:31:52 +03:00
const rng = await getRandomRng ( entropy ) ;
2022-01-19 23:59:11 +03:00
const transcriptHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
transcriptHasher . update ( mpcParams . csHash ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
hashPubKey ( transcriptHasher , curve , mpcParams . contributions [ i ] ) ;
}
const curContribution = { } ;
curContribution . delta = { } ;
curContribution . delta . prvKey = curve . Fr . fromRng ( rng ) ;
curContribution . delta . g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
curContribution . delta . g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( curContribution . delta . g1 _s , curContribution . delta . prvKey ) ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _s ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _sx ) ;
curContribution . transcript = transcriptHasher . digest ( ) ;
curContribution . delta . g2 _sp = hashToG2 ( curve , curContribution . transcript ) ;
curContribution . delta . g2 _spx = curve . G2 . toAffine ( curve . G2 . timesFr ( curContribution . delta . g2 _sp , curContribution . delta . prvKey ) ) ;
zkey . vk _delta _1 = curve . G1 . timesFr ( zkey . vk _delta _1 , curContribution . delta . prvKey ) ;
zkey . vk _delta _2 = curve . G2 . timesFr ( zkey . vk _delta _2 , curContribution . delta . prvKey ) ;
curContribution . deltaAfter = zkey . vk _delta _1 ;
curContribution . type = 0 ;
if ( name ) curContribution . name = name ;
mpcParams . contributions . push ( curContribution ) ;
await writeHeader ( fdNew , zkey ) ;
// IC
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 3 ) ;
2020-07-11 11:31:52 +03:00
// Coeffs (Keep original)
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 4 ) ;
2020-07-11 11:31:52 +03:00
// A Section
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 5 ) ;
2020-07-11 11:31:52 +03:00
// B1 Section
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 6 ) ;
2020-07-11 11:31:52 +03:00
// B2 Section
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 7 ) ;
2020-07-11 11:31:52 +03:00
const invDelta = curve . Fr . inv ( curContribution . delta . prvKey ) ;
await applyKeyToSection ( fdOld , sections , fdNew , 8 , curve , "G1" , invDelta , curve . Fr . e ( 1 ) , "L Section" , logger ) ;
await applyKeyToSection ( fdOld , sections , fdNew , 9 , curve , "G1" , invDelta , curve . Fr . e ( 1 ) , "H Section" , logger ) ;
await writeMPCParams ( fdNew , curve , mpcParams ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
2022-01-19 23:59:11 +03:00
const contributionHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
hashPubKey ( contributionHasher , curve , curContribution ) ;
const contribuionHash = contributionHasher . digest ( ) ;
2020-12-17 22:42:58 +03:00
if ( logger ) logger . info ( formatHash ( mpcParams . csHash , "Circuit Hash: " ) ) ;
2020-07-11 11:31:52 +03:00
if ( logger ) logger . info ( formatHash ( contribuionHash , "Contribution Hash: " ) ) ;
return contribuionHash ;
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
async function beacon ( zkeyNameOld , zkeyNameNew , name , beaconHashStr , numIterationsExp , logger ) {
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
const beaconHash = hex2ByteArray ( beaconHashStr ) ;
if ( ( beaconHash . byteLength == 0 )
|| ( beaconHash . byteLength * 2 != beaconHashStr . length ) )
{
if ( logger ) logger . error ( "Invalid Beacon Hash. (It must be a valid hexadecimal sequence)" ) ;
return false ;
}
if ( beaconHash . length >= 256 ) {
if ( logger ) logger . error ( "Maximum lenght of beacon hash is 255 bytes" ) ;
return false ;
}
numIterationsExp = parseInt ( numIterationsExp ) ;
if ( ( numIterationsExp < 10 ) || ( numIterationsExp > 63 ) ) {
if ( logger ) logger . error ( "Invalid numIterationsExp. (Must be between 10 and 63)" ) ;
return false ;
}
2022-01-19 23:59:11 +03:00
const { fd : fdOld , sections : sections } = await binFileUtils _ _namespace . readBinFile ( zkeyNameOld , "zkey" , 2 ) ;
const zkey = await readHeader$1 ( fdOld , sections ) ;
2021-05-31 14:21:07 +03:00
if ( zkey . protocol != "groth16" ) {
throw new Error ( "zkey file is not groth16" ) ;
}
2020-07-11 11:31:52 +03:00
const curve = await getCurveFromQ ( zkey . q ) ;
const mpcParams = await readMPCParams ( fdOld , curve , sections ) ;
2022-01-19 23:59:11 +03:00
const fdNew = await binFileUtils _ _namespace . createBinFile ( zkeyNameNew , "zkey" , 1 , 10 ) ;
2020-07-11 11:31:52 +03:00
const rng = await rngFromBeaconParams ( beaconHash , numIterationsExp ) ;
2022-01-19 23:59:11 +03:00
const transcriptHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
transcriptHasher . update ( mpcParams . csHash ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
hashPubKey ( transcriptHasher , curve , mpcParams . contributions [ i ] ) ;
}
const curContribution = { } ;
curContribution . delta = { } ;
curContribution . delta . prvKey = curve . Fr . fromRng ( rng ) ;
curContribution . delta . g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
curContribution . delta . g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( curContribution . delta . g1 _s , curContribution . delta . prvKey ) ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _s ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _sx ) ;
curContribution . transcript = transcriptHasher . digest ( ) ;
curContribution . delta . g2 _sp = hashToG2 ( curve , curContribution . transcript ) ;
curContribution . delta . g2 _spx = curve . G2 . toAffine ( curve . G2 . timesFr ( curContribution . delta . g2 _sp , curContribution . delta . prvKey ) ) ;
zkey . vk _delta _1 = curve . G1 . timesFr ( zkey . vk _delta _1 , curContribution . delta . prvKey ) ;
zkey . vk _delta _2 = curve . G2 . timesFr ( zkey . vk _delta _2 , curContribution . delta . prvKey ) ;
curContribution . deltaAfter = zkey . vk _delta _1 ;
curContribution . type = 1 ;
curContribution . numIterationsExp = numIterationsExp ;
curContribution . beaconHash = beaconHash ;
if ( name ) curContribution . name = name ;
mpcParams . contributions . push ( curContribution ) ;
await writeHeader ( fdNew , zkey ) ;
// IC
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 3 ) ;
2020-07-11 11:31:52 +03:00
// Coeffs (Keep original)
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 4 ) ;
2020-07-11 11:31:52 +03:00
// A Section
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 5 ) ;
2020-07-11 11:31:52 +03:00
// B1 Section
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 6 ) ;
2020-07-11 11:31:52 +03:00
// B2 Section
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . copySection ( fdOld , sections , fdNew , 7 ) ;
2020-07-11 11:31:52 +03:00
const invDelta = curve . Fr . inv ( curContribution . delta . prvKey ) ;
await applyKeyToSection ( fdOld , sections , fdNew , 8 , curve , "G1" , invDelta , curve . Fr . e ( 1 ) , "L Section" , logger ) ;
await applyKeyToSection ( fdOld , sections , fdNew , 9 , curve , "G1" , invDelta , curve . Fr . e ( 1 ) , "H Section" , logger ) ;
await writeMPCParams ( fdNew , curve , mpcParams ) ;
await fdOld . close ( ) ;
await fdNew . close ( ) ;
2022-01-19 23:59:11 +03:00
const contributionHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
hashPubKey ( contributionHasher , curve , curContribution ) ;
const contribuionHash = contributionHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( contribuionHash , "Contribution Hash: " ) ) ;
return contribuionHash ;
}
2022-01-19 23:59:11 +03:00
async function zkeyExportJson$1 ( zkeyFileName ) {
2020-07-11 11:31:52 +03:00
2021-01-28 23:55:41 +03:00
const zKey = await readZKey ( zkeyFileName , true ) ;
2020-07-11 11:31:52 +03:00
return zKey ;
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
async function bellmanContribute ( curve , challengeFilename , responesFileName , entropy , logger ) {
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2020-07-11 11:31:52 +03:00
const rng = await getRandomRng ( entropy ) ;
const delta = curve . Fr . fromRng ( rng ) ;
const invDelta = curve . Fr . inv ( delta ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const sG2 = curve . G2 . F . n8 * 2 ;
2022-01-19 23:59:11 +03:00
const fdFrom = await fastFile _ _namespace . readExisting ( challengeFilename ) ;
const fdTo = await fastFile _ _namespace . createOverride ( responesFileName ) ;
2020-07-11 11:31:52 +03:00
await copy ( sG1 ) ; // alpha1
await copy ( sG1 ) ; // beta1
await copy ( sG2 ) ; // beta2
await copy ( sG2 ) ; // gamma2
const oldDelta1 = await readG1 ( ) ;
const delta1 = curve . G1 . timesFr ( oldDelta1 , delta ) ;
await writeG1 ( delta1 ) ;
const oldDelta2 = await readG2 ( ) ;
const delta2 = curve . G2 . timesFr ( oldDelta2 , delta ) ;
await writeG2 ( delta2 ) ;
// IC
const nIC = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nIC ) ;
await copy ( nIC * sG1 ) ;
// H
const nH = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nH ) ;
2020-07-14 12:55:12 +03:00
await applyKeyToChallengeSection ( fdFrom , fdTo , null , curve , "G1" , nH , invDelta , curve . Fr . e ( 1 ) , "UNCOMPRESSED" , "H" , logger ) ;
2020-07-11 11:31:52 +03:00
// L
const nL = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nL ) ;
2020-07-14 12:55:12 +03:00
await applyKeyToChallengeSection ( fdFrom , fdTo , null , curve , "G1" , nL , invDelta , curve . Fr . e ( 1 ) , "UNCOMPRESSED" , "L" , logger ) ;
2020-07-11 11:31:52 +03:00
// A
const nA = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nA ) ;
await copy ( nA * sG1 ) ;
// B1
const nB1 = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nB1 ) ;
await copy ( nB1 * sG1 ) ;
// B2
const nB2 = await fdFrom . readUBE32 ( ) ;
await fdTo . writeUBE32 ( nB2 ) ;
await copy ( nB2 * sG2 ) ;
//////////
/// Read contributions
//////////
2022-01-19 23:59:11 +03:00
const transcriptHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
const mpcParams = { } ;
// csHash
mpcParams . csHash = await fdFrom . read ( 64 ) ;
transcriptHasher . update ( mpcParams . csHash ) ;
const nConttributions = await fdFrom . readUBE32 ( ) ;
mpcParams . contributions = [ ] ;
for ( let i = 0 ; i < nConttributions ; i ++ ) {
const c = { delta : { } } ;
c . deltaAfter = await readG1 ( ) ;
c . delta . g1 _s = await readG1 ( ) ;
c . delta . g1 _sx = await readG1 ( ) ;
c . delta . g2 _spx = await readG2 ( ) ;
c . transcript = await fdFrom . read ( 64 ) ;
mpcParams . contributions . push ( c ) ;
hashPubKey ( transcriptHasher , curve , c ) ;
}
const curContribution = { } ;
curContribution . delta = { } ;
curContribution . delta . prvKey = delta ;
curContribution . delta . g1 _s = curve . G1 . toAffine ( curve . G1 . fromRng ( rng ) ) ;
curContribution . delta . g1 _sx = curve . G1 . toAffine ( curve . G1 . timesFr ( curContribution . delta . g1 _s , delta ) ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _s ) ;
hashG1 ( transcriptHasher , curve , curContribution . delta . g1 _sx ) ;
curContribution . transcript = transcriptHasher . digest ( ) ;
curContribution . delta . g2 _sp = hashToG2 ( curve , curContribution . transcript ) ;
curContribution . delta . g2 _spx = curve . G2 . toAffine ( curve . G2 . timesFr ( curContribution . delta . g2 _sp , delta ) ) ;
curContribution . deltaAfter = delta1 ;
curContribution . type = 0 ;
mpcParams . contributions . push ( curContribution ) ;
//////////
/// Write COntribution
//////////
await fdTo . write ( mpcParams . csHash ) ;
await fdTo . writeUBE32 ( mpcParams . contributions . length ) ;
for ( let i = 0 ; i < mpcParams . contributions . length ; i ++ ) {
const c = mpcParams . contributions [ i ] ;
await writeG1 ( c . deltaAfter ) ;
await writeG1 ( c . delta . g1 _s ) ;
await writeG1 ( c . delta . g1 _sx ) ;
await writeG2 ( c . delta . g2 _spx ) ;
await fdTo . write ( c . transcript ) ;
}
2022-01-19 23:59:11 +03:00
const contributionHasher = Blake2b _ _default [ "default" ] ( 64 ) ;
2020-07-11 11:31:52 +03:00
hashPubKey ( contributionHasher , curve , curContribution ) ;
const contributionHash = contributionHasher . digest ( ) ;
if ( logger ) logger . info ( formatHash ( contributionHash , "Contribution Hash: " ) ) ;
await fdTo . close ( ) ;
await fdFrom . close ( ) ;
return contributionHash ;
async function copy ( nBytes ) {
const CHUNK _SIZE = fdFrom . pageSize * 2 ;
for ( let i = 0 ; i < nBytes ; i += CHUNK _SIZE ) {
const n = Math . min ( nBytes - i , CHUNK _SIZE ) ;
const buff = await fdFrom . read ( n ) ;
await fdTo . write ( buff ) ;
}
}
async function readG1 ( ) {
const buff = await fdFrom . read ( curve . G1 . F . n8 * 2 ) ;
return curve . G1 . fromRprUncompressed ( buff , 0 ) ;
}
async function readG2 ( ) {
const buff = await fdFrom . read ( curve . G2 . F . n8 * 2 ) ;
return curve . G2 . fromRprUncompressed ( buff , 0 ) ;
}
async function writeG1 ( P ) {
const buff = new Uint8Array ( sG1 ) ;
curve . G1 . toRprUncompressed ( buff , 0 , P ) ;
await fdTo . write ( buff ) ;
}
async function writeG2 ( P ) {
const buff = new Uint8Array ( sG2 ) ;
curve . G2 . toRprUncompressed ( buff , 0 , P ) ;
await fdTo . write ( buff ) ;
}
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
const { stringifyBigInts : stringifyBigInts$3 } = ffjavascript . utils ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
async function zkeyExportVerificationKey ( zkeyName , /* logger */ ) {
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { fd , sections } = await binFileUtils _ _namespace . readBinFile ( zkeyName , "zkey" , 2 ) ;
const zkey = await readHeader$1 ( fd , sections ) ;
2021-05-31 14:21:07 +03:00
let res ;
if ( zkey . protocol == "groth16" ) {
res = await groth16Vk ( zkey , fd , sections ) ;
} else if ( zkey . protocol == "plonk" ) {
res = await plonkVk ( zkey ) ;
} else {
throw new Error ( "zkey file is not groth16" ) ;
}
await fd . close ( ) ;
return res ;
}
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
async function groth16Vk ( zkey , fd , sections ) {
2020-07-11 11:31:52 +03:00
const curve = await getCurveFromQ ( zkey . q ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const alphaBeta = await curve . pairing ( zkey . vk _alpha _1 , zkey . vk _beta _2 ) ;
let vKey = {
protocol : zkey . protocol ,
curve : curve . name ,
nPublic : zkey . nPublic ,
vk _alpha _1 : curve . G1 . toObject ( zkey . vk _alpha _1 ) ,
vk _beta _2 : curve . G2 . toObject ( zkey . vk _beta _2 ) ,
vk _gamma _2 : curve . G2 . toObject ( zkey . vk _gamma _2 ) ,
vk _delta _2 : curve . G2 . toObject ( zkey . vk _delta _2 ) ,
vk _alphabeta _12 : curve . Gt . toObject ( alphaBeta )
} ;
// Read IC Section
///////////
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 3 ) ;
2020-07-11 11:31:52 +03:00
vKey . IC = [ ] ;
for ( let i = 0 ; i <= zkey . nPublic ; i ++ ) {
const buff = await fd . read ( sG1 ) ;
const P = curve . G1 . toObject ( buff ) ;
vKey . IC . push ( P ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
vKey = stringifyBigInts$3 ( vKey ) ;
2020-07-11 11:31:52 +03:00
return vKey ;
}
2021-05-31 14:21:07 +03:00
async function plonkVk ( zkey ) {
const curve = await getCurveFromQ ( zkey . q ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
let vKey = {
protocol : zkey . protocol ,
curve : curve . name ,
nPublic : zkey . nPublic ,
power : zkey . power ,
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
k1 : curve . Fr . toObject ( zkey . k1 ) ,
k2 : curve . Fr . toObject ( zkey . k2 ) ,
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
Qm : curve . G1 . toObject ( zkey . Qm ) ,
Ql : curve . G1 . toObject ( zkey . Ql ) ,
Qr : curve . G1 . toObject ( zkey . Qr ) ,
Qo : curve . G1 . toObject ( zkey . Qo ) ,
Qc : curve . G1 . toObject ( zkey . Qc ) ,
S1 : curve . G1 . toObject ( zkey . S1 ) ,
S2 : curve . G1 . toObject ( zkey . S2 ) ,
S3 : curve . G1 . toObject ( zkey . S3 ) ,
2020-07-11 11:31:52 +03:00
2021-06-01 00:06:10 +03:00
X _2 : curve . G2 . toObject ( zkey . X _2 ) ,
w : curve . Fr . toObject ( curve . Fr . w [ zkey . power ] )
2021-05-31 14:21:07 +03:00
} ;
2022-01-19 23:59:11 +03:00
vKey = stringifyBigInts$3 ( vKey ) ;
2021-05-31 14:21:07 +03:00
return vKey ;
}
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
// Not ready yet
// module.exports.generateVerifier_kimleeoh = generateVerifier_kimleeoh;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
async function exportSolidityVerifier ( zKeyName , templates , logger ) {
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
const verificationKey = await zkeyExportVerificationKey ( zKeyName ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
let template = templates [ verificationKey . protocol ] ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
return ejs _ _default [ "default" ] . render ( template , verificationKey ) ;
2020-07-11 11:31:52 +03:00
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
async function write ( fd , witness , prime ) {
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 1 ) ;
2020-07-11 11:31:52 +03:00
const n8 = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( prime ) - 1 ) / 64 ) + 1 ) * 8 ;
await fd . writeULE32 ( n8 ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . writeBigInt ( fd , prime , n8 ) ;
2020-07-11 11:31:52 +03:00
await fd . writeULE32 ( witness . length ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 2 ) ;
2020-07-11 11:31:52 +03:00
for ( let i = 0 ; i < witness . length ; i ++ ) {
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . writeBigInt ( fd , witness [ i ] , n8 ) ;
2020-07-11 11:31:52 +03:00
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd , 2 ) ;
2020-07-11 11:31:52 +03:00
}
async function writeBin ( fd , witnessBin , prime ) {
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 1 ) ;
2020-07-11 11:31:52 +03:00
const n8 = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( prime ) - 1 ) / 64 ) + 1 ) * 8 ;
await fd . writeULE32 ( n8 ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . writeBigInt ( fd , prime , n8 ) ;
2020-07-11 11:31:52 +03:00
if ( witnessBin . byteLength % n8 != 0 ) {
throw new Error ( "Invalid witness length" ) ;
}
await fd . writeULE32 ( witnessBin . byteLength / n8 ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startWriteSection ( fd , 2 ) ;
2020-07-11 11:31:52 +03:00
await fd . write ( witnessBin ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endWriteSection ( fd ) ;
2020-07-11 11:31:52 +03:00
}
2022-01-19 23:59:11 +03:00
async function readHeader ( fd , sections ) {
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 1 ) ;
2020-07-11 11:31:52 +03:00
const n8 = await fd . readULE32 ( ) ;
2022-01-19 23:59:11 +03:00
const q = await binFileUtils _ _namespace . readBigInt ( fd , n8 ) ;
2020-07-11 11:31:52 +03:00
const nWitness = await fd . readULE32 ( ) ;
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
return { n8 , q , nWitness } ;
}
async function read ( fileName ) {
2022-01-19 23:59:11 +03:00
const { fd , sections } = await binFileUtils _ _namespace . readBinFile ( fileName , "wtns" , 2 ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { n8 , nWitness } = await readHeader ( fd , sections ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . startReadUniqueSection ( fd , sections , 2 ) ;
2020-07-11 11:31:52 +03:00
const res = [ ] ;
for ( let i = 0 ; i < nWitness ; i ++ ) {
2022-01-19 23:59:11 +03:00
const v = await binFileUtils _ _namespace . readBigInt ( fd , n8 ) ;
2020-07-11 11:31:52 +03:00
res . push ( v ) ;
}
2022-01-19 23:59:11 +03:00
await binFileUtils _ _namespace . endReadSection ( fd ) ;
2020-07-11 11:31:52 +03:00
await fd . close ( ) ;
return res ;
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-10-08 17:06:48 +03:00
const { stringifyBigInts : stringifyBigInts$2 } = ffjavascript . utils ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
async function groth16Prove$1 ( zkeyFileName , witnessFileName , logger ) {
const { fd : fdWtns , sections : sectionsWtns } = await binFileUtils _ _namespace . readBinFile ( witnessFileName , "wtns" , 2 , 1 << 25 , 1 << 23 ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const wtns = await readHeader ( fdWtns , sectionsWtns ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { fd : fdZKey , sections : sectionsZKey } = await binFileUtils _ _namespace . readBinFile ( zkeyFileName , "zkey" , 2 , 1 << 25 , 1 << 23 ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const zkey = await readHeader$1 ( fdZKey , sectionsZKey ) ;
2021-05-31 14:21:07 +03:00
if ( zkey . protocol != "groth16" ) {
throw new Error ( "zkey file is not groth16" ) ;
}
2020-07-11 11:31:52 +03:00
if ( ! ffjavascript . Scalar . eq ( zkey . r , wtns . q ) ) {
throw new Error ( "Curve of the witness does not match the curve of the proving key" ) ;
}
if ( wtns . nWitness != zkey . nVars ) {
throw new Error ( ` Invalid witness length. Circuit: ${ zkey . nVars } , witness: ${ wtns . nWitness } ` ) ;
}
const curve = await getCurveFromQ ( zkey . q ) ;
const Fr = curve . Fr ;
const G1 = curve . G1 ;
const G2 = curve . G2 ;
const power = log2 ( zkey . domainSize ) ;
2020-10-22 15:58:12 +03:00
if ( logger ) logger . debug ( "Reading Wtns" ) ;
2022-01-19 23:59:11 +03:00
const buffWitness = await binFileUtils _ _namespace . readSection ( fdWtns , sectionsWtns , 2 ) ;
2020-10-22 15:58:12 +03:00
if ( logger ) logger . debug ( "Reading Coeffs" ) ;
2022-01-19 23:59:11 +03:00
const buffCoeffs = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 4 ) ;
2020-10-23 12:34:00 +03:00
if ( logger ) logger . debug ( "Building ABC" ) ;
2022-01-22 14:37:53 +03:00
const [ buffA _T , buffB _T , buffC _T ] = await buildABC1 ( curve , zkey , buffWitness , buffCoeffs , logger ) ;
2020-10-23 12:34:00 +03:00
2020-09-07 13:43:50 +03:00
const inc = power == Fr . s ? curve . Fr . shift : curve . Fr . w [ power + 1 ] ;
2020-10-25 13:12:57 +03:00
const buffA = await Fr . ifft ( buffA _T , "" , "" , logger , "IFFT_A" ) ;
2020-09-07 13:43:50 +03:00
const buffAodd = await Fr . batchApplyKey ( buffA , Fr . e ( 1 ) , inc ) ;
2020-10-25 13:12:57 +03:00
const buffAodd _T = await Fr . fft ( buffAodd , "" , "" , logger , "FFT_A" ) ;
2020-07-11 11:31:52 +03:00
2020-10-25 13:12:57 +03:00
const buffB = await Fr . ifft ( buffB _T , "" , "" , logger , "IFFT_B" ) ;
2020-09-07 13:43:50 +03:00
const buffBodd = await Fr . batchApplyKey ( buffB , Fr . e ( 1 ) , inc ) ;
2020-10-25 13:12:57 +03:00
const buffBodd _T = await Fr . fft ( buffBodd , "" , "" , logger , "FFT_B" ) ;
2020-07-11 11:31:52 +03:00
2020-10-25 13:12:57 +03:00
const buffC = await Fr . ifft ( buffC _T , "" , "" , logger , "IFFT_C" ) ;
2020-09-07 13:43:50 +03:00
const buffCodd = await Fr . batchApplyKey ( buffC , Fr . e ( 1 ) , inc ) ;
2020-10-25 13:12:57 +03:00
const buffCodd _T = await Fr . fft ( buffCodd , "" , "" , logger , "FFT_C" ) ;
2020-07-11 11:31:52 +03:00
2020-10-25 13:12:57 +03:00
if ( logger ) logger . debug ( "Join ABC" ) ;
const buffPodd _T = await joinABC ( curve , zkey , buffAodd _T , buffBodd _T , buffCodd _T , logger ) ;
2020-07-11 11:31:52 +03:00
let proof = { } ;
2020-10-25 13:12:57 +03:00
if ( logger ) logger . debug ( "Reading A Points" ) ;
2022-01-19 23:59:11 +03:00
const buffBasesA = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 5 ) ;
2020-10-25 13:12:57 +03:00
proof . pi _a = await curve . G1 . multiExpAffine ( buffBasesA , buffWitness , logger , "multiexp A" ) ;
if ( logger ) logger . debug ( "Reading B1 Points" ) ;
2022-01-19 23:59:11 +03:00
const buffBasesB1 = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 6 ) ;
2020-10-25 13:12:57 +03:00
let pib1 = await curve . G1 . multiExpAffine ( buffBasesB1 , buffWitness , logger , "multiexp B1" ) ;
if ( logger ) logger . debug ( "Reading B2 Points" ) ;
2022-01-19 23:59:11 +03:00
const buffBasesB2 = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 7 ) ;
2020-10-25 13:12:57 +03:00
proof . pi _b = await curve . G2 . multiExpAffine ( buffBasesB2 , buffWitness , logger , "multiexp B2" ) ;
if ( logger ) logger . debug ( "Reading C Points" ) ;
2022-01-19 23:59:11 +03:00
const buffBasesC = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 8 ) ;
2020-10-25 13:12:57 +03:00
proof . pi _c = await curve . G1 . multiExpAffine ( buffBasesC , buffWitness . slice ( ( zkey . nPublic + 1 ) * curve . Fr . n8 ) , logger , "multiexp C" ) ;
if ( logger ) logger . debug ( "Reading H Points" ) ;
2022-01-19 23:59:11 +03:00
const buffBasesH = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 9 ) ;
2020-10-25 13:12:57 +03:00
const resH = await curve . G1 . multiExpAffine ( buffBasesH , buffPodd _T , logger , "multiexp H" ) ;
2020-07-11 11:31:52 +03:00
const r = curve . Fr . random ( ) ;
const s = curve . Fr . random ( ) ;
proof . pi _a = G1 . add ( proof . pi _a , zkey . vk _alpha _1 ) ;
proof . pi _a = G1 . add ( proof . pi _a , G1 . timesFr ( zkey . vk _delta _1 , r ) ) ;
proof . pi _b = G2 . add ( proof . pi _b , zkey . vk _beta _2 ) ;
proof . pi _b = G2 . add ( proof . pi _b , G2 . timesFr ( zkey . vk _delta _2 , s ) ) ;
pib1 = G1 . add ( pib1 , zkey . vk _beta _1 ) ;
pib1 = G1 . add ( pib1 , G1 . timesFr ( zkey . vk _delta _1 , s ) ) ;
proof . pi _c = G1 . add ( proof . pi _c , resH ) ;
proof . pi _c = G1 . add ( proof . pi _c , G1 . timesFr ( proof . pi _a , s ) ) ;
proof . pi _c = G1 . add ( proof . pi _c , G1 . timesFr ( pib1 , r ) ) ;
proof . pi _c = G1 . add ( proof . pi _c , G1 . timesFr ( zkey . vk _delta _1 , Fr . neg ( Fr . mul ( r , s ) ) ) ) ;
let publicSignals = [ ] ;
for ( let i = 1 ; i <= zkey . nPublic ; i ++ ) {
const b = buffWitness . slice ( i * Fr . n8 , i * Fr . n8 + Fr . n8 ) ;
publicSignals . push ( ffjavascript . Scalar . fromRprLE ( b ) ) ;
}
proof . pi _a = G1 . toObject ( G1 . toAffine ( proof . pi _a ) ) ;
proof . pi _b = G2 . toObject ( G2 . toAffine ( proof . pi _b ) ) ;
proof . pi _c = G1 . toObject ( G1 . toAffine ( proof . pi _c ) ) ;
proof . protocol = "groth16" ;
2021-05-31 14:21:07 +03:00
proof . curve = curve . name ;
2020-07-11 11:31:52 +03:00
await fdZKey . close ( ) ;
await fdWtns . close ( ) ;
2020-10-08 17:06:48 +03:00
proof = stringifyBigInts$2 ( proof ) ;
publicSignals = stringifyBigInts$2 ( publicSignals ) ;
2020-07-11 11:31:52 +03:00
return { proof , publicSignals } ;
}
2022-01-22 14:37:53 +03:00
async function buildABC1 ( curve , zkey , witness , coeffs , logger ) {
2020-10-23 12:34:00 +03:00
const n8 = curve . Fr . n8 ;
2020-07-11 11:31:52 +03:00
const sCoef = 4 * 3 + zkey . n8r ;
2020-10-23 12:34:00 +03:00
const nCoef = ( coeffs . byteLength - 4 ) / sCoef ;
const outBuffA = new ffjavascript . BigBuffer ( zkey . domainSize * n8 ) ;
const outBuffB = new ffjavascript . BigBuffer ( zkey . domainSize * n8 ) ;
const outBuffC = new ffjavascript . BigBuffer ( zkey . domainSize * n8 ) ;
const outBuf = [ outBuffA , outBuffB ] ;
for ( let i = 0 ; i < nCoef ; i ++ ) {
2020-10-25 12:26:07 +03:00
if ( ( logger ) && ( i % 1000000 == 0 ) ) logger . debug ( ` QAP AB: ${ i } / ${ nCoef } ` ) ;
2020-10-23 12:34:00 +03:00
const buffCoef = coeffs . slice ( 4 + i * sCoef , 4 + i * sCoef + sCoef ) ;
const buffCoefV = new DataView ( buffCoef . buffer ) ;
const m = buffCoefV . getUint32 ( 0 , true ) ;
const c = buffCoefV . getUint32 ( 4 , true ) ;
const s = buffCoefV . getUint32 ( 8 , true ) ;
2020-10-25 12:26:07 +03:00
const coef = buffCoef . slice ( 12 , 12 + n8 ) ;
2020-10-23 12:34:00 +03:00
outBuf [ m ] . set (
curve . Fr . add (
outBuf [ m ] . slice ( c * n8 , c * n8 + n8 ) ,
curve . Fr . mul ( coef , witness . slice ( s * n8 , s * n8 + n8 ) )
) ,
c * n8
) ;
2020-10-23 08:59:28 +03:00
}
2020-07-11 11:31:52 +03:00
2020-10-23 12:34:00 +03:00
for ( let i = 0 ; i < zkey . domainSize ; i ++ ) {
2020-10-25 12:26:07 +03:00
if ( ( logger ) && ( i % 1000000 == 0 ) ) logger . debug ( ` QAP C: ${ i } / ${ zkey . domainSize } ` ) ;
2020-10-23 12:34:00 +03:00
outBuffC . set (
curve . Fr . mul (
outBuffA . slice ( i * n8 , i * n8 + n8 ) ,
outBuffB . slice ( i * n8 , i * n8 + n8 ) ,
) ,
i * n8
) ;
2020-07-11 11:31:52 +03:00
}
return [ outBuffA , outBuffB , outBuffC ] ;
}
2021-05-31 14:21:07 +03:00
/ *
async function buldABC ( curve , zkey , witness , coeffs , logger ) {
const concurrency = curve . tm . concurrency ;
const sCoef = 4 * 3 + zkey . n8r ;
let getUint32 ;
if ( coeffs instanceof BigBuffer ) {
const coeffsDV = [ ] ;
const PAGE _LEN = coeffs . buffers [ 0 ] . length ;
for ( let i = 0 ; i < coeffs . buffers . length ; i ++ ) {
coeffsDV . push ( new DataView ( coeffs . buffers [ i ] . buffer ) ) ;
}
getUint32 = function ( pos ) {
return coeffsDV [ Math . floor ( pos / PAGE _LEN ) ] . getUint32 ( pos % PAGE _LEN , true ) ;
} ;
} else {
const coeffsDV = new DataView ( coeffs . buffer , coeffs . byteOffset , coeffs . byteLength ) ;
getUint32 = function ( pos ) {
return coeffsDV . getUint32 ( pos , true ) ;
} ;
}
const elementsPerChunk = Math . floor ( zkey . domainSize / concurrency ) ;
const promises = [ ] ;
const cutPoints = [ ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
cutPoints . push ( getCutPoint ( Math . floor ( i * elementsPerChunk ) ) ) ;
}
cutPoints . push ( coeffs . byteLength ) ;
const chunkSize = 2 * * 26 ;
for ( let s = 0 ; s < zkey . nVars ; s += chunkSize ) {
if ( logger ) logger . debug ( ` QAP ${ s } : ${ s } / ${ zkey . nVars } ` ) ;
const ns = Math . min ( zkey . nVars - s , chunkSize ) ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
let n ;
if ( i < concurrency - 1 ) {
n = elementsPerChunk ;
} else {
n = zkey . domainSize - i * elementsPerChunk ;
}
if ( n == 0 ) continue ;
const task = [ ] ;
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : coeffs . slice ( cutPoints [ i ] , cutPoints [ i + 1 ] ) } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : witness . slice ( s * curve . Fr . n8 , ( s + ns ) * curve . Fr . n8 ) } ) ;
task . push ( { cmd : "ALLOC" , var : 2 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "ALLOC" , var : 3 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "ALLOC" , var : 4 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "CALL" , fnName : "qap_buildABC" , params : [
{ var : 0 } ,
{ val : ( cutPoints [ i + 1 ] - cutPoints [ i ] ) / sCoef } ,
{ var : 1 } ,
{ var : 2 } ,
{ var : 3 } ,
{ var : 4 } ,
{ val : i * elementsPerChunk } ,
{ val : n } ,
{ val : s } ,
{ val : ns }
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 2 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "GET" , out : 1 , var : 3 , len : n * curve . Fr . n8 } ) ;
task . push ( { cmd : "GET" , out : 2 , var : 4 , len : n * curve . Fr . n8 } ) ;
promises . push ( curve . tm . queueAction ( task ) ) ;
}
}
let result = await Promise . all ( promises ) ;
const nGroups = result . length / concurrency ;
if ( nGroups > 1 ) {
const promises2 = [ ] ;
for ( let i = 0 ; i < concurrency ; i ++ ) {
const task = [ ] ;
task . push ( { cmd : "ALLOC" , var : 0 , len : result [ i ] [ 0 ] . byteLength } ) ;
task . push ( { cmd : "ALLOC" , var : 1 , len : result [ i ] [ 0 ] . byteLength } ) ;
for ( let m = 0 ; m < 3 ; m ++ ) {
task . push ( { cmd : "SET" , var : 0 , buff : result [ i ] [ m ] } ) ;
for ( let s = 1 ; s < nGroups ; s ++ ) {
task . push ( { cmd : "SET" , var : 1 , buff : result [ s * concurrency + i ] [ m ] } ) ;
task . push ( { cmd : "CALL" , fnName : "qap_batchAdd" , params : [
{ var : 0 } ,
{ var : 1 } ,
{ val : result [ i ] [ m ] . length / curve . Fr . n8 } ,
{ var : 0 }
] } ) ;
}
task . push ( { cmd : "GET" , out : m , var : 0 , len : result [ i ] [ m ] . length } ) ;
}
promises2 . push ( curve . tm . queueAction ( task ) ) ;
}
result = await Promise . all ( promises2 ) ;
}
const outBuffA = new BigBuffer ( zkey . domainSize * curve . Fr . n8 ) ;
const outBuffB = new BigBuffer ( zkey . domainSize * curve . Fr . n8 ) ;
const outBuffC = new BigBuffer ( zkey . domainSize * curve . Fr . n8 ) ;
let p = 0 ;
for ( let i = 0 ; i < result . length ; i ++ ) {
outBuffA . set ( result [ i ] [ 0 ] , p ) ;
outBuffB . set ( result [ i ] [ 1 ] , p ) ;
outBuffC . set ( result [ i ] [ 2 ] , p ) ;
p += result [ i ] [ 0 ] . byteLength ;
}
return [ outBuffA , outBuffB , outBuffC ] ;
function getCutPoint ( v ) {
let m = 0 ;
let n = getUint32 ( 0 ) ;
while ( m < n ) {
var k = Math . floor ( ( n + m ) / 2 ) ;
const va = getUint32 ( 4 + k * sCoef + 4 ) ;
if ( va > v ) {
n = k - 1 ;
} else if ( va < v ) {
m = k + 1 ;
} else {
n = k ;
}
}
return 4 + m * sCoef ;
}
}
* /
2020-07-11 11:31:52 +03:00
2020-10-25 13:12:57 +03:00
async function joinABC ( curve , zkey , a , b , c , logger ) {
const MAX _CHUNK _SIZE = 1 << 22 ;
2020-07-11 11:31:52 +03:00
const n8 = curve . Fr . n8 ;
const nElements = Math . floor ( a . byteLength / curve . Fr . n8 ) ;
const promises = [ ] ;
2020-10-25 13:12:57 +03:00
for ( let i = 0 ; i < nElements ; i += MAX _CHUNK _SIZE ) {
if ( logger ) logger . debug ( ` JoinABC: ${ i } / ${ nElements } ` ) ;
const n = Math . min ( nElements - i , MAX _CHUNK _SIZE ) ;
2020-07-11 11:31:52 +03:00
const task = [ ] ;
2020-10-25 13:12:57 +03:00
const aChunk = a . slice ( i * n8 , ( i + n ) * n8 ) ;
const bChunk = b . slice ( i * n8 , ( i + n ) * n8 ) ;
const cChunk = c . slice ( i * n8 , ( i + n ) * n8 ) ;
2020-07-11 11:31:52 +03:00
task . push ( { cmd : "ALLOCSET" , var : 0 , buff : aChunk } ) ;
task . push ( { cmd : "ALLOCSET" , var : 1 , buff : bChunk } ) ;
task . push ( { cmd : "ALLOCSET" , var : 2 , buff : cChunk } ) ;
task . push ( { cmd : "ALLOC" , var : 3 , len : n * n8 } ) ;
task . push ( { cmd : "CALL" , fnName : "qap_joinABC" , params : [
{ var : 0 } ,
{ var : 1 } ,
{ var : 2 } ,
{ val : n } ,
{ var : 3 } ,
] } ) ;
task . push ( { cmd : "CALL" , fnName : "frm_batchFromMontgomery" , params : [
{ var : 3 } ,
{ val : n } ,
{ var : 3 }
] } ) ;
task . push ( { cmd : "GET" , out : 0 , var : 3 , len : n * n8 } ) ;
promises . push ( curve . tm . queueAction ( task ) ) ;
}
2021-05-31 14:21:07 +03:00
const result = await Promise . all ( promises ) ;
let outBuff ;
if ( a instanceof ffjavascript . BigBuffer ) {
outBuff = new ffjavascript . BigBuffer ( a . byteLength ) ;
} else {
outBuff = new Uint8Array ( a . byteLength ) ;
}
let p = 0 ;
for ( let i = 0 ; i < result . length ; i ++ ) {
outBuff . set ( result [ i ] [ 0 ] , p ) ;
p += result [ i ] [ 0 ] . byteLength ;
}
return outBuff ;
}
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
async function wtnsCalculate$1 ( input , wasmFileName , wtnsFileName , options ) {
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
const fdWasm = await fastFile _ _namespace . readExisting ( wasmFileName ) ;
2021-05-31 14:21:07 +03:00
const wasm = await fdWasm . read ( fdWasm . totalSize ) ;
await fdWasm . close ( ) ;
const wc = await circom _runtime . WitnessCalculatorBuilder ( wasm ) ;
2022-01-22 14:37:53 +03:00
if ( wc . circom _version ( ) == 1 ) {
const w = await wc . calculateBinWitness ( input ) ;
2021-05-31 14:21:07 +03:00
2022-01-22 14:37:53 +03:00
const fdWtns = await binFileUtils _ _namespace . createBinFile ( wtnsFileName , "wtns" , 2 , 2 ) ;
2021-05-31 14:21:07 +03:00
2022-01-22 14:37:53 +03:00
await writeBin ( fdWtns , w , wc . prime ) ;
await fdWtns . close ( ) ;
} else {
const fdWtns = await fastFile _ _namespace . createOverride ( wtnsFileName ) ;
2021-05-31 14:21:07 +03:00
2022-01-22 14:37:53 +03:00
const w = await wc . calculateWTNSBin ( input ) ;
await fdWtns . write ( w ) ;
await fdWtns . close ( ) ;
}
2021-05-31 14:21:07 +03:00
}
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
async function groth16FullProve$1 ( input , wasmFile , zkeyFileName , logger ) {
2021-05-31 14:21:07 +03:00
const wtns = {
type : "mem"
} ;
2022-01-19 23:59:11 +03:00
await wtnsCalculate$1 ( input , wasmFile , wtns ) ;
return await groth16Prove$1 ( zkeyFileName , wtns , logger ) ;
2021-05-31 14:21:07 +03:00
}
/ *
Copyright 2018 0 kims association .
This file is part of snarkjs .
snarkjs is a free software : you can redistribute it and / or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation , either version 3 of the License , or ( at your option )
any later version .
snarkjs is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License for
more details .
You should have received a copy of the GNU General Public License along with
snarkjs . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
const { unstringifyBigInts : unstringifyBigInts$2 } = ffjavascript . utils ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
async function groth16Verify$1 ( vk _verifier , publicSignals , proof , logger ) {
2021-05-31 14:21:07 +03:00
/ *
let cpub = vk _verifier . IC [ 0 ] ;
for ( let s = 0 ; s < vk _verifier . nPublic ; s ++ ) {
cpub = G1 . add ( cpub , G1 . timesScalar ( vk _verifier . IC [ s + 1 ] , publicSignals [ s ] ) ) ;
}
* /
2022-01-19 23:59:11 +03:00
vk _verifier = unstringifyBigInts$2 ( vk _verifier ) ;
proof = unstringifyBigInts$2 ( proof ) ;
publicSignals = unstringifyBigInts$2 ( publicSignals ) ;
2021-05-31 14:21:07 +03:00
const curve = await getCurveFromName ( vk _verifier . curve ) ;
const IC0 = curve . G1 . fromObject ( vk _verifier . IC [ 0 ] ) ;
const IC = new Uint8Array ( curve . G1 . F . n8 * 2 * publicSignals . length ) ;
const w = new Uint8Array ( curve . Fr . n8 * publicSignals . length ) ;
for ( let i = 0 ; i < publicSignals . length ; i ++ ) {
const buffP = curve . G1 . fromObject ( vk _verifier . IC [ i + 1 ] ) ;
IC . set ( buffP , i * curve . G1 . F . n8 * 2 ) ;
ffjavascript . Scalar . toRprLE ( w , curve . Fr . n8 * i , publicSignals [ i ] , curve . Fr . n8 ) ;
}
let cpub = await curve . G1 . multiExpAffine ( IC , w ) ;
cpub = curve . G1 . add ( cpub , IC0 ) ;
const pi _a = curve . G1 . fromObject ( proof . pi _a ) ;
const pi _b = curve . G2 . fromObject ( proof . pi _b ) ;
const pi _c = curve . G1 . fromObject ( proof . pi _c ) ;
const vk _gamma _2 = curve . G2 . fromObject ( vk _verifier . vk _gamma _2 ) ;
const vk _delta _2 = curve . G2 . fromObject ( vk _verifier . vk _delta _2 ) ;
const vk _alpha _1 = curve . G1 . fromObject ( vk _verifier . vk _alpha _1 ) ;
const vk _beta _2 = curve . G2 . fromObject ( vk _verifier . vk _beta _2 ) ;
const res = await curve . pairingEq (
curve . G1 . neg ( pi _a ) , pi _b ,
cpub , vk _gamma _2 ,
pi _c , vk _delta _2 ,
vk _alpha _1 , vk _beta _2
) ;
if ( ! res ) {
if ( logger ) logger . error ( "Invalid proof" ) ;
return false ;
}
if ( logger ) logger . info ( "OK!" ) ;
return true ;
}
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
function p256$1 ( n ) {
2021-05-31 14:21:07 +03:00
let nstr = n . toString ( 16 ) ;
while ( nstr . length < 64 ) nstr = "0" + nstr ;
nstr = ` "0x ${ nstr } " ` ;
return nstr ;
}
async function groth16ExportSolidityCallData ( proof , pub ) {
let inputs = "" ;
for ( let i = 0 ; i < pub . length ; i ++ ) {
if ( inputs != "" ) inputs = inputs + "," ;
2022-01-19 23:59:11 +03:00
inputs = inputs + p256$1 ( pub [ i ] ) ;
2021-05-31 14:21:07 +03:00
}
let S ;
2022-01-19 23:59:11 +03:00
S = ` [ ${ p256$1 ( proof . pi _a [ 0 ] ) } , ${ p256$1 ( proof . pi _a [ 1 ] ) } ], ` +
` [[ ${ p256$1 ( proof . pi _b [ 0 ] [ 1 ] ) } , ${ p256$1 ( proof . pi _b [ 0 ] [ 0 ] ) } ],[ ${ p256$1 ( proof . pi _b [ 1 ] [ 1 ] ) } , ${ p256$1 ( proof . pi _b [ 1 ] [ 0 ] ) } ]], ` +
` [ ${ p256$1 ( proof . pi _c [ 0 ] ) } , ${ p256$1 ( proof . pi _c [ 1 ] ) } ], ` +
2021-05-31 14:21:07 +03:00
` [ ${ inputs } ] ` ;
return S ;
}
/ *
Copyright 2021 0 kims association .
This file is part of snarkjs .
snarkjs is a free software : you can redistribute it and / or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation , either version 3 of the License , or ( at your option )
any later version .
snarkjs is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License for
more details .
You should have received a copy of the GNU General Public License along with
snarkjs . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
async function plonkSetup$1 ( r1csName , ptauName , zkeyName , logger ) {
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
await Blake2b _ _default [ "default" ] . ready ( ) ;
2021-05-31 14:21:07 +03:00
const { fd : fdPTau , sections : sectionsPTau } = await binFileUtils . readBinFile ( ptauName , "ptau" , 1 , 1 << 22 , 1 << 24 ) ;
const { curve , power } = await readPTauHeader ( fdPTau , sectionsPTau ) ;
const { fd : fdR1cs , sections : sectionsR1cs } = await binFileUtils . readBinFile ( r1csName , "r1cs" , 1 , 1 << 22 , 1 << 24 ) ;
const r1cs = await r1csfile . readR1csHeader ( fdR1cs , sectionsR1cs , false ) ;
const sG1 = curve . G1 . F . n8 * 2 ;
const G1 = curve . G1 ;
const sG2 = curve . G2 . F . n8 * 2 ;
const Fr = curve . Fr ;
const n8r = curve . Fr . n8 ;
if ( logger ) logger . info ( "Reading r1cs" ) ;
let sR1cs = await binFileUtils . readSection ( fdR1cs , sectionsR1cs , 2 ) ;
const plonkConstraints = new BigArray ( ) ;
const plonkAdditions = new BigArray ( ) ;
let plonkNVars = r1cs . nVars ;
const nPublic = r1cs . nOutputs + r1cs . nPubInputs ;
await processConstraints ( ) ;
const fdZKey = await binFileUtils . createBinFile ( zkeyName , "zkey" , 1 , 14 , 1 << 22 , 1 << 24 ) ;
if ( r1cs . prime != curve . r ) {
if ( logger ) logger . error ( "r1cs curve does not match powers of tau ceremony curve" ) ;
return - 1 ;
}
2021-06-06 18:27:41 +03:00
let cirPower = log2 ( plonkConstraints . length - 1 ) + 1 ;
if ( cirPower < 3 ) cirPower = 3 ; // As the t polinomal is n+5 whe need at least a power of 4
2021-05-31 14:21:07 +03:00
const domainSize = 2 * * cirPower ;
2021-06-01 15:43:58 +03:00
if ( logger ) logger . info ( "Plonk constraints: " + plonkConstraints . length ) ;
2021-05-31 14:21:07 +03:00
if ( cirPower > power ) {
2021-06-01 15:43:58 +03:00
if ( logger ) logger . error ( ` circuit too big for this power of tau ceremony. ${ plonkConstraints . length } > 2** ${ power } ` ) ;
2021-05-31 14:21:07 +03:00
return - 1 ;
}
if ( ! sectionsPTau [ 12 ] ) {
if ( logger ) logger . error ( "Powers of tau is not prepared." ) ;
return - 1 ;
}
const LPoints = new ffjavascript . BigBuffer ( domainSize * sG1 ) ;
const o = sectionsPTau [ 12 ] [ 0 ] . p + ( ( 2 * * ( cirPower ) ) - 1 ) * sG1 ;
await fdPTau . readToBuffer ( LPoints , 0 , domainSize * sG1 , o ) ;
const [ k1 , k2 ] = getK1K2 ( ) ;
const vk = { } ;
await writeAdditions ( 3 , "Additions" ) ;
await writeWitnessMap ( 4 , 0 , "Amap" ) ;
await writeWitnessMap ( 5 , 1 , "Bmap" ) ;
await writeWitnessMap ( 6 , 2 , "Cmap" ) ;
await writeQMap ( 7 , 3 , "Qm" ) ;
await writeQMap ( 8 , 4 , "Ql" ) ;
await writeQMap ( 9 , 5 , "Qr" ) ;
await writeQMap ( 10 , 6 , "Qo" ) ;
await writeQMap ( 11 , 7 , "Qc" ) ;
await writeSigma ( 12 , "sigma" ) ;
await writeLs ( 13 , "lagrange polynomials" ) ;
// Write PTau points
////////////
await binFileUtils . startWriteSection ( fdZKey , 14 ) ;
const buffOut = new ffjavascript . BigBuffer ( ( domainSize + 6 ) * sG1 ) ;
await fdPTau . readToBuffer ( buffOut , 0 , ( domainSize + 6 ) * sG1 , sectionsPTau [ 2 ] [ 0 ] . p ) ;
await fdZKey . write ( buffOut ) ;
await binFileUtils . endWriteSection ( fdZKey ) ;
await writeHeaders ( ) ;
await fdZKey . close ( ) ;
await fdR1cs . close ( ) ;
await fdPTau . close ( ) ;
if ( logger ) logger . info ( "Setup Finished" ) ;
return ;
async function processConstraints ( ) {
let r1csPos = 0 ;
function r1cs _readULE32 ( ) {
const buff = sR1cs . slice ( r1csPos , r1csPos + 4 ) ;
r1csPos += 4 ;
const buffV = new DataView ( buff . buffer ) ;
return buffV . getUint32 ( 0 , true ) ;
}
function r1cs _readCoef ( ) {
const res = Fr . fromRprLE ( sR1cs . slice ( r1csPos , r1csPos + curve . Fr . n8 ) ) ;
r1csPos += curve . Fr . n8 ;
return res ;
}
function r1cs _readCoefs ( ) {
const coefs = [ ] ;
const res = {
k : curve . Fr . zero
} ;
const nA = r1cs _readULE32 ( ) ;
for ( let i = 0 ; i < nA ; i ++ ) {
const s = r1cs _readULE32 ( ) ;
const coefp = r1cs _readCoef ( ) ;
if ( s == 0 ) {
res . k = coefp ;
} else {
coefs . push ( [ s , coefp ] ) ;
}
}
const resCoef = reduceCoef ( coefs ) ;
res . s = resCoef [ 0 ] ;
res . coef = resCoef [ 1 ] ;
return res ;
}
function reduceCoef ( coefs ) {
if ( coefs . length == 0 ) {
return [ 0 , curve . Fr . zero ] ;
}
if ( coefs . length == 1 ) {
return coefs [ 0 ] ;
}
const arr1 = coefs . slice ( 0 , coefs . length >> 1 ) ;
const arr2 = coefs . slice ( coefs . length >> 1 ) ;
const coef1 = reduceCoef ( arr1 ) ;
const coef2 = reduceCoef ( arr2 ) ;
const sl = coef1 [ 0 ] ;
const sr = coef2 [ 0 ] ;
const so = plonkNVars ++ ;
const qm = curve . Fr . zero ;
const ql = Fr . neg ( coef1 [ 1 ] ) ;
const qr = Fr . neg ( coef2 [ 1 ] ) ;
const qo = curve . Fr . one ;
const qc = curve . Fr . zero ;
plonkConstraints . push ( [ sl , sr , so , qm , ql , qr , qo , qc ] ) ;
plonkAdditions . push ( [ sl , sr , coef1 [ 1 ] , coef2 [ 1 ] ] ) ;
return [ so , curve . Fr . one ] ;
}
for ( let s = 1 ; s <= nPublic ; s ++ ) {
const sl = s ;
const sr = 0 ;
const so = 0 ;
const qm = curve . Fr . zero ;
const ql = curve . Fr . one ;
const qr = curve . Fr . zero ;
const qo = curve . Fr . zero ;
const qc = curve . Fr . zero ;
plonkConstraints . push ( [ sl , sr , so , qm , ql , qr , qo , qc ] ) ;
}
for ( let c = 0 ; c < r1cs . nConstraints ; c ++ ) {
if ( ( logger ) && ( c % 10000 == 0 ) ) logger . debug ( ` processing constraints: ${ c } / ${ r1cs . nConstraints } ` ) ;
const A = r1cs _readCoefs ( ) ;
const B = r1cs _readCoefs ( ) ;
const C = r1cs _readCoefs ( ) ;
const sl = A . s ;
const sr = B . s ;
const so = C . s ;
const qm = curve . Fr . mul ( A . coef , B . coef ) ;
const ql = curve . Fr . mul ( A . coef , B . k ) ;
const qr = curve . Fr . mul ( A . k , B . coef ) ;
const qo = curve . Fr . neg ( C . coef ) ;
2021-06-01 14:59:20 +03:00
const qc = curve . Fr . sub ( curve . Fr . mul ( A . k , B . k ) , C . k ) ;
2021-05-31 14:21:07 +03:00
plonkConstraints . push ( [ sl , sr , so , qm , ql , qr , qo , qc ] ) ;
}
}
async function writeWitnessMap ( sectionNum , posConstraint , name ) {
await binFileUtils . startWriteSection ( fdZKey , sectionNum ) ;
for ( let i = 0 ; i < plonkConstraints . length ; i ++ ) {
await fdZKey . writeULE32 ( plonkConstraints [ i ] [ posConstraint ] ) ;
if ( ( logger ) && ( i % 1000000 == 0 ) ) logger . debug ( ` writing ${ name } : ${ i } / ${ plonkConstraints . length } ` ) ;
}
await binFileUtils . endWriteSection ( fdZKey ) ;
}
async function writeQMap ( sectionNum , posConstraint , name ) {
let Q = new ffjavascript . BigBuffer ( domainSize * n8r ) ;
for ( let i = 0 ; i < plonkConstraints . length ; i ++ ) {
Q . set ( plonkConstraints [ i ] [ posConstraint ] , i * n8r ) ;
if ( ( logger ) && ( i % 1000000 == 0 ) ) logger . debug ( ` writing ${ name } : ${ i } / ${ plonkConstraints . length } ` ) ;
}
await binFileUtils . startWriteSection ( fdZKey , sectionNum ) ;
await writeP4 ( Q ) ;
await binFileUtils . endWriteSection ( fdZKey ) ;
Q = await Fr . batchFromMontgomery ( Q ) ;
vk [ name ] = await curve . G1 . multiExpAffine ( LPoints , Q , logger , "multiexp " + name ) ;
}
async function writeP4 ( buff ) {
const q = await Fr . ifft ( buff ) ;
const q4 = new ffjavascript . BigBuffer ( domainSize * n8r * 4 ) ;
q4 . set ( q , 0 ) ;
const Q4 = await Fr . fft ( q4 ) ;
await fdZKey . write ( q ) ;
await fdZKey . write ( Q4 ) ;
}
async function writeAdditions ( sectionNum , name ) {
await binFileUtils . startWriteSection ( fdZKey , sectionNum ) ;
const buffOut = new Uint8Array ( ( 2 * 4 + 2 * n8r ) ) ;
const buffOutV = new DataView ( buffOut . buffer ) ;
for ( let i = 0 ; i < plonkAdditions . length ; i ++ ) {
const addition = plonkAdditions [ i ] ;
let o = 0 ;
buffOutV . setUint32 ( o , addition [ 0 ] , true ) ; o += 4 ;
buffOutV . setUint32 ( o , addition [ 1 ] , true ) ; o += 4 ;
// The value is storen in Montgomery. stored = v*R
// so when montgomery multiplicated by the witness it result = v*R*w/R = v*w
buffOut . set ( addition [ 2 ] , o ) ; o += n8r ;
buffOut . set ( addition [ 3 ] , o ) ; o += n8r ;
await fdZKey . write ( buffOut ) ;
if ( ( logger ) && ( i % 1000000 == 0 ) ) logger . debug ( ` writing ${ name } : ${ i } / ${ plonkAdditions . length } ` ) ;
}
await binFileUtils . endWriteSection ( fdZKey ) ;
}
async function writeSigma ( sectionNum , name ) {
const sigma = new ffjavascript . BigBuffer ( n8r * domainSize * 3 ) ;
const lastAparence = new BigArray ( plonkNVars ) ;
const firstPos = new BigArray ( plonkNVars ) ;
let w = Fr . one ;
for ( let i = 0 ; i < domainSize ; i ++ ) {
if ( i < plonkConstraints . length ) {
buildSigma ( plonkConstraints [ i ] [ 0 ] , i ) ;
buildSigma ( plonkConstraints [ i ] [ 1 ] , domainSize + i ) ;
buildSigma ( plonkConstraints [ i ] [ 2 ] , domainSize * 2 + i ) ;
} else {
buildSigma ( 0 , i ) ;
buildSigma ( 0 , domainSize + i ) ;
buildSigma ( 0 , domainSize * 2 + i ) ;
}
w = Fr . mul ( w , Fr . w [ cirPower ] ) ;
if ( ( logger ) && ( i % 1000000 == 0 ) ) logger . debug ( ` writing ${ name } phase1: ${ i } / ${ plonkConstraints . length } ` ) ;
}
for ( let s = 0 ; s < plonkNVars ; s ++ ) {
if ( typeof firstPos [ s ] !== "undefined" ) {
sigma . set ( lastAparence [ s ] , firstPos [ s ] * n8r ) ;
} else {
throw new Error ( "Variable not used" ) ;
}
if ( ( logger ) && ( s % 1000000 == 0 ) ) logger . debug ( ` writing ${ name } phase2: ${ s } / ${ plonkNVars } ` ) ;
}
await binFileUtils . startWriteSection ( fdZKey , sectionNum ) ;
let S1 = sigma . slice ( 0 , domainSize * n8r ) ;
await writeP4 ( S1 ) ;
let S2 = sigma . slice ( domainSize * n8r , domainSize * n8r * 2 ) ;
await writeP4 ( S2 ) ;
let S3 = sigma . slice ( domainSize * n8r * 2 , domainSize * n8r * 3 ) ;
await writeP4 ( S3 ) ;
await binFileUtils . endWriteSection ( fdZKey ) ;
S1 = await Fr . batchFromMontgomery ( S1 ) ;
S2 = await Fr . batchFromMontgomery ( S2 ) ;
S3 = await Fr . batchFromMontgomery ( S3 ) ;
vk . S1 = await curve . G1 . multiExpAffine ( LPoints , S1 , logger , "multiexp S1" ) ;
vk . S2 = await curve . G1 . multiExpAffine ( LPoints , S2 , logger , "multiexp S2" ) ;
vk . S3 = await curve . G1 . multiExpAffine ( LPoints , S3 , logger , "multiexp S3" ) ;
function buildSigma ( s , p ) {
if ( typeof lastAparence [ s ] === "undefined" ) {
firstPos [ s ] = p ;
} else {
sigma . set ( lastAparence [ s ] , p * n8r ) ;
}
let v ;
if ( p < domainSize ) {
v = w ;
} else if ( p < 2 * domainSize ) {
v = Fr . mul ( w , k1 ) ;
} else {
v = Fr . mul ( w , k2 ) ;
}
lastAparence [ s ] = v ;
}
}
async function writeLs ( sectionNum , name ) {
await binFileUtils . startWriteSection ( fdZKey , sectionNum ) ;
const l = Math . max ( nPublic , 1 ) ;
for ( let i = 0 ; i < l ; i ++ ) {
let buff = new ffjavascript . BigBuffer ( domainSize * n8r ) ;
buff . set ( Fr . one , i * n8r ) ;
await writeP4 ( buff ) ;
if ( logger ) logger . debug ( ` writing ${ name } ${ i } / ${ l } ` ) ;
}
await binFileUtils . endWriteSection ( fdZKey ) ;
}
async function writeHeaders ( ) {
// Write the header
///////////
await binFileUtils . startWriteSection ( fdZKey , 1 ) ;
await fdZKey . writeULE32 ( 2 ) ; // Plonk
await binFileUtils . endWriteSection ( fdZKey ) ;
// Write the Plonk header section
///////////
await binFileUtils . startWriteSection ( fdZKey , 2 ) ;
const primeQ = curve . q ;
const n8q = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeQ ) - 1 ) / 64 ) + 1 ) * 8 ;
const primeR = curve . r ;
const n8r = ( Math . floor ( ( ffjavascript . Scalar . bitLength ( primeR ) - 1 ) / 64 ) + 1 ) * 8 ;
await fdZKey . writeULE32 ( n8q ) ;
await binFileUtils . writeBigInt ( fdZKey , primeQ , n8q ) ;
await fdZKey . writeULE32 ( n8r ) ;
await binFileUtils . writeBigInt ( fdZKey , primeR , n8r ) ;
await fdZKey . writeULE32 ( plonkNVars ) ; // Total number of bars
await fdZKey . writeULE32 ( nPublic ) ; // Total number of public vars (not including ONE)
await fdZKey . writeULE32 ( domainSize ) ; // domainSize
await fdZKey . writeULE32 ( plonkAdditions . length ) ; // domainSize
await fdZKey . writeULE32 ( plonkConstraints . length ) ;
await fdZKey . write ( k1 ) ;
await fdZKey . write ( k2 ) ;
await fdZKey . write ( G1 . toAffine ( vk . Qm ) ) ;
await fdZKey . write ( G1 . toAffine ( vk . Ql ) ) ;
await fdZKey . write ( G1 . toAffine ( vk . Qr ) ) ;
await fdZKey . write ( G1 . toAffine ( vk . Qo ) ) ;
await fdZKey . write ( G1 . toAffine ( vk . Qc ) ) ;
await fdZKey . write ( G1 . toAffine ( vk . S1 ) ) ;
await fdZKey . write ( G1 . toAffine ( vk . S2 ) ) ;
await fdZKey . write ( G1 . toAffine ( vk . S3 ) ) ;
let bX _2 ;
bX _2 = await fdPTau . read ( sG2 , sectionsPTau [ 3 ] [ 0 ] . p + sG2 ) ;
await fdZKey . write ( bX _2 ) ;
await binFileUtils . endWriteSection ( fdZKey ) ;
}
function getK1K2 ( ) {
let k1 = Fr . two ;
while ( isIncluded ( k1 , [ ] , cirPower ) ) Fr . add ( k1 , Fr . one ) ;
let k2 = Fr . add ( k1 , Fr . one ) ;
while ( isIncluded ( k2 , [ k1 ] , cirPower ) ) Fr . add ( k2 , Fr . one ) ;
return [ k1 , k2 ] ;
function isIncluded ( k , kArr , pow ) {
const domainSize = 2 * * pow ;
let w = Fr . one ;
for ( let i = 0 ; i < domainSize ; i ++ ) {
if ( Fr . eq ( k , w ) ) return true ;
for ( let j = 0 ; j < kArr . length ; j ++ ) {
if ( Fr . eq ( k , Fr . mul ( kArr [ j ] , w ) ) ) return true ;
}
w = Fr . mul ( w , Fr . w [ pow ] ) ;
}
return false ;
}
}
}
/ *
Copyright 2021 0 kims association .
This file is part of snarkjs .
snarkjs is a free software : you can redistribute it and / or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation , either version 3 of the License , or ( at your option )
any later version .
snarkjs is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License for
more details .
You should have received a copy of the GNU General Public License along with
snarkjs . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
const { stringifyBigInts : stringifyBigInts$1 } = ffjavascript . utils ;
const { keccak256 : keccak256$1 } = jsSha3 _ _default [ "default" ] ;
2021-05-31 14:21:07 +03:00
async function plonk16Prove ( zkeyFileName , witnessFileName , logger ) {
2022-01-19 23:59:11 +03:00
const { fd : fdWtns , sections : sectionsWtns } = await binFileUtils _ _namespace . readBinFile ( witnessFileName , "wtns" , 2 , 1 << 25 , 1 << 23 ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
const wtns = await readHeader ( fdWtns , sectionsWtns ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
const { fd : fdZKey , sections : sectionsZKey } = await binFileUtils _ _namespace . readBinFile ( zkeyFileName , "zkey" , 2 , 1 << 25 , 1 << 23 ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
const zkey = await readHeader$1 ( fdZKey , sectionsZKey ) ;
2021-05-31 14:21:07 +03:00
if ( zkey . protocol != "plonk" ) {
throw new Error ( "zkey file is not groth16" ) ;
}
if ( ! ffjavascript . Scalar . eq ( zkey . r , wtns . q ) ) {
throw new Error ( "Curve of the witness does not match the curve of the proving key" ) ;
}
if ( wtns . nWitness != zkey . nVars - zkey . nAdditions ) {
throw new Error ( ` Invalid witness length. Circuit: ${ zkey . nVars } , witness: ${ wtns . nWitness } , ${ zkey . nAdditions } ` ) ;
}
const curve = await getCurveFromQ ( zkey . q ) ;
const Fr = curve . Fr ;
const G1 = curve . G1 ;
const n8r = curve . Fr . n8 ;
if ( logger ) logger . debug ( "Reading Wtns" ) ;
2022-01-19 23:59:11 +03:00
const buffWitness = await binFileUtils _ _namespace . readSection ( fdWtns , sectionsWtns , 2 ) ;
2021-05-31 14:21:07 +03:00
// First element in plonk is not used and can be any value. (But always the same).
// We set it to zero to go faster in the exponentiations.
buffWitness . set ( Fr . zero , 0 ) ;
const buffInternalWitness = new ffjavascript . BigBuffer ( n8r * zkey . nAdditions ) ;
await calculateAdditions ( ) ;
let A , B , C , Z ;
let A4 , B4 , C4 , Z4 ;
let pol _a , pol _b , pol _c , pol _z , pol _t , pol _r ;
let proof = { } ;
const sigmaBuff = new ffjavascript . BigBuffer ( zkey . domainSize * n8r * 4 * 3 ) ;
let o = sectionsZKey [ 12 ] [ 0 ] . p + zkey . domainSize * n8r ;
await fdZKey . readToBuffer ( sigmaBuff , 0 , zkey . domainSize * n8r * 4 , o ) ;
o += zkey . domainSize * n8r * 5 ;
await fdZKey . readToBuffer ( sigmaBuff , zkey . domainSize * n8r * 4 , zkey . domainSize * n8r * 4 , o ) ;
o += zkey . domainSize * n8r * 5 ;
await fdZKey . readToBuffer ( sigmaBuff , zkey . domainSize * n8r * 8 , zkey . domainSize * n8r * 4 , o ) ;
const pol _s1 = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
await fdZKey . readToBuffer ( pol _s1 , 0 , zkey . domainSize * n8r , sectionsZKey [ 12 ] [ 0 ] . p ) ;
const pol _s2 = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
await fdZKey . readToBuffer ( pol _s2 , 0 , zkey . domainSize * n8r , sectionsZKey [ 12 ] [ 0 ] . p + 5 * zkey . domainSize * n8r ) ;
2022-01-19 23:59:11 +03:00
const PTau = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 14 ) ;
2021-05-31 14:21:07 +03:00
const ch = { } ;
await round1 ( ) ;
await round2 ( ) ;
await round3 ( ) ;
await round4 ( ) ;
await round5 ( ) ;
///////////////////////
// Final adjustments //
///////////////////////
proof . protocol = "plonk" ;
proof . curve = curve . name ;
await fdZKey . close ( ) ;
await fdWtns . close ( ) ;
let publicSignals = [ ] ;
for ( let i = 1 ; i <= zkey . nPublic ; i ++ ) {
const pub = buffWitness . slice ( i * Fr . n8 , i * Fr . n8 + Fr . n8 ) ;
publicSignals . push ( ffjavascript . Scalar . fromRprLE ( pub ) ) ;
}
proof . A = G1 . toObject ( proof . A ) ;
proof . B = G1 . toObject ( proof . B ) ;
proof . C = G1 . toObject ( proof . C ) ;
proof . Z = G1 . toObject ( proof . Z ) ;
proof . T1 = G1 . toObject ( proof . T1 ) ;
proof . T2 = G1 . toObject ( proof . T2 ) ;
proof . T3 = G1 . toObject ( proof . T3 ) ;
proof . eval _a = Fr . toObject ( proof . eval _a ) ;
proof . eval _b = Fr . toObject ( proof . eval _b ) ;
proof . eval _c = Fr . toObject ( proof . eval _c ) ;
proof . eval _s1 = Fr . toObject ( proof . eval _s1 ) ;
proof . eval _s2 = Fr . toObject ( proof . eval _s2 ) ;
proof . eval _zw = Fr . toObject ( proof . eval _zw ) ;
proof . eval _t = Fr . toObject ( proof . eval _t ) ;
proof . eval _r = Fr . toObject ( proof . eval _r ) ;
proof . Wxi = G1 . toObject ( proof . Wxi ) ;
proof . Wxiw = G1 . toObject ( proof . Wxiw ) ;
delete proof . eval _t ;
2022-01-19 23:59:11 +03:00
proof = stringifyBigInts$1 ( proof ) ;
publicSignals = stringifyBigInts$1 ( publicSignals ) ;
2021-05-31 14:21:07 +03:00
return { proof , publicSignals } ;
async function calculateAdditions ( ) {
2022-01-19 23:59:11 +03:00
const additionsBuff = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 3 ) ;
2021-05-31 14:21:07 +03:00
const sSum = 8 + curve . Fr . n8 * 2 ;
for ( let i = 0 ; i < zkey . nAdditions ; i ++ ) {
const ai = readUInt32 ( additionsBuff , i * sSum ) ;
const bi = readUInt32 ( additionsBuff , i * sSum + 4 ) ;
const ac = additionsBuff . slice ( i * sSum + 8 , i * sSum + 8 + n8r ) ;
const bc = additionsBuff . slice ( i * sSum + 8 + n8r , i * sSum + 8 + n8r * 2 ) ;
const aw = getWitness ( ai ) ;
const bw = getWitness ( bi ) ;
const r = curve . Fr . add (
curve . Fr . mul ( ac , aw ) ,
curve . Fr . mul ( bc , bw )
) ;
buffInternalWitness . set ( r , n8r * i ) ;
}
}
async function buildABC ( ) {
let A = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
let B = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
let C = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
2022-01-19 23:59:11 +03:00
const aMap = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 4 ) ;
const bMap = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 5 ) ;
const cMap = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 6 ) ;
2021-05-31 14:21:07 +03:00
for ( let i = 0 ; i < zkey . nConstrains ; i ++ ) {
const iA = readUInt32 ( aMap , i * 4 ) ;
A . set ( getWitness ( iA ) , i * n8r ) ;
const iB = readUInt32 ( bMap , i * 4 ) ;
B . set ( getWitness ( iB ) , i * n8r ) ;
const iC = readUInt32 ( cMap , i * 4 ) ;
C . set ( getWitness ( iC ) , i * n8r ) ;
}
A = await Fr . batchToMontgomery ( A ) ;
B = await Fr . batchToMontgomery ( B ) ;
C = await Fr . batchToMontgomery ( C ) ;
return [ A , B , C ] ;
}
function readUInt32 ( b , o ) {
const buff = b . slice ( o , o + 4 ) ;
const buffV = new DataView ( buff . buffer , buff . byteOffset , buff . byteLength ) ;
return buffV . getUint32 ( 0 , true ) ;
}
function getWitness ( idx ) {
if ( idx < zkey . nVars - zkey . nAdditions ) {
return buffWitness . slice ( idx * n8r , idx * n8r + n8r ) ;
} else if ( idx < zkey . nVars ) {
return buffInternalWitness . slice ( ( idx - ( zkey . nVars - zkey . nAdditions ) ) * n8r , ( idx - ( zkey . nVars - zkey . nAdditions ) ) * n8r + n8r ) ;
} else {
return curve . Fr . zero ;
}
}
async function round1 ( ) {
ch . b = [ ] ;
for ( let i = 1 ; i <= 9 ; i ++ ) {
ch . b [ i ] = curve . Fr . random ( ) ;
}
[ A , B , C ] = await buildABC ( ) ;
[ pol _a , A4 ] = await to4T ( A , [ ch . b [ 2 ] , ch . b [ 1 ] ] ) ;
[ pol _b , B4 ] = await to4T ( B , [ ch . b [ 4 ] , ch . b [ 3 ] ] ) ;
[ pol _c , C4 ] = await to4T ( C , [ ch . b [ 6 ] , ch . b [ 5 ] ] ) ;
proof . A = await expTau ( pol _a , "multiexp A" ) ;
proof . B = await expTau ( pol _b , "multiexp B" ) ;
proof . C = await expTau ( pol _c , "multiexp C" ) ;
}
async function round2 ( ) {
const transcript1 = new Uint8Array ( G1 . F . n8 * 2 * 3 ) ;
G1 . toRprUncompressed ( transcript1 , 0 , proof . A ) ;
G1 . toRprUncompressed ( transcript1 , G1 . F . n8 * 2 , proof . B ) ;
G1 . toRprUncompressed ( transcript1 , G1 . F . n8 * 4 , proof . C ) ;
ch . beta = hashToFr ( transcript1 ) ;
if ( logger ) logger . debug ( "beta: " + Fr . toString ( ch . beta ) ) ;
const transcript2 = new Uint8Array ( n8r ) ;
Fr . toRprBE ( transcript2 , 0 , ch . beta ) ;
ch . gamma = hashToFr ( transcript2 ) ;
if ( logger ) logger . debug ( "gamma: " + Fr . toString ( ch . gamma ) ) ;
let numArr = new ffjavascript . BigBuffer ( Fr . n8 * zkey . domainSize ) ;
let denArr = new ffjavascript . BigBuffer ( Fr . n8 * zkey . domainSize ) ;
numArr . set ( Fr . one , 0 ) ;
denArr . set ( Fr . one , 0 ) ;
let w = Fr . one ;
for ( let i = 0 ; i < zkey . domainSize ; i ++ ) {
let n1 = A . slice ( i * n8r , ( i + 1 ) * n8r ) ;
n1 = Fr . add ( n1 , Fr . mul ( ch . beta , w ) ) ;
n1 = Fr . add ( n1 , ch . gamma ) ;
let n2 = B . slice ( i * n8r , ( i + 1 ) * n8r ) ;
n2 = Fr . add ( n2 , Fr . mul ( zkey . k1 , Fr . mul ( ch . beta , w ) ) ) ;
n2 = Fr . add ( n2 , ch . gamma ) ;
let n3 = C . slice ( i * n8r , ( i + 1 ) * n8r ) ;
n3 = Fr . add ( n3 , Fr . mul ( zkey . k2 , Fr . mul ( ch . beta , w ) ) ) ;
n3 = Fr . add ( n3 , ch . gamma ) ;
const num = Fr . mul ( n1 , Fr . mul ( n2 , n3 ) ) ;
let d1 = A . slice ( i * n8r , ( i + 1 ) * n8r ) ;
d1 = Fr . add ( d1 , Fr . mul ( sigmaBuff . slice ( i * n8r * 4 , i * n8r * 4 + n8r ) , ch . beta ) ) ;
d1 = Fr . add ( d1 , ch . gamma ) ;
let d2 = B . slice ( i * n8r , ( i + 1 ) * n8r ) ;
d2 = Fr . add ( d2 , Fr . mul ( sigmaBuff . slice ( ( zkey . domainSize + i ) * 4 * n8r , ( zkey . domainSize + i ) * 4 * n8r + n8r ) , ch . beta ) ) ;
d2 = Fr . add ( d2 , ch . gamma ) ;
let d3 = C . slice ( i * n8r , ( i + 1 ) * n8r ) ;
d3 = Fr . add ( d3 , Fr . mul ( sigmaBuff . slice ( ( zkey . domainSize * 2 + i ) * 4 * n8r , ( zkey . domainSize * 2 + i ) * 4 * n8r + n8r ) , ch . beta ) ) ;
d3 = Fr . add ( d3 , ch . gamma ) ;
const den = Fr . mul ( d1 , Fr . mul ( d2 , d3 ) ) ;
numArr . set (
Fr . mul (
numArr . slice ( i * n8r , ( i + 1 ) * n8r ) ,
num
) ,
( ( i + 1 ) % zkey . domainSize ) * n8r
) ;
denArr . set (
Fr . mul (
denArr . slice ( i * n8r , ( i + 1 ) * n8r ) ,
den
) ,
( ( i + 1 ) % zkey . domainSize ) * n8r
) ;
w = Fr . mul ( w , Fr . w [ zkey . power ] ) ;
}
denArr = await Fr . batchInverse ( denArr ) ;
// TODO: Do it in assembly and in parallel
for ( let i = 0 ; i < zkey . domainSize ; i ++ ) {
numArr . set ( Fr . mul ( numArr . slice ( i * n8r , ( i + 1 ) * n8r ) , denArr . slice ( i * n8r , ( i + 1 ) * n8r ) ) , i * n8r ) ;
}
if ( ! Fr . eq ( numArr . slice ( 0 , n8r ) , Fr . one ) ) {
throw new Error ( "Copy constraints does not match" ) ;
}
Z = numArr ;
[ pol _z , Z4 ] = await to4T ( Z , [ ch . b [ 9 ] , ch . b [ 8 ] , ch . b [ 7 ] ] ) ;
proof . Z = await expTau ( pol _z , "multiexp Z" ) ;
}
async function round3 ( ) {
/ *
async function checkDegree ( P ) {
const p = await curve . Fr . ifft ( P ) ;
let deg = ( P . byteLength / n8r ) - 1 ;
while ( ( deg > 0 ) && ( Fr . isZero ( p . slice ( deg * n8r , deg * n8r + n8r ) ) ) ) deg -- ;
return deg ;
}
function printPol ( P ) {
const n = ( P . byteLength / n8r ) ;
console . log ( "[" ) ;
for ( let i = 0 ; i < n ; i ++ ) {
console . log ( Fr . toString ( P . slice ( i * n8r , i * n8r + n8r ) ) ) ;
}
console . log ( "]" ) ;
}
* /
2021-06-06 18:27:41 +03:00
if ( logger ) logger . debug ( "phse3: Reading QM4" ) ;
2021-05-31 14:21:07 +03:00
const QM4 = new ffjavascript . BigBuffer ( zkey . domainSize * 4 * n8r ) ;
await fdZKey . readToBuffer ( QM4 , 0 , zkey . domainSize * n8r * 4 , sectionsZKey [ 7 ] [ 0 ] . p + zkey . domainSize * n8r ) ;
2021-06-06 18:27:41 +03:00
if ( logger ) logger . debug ( "phse3: Reading QL4" ) ;
2021-05-31 14:21:07 +03:00
const QL4 = new ffjavascript . BigBuffer ( zkey . domainSize * 4 * n8r ) ;
await fdZKey . readToBuffer ( QL4 , 0 , zkey . domainSize * n8r * 4 , sectionsZKey [ 8 ] [ 0 ] . p + zkey . domainSize * n8r ) ;
2021-06-06 18:27:41 +03:00
if ( logger ) logger . debug ( "phse3: Reading QR4" ) ;
2021-05-31 14:21:07 +03:00
const QR4 = new ffjavascript . BigBuffer ( zkey . domainSize * 4 * n8r ) ;
await fdZKey . readToBuffer ( QR4 , 0 , zkey . domainSize * n8r * 4 , sectionsZKey [ 9 ] [ 0 ] . p + zkey . domainSize * n8r ) ;
2021-06-06 18:27:41 +03:00
if ( logger ) logger . debug ( "phse3: Reading QO4" ) ;
2021-05-31 14:21:07 +03:00
const QO4 = new ffjavascript . BigBuffer ( zkey . domainSize * 4 * n8r ) ;
await fdZKey . readToBuffer ( QO4 , 0 , zkey . domainSize * n8r * 4 , sectionsZKey [ 10 ] [ 0 ] . p + zkey . domainSize * n8r ) ;
2021-06-06 18:27:41 +03:00
if ( logger ) logger . debug ( "phse3: Reading QC4" ) ;
2021-05-31 14:21:07 +03:00
const QC4 = new ffjavascript . BigBuffer ( zkey . domainSize * 4 * n8r ) ;
await fdZKey . readToBuffer ( QC4 , 0 , zkey . domainSize * n8r * 4 , sectionsZKey [ 11 ] [ 0 ] . p + zkey . domainSize * n8r ) ;
2022-01-19 23:59:11 +03:00
const lPols = await binFileUtils _ _namespace . readSection ( fdZKey , sectionsZKey , 13 ) ;
2021-05-31 14:21:07 +03:00
const transcript3 = new Uint8Array ( G1 . F . n8 * 2 ) ;
G1 . toRprUncompressed ( transcript3 , 0 , proof . Z ) ;
ch . alpha = hashToFr ( transcript3 ) ;
if ( logger ) logger . debug ( "alpha: " + Fr . toString ( ch . alpha ) ) ;
const Z1 = [
Fr . zero ,
Fr . add ( Fr . e ( - 1 ) , Fr . w [ 2 ] ) ,
Fr . e ( - 2 ) ,
Fr . sub ( Fr . e ( - 1 ) , Fr . w [ 2 ] ) ,
] ;
const Z2 = [
Fr . zero ,
Fr . add ( Fr . zero , Fr . mul ( Fr . e ( - 2 ) , Fr . w [ 2 ] ) ) ,
Fr . e ( 4 ) ,
Fr . sub ( Fr . zero , Fr . mul ( Fr . e ( - 2 ) , Fr . w [ 2 ] ) ) ,
] ;
const Z3 = [
Fr . zero ,
Fr . add ( Fr . e ( 2 ) , Fr . mul ( Fr . e ( 2 ) , Fr . w [ 2 ] ) ) ,
Fr . e ( - 8 ) ,
Fr . sub ( Fr . e ( 2 ) , Fr . mul ( Fr . e ( 2 ) , Fr . w [ 2 ] ) ) ,
] ;
const T = new ffjavascript . BigBuffer ( zkey . domainSize * 4 * n8r ) ;
const Tz = new ffjavascript . BigBuffer ( zkey . domainSize * 4 * n8r ) ;
let w = Fr . one ;
for ( let i = 0 ; i < zkey . domainSize * 4 ; i ++ ) {
2021-06-06 18:27:41 +03:00
if ( ( i % 4096 == 0 ) && ( logger ) ) logger . debug ( ` calculating t ${ i } / ${ zkey . domainSize * 4 } ` ) ;
2021-05-31 14:21:07 +03:00
const a = A4 . slice ( i * n8r , i * n8r + n8r ) ;
const b = B4 . slice ( i * n8r , i * n8r + n8r ) ;
const c = C4 . slice ( i * n8r , i * n8r + n8r ) ;
const z = Z4 . slice ( i * n8r , i * n8r + n8r ) ;
const zw = Z4 . slice ( ( ( i + zkey . domainSize * 4 + 4 ) % ( zkey . domainSize * 4 ) ) * n8r , ( ( i + zkey . domainSize * 4 + 4 ) % ( zkey . domainSize * 4 ) ) * n8r + n8r ) ;
const qm = QM4 . slice ( i * n8r , i * n8r + n8r ) ;
const ql = QL4 . slice ( i * n8r , i * n8r + n8r ) ;
const qr = QR4 . slice ( i * n8r , i * n8r + n8r ) ;
const qo = QO4 . slice ( i * n8r , i * n8r + n8r ) ;
const qc = QC4 . slice ( i * n8r , i * n8r + n8r ) ;
const s1 = sigmaBuff . slice ( i * n8r , i * n8r + n8r ) ;
const s2 = sigmaBuff . slice ( ( i + zkey . domainSize * 4 ) * n8r , ( i + zkey . domainSize * 4 ) * n8r + n8r ) ;
const s3 = sigmaBuff . slice ( ( i + zkey . domainSize * 8 ) * n8r , ( i + zkey . domainSize * 8 ) * n8r + n8r ) ;
const ap = Fr . add ( ch . b [ 2 ] , Fr . mul ( ch . b [ 1 ] , w ) ) ;
const bp = Fr . add ( ch . b [ 4 ] , Fr . mul ( ch . b [ 3 ] , w ) ) ;
const cp = Fr . add ( ch . b [ 6 ] , Fr . mul ( ch . b [ 5 ] , w ) ) ;
const w2 = Fr . square ( w ) ;
const zp = Fr . add ( Fr . add ( Fr . mul ( ch . b [ 7 ] , w2 ) , Fr . mul ( ch . b [ 8 ] , w ) ) , ch . b [ 9 ] ) ;
const wW = Fr . mul ( w , Fr . w [ zkey . power ] ) ;
const wW2 = Fr . square ( wW ) ;
const zWp = Fr . add ( Fr . add ( Fr . mul ( ch . b [ 7 ] , wW2 ) , Fr . mul ( ch . b [ 8 ] , wW ) ) , ch . b [ 9 ] ) ;
let pl = Fr . zero ;
for ( let j = 0 ; j < zkey . nPublic ; j ++ ) {
pl = Fr . sub ( pl , Fr . mul (
lPols . slice ( ( j * 5 * zkey . domainSize + zkey . domainSize + i ) * n8r , ( j * 5 * zkey . domainSize + zkey . domainSize + i + 1 ) * n8r ) ,
A . slice ( j * n8r , ( j + 1 ) * n8r )
) ) ;
}
let [ e1 , e1z ] = mul2 ( a , b , ap , bp , i % 4 ) ;
e1 = Fr . mul ( e1 , qm ) ;
e1z = Fr . mul ( e1z , qm ) ;
e1 = Fr . add ( e1 , Fr . mul ( a , ql ) ) ;
e1z = Fr . add ( e1z , Fr . mul ( ap , ql ) ) ;
e1 = Fr . add ( e1 , Fr . mul ( b , qr ) ) ;
e1z = Fr . add ( e1z , Fr . mul ( bp , qr ) ) ;
e1 = Fr . add ( e1 , Fr . mul ( c , qo ) ) ;
e1z = Fr . add ( e1z , Fr . mul ( cp , qo ) ) ;
e1 = Fr . add ( e1 , pl ) ;
e1 = Fr . add ( e1 , qc ) ;
const betaw = Fr . mul ( ch . beta , w ) ;
let e2a = a ;
e2a = Fr . add ( e2a , betaw ) ;
e2a = Fr . add ( e2a , ch . gamma ) ;
let e2b = b ;
e2b = Fr . add ( e2b , Fr . mul ( betaw , zkey . k1 ) ) ;
e2b = Fr . add ( e2b , ch . gamma ) ;
let e2c = c ;
e2c = Fr . add ( e2c , Fr . mul ( betaw , zkey . k2 ) ) ;
e2c = Fr . add ( e2c , ch . gamma ) ;
let e2d = z ;
let [ e2 , e2z ] = mul4 ( e2a , e2b , e2c , e2d , ap , bp , cp , zp , i % 4 ) ;
e2 = Fr . mul ( e2 , ch . alpha ) ;
e2z = Fr . mul ( e2z , ch . alpha ) ;
let e3a = a ;
e3a = Fr . add ( e3a , Fr . mul ( ch . beta , s1 ) ) ;
e3a = Fr . add ( e3a , ch . gamma ) ;
let e3b = b ;
e3b = Fr . add ( e3b , Fr . mul ( ch . beta , s2 ) ) ;
e3b = Fr . add ( e3b , ch . gamma ) ;
let e3c = c ;
e3c = Fr . add ( e3c , Fr . mul ( ch . beta , s3 ) ) ;
e3c = Fr . add ( e3c , ch . gamma ) ;
let e3d = zw ;
let [ e3 , e3z ] = mul4 ( e3a , e3b , e3c , e3d , ap , bp , cp , zWp , i % 4 ) ;
e3 = Fr . mul ( e3 , ch . alpha ) ;
e3z = Fr . mul ( e3z , ch . alpha ) ;
let e4 = Fr . sub ( z , Fr . one ) ;
e4 = Fr . mul ( e4 , lPols . slice ( ( zkey . domainSize + i ) * n8r , ( zkey . domainSize + i + 1 ) * n8r ) ) ;
e4 = Fr . mul ( e4 , Fr . mul ( ch . alpha , ch . alpha ) ) ;
let e4z = Fr . mul ( zp , lPols . slice ( ( zkey . domainSize + i ) * n8r , ( zkey . domainSize + i + 1 ) * n8r ) ) ;
e4z = Fr . mul ( e4z , Fr . mul ( ch . alpha , ch . alpha ) ) ;
let e = Fr . add ( Fr . sub ( Fr . add ( e1 , e2 ) , e3 ) , e4 ) ;
let ez = Fr . add ( Fr . sub ( Fr . add ( e1z , e2z ) , e3z ) , e4z ) ;
T . set ( e , i * n8r ) ;
Tz . set ( ez , i * n8r ) ;
w = Fr . mul ( w , Fr . w [ zkey . power + 2 ] ) ;
}
2021-06-06 18:27:41 +03:00
if ( logger ) logger . debug ( "ifft T" ) ;
2021-05-31 14:21:07 +03:00
let t = await Fr . ifft ( T ) ;
2021-06-06 18:27:41 +03:00
if ( logger ) logger . debug ( "dividing T/Z" ) ;
2021-05-31 14:21:07 +03:00
for ( let i = 0 ; i < zkey . domainSize ; i ++ ) {
t . set ( Fr . neg ( t . slice ( i * n8r , i * n8r + n8r ) ) , i * n8r ) ;
}
for ( let i = zkey . domainSize ; i < zkey . domainSize * 4 ; i ++ ) {
const a = Fr . sub (
t . slice ( ( i - zkey . domainSize ) * n8r , ( i - zkey . domainSize ) * n8r + n8r ) ,
t . slice ( i * n8r , i * n8r + n8r )
) ;
t . set ( a , i * n8r ) ;
if ( i > ( zkey . domainSize * 3 - 4 ) ) {
if ( ! Fr . isZero ( a ) ) {
throw new Error ( "T Polynomial is not divisible" ) ;
}
}
}
2021-06-06 18:27:41 +03:00
if ( logger ) logger . debug ( "ifft Tz" ) ;
2021-05-31 14:21:07 +03:00
const tz = await Fr . ifft ( Tz ) ;
for ( let i = 0 ; i < zkey . domainSize * 4 ; i ++ ) {
const a = tz . slice ( i * n8r , ( i + 1 ) * n8r ) ;
if ( i > ( zkey . domainSize * 3 + 5 ) ) {
if ( ! Fr . isZero ( a ) ) {
throw new Error ( "Tz Polynomial is not well calculated" ) ;
}
} else {
t . set (
Fr . add (
t . slice ( i * n8r , ( i + 1 ) * n8r ) ,
a
) ,
i * n8r
) ;
}
}
pol _t = t . slice ( 0 , ( zkey . domainSize * 3 + 6 ) * n8r ) ;
proof . T1 = await expTau ( t . slice ( 0 , zkey . domainSize * n8r ) , "multiexp T1" ) ;
proof . T2 = await expTau ( t . slice ( zkey . domainSize * n8r , zkey . domainSize * 2 * n8r ) , "multiexp T2" ) ;
proof . T3 = await expTau ( t . slice ( zkey . domainSize * 2 * n8r , ( zkey . domainSize * 3 + 6 ) * n8r ) , "multiexp T3" ) ;
function mul2 ( a , b , ap , bp , p ) {
let r , rz ;
const a _b = Fr . mul ( a , b ) ;
const a _bp = Fr . mul ( a , bp ) ;
const ap _b = Fr . mul ( ap , b ) ;
const ap _bp = Fr . mul ( ap , bp ) ;
r = a _b ;
let a0 = Fr . add ( a _bp , ap _b ) ;
let a1 = ap _bp ;
rz = a0 ;
if ( p ) {
rz = Fr . add ( rz , Fr . mul ( Z1 [ p ] , a1 ) ) ;
}
return [ r , rz ] ;
}
function mul4 ( a , b , c , d , ap , bp , cp , dp , p ) {
let r , rz ;
const a _b = Fr . mul ( a , b ) ;
const a _bp = Fr . mul ( a , bp ) ;
const ap _b = Fr . mul ( ap , b ) ;
const ap _bp = Fr . mul ( ap , bp ) ;
const c _d = Fr . mul ( c , d ) ;
const c _dp = Fr . mul ( c , dp ) ;
const cp _d = Fr . mul ( cp , d ) ;
const cp _dp = Fr . mul ( cp , dp ) ;
r = Fr . mul ( a _b , c _d ) ;
let a0 = Fr . mul ( ap _b , c _d ) ;
a0 = Fr . add ( a0 , Fr . mul ( a _bp , c _d ) ) ;
a0 = Fr . add ( a0 , Fr . mul ( a _b , cp _d ) ) ;
a0 = Fr . add ( a0 , Fr . mul ( a _b , c _dp ) ) ;
let a1 = Fr . mul ( ap _bp , c _d ) ;
a1 = Fr . add ( a1 , Fr . mul ( ap _b , cp _d ) ) ;
a1 = Fr . add ( a1 , Fr . mul ( ap _b , c _dp ) ) ;
a1 = Fr . add ( a1 , Fr . mul ( a _bp , cp _d ) ) ;
a1 = Fr . add ( a1 , Fr . mul ( a _bp , c _dp ) ) ;
a1 = Fr . add ( a1 , Fr . mul ( a _b , cp _dp ) ) ;
let a2 = Fr . mul ( a _bp , cp _dp ) ;
a2 = Fr . add ( a2 , Fr . mul ( ap _b , cp _dp ) ) ;
a2 = Fr . add ( a2 , Fr . mul ( ap _bp , c _dp ) ) ;
a2 = Fr . add ( a2 , Fr . mul ( ap _bp , cp _d ) ) ;
let a3 = Fr . mul ( ap _bp , cp _dp ) ;
rz = a0 ;
if ( p ) {
rz = Fr . add ( rz , Fr . mul ( Z1 [ p ] , a1 ) ) ;
rz = Fr . add ( rz , Fr . mul ( Z2 [ p ] , a2 ) ) ;
rz = Fr . add ( rz , Fr . mul ( Z3 [ p ] , a3 ) ) ;
}
return [ r , rz ] ;
}
}
async function round4 ( ) {
const pol _qm = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
await fdZKey . readToBuffer ( pol _qm , 0 , zkey . domainSize * n8r , sectionsZKey [ 7 ] [ 0 ] . p ) ;
const pol _ql = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
await fdZKey . readToBuffer ( pol _ql , 0 , zkey . domainSize * n8r , sectionsZKey [ 8 ] [ 0 ] . p ) ;
const pol _qr = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
await fdZKey . readToBuffer ( pol _qr , 0 , zkey . domainSize * n8r , sectionsZKey [ 9 ] [ 0 ] . p ) ;
const pol _qo = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
await fdZKey . readToBuffer ( pol _qo , 0 , zkey . domainSize * n8r , sectionsZKey [ 10 ] [ 0 ] . p ) ;
const pol _qc = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
await fdZKey . readToBuffer ( pol _qc , 0 , zkey . domainSize * n8r , sectionsZKey [ 11 ] [ 0 ] . p ) ;
const pol _s3 = new ffjavascript . BigBuffer ( zkey . domainSize * n8r ) ;
await fdZKey . readToBuffer ( pol _s3 , 0 , zkey . domainSize * n8r , sectionsZKey [ 12 ] [ 0 ] . p + 10 * zkey . domainSize * n8r ) ;
const transcript4 = new Uint8Array ( G1 . F . n8 * 2 * 3 ) ;
G1 . toRprUncompressed ( transcript4 , 0 , proof . T1 ) ;
G1 . toRprUncompressed ( transcript4 , G1 . F . n8 * 2 , proof . T2 ) ;
G1 . toRprUncompressed ( transcript4 , G1 . F . n8 * 4 , proof . T3 ) ;
ch . xi = hashToFr ( transcript4 ) ;
if ( logger ) logger . debug ( "xi: " + Fr . toString ( ch . xi ) ) ;
proof . eval _a = evalPol ( pol _a , ch . xi ) ;
proof . eval _b = evalPol ( pol _b , ch . xi ) ;
proof . eval _c = evalPol ( pol _c , ch . xi ) ;
proof . eval _s1 = evalPol ( pol _s1 , ch . xi ) ;
proof . eval _s2 = evalPol ( pol _s2 , ch . xi ) ;
proof . eval _t = evalPol ( pol _t , ch . xi ) ;
proof . eval _zw = evalPol ( pol _z , Fr . mul ( ch . xi , Fr . w [ zkey . power ] ) ) ;
const coef _ab = Fr . mul ( proof . eval _a , proof . eval _b ) ;
let e2a = proof . eval _a ;
const betaxi = Fr . mul ( ch . beta , ch . xi ) ;
e2a = Fr . add ( e2a , betaxi ) ;
e2a = Fr . add ( e2a , ch . gamma ) ;
let e2b = proof . eval _b ;
e2b = Fr . add ( e2b , Fr . mul ( betaxi , zkey . k1 ) ) ;
e2b = Fr . add ( e2b , ch . gamma ) ;
let e2c = proof . eval _c ;
e2c = Fr . add ( e2c , Fr . mul ( betaxi , zkey . k2 ) ) ;
e2c = Fr . add ( e2c , ch . gamma ) ;
const e2 = Fr . mul ( Fr . mul ( Fr . mul ( e2a , e2b ) , e2c ) , ch . alpha ) ;
let e3a = proof . eval _a ;
e3a = Fr . add ( e3a , Fr . mul ( ch . beta , proof . eval _s1 ) ) ;
e3a = Fr . add ( e3a , ch . gamma ) ;
let e3b = proof . eval _b ;
e3b = Fr . add ( e3b , Fr . mul ( ch . beta , proof . eval _s2 ) ) ;
e3b = Fr . add ( e3b , ch . gamma ) ;
let e3 = Fr . mul ( e3a , e3b ) ;
e3 = Fr . mul ( e3 , ch . beta ) ;
e3 = Fr . mul ( e3 , proof . eval _zw ) ;
e3 = Fr . mul ( e3 , ch . alpha ) ;
ch . xim = ch . xi ;
for ( let i = 0 ; i < zkey . power ; i ++ ) ch . xim = Fr . mul ( ch . xim , ch . xim ) ;
const eval _l1 = Fr . div (
Fr . sub ( ch . xim , Fr . one ) ,
Fr . mul ( Fr . sub ( ch . xi , Fr . one ) , Fr . e ( zkey . domainSize ) )
) ;
const e4 = Fr . mul ( eval _l1 , Fr . mul ( ch . alpha , ch . alpha ) ) ;
const coefs3 = e3 ;
const coefz = Fr . add ( e2 , e4 ) ;
pol _r = new ffjavascript . BigBuffer ( ( zkey . domainSize + 3 ) * n8r ) ;
for ( let i = 0 ; i < zkey . domainSize + 3 ; i ++ ) {
let v = Fr . mul ( coefz , pol _z . slice ( i * n8r , ( i + 1 ) * n8r ) ) ;
if ( i < zkey . domainSize ) {
v = Fr . add ( v , Fr . mul ( coef _ab , pol _qm . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
v = Fr . add ( v , Fr . mul ( proof . eval _a , pol _ql . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
v = Fr . add ( v , Fr . mul ( proof . eval _b , pol _qr . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
v = Fr . add ( v , Fr . mul ( proof . eval _c , pol _qo . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
v = Fr . add ( v , pol _qc . slice ( i * n8r , ( i + 1 ) * n8r ) ) ;
v = Fr . sub ( v , Fr . mul ( coefs3 , pol _s3 . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
}
pol _r . set ( v , i * n8r ) ;
}
proof . eval _r = evalPol ( pol _r , ch . xi ) ;
}
async function round5 ( ) {
const transcript5 = new Uint8Array ( n8r * 7 ) ;
Fr . toRprBE ( transcript5 , 0 , proof . eval _a ) ;
Fr . toRprBE ( transcript5 , n8r , proof . eval _b ) ;
Fr . toRprBE ( transcript5 , n8r * 2 , proof . eval _c ) ;
Fr . toRprBE ( transcript5 , n8r * 3 , proof . eval _s1 ) ;
Fr . toRprBE ( transcript5 , n8r * 4 , proof . eval _s2 ) ;
Fr . toRprBE ( transcript5 , n8r * 5 , proof . eval _zw ) ;
Fr . toRprBE ( transcript5 , n8r * 6 , proof . eval _r ) ;
ch . v = [ ] ;
ch . v [ 1 ] = hashToFr ( transcript5 ) ;
if ( logger ) logger . debug ( "v: " + Fr . toString ( ch . v [ 1 ] ) ) ;
for ( let i = 2 ; i <= 6 ; i ++ ) ch . v [ i ] = Fr . mul ( ch . v [ i - 1 ] , ch . v [ 1 ] ) ;
let pol _wxi = new ffjavascript . BigBuffer ( ( zkey . domainSize + 6 ) * n8r ) ;
const xi2m = Fr . mul ( ch . xim , ch . xim ) ;
for ( let i = 0 ; i < zkey . domainSize + 6 ; i ++ ) {
let w = Fr . zero ;
w = Fr . add ( w , Fr . mul ( xi2m , pol _t . slice ( ( zkey . domainSize * 2 + i ) * n8r , ( zkey . domainSize * 2 + i + 1 ) * n8r ) ) ) ;
if ( i < zkey . domainSize + 3 ) {
w = Fr . add ( w , Fr . mul ( ch . v [ 1 ] , pol _r . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
}
if ( i < zkey . domainSize + 2 ) {
w = Fr . add ( w , Fr . mul ( ch . v [ 2 ] , pol _a . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
w = Fr . add ( w , Fr . mul ( ch . v [ 3 ] , pol _b . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
w = Fr . add ( w , Fr . mul ( ch . v [ 4 ] , pol _c . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
}
if ( i < zkey . domainSize ) {
w = Fr . add ( w , pol _t . slice ( i * n8r , ( i + 1 ) * n8r ) ) ;
w = Fr . add ( w , Fr . mul ( ch . xim , pol _t . slice ( ( zkey . domainSize + i ) * n8r , ( zkey . domainSize + i + 1 ) * n8r ) ) ) ;
w = Fr . add ( w , Fr . mul ( ch . v [ 5 ] , pol _s1 . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
w = Fr . add ( w , Fr . mul ( ch . v [ 6 ] , pol _s2 . slice ( i * n8r , ( i + 1 ) * n8r ) ) ) ;
}
pol _wxi . set ( w , i * n8r ) ;
}
let w0 = pol _wxi . slice ( 0 , n8r ) ;
w0 = Fr . sub ( w0 , proof . eval _t ) ;
w0 = Fr . sub ( w0 , Fr . mul ( ch . v [ 1 ] , proof . eval _r ) ) ;
w0 = Fr . sub ( w0 , Fr . mul ( ch . v [ 2 ] , proof . eval _a ) ) ;
w0 = Fr . sub ( w0 , Fr . mul ( ch . v [ 3 ] , proof . eval _b ) ) ;
w0 = Fr . sub ( w0 , Fr . mul ( ch . v [ 4 ] , proof . eval _c ) ) ;
w0 = Fr . sub ( w0 , Fr . mul ( ch . v [ 5 ] , proof . eval _s1 ) ) ;
w0 = Fr . sub ( w0 , Fr . mul ( ch . v [ 6 ] , proof . eval _s2 ) ) ;
pol _wxi . set ( w0 , 0 ) ;
pol _wxi = divPol1 ( pol _wxi , ch . xi ) ;
proof . Wxi = await expTau ( pol _wxi , "multiexp Wxi" ) ;
let pol _wxiw = new ffjavascript . BigBuffer ( ( zkey . domainSize + 3 ) * n8r ) ;
for ( let i = 0 ; i < zkey . domainSize + 3 ; i ++ ) {
const w = pol _z . slice ( i * n8r , ( i + 1 ) * n8r ) ;
pol _wxiw . set ( w , i * n8r ) ;
}
w0 = pol _wxiw . slice ( 0 , n8r ) ;
w0 = Fr . sub ( w0 , proof . eval _zw ) ;
pol _wxiw . set ( w0 , 0 ) ;
pol _wxiw = divPol1 ( pol _wxiw , Fr . mul ( ch . xi , Fr . w [ zkey . power ] ) ) ;
proof . Wxiw = await expTau ( pol _wxiw , "multiexp Wxiw" ) ;
}
function hashToFr ( transcript ) {
2022-01-19 23:59:11 +03:00
const v = ffjavascript . Scalar . fromRprBE ( new Uint8Array ( keccak256$1 . arrayBuffer ( transcript ) ) ) ;
2021-05-31 14:21:07 +03:00
return Fr . e ( v ) ;
}
function evalPol ( P , x ) {
const n = P . byteLength / n8r ;
if ( n == 0 ) return Fr . zero ;
let res = P . slice ( ( n - 1 ) * n8r , n * n8r ) ;
for ( let i = n - 2 ; i >= 0 ; i -- ) {
res = Fr . add ( Fr . mul ( res , x ) , P . slice ( i * n8r , ( i + 1 ) * n8r ) ) ;
}
return res ;
}
function divPol1 ( P , d ) {
const n = P . byteLength / n8r ;
const res = new ffjavascript . BigBuffer ( n * n8r ) ;
res . set ( Fr . zero , ( n - 1 ) * n8r ) ;
res . set ( P . slice ( ( n - 1 ) * n8r , n * n8r ) , ( n - 2 ) * n8r ) ;
for ( let i = n - 3 ; i >= 0 ; i -- ) {
res . set (
Fr . add (
P . slice ( ( i + 1 ) * n8r , ( i + 2 ) * n8r ) ,
Fr . mul (
d ,
res . slice ( ( i + 1 ) * n8r , ( i + 2 ) * n8r )
)
) ,
i * n8r
) ;
}
if ( ! Fr . eq (
P . slice ( 0 , n8r ) ,
Fr . mul (
Fr . neg ( d ) ,
res . slice ( 0 , n8r )
)
) ) {
throw new Error ( "Polinomial does not divide" ) ;
}
return res ;
}
async function expTau ( b , name ) {
const n = b . byteLength / n8r ;
const PTauN = PTau . slice ( 0 , n * curve . G1 . F . n8 * 2 ) ;
const bm = await curve . Fr . batchFromMontgomery ( b ) ;
let res = await curve . G1 . multiExpAffine ( PTauN , bm , logger , name ) ;
res = curve . G1 . toAffine ( res ) ;
return res ;
}
async function to4T ( A , pz ) {
pz = pz || [ ] ;
let a = await Fr . ifft ( A ) ;
const a4 = new ffjavascript . BigBuffer ( n8r * zkey . domainSize * 4 ) ;
a4 . set ( a , 0 ) ;
const a1 = new ffjavascript . BigBuffer ( n8r * ( zkey . domainSize + pz . length ) ) ;
a1 . set ( a , 0 ) ;
for ( let i = 0 ; i < pz . length ; i ++ ) {
a1 . set (
Fr . add (
a1 . slice ( ( zkey . domainSize + i ) * n8r , ( zkey . domainSize + i + 1 ) * n8r ) ,
pz [ i ]
) ,
( zkey . domainSize + i ) * n8r
) ;
a1 . set (
Fr . sub (
a1 . slice ( i * n8r , ( i + 1 ) * n8r ) ,
pz [ i ]
) ,
i * n8r
) ;
}
const A4 = await Fr . fft ( a4 ) ;
return [ a1 , A4 ] ;
}
}
/ *
Copyright 2021 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
async function plonkFullProve$1 ( input , wasmFile , zkeyFileName , logger ) {
2021-05-31 14:21:07 +03:00
const wtns = {
type : "mem"
} ;
2022-01-19 23:59:11 +03:00
await wtnsCalculate$1 ( input , wasmFile , wtns ) ;
2021-05-31 14:21:07 +03:00
return await plonk16Prove ( zkeyFileName , wtns , logger ) ;
}
/ *
Copyright 2021 0 kims association .
This file is part of snarkjs .
snarkjs is a free software : you can redistribute it and / or
modify it under the terms of the GNU General Public License as published by the
Free Software Foundation , either version 3 of the License , or ( at your option )
any later version .
snarkjs is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License for
more details .
You should have received a copy of the GNU General Public License along with
snarkjs . If not , see < https : //www.gnu.org/licenses/>.
* /
const { unstringifyBigInts : unstringifyBigInts$1 } = ffjavascript . utils ;
2022-01-19 23:59:11 +03:00
const { keccak256 } = jsSha3 _ _default [ "default" ] ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
async function plonkVerify$1 ( vk _verifier , publicSignals , proof , logger ) {
2021-05-31 14:21:07 +03:00
vk _verifier = unstringifyBigInts$1 ( vk _verifier ) ;
proof = unstringifyBigInts$1 ( proof ) ;
publicSignals = unstringifyBigInts$1 ( publicSignals ) ;
const curve = await getCurveFromName ( vk _verifier . curve ) ;
const Fr = curve . Fr ;
const G1 = curve . G1 ;
proof = fromObjectProof ( curve , proof ) ;
vk _verifier = fromObjectVk ( curve , vk _verifier ) ;
if ( ! isWellConstructed ( curve , proof ) ) {
logger . error ( "Proof is not well constructed" ) ;
return false ;
}
const challanges = calculateChallanges ( curve , proof ) ;
if ( logger ) {
logger . debug ( "beta: " + Fr . toString ( challanges . beta , 16 ) ) ;
logger . debug ( "gamma: " + Fr . toString ( challanges . gamma , 16 ) ) ;
logger . debug ( "alpha: " + Fr . toString ( challanges . alpha , 16 ) ) ;
logger . debug ( "xi: " + Fr . toString ( challanges . xi , 16 ) ) ;
logger . debug ( "v1: " + Fr . toString ( challanges . v [ 1 ] , 16 ) ) ;
logger . debug ( "v6: " + Fr . toString ( challanges . v [ 6 ] , 16 ) ) ;
logger . debug ( "u: " + Fr . toString ( challanges . u , 16 ) ) ;
}
const L = calculateLagrangeEvaluations ( curve , challanges , vk _verifier ) ;
if ( logger ) {
logger . debug ( "Lagrange Evaluations: " ) ;
for ( let i = 1 ; i < L . length ; i ++ ) {
logger . debug ( ` L ${ i } (xi)= ` + Fr . toString ( L [ i ] , 16 ) ) ;
}
}
if ( publicSignals . length != vk _verifier . nPublic ) {
logger . error ( "Number of public signals does not match with vk" ) ;
return false ;
}
const pl = calculatePl ( curve , publicSignals , L ) ;
if ( logger ) {
logger . debug ( "Pl: " + Fr . toString ( pl , 16 ) ) ;
}
const t = calculateT ( curve , proof , challanges , pl , L [ 1 ] ) ;
if ( logger ) {
logger . debug ( "t: " + Fr . toString ( t , 16 ) ) ;
}
const D = calculateD ( curve , proof , challanges , vk _verifier , L [ 1 ] ) ;
if ( logger ) {
logger . debug ( "D: " + G1 . toString ( G1 . toAffine ( D ) , 16 ) ) ;
}
const F = calculateF ( curve , proof , challanges , vk _verifier , D ) ;
if ( logger ) {
logger . debug ( "F: " + G1 . toString ( G1 . toAffine ( F ) , 16 ) ) ;
}
const E = calculateE ( curve , proof , challanges , vk _verifier , t ) ;
if ( logger ) {
logger . debug ( "E: " + G1 . toString ( G1 . toAffine ( E ) , 16 ) ) ;
}
const res = await isValidPairing ( curve , proof , challanges , vk _verifier , E , F ) ;
if ( logger ) {
if ( res ) {
logger . info ( "OK!" ) ;
} else {
logger . warn ( "Invalid Proof" ) ;
}
}
return res ;
}
function fromObjectProof ( curve , proof ) {
const G1 = curve . G1 ;
const Fr = curve . Fr ;
const res = { } ;
res . A = G1 . fromObject ( proof . A ) ;
res . B = G1 . fromObject ( proof . B ) ;
res . C = G1 . fromObject ( proof . C ) ;
res . Z = G1 . fromObject ( proof . Z ) ;
res . T1 = G1 . fromObject ( proof . T1 ) ;
res . T2 = G1 . fromObject ( proof . T2 ) ;
res . T3 = G1 . fromObject ( proof . T3 ) ;
res . eval _a = Fr . fromObject ( proof . eval _a ) ;
res . eval _b = Fr . fromObject ( proof . eval _b ) ;
res . eval _c = Fr . fromObject ( proof . eval _c ) ;
res . eval _zw = Fr . fromObject ( proof . eval _zw ) ;
res . eval _s1 = Fr . fromObject ( proof . eval _s1 ) ;
res . eval _s2 = Fr . fromObject ( proof . eval _s2 ) ;
res . eval _r = Fr . fromObject ( proof . eval _r ) ;
res . Wxi = G1 . fromObject ( proof . Wxi ) ;
res . Wxiw = G1 . fromObject ( proof . Wxiw ) ;
return res ;
}
function fromObjectVk ( curve , vk ) {
const G1 = curve . G1 ;
const G2 = curve . G2 ;
const Fr = curve . Fr ;
const res = vk ;
res . Qm = G1 . fromObject ( vk . Qm ) ;
res . Ql = G1 . fromObject ( vk . Ql ) ;
res . Qr = G1 . fromObject ( vk . Qr ) ;
res . Qo = G1 . fromObject ( vk . Qo ) ;
res . Qc = G1 . fromObject ( vk . Qc ) ;
res . S1 = G1 . fromObject ( vk . S1 ) ;
res . S2 = G1 . fromObject ( vk . S2 ) ;
res . S3 = G1 . fromObject ( vk . S3 ) ;
res . k1 = Fr . fromObject ( vk . k1 ) ;
res . k2 = Fr . fromObject ( vk . k2 ) ;
res . X _2 = G2 . fromObject ( vk . X _2 ) ;
return res ;
}
function isWellConstructed ( curve , proof ) {
const G1 = curve . G1 ;
if ( ! G1 . isValid ( proof . A ) ) return false ;
if ( ! G1 . isValid ( proof . B ) ) return false ;
if ( ! G1 . isValid ( proof . C ) ) return false ;
if ( ! G1 . isValid ( proof . Z ) ) return false ;
if ( ! G1 . isValid ( proof . T1 ) ) return false ;
if ( ! G1 . isValid ( proof . T2 ) ) return false ;
if ( ! G1 . isValid ( proof . T3 ) ) return false ;
if ( ! G1 . isValid ( proof . Wxi ) ) return false ;
if ( ! G1 . isValid ( proof . Wxiw ) ) return false ;
return true ;
}
function calculateChallanges ( curve , proof ) {
const G1 = curve . G1 ;
const Fr = curve . Fr ;
const n8r = curve . Fr . n8 ;
const res = { } ;
const transcript1 = new Uint8Array ( G1 . F . n8 * 2 * 3 ) ;
G1 . toRprUncompressed ( transcript1 , 0 , proof . A ) ;
G1 . toRprUncompressed ( transcript1 , G1 . F . n8 * 2 , proof . B ) ;
G1 . toRprUncompressed ( transcript1 , G1 . F . n8 * 4 , proof . C ) ;
res . beta = hashToFr ( curve , transcript1 ) ;
const transcript2 = new Uint8Array ( n8r ) ;
Fr . toRprBE ( transcript2 , 0 , res . beta ) ;
res . gamma = hashToFr ( curve , transcript2 ) ;
const transcript3 = new Uint8Array ( G1 . F . n8 * 2 ) ;
G1 . toRprUncompressed ( transcript3 , 0 , proof . Z ) ;
res . alpha = hashToFr ( curve , transcript3 ) ;
const transcript4 = new Uint8Array ( G1 . F . n8 * 2 * 3 ) ;
G1 . toRprUncompressed ( transcript4 , 0 , proof . T1 ) ;
G1 . toRprUncompressed ( transcript4 , G1 . F . n8 * 2 , proof . T2 ) ;
G1 . toRprUncompressed ( transcript4 , G1 . F . n8 * 4 , proof . T3 ) ;
res . xi = hashToFr ( curve , transcript4 ) ;
const transcript5 = new Uint8Array ( n8r * 7 ) ;
Fr . toRprBE ( transcript5 , 0 , proof . eval _a ) ;
Fr . toRprBE ( transcript5 , n8r , proof . eval _b ) ;
Fr . toRprBE ( transcript5 , n8r * 2 , proof . eval _c ) ;
Fr . toRprBE ( transcript5 , n8r * 3 , proof . eval _s1 ) ;
Fr . toRprBE ( transcript5 , n8r * 4 , proof . eval _s2 ) ;
Fr . toRprBE ( transcript5 , n8r * 5 , proof . eval _zw ) ;
Fr . toRprBE ( transcript5 , n8r * 6 , proof . eval _r ) ;
res . v = [ ] ;
res . v [ 1 ] = hashToFr ( curve , transcript5 ) ;
for ( let i = 2 ; i <= 6 ; i ++ ) res . v [ i ] = Fr . mul ( res . v [ i - 1 ] , res . v [ 1 ] ) ;
const transcript6 = new Uint8Array ( G1 . F . n8 * 2 * 2 ) ;
G1 . toRprUncompressed ( transcript6 , 0 , proof . Wxi ) ;
G1 . toRprUncompressed ( transcript6 , G1 . F . n8 * 2 , proof . Wxiw ) ;
res . u = hashToFr ( curve , transcript6 ) ;
return res ;
}
function calculateLagrangeEvaluations ( curve , challanges , vk ) {
const Fr = curve . Fr ;
let xin = challanges . xi ;
let domainSize = 1 ;
for ( let i = 0 ; i < vk . power ; i ++ ) {
xin = Fr . square ( xin ) ;
domainSize *= 2 ;
}
challanges . xin = xin ;
challanges . zh = Fr . sub ( xin , Fr . one ) ;
const L = [ ] ;
const n = Fr . e ( domainSize ) ;
let w = Fr . one ;
for ( let i = 1 ; i <= Math . max ( 1 , vk . nPublic ) ; i ++ ) {
L [ i ] = Fr . div ( Fr . mul ( w , challanges . zh ) , Fr . mul ( n , Fr . sub ( challanges . xi , w ) ) ) ;
w = Fr . mul ( w , Fr . w [ vk . power ] ) ;
}
return L ;
}
function hashToFr ( curve , transcript ) {
2022-01-19 23:59:11 +03:00
const v = ffjavascript . Scalar . fromRprBE ( new Uint8Array ( keccak256 . arrayBuffer ( transcript ) ) ) ;
2021-05-31 14:21:07 +03:00
return curve . Fr . e ( v ) ;
}
function calculatePl ( curve , publicSignals , L ) {
const Fr = curve . Fr ;
let pl = Fr . zero ;
for ( let i = 0 ; i < publicSignals . length ; i ++ ) {
const w = Fr . e ( publicSignals [ i ] ) ;
pl = Fr . sub ( pl , Fr . mul ( w , L [ i + 1 ] ) ) ;
}
return pl ;
}
function calculateT ( curve , proof , challanges , pl , l1 ) {
const Fr = curve . Fr ;
let num = proof . eval _r ;
num = Fr . add ( num , pl ) ;
let e1 = proof . eval _a ;
e1 = Fr . add ( e1 , Fr . mul ( challanges . beta , proof . eval _s1 ) ) ;
e1 = Fr . add ( e1 , challanges . gamma ) ;
let e2 = proof . eval _b ;
e2 = Fr . add ( e2 , Fr . mul ( challanges . beta , proof . eval _s2 ) ) ;
e2 = Fr . add ( e2 , challanges . gamma ) ;
let e3 = proof . eval _c ;
e3 = Fr . add ( e3 , challanges . gamma ) ;
let e = Fr . mul ( Fr . mul ( e1 , e2 ) , e3 ) ;
e = Fr . mul ( e , proof . eval _zw ) ;
e = Fr . mul ( e , challanges . alpha ) ;
num = Fr . sub ( num , e ) ;
num = Fr . sub ( num , Fr . mul ( l1 , Fr . square ( challanges . alpha ) ) ) ;
const t = Fr . div ( num , challanges . zh ) ;
return t ;
}
function calculateD ( curve , proof , challanges , vk , l1 ) {
const G1 = curve . G1 ;
const Fr = curve . Fr ;
let s1 = Fr . mul ( Fr . mul ( proof . eval _a , proof . eval _b ) , challanges . v [ 1 ] ) ;
let res = G1 . timesFr ( vk . Qm , s1 ) ;
let s2 = Fr . mul ( proof . eval _a , challanges . v [ 1 ] ) ;
res = G1 . add ( res , G1 . timesFr ( vk . Ql , s2 ) ) ;
let s3 = Fr . mul ( proof . eval _b , challanges . v [ 1 ] ) ;
res = G1 . add ( res , G1 . timesFr ( vk . Qr , s3 ) ) ;
let s4 = Fr . mul ( proof . eval _c , challanges . v [ 1 ] ) ;
res = G1 . add ( res , G1 . timesFr ( vk . Qo , s4 ) ) ;
res = G1 . add ( res , G1 . timesFr ( vk . Qc , challanges . v [ 1 ] ) ) ;
const betaxi = Fr . mul ( challanges . beta , challanges . xi ) ;
let s6a = proof . eval _a ;
s6a = Fr . add ( s6a , betaxi ) ;
s6a = Fr . add ( s6a , challanges . gamma ) ;
let s6b = proof . eval _b ;
s6b = Fr . add ( s6b , Fr . mul ( betaxi , vk . k1 ) ) ;
s6b = Fr . add ( s6b , challanges . gamma ) ;
let s6c = proof . eval _c ;
s6c = Fr . add ( s6c , Fr . mul ( betaxi , vk . k2 ) ) ;
s6c = Fr . add ( s6c , challanges . gamma ) ;
let s6 = Fr . mul ( Fr . mul ( s6a , s6b ) , s6c ) ;
s6 = Fr . mul ( s6 , Fr . mul ( challanges . alpha , challanges . v [ 1 ] ) ) ;
let s6d = Fr . mul ( Fr . mul ( l1 , Fr . square ( challanges . alpha ) ) , challanges . v [ 1 ] ) ;
s6 = Fr . add ( s6 , s6d ) ;
s6 = Fr . add ( s6 , challanges . u ) ;
res = G1 . add ( res , G1 . timesFr ( proof . Z , s6 ) ) ;
let s7a = proof . eval _a ;
s7a = Fr . add ( s7a , Fr . mul ( challanges . beta , proof . eval _s1 ) ) ;
s7a = Fr . add ( s7a , challanges . gamma ) ;
let s7b = proof . eval _b ;
s7b = Fr . add ( s7b , Fr . mul ( challanges . beta , proof . eval _s2 ) ) ;
s7b = Fr . add ( s7b , challanges . gamma ) ;
let s7 = Fr . mul ( s7a , s7b ) ;
s7 = Fr . mul ( s7 , challanges . alpha ) ;
s7 = Fr . mul ( s7 , challanges . v [ 1 ] ) ;
s7 = Fr . mul ( s7 , challanges . beta ) ;
s7 = Fr . mul ( s7 , proof . eval _zw ) ;
res = G1 . sub ( res , G1 . timesFr ( vk . S3 , s7 ) ) ;
return res ;
}
function calculateF ( curve , proof , challanges , vk , D ) {
const G1 = curve . G1 ;
const Fr = curve . Fr ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
let res = proof . T1 ;
2020-10-25 14:22:07 +03:00
2021-05-31 14:21:07 +03:00
res = G1 . add ( res , G1 . timesFr ( proof . T2 , challanges . xin ) ) ;
res = G1 . add ( res , G1 . timesFr ( proof . T3 , Fr . square ( challanges . xin ) ) ) ;
res = G1 . add ( res , D ) ;
res = G1 . add ( res , G1 . timesFr ( proof . A , challanges . v [ 2 ] ) ) ;
res = G1 . add ( res , G1 . timesFr ( proof . B , challanges . v [ 3 ] ) ) ;
res = G1 . add ( res , G1 . timesFr ( proof . C , challanges . v [ 4 ] ) ) ;
res = G1 . add ( res , G1 . timesFr ( vk . S1 , challanges . v [ 5 ] ) ) ;
res = G1 . add ( res , G1 . timesFr ( vk . S2 , challanges . v [ 6 ] ) ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
return res ;
2020-07-11 11:31:52 +03:00
}
2021-05-31 14:21:07 +03:00
function calculateE ( curve , proof , challanges , vk , t ) {
const G1 = curve . G1 ;
const Fr = curve . Fr ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
let s = t ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
s = Fr . add ( s , Fr . mul ( challanges . v [ 1 ] , proof . eval _r ) ) ;
s = Fr . add ( s , Fr . mul ( challanges . v [ 2 ] , proof . eval _a ) ) ;
s = Fr . add ( s , Fr . mul ( challanges . v [ 3 ] , proof . eval _b ) ) ;
s = Fr . add ( s , Fr . mul ( challanges . v [ 4 ] , proof . eval _c ) ) ;
s = Fr . add ( s , Fr . mul ( challanges . v [ 5 ] , proof . eval _s1 ) ) ;
s = Fr . add ( s , Fr . mul ( challanges . v [ 6 ] , proof . eval _s2 ) ) ;
s = Fr . add ( s , Fr . mul ( challanges . u , proof . eval _zw ) ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
const res = G1 . timesFr ( G1 . one , s ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
return res ;
2020-07-11 11:31:52 +03:00
}
2021-05-31 14:21:07 +03:00
async function isValidPairing ( curve , proof , challanges , vk , E , F ) {
const G1 = curve . G1 ;
const Fr = curve . Fr ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
let A1 = proof . Wxi ;
A1 = G1 . add ( A1 , G1 . timesFr ( proof . Wxiw , challanges . u ) ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
let B1 = G1 . timesFr ( proof . Wxi , challanges . xi ) ;
const s = Fr . mul ( Fr . mul ( challanges . u , challanges . xi ) , Fr . w [ vk . power ] ) ;
B1 = G1 . add ( B1 , G1 . timesFr ( proof . Wxiw , s ) ) ;
B1 = G1 . add ( B1 , F ) ;
B1 = G1 . sub ( B1 , E ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
const res = await curve . pairingEq (
G1 . neg ( A1 ) , vk . X _2 ,
B1 , curve . G2 . one
) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
return res ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
}
2020-07-11 11:31:52 +03:00
/ *
2021-05-31 14:21:07 +03:00
Copyright 2021 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
2020-07-11 11:31:52 +03:00
* /
2021-05-31 14:21:07 +03:00
function i2hex ( i ) {
return ( "0" + i . toString ( 16 ) ) . slice ( - 2 ) ;
}
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
function p256 ( n ) {
2021-05-31 14:21:07 +03:00
let nstr = n . toString ( 16 ) ;
while ( nstr . length < 64 ) nstr = "0" + nstr ;
nstr = ` "0x ${ nstr } " ` ;
return nstr ;
}
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
async function plonkExportSolidityCallData ( proof , pub ) {
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
const curve = await getCurveFromName ( proof . curve ) ;
const G1 = curve . G1 ;
const Fr = curve . Fr ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
let inputs = "" ;
for ( let i = 0 ; i < pub . length ; i ++ ) {
if ( inputs != "" ) inputs = inputs + "," ;
2022-01-19 23:59:11 +03:00
inputs = inputs + p256 ( pub [ i ] ) ;
2021-05-31 14:21:07 +03:00
}
const proofBuff = new Uint8Array ( G1 . F . n8 * 2 * 9 + Fr . n8 * 7 ) ;
G1 . toRprUncompressed ( proofBuff , 0 , G1 . e ( proof . A ) ) ;
G1 . toRprUncompressed ( proofBuff , G1 . F . n8 * 2 , G1 . e ( proof . B ) ) ;
G1 . toRprUncompressed ( proofBuff , G1 . F . n8 * 4 , G1 . e ( proof . C ) ) ;
G1 . toRprUncompressed ( proofBuff , G1 . F . n8 * 6 , G1 . e ( proof . Z ) ) ;
G1 . toRprUncompressed ( proofBuff , G1 . F . n8 * 8 , G1 . e ( proof . T1 ) ) ;
G1 . toRprUncompressed ( proofBuff , G1 . F . n8 * 10 , G1 . e ( proof . T2 ) ) ;
G1 . toRprUncompressed ( proofBuff , G1 . F . n8 * 12 , G1 . e ( proof . T3 ) ) ;
G1 . toRprUncompressed ( proofBuff , G1 . F . n8 * 14 , G1 . e ( proof . Wxi ) ) ;
G1 . toRprUncompressed ( proofBuff , G1 . F . n8 * 16 , G1 . e ( proof . Wxiw ) ) ;
Fr . toRprBE ( proofBuff , G1 . F . n8 * 18 , Fr . e ( proof . eval _a ) ) ;
Fr . toRprBE ( proofBuff , G1 . F . n8 * 18 + Fr . n8 , Fr . e ( proof . eval _b ) ) ;
Fr . toRprBE ( proofBuff , G1 . F . n8 * 18 + Fr . n8 * 2 , Fr . e ( proof . eval _c ) ) ;
Fr . toRprBE ( proofBuff , G1 . F . n8 * 18 + Fr . n8 * 3 , Fr . e ( proof . eval _s1 ) ) ;
Fr . toRprBE ( proofBuff , G1 . F . n8 * 18 + Fr . n8 * 4 , Fr . e ( proof . eval _s2 ) ) ;
Fr . toRprBE ( proofBuff , G1 . F . n8 * 18 + Fr . n8 * 5 , Fr . e ( proof . eval _zw ) ) ;
Fr . toRprBE ( proofBuff , G1 . F . n8 * 18 + Fr . n8 * 6 , Fr . e ( proof . eval _r ) ) ;
const proofHex = Array . from ( proofBuff ) . map ( i2hex ) . join ( "" ) ;
const S = "0x" + proofHex + ",[" + inputs + "]" ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
return S ;
}
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
This file is part of snarkJS .
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
async function wtnsDebug$1 ( input , wasmFileName , wtnsFileName , symName , options , logger ) {
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const fdWasm = await fastFile _ _namespace . readExisting ( wasmFileName ) ;
2020-07-11 11:31:52 +03:00
const wasm = await fdWasm . read ( fdWasm . totalSize ) ;
await fdWasm . close ( ) ;
let wcOps = {
sanityCheck : true
} ;
let sym = await loadSymbols ( symName ) ;
if ( options . set ) {
if ( ! sym ) sym = await loadSymbols ( symName ) ;
wcOps . logSetSignal = function ( labelIdx , value ) {
if ( logger ) logger . info ( "SET " + sym . labelIdx2Name [ labelIdx ] + " <-- " + value . toString ( ) ) ;
} ;
}
if ( options . get ) {
if ( ! sym ) sym = await loadSymbols ( symName ) ;
wcOps . logGetSignal = function ( varIdx , value ) {
if ( logger ) logger . info ( "GET " + sym . labelIdx2Name [ varIdx ] + " --> " + value . toString ( ) ) ;
} ;
}
if ( options . trigger ) {
if ( ! sym ) sym = await loadSymbols ( symName ) ;
wcOps . logStartComponent = function ( cIdx ) {
if ( logger ) logger . info ( "START: " + sym . componentIdx2Name [ cIdx ] ) ;
} ;
wcOps . logFinishComponent = function ( cIdx ) {
if ( logger ) logger . info ( "FINISH: " + sym . componentIdx2Name [ cIdx ] ) ;
} ;
}
2020-08-31 12:33:56 +03:00
wcOps . sym = sym ;
2020-07-11 11:31:52 +03:00
2021-01-21 21:26:22 +03:00
const wc = await circom _runtime . WitnessCalculatorBuilder ( wasm , wcOps ) ;
2020-07-11 11:31:52 +03:00
const w = await wc . calculateWitness ( input ) ;
2022-01-19 23:59:11 +03:00
const fdWtns = await binFileUtils _ _namespace . createBinFile ( wtnsFileName , "wtns" , 2 , 2 ) ;
2020-07-11 11:31:52 +03:00
await write ( fdWtns , w , wc . prime ) ;
await fdWtns . close ( ) ;
}
2021-05-31 14:21:07 +03:00
/ *
Copyright 2018 0 KIMS association .
This file is part of snarkJS .
snarkJS is a free software : you can redistribute it and / or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
snarkJS is distributed in the hope that it will be useful , but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
* /
2022-01-19 23:59:11 +03:00
async function wtnsExportJson$1 ( wtnsFileName ) {
2020-07-11 11:31:52 +03:00
const w = await read ( wtnsFileName ) ;
return w ;
}
/ *
Copyright 2018 0 KIMS association .
2021-05-31 14:21:07 +03:00
This file is part of snarkJS .
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
snarkJS is a free software : you can redistribute it and / or modify it
2020-07-11 11:31:52 +03:00
under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
2021-05-31 14:21:07 +03:00
snarkJS is distributed in the hope that it will be useful , but WITHOUT
2020-07-11 11:31:52 +03:00
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public
License for more details .
You should have received a copy of the GNU General Public License
2021-05-31 14:21:07 +03:00
along with snarkJS . If not , see < https : //www.gnu.org/licenses/>.
2020-07-11 11:31:52 +03:00
* /
2022-01-19 23:59:11 +03:00
const { stringifyBigInts , unstringifyBigInts } = ffjavascript . utils ;
const logger = Logger _ _default [ "default" ] . create ( "snarkJS" , { showTimestamp : false } ) ;
Logger _ _default [ "default" ] . setLogLevel ( "INFO" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const _ _dirname$1 = path _ _default [ "default" ] . dirname ( new URL ( ( typeof document === 'undefined' ? new ( require ( 'u' + 'rl' ) . URL ) ( 'file:' + _ _filename ) . href : ( document . currentScript && document . currentScript . src || new URL ( 'cli.cjs' , document . baseURI ) . href ) ) ) . pathname ) ;
2020-08-29 15:12:24 +03:00
2020-07-11 11:31:52 +03:00
const commands = [
{
cmd : "powersoftau new <curve> <power> [powersoftau_0000.ptau]" ,
description : "Starts a powers of tau ceremony" ,
alias : [ "ptn" ] ,
options : "-verbose|v" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauNew
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "powersoftau contribute <powersoftau.ptau> <new_powersoftau.ptau>" ,
description : "creates a ptau file with a new contribution" ,
alias : [ "ptc" ] ,
options : "-verbose|v -name|n -entropy|e" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauContribute
2020-07-11 11:31:52 +03:00
} ,
{
2020-07-14 12:55:12 +03:00
cmd : "powersoftau export challenge <powersoftau_0000.ptau> [challenge]" ,
description : "Creates a challenge" ,
2020-07-11 11:31:52 +03:00
alias : [ "ptec" ] ,
options : "-verbose|v" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauExportChallenge
2020-07-11 11:31:52 +03:00
} ,
{
2020-07-14 12:55:12 +03:00
cmd : "powersoftau challenge contribute <curve> <challenge> [response]" ,
description : "Contribute to a challenge" ,
2020-07-11 11:31:52 +03:00
alias : [ "ptcc" ] ,
options : "-verbose|v -entropy|e" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauChallengeContribute
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "powersoftau import response <powersoftau_old.ptau> <response> <<powersoftau_new.ptau>" ,
description : "import a response to a ptau file" ,
alias : [ "ptir" ] ,
options : "-verbose|v -nopoints -nocheck -name|n" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauImport
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "powersoftau beacon <old_powersoftau.ptau> <new_powersoftau.ptau> <beaconHash(Hex)> <numIterationsExp>" ,
description : "adds a beacon" ,
alias : [ "ptb" ] ,
options : "-verbose|v -name|n" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauBeacon
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "powersoftau prepare phase2 <powersoftau.ptau> <new_powersoftau.ptau>" ,
description : "Prepares phase 2. " ,
longDescription : " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau" ,
alias : [ "pt2" ] ,
options : "-verbose|v" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauPreparePhase2
} ,
{
cmd : "powersoftau convert <old_powersoftau.ptau> <new_powersoftau.ptau>" ,
description : "Convert ptau" ,
longDescription : " This process calculates the evaluation of the Lagrange polinomials at tau for alpha*tau and beta tau" ,
alias : [ "ptcv" ] ,
options : "-verbose|v" ,
action : powersOfTauConvert
} ,
{
cmd : "powersoftau truncate <powersoftau.ptau>" ,
description : "Generate diferent powers of tau with smoller sizes " ,
longDescription : " This process generates smaller ptau files from a bigger power ptau" ,
alias : [ "ptt" ] ,
options : "-verbose|v" ,
action : powersOfTauTruncate
2020-07-11 11:31:52 +03:00
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "powersoftau verify <powersoftau.ptau>" ,
description : "verifies a powers of tau file" ,
alias : [ "ptv" ] ,
options : "-verbose|v" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauVerify
2020-07-13 08:21:03 +03:00
} ,
2020-07-11 11:31:52 +03:00
{
cmd : "powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>" ,
description : "Exports a power of tau file to a JSON" ,
alias : [ "ptej" ] ,
options : "-verbose|v" ,
2020-09-02 13:06:20 +03:00
action : powersOfTauExportJson
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "r1cs info [circuit.r1cs]" ,
description : "Print statistiscs of a circuit" ,
alias : [ "ri" , "info -r|r1cs:circuit.r1cs" ] ,
2022-01-19 23:59:11 +03:00
action : r1csInfo
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "r1cs print [circuit.r1cs] [circuit.sym]" ,
description : "Print the constraints of a circuit" ,
alias : [ "rp" , "print -r|r1cs:circuit.r1cs -s|sym" ] ,
2022-01-19 23:59:11 +03:00
action : r1csPrint
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "r1cs export json [circuit.r1cs] [circuit.json]" ,
description : "Export r1cs to JSON file" ,
alias : [ "rej" ] ,
action : r1csExportJSON
} ,
{
cmd : "wtns calculate [circuit.wasm] [input.json] [witness.wtns]" ,
description : "Caclculate specific witness of a circuit given an input" ,
alias : [ "wc" , "calculatewitness -ws|wasm:circuit.wasm -i|input:input.json -wt|witness:witness.wtns" ] ,
2022-01-19 23:59:11 +03:00
action : wtnsCalculate
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "wtns debug [circuit.wasm] [input.json] [witness.wtns] [circuit.sym]" ,
description : "Calculate the witness with debug info." ,
longDescription : "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers " ,
options : "-get|g -set|s -trigger|t" ,
alias : [ "wd" ] ,
2022-01-19 23:59:11 +03:00
action : wtnsDebug
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "wtns export json [witness.wtns] [witnes.json]" ,
description : "Calculate the witness with debug info." ,
longDescription : "Calculate the witness with debug info. \nOptions:\n-g or --g : Log signal gets\n-s or --s : Log signal sets\n-t or --trigger : Log triggers " ,
options : "-verbose|v" ,
alias : [ "wej" ] ,
2022-01-19 23:59:11 +03:00
action : wtnsExportJson
2020-07-11 11:31:52 +03:00
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "zkey contribute <circuit_old.zkey> <circuit_new.zkey>" ,
description : "creates a zkey file with a new contribution" ,
alias : [ "zkc" ] ,
options : "-verbose|v -entropy|e -name|n" ,
action : zkeyContribute
} ,
2020-07-11 11:31:52 +03:00
{
2020-12-18 17:38:31 +03:00
cmd : "zkey export bellman <circuit_xxxx.zkey> [circuit.mpcparams]" ,
2020-07-11 11:31:52 +03:00
description : "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman)" ,
alias : [ "zkeb" ] ,
options : "-verbose|v" ,
action : zkeyExportBellman
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "zkey bellman contribute <curve> <circuit.mpcparams> <circuit_response.mpcparams>" ,
2020-07-29 10:11:59 +03:00
description : "contributes to a challenge file in bellman format" ,
2020-07-13 08:21:03 +03:00
alias : [ "zkbc" ] ,
options : "-verbose|v -entropy|e" ,
action : zkeyBellmanContribute
} ,
2020-07-11 11:31:52 +03:00
{
cmd : "zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>" ,
description : "Export a zKey to a MPCParameters file compatible with kobi/phase2 (Bellman) " ,
alias : [ "zkib" ] ,
options : "-verbose|v -name|n" ,
action : zkeyImportBellman
} ,
{
cmd : "zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp>" ,
description : "adds a beacon" ,
alias : [ "zkb" ] ,
options : "-verbose|v -name|n" ,
action : zkeyBeacon
} ,
{
2020-12-18 17:38:31 +03:00
cmd : "zkey verify r1cs [circuit.r1cs] [powersoftau.ptau] [circuit_final.zkey]" ,
2020-07-13 08:21:03 +03:00
description : "Verify zkey file contributions and verify that matches with the original circuit.r1cs and ptau" ,
2020-12-18 17:38:31 +03:00
alias : [ "zkv" , "zkvr" , "zkey verify" ] ,
2020-07-13 08:21:03 +03:00
options : "-verbose|v" ,
2020-12-18 17:38:31 +03:00
action : zkeyVerifyFromR1cs
2020-07-11 11:31:52 +03:00
} ,
{
2020-12-18 17:38:31 +03:00
cmd : "zkey verify init [circuit_0000.zkey] [powersoftau.ptau] [circuit_final.zkey]" ,
description : "Verify zkey file contributions and verify that matches with the original circuit.r1cs and ptau" ,
alias : [ "zkvi" ] ,
options : "-verbose|v" ,
action : zkeyVerifyFromInit
} ,
{
cmd : "zkey export verificationkey [circuit_final.zkey] [verification_key.json]" ,
2020-07-11 11:31:52 +03:00
description : "Exports a verification key" ,
alias : [ "zkev" ] ,
action : zkeyExportVKey
} ,
{
2020-12-18 17:38:31 +03:00
cmd : "zkey export json [circuit_final.zkey] [circuit_final.zkey.json]" ,
2020-07-11 11:31:52 +03:00
description : "Exports a circuit key to a JSON file" ,
alias : [ "zkej" ] ,
options : "-verbose|v" ,
2022-01-19 23:59:11 +03:00
action : zkeyExportJson
2020-07-11 11:31:52 +03:00
} ,
2020-07-13 08:21:03 +03:00
{
2020-12-18 17:38:31 +03:00
cmd : "zkey export solidityverifier [circuit_final.zkey] [verifier.sol]" ,
2020-07-13 08:21:03 +03:00
description : "Creates a verifier in solidity" ,
alias : [ "zkesv" , "generateverifier -vk|verificationkey -v|verifier" ] ,
action : zkeyExportSolidityVerifier
} ,
{
2021-05-31 14:21:07 +03:00
cmd : "zkey export soliditycalldata [public.json] [proof.json]" ,
2020-07-13 08:21:03 +03:00
description : "Generates call parameters ready to be called." ,
alias : [ "zkesc" , "generatecall -pub|public -p|proof" ] ,
action : zkeyExportSolidityCalldata
} ,
2021-05-31 14:21:07 +03:00
{
cmd : "groth16 setup [circuit.r1cs] [powersoftau.ptau] [circuit_0000.zkey]" ,
description : "Creates an initial groth16 pkey file with zero contributions" ,
alias : [ "g16s" , "zkn" , "zkey new" ] ,
options : "-verbose|v" ,
action : zkeyNew
} ,
2020-07-13 08:21:03 +03:00
{
2020-12-18 17:38:31 +03:00
cmd : "groth16 prove [circuit_final.zkey] [witness.wtns] [proof.json] [public.json]" ,
2020-07-13 08:21:03 +03:00
description : "Generates a zk Proof from witness" ,
alias : [ "g16p" , "zpw" , "zksnark proof" , "proof -pk|provingkey -wt|witness -p|proof -pub|public" ] ,
options : "-verbose|v -protocol" ,
2022-01-19 23:59:11 +03:00
action : groth16Prove
2020-07-13 08:21:03 +03:00
} ,
{
2020-12-18 17:38:31 +03:00
cmd : "groth16 fullprove [input.json] [circuit_final.wasm] [circuit_final.zkey] [proof.json] [public.json]" ,
2020-07-13 08:21:03 +03:00
description : "Generates a zk Proof from input" ,
alias : [ "g16f" , "g16i" ] ,
options : "-verbose|v -protocol" ,
2022-01-19 23:59:11 +03:00
action : groth16FullProve
2020-07-13 08:21:03 +03:00
} ,
{
cmd : "groth16 verify [verification_key.json] [public.json] [proof.json]" ,
description : "Verify a zk Proof" ,
alias : [ "g16v" , "verify -vk|verificationkey -pub|public -p|proof" ] ,
2022-01-19 23:59:11 +03:00
action : groth16Verify
2020-07-13 08:21:03 +03:00
} ,
2021-05-31 14:21:07 +03:00
{
cmd : "plonk setup [circuit.r1cs] [powersoftau.ptau] [circuit.zkey]" ,
description : "Creates an initial PLONK pkey " ,
alias : [ "pks" ] ,
options : "-verbose|v" ,
2022-01-19 23:59:11 +03:00
action : plonkSetup
2021-05-31 14:21:07 +03:00
} ,
{
cmd : "plonk prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]" ,
description : "Generates a PLONK Proof from witness" ,
alias : [ "pkp" ] ,
options : "-verbose|v -protocol" ,
action : plonkProve
} ,
{
cmd : "plonk fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]" ,
description : "Generates a PLONK Proof from input" ,
alias : [ "pkf" ] ,
options : "-verbose|v -protocol" ,
2022-01-19 23:59:11 +03:00
action : plonkFullProve
2021-05-31 14:21:07 +03:00
} ,
{
cmd : "plonk verify [verification_key.json] [public.json] [proof.json]" ,
description : "Verify a PLONK Proof" ,
alias : [ "pkv" ] ,
options : "-verbose|v" ,
2022-01-19 23:59:11 +03:00
action : plonkVerify
2021-05-31 14:21:07 +03:00
}
2020-07-11 11:31:52 +03:00
] ;
clProcessor ( commands ) . then ( ( res ) => {
process . exit ( res ) ;
} , ( err ) => {
logger . error ( err ) ;
process . exit ( 1 ) ;
} ) ;
/ *
TODO COMMANDS
=== === === === =
{
2020-07-13 08:21:03 +03:00
cmd : "zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]" ,
description : "Run a simple setup for a circuit generating the proving key." ,
alias : [ "zs" , "setup -r1cs|r -provingkey|pk -verificationkey|vk" ] ,
options : "-verbose|v -protocol" ,
action : zksnarkSetup
2020-07-11 11:31:52 +03:00
} ,
{
cmd : "witness verify <circuit.r1cs> <witness.wtns>" ,
description : "Verify a witness agains a r1cs" ,
alias : [ "wv" ] ,
action : witnessVerify
} ,
2020-07-13 08:21:03 +03:00
{
cmd : "powersOfTau export response"
}
2020-07-11 11:31:52 +03:00
* /
function changeExt ( fileName , newExt ) {
let S = fileName ;
while ( ( S . length > 0 ) && ( S [ S . length - 1 ] != "." ) ) S = S . slice ( 0 , S . length - 1 ) ;
if ( S . length > 0 ) {
return S + newExt ;
} else {
return fileName + "." + newExt ;
}
}
// r1cs export circomJSON [circuit.r1cs] [circuit.json]
2022-01-19 23:59:11 +03:00
async function r1csInfo ( params , options ) {
2020-07-11 11:31:52 +03:00
const r1csName = params [ 0 ] || "circuit.r1cs" ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await r1csInfo$1 ( r1csName , logger ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// r1cs print [circuit.r1cs] [circuit.sym]
2022-01-19 23:59:11 +03:00
async function r1csPrint ( params , options ) {
2020-07-11 11:31:52 +03:00
const r1csName = params [ 0 ] || "circuit.r1cs" ;
const symName = params [ 1 ] || changeExt ( r1csName , "sym" ) ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2021-02-10 11:53:04 +03:00
const cir = await r1csfile . readR1cs ( r1csName , true , true , false ) ;
2020-07-11 11:31:52 +03:00
const sym = await loadSymbols ( symName ) ;
2022-01-19 23:59:11 +03:00
await r1csPrint$1 ( cir , sym , logger ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// r1cs export json [circuit.r1cs] [circuit.json]
async function r1csExportJSON ( params , options ) {
const r1csName = params [ 0 ] || "circuit.r1cs" ;
const jsonName = params [ 1 ] || changeExt ( r1csName , "json" ) ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2020-10-09 07:29:55 +03:00
const r1csObj = await r1csExportJson ( r1csName , logger ) ;
2020-07-11 11:31:52 +03:00
2020-10-08 17:06:48 +03:00
const S = JSON . stringify ( r1csObj , null , 1 ) ;
2022-01-19 23:59:11 +03:00
await fs _ _default [ "default" ] . promises . writeFile ( jsonName , S ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// wtns calculate <circuit.wasm> <input.json> <witness.wtns>
2022-01-19 23:59:11 +03:00
async function wtnsCalculate ( params , options ) {
2020-07-11 11:31:52 +03:00
const wasmName = params [ 0 ] || "circuit.wasm" ;
const inputName = params [ 1 ] || "input.json" ;
const witnessName = params [ 2 ] || "witness.wtns" ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const input = unstringifyBigInts ( JSON . parse ( await fs _ _default [ "default" ] . promises . readFile ( inputName , "utf8" ) ) ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await wtnsCalculate$1 ( input , wasmName , witnessName ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// wtns debug <circuit.wasm> <input.json> <witness.wtns> <circuit.sym>
// -get|g -set|s -trigger|t
2022-01-19 23:59:11 +03:00
async function wtnsDebug ( params , options ) {
2020-07-11 11:31:52 +03:00
const wasmName = params [ 0 ] || "circuit.wasm" ;
const inputName = params [ 1 ] || "input.json" ;
const witnessName = params [ 2 ] || "witness.wtns" ;
const symName = params [ 3 ] || changeExt ( wasmName , "sym" ) ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const input = unstringifyBigInts ( JSON . parse ( await fs _ _default [ "default" ] . promises . readFile ( inputName , "utf8" ) ) ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await wtnsDebug$1 ( input , wasmName , witnessName , symName , options , logger ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// wtns export json [witness.wtns] [witness.json]
// -get|g -set|s -trigger|t
2022-01-19 23:59:11 +03:00
async function wtnsExportJson ( params , options ) {
2020-07-11 11:31:52 +03:00
const wtnsName = params [ 0 ] || "witness.wtns" ;
const jsonName = params [ 1 ] || "witness.json" ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const w = await wtnsExportJson$1 ( wtnsName ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await fs _ _default [ "default" ] . promises . writeFile ( jsonName , JSON . stringify ( stringifyBigInts ( w ) , null , 1 ) ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
/ *
// zksnark setup [circuit.r1cs] [circuit.zkey] [verification_key.json]
async function zksnarkSetup ( params , options ) {
const r1csName = params [ 0 ] || "circuit.r1cs" ;
const zkeyName = params [ 1 ] || changeExt ( r1csName , "zkey" ) ;
const verificationKeyName = params [ 2 ] || "verification_key.json" ;
const protocol = options . protocol || "groth16" ;
2020-10-08 12:43:05 +03:00
const cir = await readR1cs ( r1csName , true ) ;
2020-07-11 11:31:52 +03:00
if ( ! zkSnark [ protocol ] ) throw new Error ( "Invalid protocol" ) ;
const setup = zkSnark [ protocol ] . setup ( cir , options . verbose ) ;
await zkey . utils . write ( zkeyName , setup . vk _proof ) ;
// await fs.promises.writeFile(provingKeyName, JSON.stringify(stringifyBigInts(setup.vk_proof), null, 1), "utf-8");
await fs . promises . writeFile ( verificationKeyName , JSON . stringify ( stringifyBigInts ( setup . vk _verifier ) , null , 1 ) , "utf-8" ) ;
return 0 ;
}
* /
// groth16 prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
2022-01-19 23:59:11 +03:00
async function groth16Prove ( params , options ) {
2020-07-11 11:31:52 +03:00
2020-12-18 17:38:31 +03:00
const zkeyName = params [ 0 ] || "circuit_final.zkey" ;
2020-07-11 11:31:52 +03:00
const witnessName = params [ 1 ] || "witness.wtns" ;
const proofName = params [ 2 ] || "proof.json" ;
const publicName = params [ 3 ] || "public.json" ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { proof , publicSignals } = await groth16Prove$1 ( zkeyName , witnessName , logger ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await fs _ _default [ "default" ] . promises . writeFile ( proofName , JSON . stringify ( stringifyBigInts ( proof ) , null , 1 ) , "utf-8" ) ;
await fs _ _default [ "default" ] . promises . writeFile ( publicName , JSON . stringify ( stringifyBigInts ( publicSignals ) , null , 1 ) , "utf-8" ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// groth16 fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]
2022-01-19 23:59:11 +03:00
async function groth16FullProve ( params , options ) {
2020-07-11 11:31:52 +03:00
const inputName = params [ 0 ] || "input.json" ;
const wasmName = params [ 1 ] || "circuit.wasm" ;
2020-12-18 17:38:31 +03:00
const zkeyName = params [ 2 ] || "circuit_final.zkey" ;
2020-07-11 11:31:52 +03:00
const proofName = params [ 3 ] || "proof.json" ;
const publicName = params [ 4 ] || "public.json" ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const input = unstringifyBigInts ( JSON . parse ( await fs _ _default [ "default" ] . promises . readFile ( inputName , "utf8" ) ) ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const { proof , publicSignals } = await groth16FullProve$1 ( input , wasmName , zkeyName , logger ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
await fs _ _default [ "default" ] . promises . writeFile ( proofName , JSON . stringify ( stringifyBigInts ( proof ) , null , 1 ) , "utf-8" ) ;
await fs _ _default [ "default" ] . promises . writeFile ( publicName , JSON . stringify ( stringifyBigInts ( publicSignals ) , null , 1 ) , "utf-8" ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// groth16 verify [verification_key.json] [public.json] [proof.json]
2022-01-19 23:59:11 +03:00
async function groth16Verify ( params , options ) {
2020-07-11 11:31:52 +03:00
const verificationKeyName = params [ 0 ] || "verification_key.json" ;
const publicName = params [ 1 ] || "public.json" ;
const proofName = params [ 2 ] || "proof.json" ;
2022-01-19 23:59:11 +03:00
const verificationKey = unstringifyBigInts ( JSON . parse ( fs _ _default [ "default" ] . readFileSync ( verificationKeyName , "utf8" ) ) ) ;
const pub = unstringifyBigInts ( JSON . parse ( fs _ _default [ "default" ] . readFileSync ( publicName , "utf8" ) ) ) ;
const proof = unstringifyBigInts ( JSON . parse ( fs _ _default [ "default" ] . readFileSync ( proofName , "utf8" ) ) ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const isValid = await groth16Verify$1 ( verificationKey , pub , proof , logger ) ;
2020-07-11 11:31:52 +03:00
2020-07-13 08:21:03 +03:00
if ( isValid ) {
2020-07-11 11:31:52 +03:00
return 0 ;
} else {
return 1 ;
}
}
2020-12-18 17:38:31 +03:00
// zkey export vkey [circuit_final.zkey] [verification_key.json]",
2020-07-11 11:31:52 +03:00
async function zkeyExportVKey ( params , options ) {
2020-12-18 17:38:31 +03:00
const zkeyName = params [ 0 ] || "circuit_final.zkey" ;
2020-12-16 13:24:34 +03:00
const verificationKeyName = params [ 1 ] || "verification_key.json" ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
const vKey = await zkeyExportVerificationKey ( zkeyName ) ;
const S = JSON . stringify ( ffjavascript . utils . stringifyBigInts ( vKey ) , null , 1 ) ;
2022-01-19 23:59:11 +03:00
await fs _ _default [ "default" ] . promises . writeFile ( verificationKeyName , S ) ;
2020-07-11 11:31:52 +03:00
}
2020-12-18 17:38:31 +03:00
// zkey export json [circuit_final.zkey] [circuit.zkey.json]",
2022-01-19 23:59:11 +03:00
async function zkeyExportJson ( params , options ) {
2020-12-18 17:38:31 +03:00
const zkeyName = params [ 0 ] || "circuit_final.zkey" ;
const zkeyJsonName = params [ 1 ] || "circuit_final.zkey.json" ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const zKey = await zkeyExportJson$1 ( zkeyName ) ;
2020-07-11 11:31:52 +03:00
const S = JSON . stringify ( ffjavascript . utils . stringifyBigInts ( zKey ) , null , 1 ) ;
2022-01-19 23:59:11 +03:00
await fs _ _default [ "default" ] . promises . writeFile ( zkeyJsonName , S ) ;
2020-07-11 11:31:52 +03:00
}
2021-05-31 21:41:10 +03:00
async function fileExists ( file ) {
2022-01-19 23:59:11 +03:00
return fs _ _default [ "default" ] . promises . access ( file , fs _ _default [ "default" ] . constants . F _OK )
2021-05-31 21:41:10 +03:00
. then ( ( ) => true )
. catch ( ( ) => false ) ;
}
2020-12-18 17:38:31 +03:00
// solidity genverifier [circuit_final.zkey] [verifier.sol]
2020-07-11 11:31:52 +03:00
async function zkeyExportSolidityVerifier ( params , options ) {
let zkeyName ;
let verifierName ;
if ( params . length < 1 ) {
2020-12-18 17:38:31 +03:00
zkeyName = "circuit_final.zkey" ;
2020-07-11 11:31:52 +03:00
} else {
zkeyName = params [ 0 ] ;
}
if ( params . length < 2 ) {
verifierName = "verifier.sol" ;
} else {
verifierName = params [ 1 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
const templates = { } ;
2022-01-19 23:59:11 +03:00
if ( await fileExists ( path _ _default [ "default" ] . join ( _ _dirname$1 , "templates" ) ) ) {
templates . groth16 = await fs _ _default [ "default" ] . promises . readFile ( path _ _default [ "default" ] . join ( _ _dirname$1 , "templates" , "verifier_groth16.sol.ejs" ) , "utf8" ) ;
templates . plonk = await fs _ _default [ "default" ] . promises . readFile ( path _ _default [ "default" ] . join ( _ _dirname$1 , "templates" , "verifier_plonk.sol.ejs" ) , "utf8" ) ;
2021-05-31 21:41:10 +03:00
} else {
2022-01-19 23:59:11 +03:00
templates . groth16 = await fs _ _default [ "default" ] . promises . readFile ( path _ _default [ "default" ] . join ( _ _dirname$1 , ".." , "templates" , "verifier_groth16.sol.ejs" ) , "utf8" ) ;
templates . plonk = await fs _ _default [ "default" ] . promises . readFile ( path _ _default [ "default" ] . join ( _ _dirname$1 , ".." , "templates" , "verifier_plonk.sol.ejs" ) , "utf8" ) ;
2021-05-31 21:41:10 +03:00
}
2021-05-31 14:21:07 +03:00
const verifierCode = await exportSolidityVerifier ( zkeyName , templates ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
fs _ _default [ "default" ] . writeFileSync ( verifierName , verifierCode , "utf-8" ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// solidity gencall <public.json> <proof.json>
async function zkeyExportSolidityCalldata ( params , options ) {
let publicName ;
let proofName ;
if ( params . length < 1 ) {
publicName = "public.json" ;
} else {
publicName = params [ 0 ] ;
}
if ( params . length < 2 ) {
proofName = "proof.json" ;
} else {
proofName = params [ 1 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
const pub = unstringifyBigInts ( JSON . parse ( fs _ _default [ "default" ] . readFileSync ( publicName , "utf8" ) ) ) ;
const proof = unstringifyBigInts ( JSON . parse ( fs _ _default [ "default" ] . readFileSync ( proofName , "utf8" ) ) ) ;
2020-07-11 11:31:52 +03:00
2021-05-31 14:21:07 +03:00
let res ;
if ( proof . protocol == "groth16" ) {
res = await groth16ExportSolidityCallData ( proof , pub ) ;
} else if ( proof . protocol == "plonk" ) {
res = await plonkExportSolidityCallData ( proof , pub ) ;
2020-07-11 11:31:52 +03:00
} else {
2021-05-31 14:21:07 +03:00
throw new Error ( "Invalid Protocol" ) ;
2020-07-11 11:31:52 +03:00
}
2021-05-31 14:21:07 +03:00
console . log ( res ) ;
2020-07-11 11:31:52 +03:00
return 0 ;
}
// powersoftau new <curve> <power> [powersoftau_0000.ptau]",
2020-09-02 13:06:20 +03:00
async function powersOfTauNew ( params , options ) {
2020-07-11 11:31:52 +03:00
let curveName ;
let power ;
let ptauName ;
curveName = params [ 0 ] ;
power = parseInt ( params [ 1 ] ) ;
if ( ( power < 1 ) || ( power > 28 ) ) {
throw new Error ( "Power must be between 1 and 28" ) ;
}
if ( params . length < 3 ) {
2020-09-02 13:06:20 +03:00
ptauName = "powersOfTau" + power + "_0000.ptau" ;
2020-07-11 11:31:52 +03:00
} else {
ptauName = params [ 2 ] ;
}
const curve = await getCurveFromName ( curveName ) ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
return await newAccumulator ( curve , power , ptauName , logger ) ;
}
2020-09-02 13:06:20 +03:00
async function powersOfTauExportChallenge ( params , options ) {
2020-07-11 11:31:52 +03:00
let ptauName ;
2020-07-14 12:55:12 +03:00
let challengeName ;
2020-07-11 11:31:52 +03:00
ptauName = params [ 0 ] ;
if ( params . length < 2 ) {
2020-07-14 12:55:12 +03:00
challengeName = "challenge" ;
2020-07-11 11:31:52 +03:00
} else {
2020-07-14 12:55:12 +03:00
challengeName = params [ 1 ] ;
2020-07-11 11:31:52 +03:00
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
return await exportChallenge ( ptauName , challengeName , logger ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
// powersoftau challenge contribute <curve> <challenge> [response]
2020-09-02 13:06:20 +03:00
async function powersOfTauChallengeContribute ( params , options ) {
2020-07-14 12:55:12 +03:00
let challengeName ;
2020-07-11 11:31:52 +03:00
let responseName ;
const curve = await getCurveFromName ( params [ 0 ] ) ;
2020-07-14 12:55:12 +03:00
challengeName = params [ 1 ] ;
2020-07-11 11:31:52 +03:00
if ( params . length < 3 ) {
2020-07-14 12:55:12 +03:00
responseName = changeExt ( challengeName , "response" ) ;
2020-07-11 11:31:52 +03:00
} else {
responseName = params [ 2 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
return await challengeContribute ( curve , challengeName , responseName , options . entropy , logger ) ;
2020-07-11 11:31:52 +03:00
}
2020-09-02 13:06:20 +03:00
async function powersOfTauImport ( params , options ) {
2020-07-11 11:31:52 +03:00
let oldPtauName ;
let response ;
let newPtauName ;
let importPoints = true ;
let doCheck = true ;
oldPtauName = params [ 0 ] ;
response = params [ 1 ] ;
newPtauName = params [ 2 ] ;
if ( options . nopoints ) importPoints = false ;
if ( options . nocheck ) doCheck = false ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
const res = await importResponse ( oldPtauName , response , newPtauName , options . name , importPoints , logger ) ;
if ( res ) return res ;
if ( ! doCheck ) return ;
// TODO Verify
}
2020-09-02 13:06:20 +03:00
async function powersOfTauVerify ( params , options ) {
2020-07-11 11:31:52 +03:00
let ptauName ;
ptauName = params [ 0 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
const res = await verify ( ptauName , logger ) ;
if ( res === true ) {
return 0 ;
} else {
return 1 ;
}
}
2020-09-02 13:06:20 +03:00
async function powersOfTauBeacon ( params , options ) {
2020-07-11 11:31:52 +03:00
let oldPtauName ;
let newPtauName ;
let beaconHashStr ;
let numIterationsExp ;
oldPtauName = params [ 0 ] ;
newPtauName = params [ 1 ] ;
beaconHashStr = params [ 2 ] ;
numIterationsExp = params [ 3 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
return await beacon$1 ( oldPtauName , newPtauName , options . name , beaconHashStr , numIterationsExp , logger ) ;
2020-07-11 11:31:52 +03:00
}
2020-09-02 13:06:20 +03:00
async function powersOfTauContribute ( params , options ) {
2020-07-11 11:31:52 +03:00
let oldPtauName ;
let newPtauName ;
oldPtauName = params [ 0 ] ;
newPtauName = params [ 1 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
return await contribute ( oldPtauName , newPtauName , options . name , options . entropy , logger ) ;
}
2020-09-02 13:06:20 +03:00
async function powersOfTauPreparePhase2 ( params , options ) {
2020-07-11 11:31:52 +03:00
let oldPtauName ;
let newPtauName ;
oldPtauName = params [ 0 ] ;
newPtauName = params [ 1 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
return await preparePhase2 ( oldPtauName , newPtauName , logger ) ;
}
2020-09-02 13:06:20 +03:00
async function powersOfTauConvert ( params , options ) {
let oldPtauName ;
let newPtauName ;
oldPtauName = params [ 0 ] ;
newPtauName = params [ 1 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-09-02 13:06:20 +03:00
return await convert ( oldPtauName , newPtauName , logger ) ;
}
async function powersOfTauTruncate ( params , options ) {
let ptauName ;
ptauName = params [ 0 ] ;
let template = ptauName ;
while ( ( template . length > 0 ) && ( template [ template . length - 1 ] != "." ) ) template = template . slice ( 0 , template . length - 1 ) ;
template = template . slice ( 0 , template . length - 1 ) ;
template = template + "_" ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-09-02 13:06:20 +03:00
return await truncate ( ptauName , template , logger ) ;
}
2020-07-11 11:31:52 +03:00
// powersoftau export json <powersoftau_0000.ptau> <powersoftau_0000.json>",
2020-09-02 13:06:20 +03:00
async function powersOfTauExportJson ( params , options ) {
2020-07-11 11:31:52 +03:00
let ptauName ;
let jsonName ;
ptauName = params [ 0 ] ;
jsonName = params [ 1 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
const pTau = await exportJson ( ptauName , logger ) ;
2022-01-19 23:59:11 +03:00
const S = JSON . stringify ( stringifyBigInts ( pTau ) , null , 1 ) ;
await fs _ _default [ "default" ] . promises . writeFile ( jsonName , S ) ;
2020-07-11 11:31:52 +03:00
}
2020-12-18 17:38:31 +03:00
// phase2 new <circuit.r1cs> <powersoftau.ptau> <circuit_0000.zkey>
2020-07-11 11:31:52 +03:00
async function zkeyNew ( params , options ) {
let r1csName ;
let ptauName ;
let zkeyName ;
if ( params . length < 1 ) {
r1csName = "circuit.r1cs" ;
} else {
r1csName = params [ 0 ] ;
}
if ( params . length < 2 ) {
ptauName = "powersoftau.ptau" ;
} else {
ptauName = params [ 1 ] ;
}
if ( params . length < 3 ) {
2020-12-18 17:38:31 +03:00
zkeyName = "circuit_0000.zkey" ;
2020-07-11 11:31:52 +03:00
} else {
zkeyName = params [ 2 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
return newZKey ( r1csName , ptauName , zkeyName , logger ) ;
}
2020-12-18 17:38:31 +03:00
// zkey export bellman [circuit_0000.zkey] [circuit.mpcparams]
2020-07-11 11:31:52 +03:00
async function zkeyExportBellman ( params , options ) {
let zkeyName ;
let mpcparamsName ;
2020-12-18 17:38:31 +03:00
zkeyName = params [ 0 ] ;
2020-07-11 11:31:52 +03:00
if ( params . length < 2 ) {
mpcparamsName = "circuit.mpcparams" ;
} else {
mpcparamsName = params [ 1 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
return phase2exportMPCParams ( zkeyName , mpcparamsName , logger ) ;
}
// zkey import bellman <circuit_old.zkey> <circuit.mpcparams> <circuit_new.zkey>
async function zkeyImportBellman ( params , options ) {
let zkeyNameOld ;
let mpcParamsName ;
let zkeyNameNew ;
zkeyNameOld = params [ 0 ] ;
mpcParamsName = params [ 1 ] ;
zkeyNameNew = params [ 2 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
return phase2importMPCParams ( zkeyNameOld , mpcParamsName , zkeyNameNew , options . name , logger ) ;
}
2020-12-18 17:38:31 +03:00
// phase2 verify r1cs [circuit.r1cs] [powersoftau.ptau] [circuit_final.zkey]
async function zkeyVerifyFromR1cs ( params , options ) {
2020-07-11 11:31:52 +03:00
let r1csName ;
let ptauName ;
let zkeyName ;
if ( params . length < 1 ) {
r1csName = "circuit.r1cs" ;
} else {
r1csName = params [ 0 ] ;
}
if ( params . length < 2 ) {
ptauName = "powersoftau.ptau" ;
} else {
ptauName = params [ 1 ] ;
}
if ( params . length < 3 ) {
2020-12-18 17:38:31 +03:00
zkeyName = "circuit_final.zkey" ;
2020-07-11 11:31:52 +03:00
} else {
zkeyName = params [ 2 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2020-12-18 17:38:31 +03:00
const res = await phase2verifyFromR1cs ( r1csName , ptauName , zkeyName , logger ) ;
2020-07-11 11:31:52 +03:00
if ( res === true ) {
return 0 ;
} else {
return 1 ;
}
}
2020-12-18 17:38:31 +03:00
// phase2 verify [circuit_0000] [powersoftau.ptau] [circuit_final.zkey]
async function zkeyVerifyFromInit ( params , options ) {
let initZKeyName ;
let ptauName ;
let zkeyName ;
if ( params . length < 1 ) {
initZKeyName = "circuit_0000.zkey" ;
} else {
initZKeyName = params [ 0 ] ;
}
if ( params . length < 2 ) {
ptauName = "powersoftau.ptau" ;
} else {
ptauName = params [ 1 ] ;
}
if ( params . length < 3 ) {
zkeyName = "circuit_final.zkey" ;
} else {
zkeyName = params [ 2 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-12-18 17:38:31 +03:00
2020-12-20 12:25:38 +03:00
const res = await phase2verifyFromInit ( initZKeyName , ptauName , zkeyName , logger ) ;
2020-12-18 17:38:31 +03:00
if ( res === true ) {
return 0 ;
} else {
return 1 ;
}
}
2020-07-11 11:31:52 +03:00
// zkey contribute <circuit_old.zkey> <circuit_new.zkey>
async function zkeyContribute ( params , options ) {
let zkeyOldName ;
let zkeyNewName ;
zkeyOldName = params [ 0 ] ;
zkeyNewName = params [ 1 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
return phase2contribute ( zkeyOldName , zkeyNewName , options . name , options . entropy , logger ) ;
}
// zkey beacon <circuit_old.zkey> <circuit_new.zkey> <beaconHash(Hex)> <numIterationsExp>
async function zkeyBeacon ( params , options ) {
let zkeyOldName ;
let zkeyNewName ;
let beaconHashStr ;
let numIterationsExp ;
zkeyOldName = params [ 0 ] ;
zkeyNewName = params [ 1 ] ;
beaconHashStr = params [ 2 ] ;
numIterationsExp = params [ 3 ] ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2022-01-19 23:59:11 +03:00
return await beacon ( zkeyOldName , zkeyNewName , options . name , beaconHashStr , numIterationsExp , logger ) ;
2020-07-11 11:31:52 +03:00
}
2020-07-14 12:55:12 +03:00
// zkey challenge contribute <curve> <challenge> [response]",
2020-07-11 11:31:52 +03:00
async function zkeyBellmanContribute ( params , options ) {
2020-07-14 12:55:12 +03:00
let challengeName ;
2020-07-11 11:31:52 +03:00
let responseName ;
const curve = await getCurveFromName ( params [ 0 ] ) ;
2020-07-14 12:55:12 +03:00
challengeName = params [ 1 ] ;
2020-07-11 11:31:52 +03:00
if ( params . length < 3 ) {
2020-07-14 12:55:12 +03:00
responseName = changeExt ( challengeName , "response" ) ;
2020-07-11 11:31:52 +03:00
} else {
responseName = params [ 2 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2020-07-11 11:31:52 +03:00
2020-07-14 12:55:12 +03:00
return bellmanContribute ( curve , challengeName , responseName , options . entropy , logger ) ;
2020-07-11 11:31:52 +03:00
}
2021-05-31 14:21:07 +03:00
// plonk setup <circuit.r1cs> <powersoftau.ptau> <circuit.zkey>
2022-01-19 23:59:11 +03:00
async function plonkSetup ( params , options ) {
2021-05-31 14:21:07 +03:00
let r1csName ;
let ptauName ;
let zkeyName ;
if ( params . length < 1 ) {
r1csName = "circuit.r1cs" ;
} else {
r1csName = params [ 0 ] ;
}
if ( params . length < 2 ) {
ptauName = "powersoftau.ptau" ;
} else {
ptauName = params [ 1 ] ;
}
if ( params . length < 3 ) {
zkeyName = "circuit.zkey" ;
} else {
zkeyName = params [ 2 ] ;
}
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
return plonkSetup$1 ( r1csName , ptauName , zkeyName , logger ) ;
2021-05-31 14:21:07 +03:00
}
// plonk prove [circuit.zkey] [witness.wtns] [proof.json] [public.json]
async function plonkProve ( params , options ) {
const zkeyName = params [ 0 ] || "circuit.zkey" ;
const witnessName = params [ 1 ] || "witness.wtns" ;
const proofName = params [ 2 ] || "proof.json" ;
const publicName = params [ 3 ] || "public.json" ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2021-05-31 14:21:07 +03:00
const { proof , publicSignals } = await plonk16Prove ( zkeyName , witnessName , logger ) ;
2022-01-19 23:59:11 +03:00
await fs _ _default [ "default" ] . promises . writeFile ( proofName , JSON . stringify ( stringifyBigInts ( proof ) , null , 1 ) , "utf-8" ) ;
await fs _ _default [ "default" ] . promises . writeFile ( publicName , JSON . stringify ( stringifyBigInts ( publicSignals ) , null , 1 ) , "utf-8" ) ;
2021-05-31 14:21:07 +03:00
return 0 ;
}
// plonk fullprove [input.json] [circuit.wasm] [circuit.zkey] [proof.json] [public.json]
2022-01-19 23:59:11 +03:00
async function plonkFullProve ( params , options ) {
2021-05-31 14:21:07 +03:00
const inputName = params [ 0 ] || "input.json" ;
const wasmName = params [ 1 ] || "circuit.wasm" ;
const zkeyName = params [ 2 ] || "circuit.zkey" ;
const proofName = params [ 3 ] || "proof.json" ;
const publicName = params [ 4 ] || "public.json" ;
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
const input = unstringifyBigInts ( JSON . parse ( await fs _ _default [ "default" ] . promises . readFile ( inputName , "utf8" ) ) ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
const { proof , publicSignals } = await plonkFullProve$1 ( input , wasmName , zkeyName , logger ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
await fs _ _default [ "default" ] . promises . writeFile ( proofName , JSON . stringify ( stringifyBigInts ( proof ) , null , 1 ) , "utf-8" ) ;
await fs _ _default [ "default" ] . promises . writeFile ( publicName , JSON . stringify ( stringifyBigInts ( publicSignals ) , null , 1 ) , "utf-8" ) ;
2021-05-31 14:21:07 +03:00
return 0 ;
}
// plonk verify [verification_key.json] [public.json] [proof.json]
2022-01-19 23:59:11 +03:00
async function plonkVerify ( params , options ) {
2021-05-31 14:21:07 +03:00
const verificationKeyName = params [ 0 ] || "verification_key.json" ;
const publicName = params [ 1 ] || "public.json" ;
const proofName = params [ 2 ] || "proof.json" ;
2022-01-19 23:59:11 +03:00
const verificationKey = unstringifyBigInts ( JSON . parse ( fs _ _default [ "default" ] . readFileSync ( verificationKeyName , "utf8" ) ) ) ;
const pub = unstringifyBigInts ( JSON . parse ( fs _ _default [ "default" ] . readFileSync ( publicName , "utf8" ) ) ) ;
const proof = unstringifyBigInts ( JSON . parse ( fs _ _default [ "default" ] . readFileSync ( proofName , "utf8" ) ) ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
if ( options . verbose ) Logger _ _default [ "default" ] . setLogLevel ( "DEBUG" ) ;
2021-05-31 14:21:07 +03:00
2022-01-19 23:59:11 +03:00
const isValid = await plonkVerify$1 ( verificationKey , pub , proof , logger ) ;
2021-05-31 14:21:07 +03:00
if ( isValid ) {
return 0 ;
} else {
return 1 ;
}
}