2024-04-29 18:55:15 +03:00
'use strict' ;
2024-09-19 20:49:25 +03:00
var workerThreads = require ( 'worker_threads' ) ;
var ffjavascript = require ( 'ffjavascript' ) ;
2024-04-29 18:55:15 +03:00
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : { } ;
function getDefaultExportFromCjs ( x ) {
return x && x . _ _esModule && Object . prototype . hasOwnProperty . call ( x , 'default' ) ? x [ 'default' ] : x ;
}
var lib = { } ;
var FixedMerkleTree = { } ;
2024-09-30 18:11:48 +03:00
var simpleHash = { } ;
var hasRequiredSimpleHash ;
function requireSimpleHash ( ) {
if ( hasRequiredSimpleHash ) return simpleHash ;
hasRequiredSimpleHash = 1 ;
Object . defineProperty ( simpleHash , "__esModule" , { value : true } ) ;
simpleHash . simpleHash = void 0 ;
/ * * *
* This is insecure hash function , just for example only
* @ param data
* @ param seed
* @ param hashLength
* /
function simpleHash$1 ( data , seed , hashLength = 40 ) {
const str = data . join ( '' ) ;
let i , l , hval = seed !== null && seed !== void 0 ? seed : 0x811c9dcc5 ;
for ( i = 0 , l = str . length ; i < l ; i ++ ) {
hval ^= str . charCodeAt ( i ) ;
hval += ( hval << 1 ) + ( hval << 4 ) + ( hval << 6 ) + ( hval << 8 ) + ( hval << 24 ) ;
}
const hash = ( hval >>> 0 ) . toString ( 16 ) ;
return BigInt ( '0x' + hash . padEnd ( hashLength - ( hash . length - 1 ) , '0' ) ) . toString ( 10 ) ;
}
simpleHash . simpleHash = simpleHash$1 ;
simpleHash . default = ( left , right ) => simpleHash$1 ( [ left , right ] ) ;
return simpleHash ;
2024-09-21 20:25:44 +03:00
}
2024-04-29 18:55:15 +03:00
2024-09-30 18:11:48 +03:00
var BaseTree = { } ;
2024-04-29 18:55:15 +03:00
2024-09-30 18:11:48 +03:00
var hasRequiredBaseTree ;
2024-04-29 18:55:15 +03:00
2024-09-30 18:11:48 +03:00
function requireBaseTree ( ) {
if ( hasRequiredBaseTree ) return BaseTree ;
hasRequiredBaseTree = 1 ;
Object . defineProperty ( BaseTree , "__esModule" , { value : true } ) ;
BaseTree . BaseTree = void 0 ;
let BaseTree$1 = class BaseTree {
get capacity ( ) {
return 2 * * this . levels ;
}
get layers ( ) {
return this . _layers . slice ( ) ;
}
get zeros ( ) {
return this . _zeros . slice ( ) ;
}
get elements ( ) {
return this . _layers [ 0 ] . slice ( ) ;
}
get root ( ) {
var _a ;
return ( _a = this . _layers [ this . levels ] [ 0 ] ) !== null && _a !== void 0 ? _a : this . _zeros [ this . levels ] ;
}
/ * *
* Find an element in the tree
* @ param elements elements of tree
* @ param element An element to find
* @ param comparator A function that checks leaf value equality
* @ param fromIndex The index to start the search at . If the index is greater than or equal to the array ' s length , - 1 is returned
* @ returns { number } Index if element is found , otherwise - 1
* /
static indexOf ( elements , element , fromIndex , comparator ) {
if ( comparator ) {
return elements . findIndex ( ( el ) => comparator ( element , el ) ) ;
}
else {
return elements . indexOf ( element , fromIndex ) ;
}
}
/ * *
* Insert new element into the tree
* @ param element Element to insert
* /
insert ( element ) {
if ( this . _layers [ 0 ] . length >= this . capacity ) {
throw new Error ( 'Tree is full' ) ;
}
this . update ( this . _layers [ 0 ] . length , element ) ;
}
/ *
* Insert multiple elements into the tree .
* @ param { Array } elements Elements to insert
* /
bulkInsert ( elements ) {
if ( ! elements . length ) {
return ;
}
if ( this . _layers [ 0 ] . length + elements . length > this . capacity ) {
throw new Error ( 'Tree is full' ) ;
}
// First we insert all elements except the last one
// updating only full subtree hashes (all layers where inserted element has odd index)
// the last element will update the full path to the root making the tree consistent again
for ( let i = 0 ; i < elements . length - 1 ; i ++ ) {
this . _layers [ 0 ] . push ( elements [ i ] ) ;
let level = 0 ;
let index = this . _layers [ 0 ] . length - 1 ;
while ( index % 2 === 1 ) {
level ++ ;
index >>= 1 ;
const left = this . _layers [ level - 1 ] [ index * 2 ] ;
const right = this . _layers [ level - 1 ] [ index * 2 + 1 ] ;
this . _layers [ level ] [ index ] = this . _hashFn ( left , right ) ;
}
}
this . insert ( elements [ elements . length - 1 ] ) ;
}
/ * *
* Change an element in the tree
* @ param { number } index Index of element to change
* @ param element Updated element value
* /
update ( index , element ) {
if ( isNaN ( Number ( index ) ) || index < 0 || index > this . _layers [ 0 ] . length || index >= this . capacity ) {
throw new Error ( 'Insert index out of bounds: ' + index ) ;
}
this . _layers [ 0 ] [ index ] = element ;
this . _processUpdate ( index ) ;
}
/ * *
* Get merkle path to a leaf
* @ param { number } index Leaf index to generate path for
* @ returns { { pathElements : Object [ ] , pathIndex : number [ ] } } An object containing adjacent elements and left - right index
* /
path ( index ) {
if ( isNaN ( Number ( index ) ) || index < 0 || index >= this . _layers [ 0 ] . length ) {
throw new Error ( 'Index out of bounds: ' + index ) ;
}
let elIndex = + index ;
const pathElements = [ ] ;
const pathIndices = [ ] ;
const pathPositions = [ ] ;
for ( let level = 0 ; level < this . levels ; level ++ ) {
pathIndices [ level ] = elIndex % 2 ;
const leafIndex = elIndex ^ 1 ;
if ( leafIndex < this . _layers [ level ] . length ) {
pathElements [ level ] = this . _layers [ level ] [ leafIndex ] ;
pathPositions [ level ] = leafIndex ;
}
else {
pathElements [ level ] = this . _zeros [ level ] ;
pathPositions [ level ] = 0 ;
}
elIndex >>= 1 ;
}
return {
pathElements ,
pathIndices ,
pathPositions ,
pathRoot : this . root ,
} ;
}
_buildZeros ( ) {
this . _zeros = [ this . zeroElement ] ;
for ( let i = 1 ; i <= this . levels ; i ++ ) {
this . _zeros [ i ] = this . _hashFn ( this . _zeros [ i - 1 ] , this . _zeros [ i - 1 ] ) ;
}
}
_processNodes ( nodes , layerIndex ) {
const length = nodes . length ;
let currentLength = Math . ceil ( length / 2 ) ;
const currentLayer = new Array ( currentLength ) ;
currentLength -- ;
const starFrom = length - ( ( length % 2 ) ^ 1 ) ;
let j = 0 ;
for ( let i = starFrom ; i >= 0 ; i -= 2 ) {
if ( nodes [ i - 1 ] === undefined )
break ;
const left = nodes [ i - 1 ] ;
const right = ( i === starFrom && length % 2 === 1 ) ? this . _zeros [ layerIndex - 1 ] : nodes [ i ] ;
currentLayer [ currentLength - j ] = this . _hashFn ( left , right ) ;
j ++ ;
}
return currentLayer ;
}
_processUpdate ( index ) {
for ( let level = 1 ; level <= this . levels ; level ++ ) {
index >>= 1 ;
const left = this . _layers [ level - 1 ] [ index * 2 ] ;
const right = index * 2 + 1 < this . _layers [ level - 1 ] . length
? this . _layers [ level - 1 ] [ index * 2 + 1 ]
: this . _zeros [ level - 1 ] ;
this . _layers [ level ] [ index ] = this . _hashFn ( left , right ) ;
}
}
} ;
BaseTree . BaseTree = BaseTree$1 ;
return BaseTree ;
2024-09-21 20:25:44 +03:00
}
2024-04-29 18:55:15 +03:00
2024-09-30 18:11:48 +03:00
var hasRequiredFixedMerkleTree ;
2024-04-29 18:55:15 +03:00
2024-09-30 18:11:48 +03:00
function requireFixedMerkleTree ( ) {
if ( hasRequiredFixedMerkleTree ) return FixedMerkleTree ;
hasRequiredFixedMerkleTree = 1 ;
var _ _importDefault = ( FixedMerkleTree && FixedMerkleTree . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( FixedMerkleTree , "__esModule" , { value : true } ) ;
const simpleHash _1 = _ _importDefault ( /*@__PURE__*/ requireSimpleHash ( ) ) ;
const BaseTree _1 = /*@__PURE__*/ requireBaseTree ( ) ;
class MerkleTree extends BaseTree _1 . BaseTree {
constructor ( levels , elements = [ ] , { hashFunction = simpleHash _1 . default , zeroElement = 0 , } = { } ) {
super ( ) ;
this . levels = levels ;
if ( elements . length > this . capacity ) {
throw new Error ( 'Tree is full' ) ;
}
this . _hashFn = hashFunction ;
this . zeroElement = zeroElement ;
this . _layers = [ ] ;
const leaves = elements . slice ( ) ;
this . _layers = [ leaves ] ;
this . _buildZeros ( ) ;
this . _buildHashes ( ) ;
}
_buildHashes ( ) {
for ( let layerIndex = 1 ; layerIndex <= this . levels ; layerIndex ++ ) {
const nodes = this . _layers [ layerIndex - 1 ] ;
this . _layers [ layerIndex ] = this . _processNodes ( nodes , layerIndex ) ;
}
}
/ * *
* Insert multiple elements into the tree .
* @ param { Array } elements Elements to insert
* /
bulkInsert ( elements ) {
if ( ! elements . length ) {
return ;
}
if ( this . _layers [ 0 ] . length + elements . length > this . capacity ) {
throw new Error ( 'Tree is full' ) ;
}
// First we insert all elements except the last one
// updating only full subtree hashes (all layers where inserted element has odd index)
// the last element will update the full path to the root making the tree consistent again
for ( let i = 0 ; i < elements . length - 1 ; i ++ ) {
this . _layers [ 0 ] . push ( elements [ i ] ) ;
let level = 0 ;
let index = this . _layers [ 0 ] . length - 1 ;
while ( index % 2 === 1 ) {
level ++ ;
index >>= 1 ;
this . _layers [ level ] [ index ] = this . _hashFn ( this . _layers [ level - 1 ] [ index * 2 ] , this . _layers [ level - 1 ] [ index * 2 + 1 ] ) ;
}
}
this . insert ( elements [ elements . length - 1 ] ) ;
}
indexOf ( element , comparator ) {
return BaseTree _1 . BaseTree . indexOf ( this . _layers [ 0 ] , element , 0 , comparator ) ;
}
proof ( element ) {
const index = this . indexOf ( element ) ;
return this . path ( index ) ;
}
getTreeEdge ( edgeIndex ) {
const edgeElement = this . _layers [ 0 ] [ edgeIndex ] ;
if ( edgeElement === undefined ) {
throw new Error ( 'Element not found' ) ;
}
const edgePath = this . path ( edgeIndex ) ;
return { edgePath , edgeElement , edgeIndex , edgeElementsCount : this . _layers [ 0 ] . length } ;
}
/ * *
* 🪓
* @ param count
* /
getTreeSlices ( count = 4 ) {
const length = this . _layers [ 0 ] . length ;
let size = Math . ceil ( length / count ) ;
if ( size % 2 )
size ++ ;
const slices = [ ] ;
for ( let i = 0 ; i < length ; i += size ) {
const edgeLeft = i ;
const edgeRight = i + size ;
slices . push ( { edge : this . getTreeEdge ( edgeLeft ) , elements : this . elements . slice ( edgeLeft , edgeRight ) } ) ;
}
return slices ;
}
/ * *
* Serialize entire tree state including intermediate layers into a plain object
* Deserializing it back will not require to recompute any hashes
* Elements are not converted to a plain type , this is responsibility of the caller
* /
serialize ( ) {
return {
levels : this . levels ,
_zeros : this . _zeros ,
_layers : this . _layers ,
} ;
}
/ * *
* Deserialize data into a MerkleTree instance
* Make sure to provide the same hashFunction as was used in the source tree ,
* otherwise the tree state will be invalid
* /
static deserialize ( data , hashFunction ) {
const instance = Object . assign ( Object . create ( this . prototype ) , data ) ;
instance . _hashFn = hashFunction || simpleHash _1 . default ;
instance . zeroElement = instance . _zeros [ 0 ] ;
return instance ;
}
toString ( ) {
return JSON . stringify ( this . serialize ( ) ) ;
}
}
FixedMerkleTree . default = MerkleTree ;
return FixedMerkleTree ;
2024-09-21 20:25:44 +03:00
}
2024-04-29 18:55:15 +03:00
2024-09-30 18:11:48 +03:00
var PartialMerkleTree = { } ;
var hasRequiredPartialMerkleTree ;
function requirePartialMerkleTree ( ) {
if ( hasRequiredPartialMerkleTree ) return PartialMerkleTree ;
hasRequiredPartialMerkleTree = 1 ;
var _ _importDefault = ( PartialMerkleTree && PartialMerkleTree . _ _importDefault ) || function ( mod ) {
2024-09-21 20:25:44 +03:00
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
2024-09-30 18:11:48 +03:00
Object . defineProperty ( PartialMerkleTree , "__esModule" , { value : true } ) ;
PartialMerkleTree . PartialMerkleTree = void 0 ;
const simpleHash _1 = _ _importDefault ( /*@__PURE__*/ requireSimpleHash ( ) ) ;
const BaseTree _1 = /*@__PURE__*/ requireBaseTree ( ) ;
let PartialMerkleTree$1 = class PartialMerkleTree extends BaseTree _1 . BaseTree {
constructor ( levels , { edgePath , edgeElement , edgeIndex , edgeElementsCount , } , leaves , { hashFunction , zeroElement } = { } ) {
super ( ) ;
if ( edgeIndex + leaves . length !== edgeElementsCount )
throw new Error ( 'Invalid number of elements' ) ;
this . _edgeLeafProof = edgePath ;
this . _initialRoot = edgePath . pathRoot ;
this . zeroElement = zeroElement !== null && zeroElement !== void 0 ? zeroElement : 0 ;
this . _edgeLeaf = { data : edgeElement , index : edgeIndex } ;
this . _leavesAfterEdge = leaves ;
this . levels = levels ;
this . _hashFn = hashFunction || simpleHash _1 . default ;
this . _createProofMap ( ) ;
this . _buildTree ( ) ;
}
get edgeIndex ( ) {
return this . _edgeLeaf . index ;
}
get edgeElement ( ) {
return this . _edgeLeaf . data ;
}
get edgeLeafProof ( ) {
return this . _edgeLeafProof ;
}
_createProofMap ( ) {
this . _proofMap = this . edgeLeafProof . pathPositions . reduce ( ( p , c , i ) => {
p . set ( i , [ c , this . edgeLeafProof . pathElements [ i ] ] ) ;
return p ;
} , new Map ( ) ) ;
this . _proofMap . set ( this . levels , [ 0 , this . edgeLeafProof . pathRoot ] ) ;
}
_buildTree ( ) {
const edgeLeafIndex = this . _edgeLeaf . index ;
this . _leaves = Array ( edgeLeafIndex ) . concat ( this . _leavesAfterEdge ) ;
if ( this . _proofMap . has ( 0 ) ) {
const [ proofPos , proofEl ] = this . _proofMap . get ( 0 ) ;
this . _leaves [ proofPos ] = proofEl ;
}
this . _layers = [ this . _leaves ] ;
this . _buildZeros ( ) ;
this . _buildHashes ( ) ;
}
_buildHashes ( ) {
for ( let layerIndex = 1 ; layerIndex <= this . levels ; layerIndex ++ ) {
const nodes = this . _layers [ layerIndex - 1 ] ;
const currentLayer = this . _processNodes ( nodes , layerIndex ) ;
if ( this . _proofMap . has ( layerIndex ) ) {
const [ proofPos , proofEl ] = this . _proofMap . get ( layerIndex ) ;
if ( ! currentLayer [ proofPos ] )
currentLayer [ proofPos ] = proofEl ;
}
this . _layers [ layerIndex ] = currentLayer ;
}
}
/ * *
* Change an element in the tree
* @ param { number } index Index of element to change
* @ param element Updated element value
* /
update ( index , element ) {
if ( isNaN ( Number ( index ) ) || index < 0 || index > this . _layers [ 0 ] . length || index >= this . capacity ) {
throw new Error ( 'Insert index out of bounds: ' + index ) ;
}
if ( index < this . _edgeLeaf . index ) {
throw new Error ( ` Index ${ index } is below the edge: ${ this . _edgeLeaf . index } ` ) ;
}
this . _layers [ 0 ] [ index ] = element ;
this . _processUpdate ( index ) ;
}
path ( index ) {
var _a ;
if ( isNaN ( Number ( index ) ) || index < 0 || index >= this . _layers [ 0 ] . length ) {
throw new Error ( 'Index out of bounds: ' + index ) ;
}
if ( index < this . _edgeLeaf . index ) {
throw new Error ( ` Index ${ index } is below the edge: ${ this . _edgeLeaf . index } ` ) ;
}
let elIndex = Number ( index ) ;
const pathElements = [ ] ;
const pathIndices = [ ] ;
const pathPositions = [ ] ;
for ( let level = 0 ; level < this . levels ; level ++ ) {
pathIndices [ level ] = elIndex % 2 ;
const leafIndex = elIndex ^ 1 ;
if ( leafIndex < this . _layers [ level ] . length ) {
pathElements [ level ] = this . _layers [ level ] [ leafIndex ] ;
pathPositions [ level ] = leafIndex ;
}
else {
pathElements [ level ] = this . _zeros [ level ] ;
pathPositions [ level ] = 0 ;
}
const [ proofPos , proofEl ] = this . _proofMap . get ( level ) ;
pathElements [ level ] = ( _a = pathElements [ level ] ) !== null && _a !== void 0 ? _a : ( proofPos === leafIndex ? proofEl : this . _zeros [ level ] ) ;
elIndex >>= 1 ;
}
return {
pathElements ,
pathIndices ,
pathPositions ,
pathRoot : this . root ,
} ;
}
indexOf ( element , comparator ) {
return BaseTree _1 . BaseTree . indexOf ( this . _layers [ 0 ] , element , this . edgeIndex , comparator ) ;
}
proof ( element ) {
const index = this . indexOf ( element ) ;
return this . path ( index ) ;
}
/ * *
* Shifts edge of tree to left
* @ param edge new TreeEdge below current edge
* @ param elements leaves between old and new edge
* /
shiftEdge ( edge , elements ) {
if ( this . _edgeLeaf . index <= edge . edgeIndex ) {
throw new Error ( ` New edgeIndex should be smaller then ${ this . _edgeLeaf . index } ` ) ;
}
if ( elements . length !== ( this . _edgeLeaf . index - edge . edgeIndex ) ) {
throw new Error ( ` Elements length should be ${ this . _edgeLeaf . index - edge . edgeIndex } ` ) ;
}
this . _edgeLeafProof = edge . edgePath ;
this . _edgeLeaf = { index : edge . edgeIndex , data : edge . edgeElement } ;
this . _leavesAfterEdge = [ ... elements , ... this . _leavesAfterEdge ] ;
this . _createProofMap ( ) ;
this . _buildTree ( ) ;
}
serialize ( ) {
return {
_edgeLeafProof : this . _edgeLeafProof ,
_edgeLeaf : this . _edgeLeaf ,
_layers : this . _layers ,
_zeros : this . _zeros ,
levels : this . levels ,
} ;
}
static deserialize ( data , hashFunction ) {
const instance = Object . assign ( Object . create ( this . prototype ) , data ) ;
instance . _hashFn = hashFunction || simpleHash _1 . default ;
instance . _initialRoot = data . _edgeLeafProof . pathRoot ;
instance . zeroElement = instance . _zeros [ 0 ] ;
instance . _leavesAfterEdge = instance . _layers [ 0 ] . slice ( data . _edgeLeaf . index ) ;
instance . _createProofMap ( ) ;
return instance ;
}
toString ( ) {
return JSON . stringify ( this . serialize ( ) ) ;
}
} ;
PartialMerkleTree . PartialMerkleTree = PartialMerkleTree$1 ;
return PartialMerkleTree ;
}
var hasRequiredLib ;
function requireLib ( ) {
if ( hasRequiredLib ) return lib ;
hasRequiredLib = 1 ;
( function ( exports ) {
var _ _importDefault = ( lib && lib . _ _importDefault ) || function ( mod ) {
return ( mod && mod . _ _esModule ) ? mod : { "default" : mod } ;
} ;
Object . defineProperty ( exports , "__esModule" , { value : true } ) ;
exports . MerkleTree = exports . simpleHash = exports . PartialMerkleTree = void 0 ;
const FixedMerkleTree _1 = _ _importDefault ( /*@__PURE__*/ requireFixedMerkleTree ( ) ) ;
Object . defineProperty ( exports , "MerkleTree" , { enumerable : true , get : function ( ) { return FixedMerkleTree _1 . default ; } } ) ;
var PartialMerkleTree _1 = /*@__PURE__*/ requirePartialMerkleTree ( ) ;
Object . defineProperty ( exports , "PartialMerkleTree" , { enumerable : true , get : function ( ) { return PartialMerkleTree _1 . PartialMerkleTree ; } } ) ;
var simpleHash _1 = /*@__PURE__*/ requireSimpleHash ( ) ;
Object . defineProperty ( exports , "simpleHash" , { enumerable : true , get : function ( ) { return simpleHash _1 . simpleHash ; } } ) ;
exports . default = FixedMerkleTree _1 . default ;
} ( lib ) ) ;
return lib ;
}
var libExports = /*@__PURE__*/ requireLib ( ) ;
2024-04-29 18:55:15 +03:00
const version$2 = "logger/5.7.0" ;
let _permanentCensorErrors = false ;
let _censorErrors = false ;
const LogLevels = { debug : 1 , "default" : 2 , info : 2 , warning : 3 , error : 4 , off : 5 } ;
let _logLevel = LogLevels [ "default" ] ;
let _globalLogger = null ;
function _checkNormalize ( ) {
try {
const missing = [ ] ;
// Make sure all forms of normalization are supported
[ "NFD" , "NFC" , "NFKD" , "NFKC" ] . forEach ( ( form ) => {
try {
if ( "test" . normalize ( form ) !== "test" ) {
throw new Error ( "bad normalize" ) ;
}
}
catch ( error ) {
missing . push ( form ) ;
}
} ) ;
if ( missing . length ) {
throw new Error ( "missing " + missing . join ( ", " ) ) ;
}
if ( String . fromCharCode ( 0xe9 ) . normalize ( "NFD" ) !== String . fromCharCode ( 0x65 , 0x0301 ) ) {
throw new Error ( "broken implementation" ) ;
}
}
catch ( error ) {
return error . message ;
}
return null ;
}
const _normalizeError = _checkNormalize ( ) ;
var LogLevel ;
( function ( LogLevel ) {
LogLevel [ "DEBUG" ] = "DEBUG" ;
LogLevel [ "INFO" ] = "INFO" ;
LogLevel [ "WARNING" ] = "WARNING" ;
LogLevel [ "ERROR" ] = "ERROR" ;
LogLevel [ "OFF" ] = "OFF" ;
} ) ( LogLevel || ( LogLevel = { } ) ) ;
var ErrorCode ;
( function ( ErrorCode ) {
///////////////////
// Generic Errors
// Unknown Error
ErrorCode [ "UNKNOWN_ERROR" ] = "UNKNOWN_ERROR" ;
// Not Implemented
ErrorCode [ "NOT_IMPLEMENTED" ] = "NOT_IMPLEMENTED" ;
// Unsupported Operation
// - operation
ErrorCode [ "UNSUPPORTED_OPERATION" ] = "UNSUPPORTED_OPERATION" ;
// Network Error (i.e. Ethereum Network, such as an invalid chain ID)
// - event ("noNetwork" is not re-thrown in provider.ready; otherwise thrown)
ErrorCode [ "NETWORK_ERROR" ] = "NETWORK_ERROR" ;
// Some sort of bad response from the server
ErrorCode [ "SERVER_ERROR" ] = "SERVER_ERROR" ;
// Timeout
ErrorCode [ "TIMEOUT" ] = "TIMEOUT" ;
///////////////////
// Operational Errors
// Buffer Overrun
ErrorCode [ "BUFFER_OVERRUN" ] = "BUFFER_OVERRUN" ;
// Numeric Fault
// - operation: the operation being executed
// - fault: the reason this faulted
ErrorCode [ "NUMERIC_FAULT" ] = "NUMERIC_FAULT" ;
///////////////////
// Argument Errors
// Missing new operator to an object
// - name: The name of the class
ErrorCode [ "MISSING_NEW" ] = "MISSING_NEW" ;
// Invalid argument (e.g. value is incompatible with type) to a function:
// - argument: The argument name that was invalid
// - value: The value of the argument
ErrorCode [ "INVALID_ARGUMENT" ] = "INVALID_ARGUMENT" ;
// Missing argument to a function:
// - count: The number of arguments received
// - expectedCount: The number of arguments expected
ErrorCode [ "MISSING_ARGUMENT" ] = "MISSING_ARGUMENT" ;
// Too many arguments
// - count: The number of arguments received
// - expectedCount: The number of arguments expected
ErrorCode [ "UNEXPECTED_ARGUMENT" ] = "UNEXPECTED_ARGUMENT" ;
///////////////////
// Blockchain Errors
// Call exception
// - transaction: the transaction
// - address?: the contract address
// - args?: The arguments passed into the function
// - method?: The Solidity method signature
// - errorSignature?: The EIP848 error signature
// - errorArgs?: The EIP848 error parameters
// - reason: The reason (only for EIP848 "Error(string)")
ErrorCode [ "CALL_EXCEPTION" ] = "CALL_EXCEPTION" ;
// Insufficient funds (< value + gasLimit * gasPrice)
// - transaction: the transaction attempted
ErrorCode [ "INSUFFICIENT_FUNDS" ] = "INSUFFICIENT_FUNDS" ;
// Nonce has already been used
// - transaction: the transaction attempted
ErrorCode [ "NONCE_EXPIRED" ] = "NONCE_EXPIRED" ;
// The replacement fee for the transaction is too low
// - transaction: the transaction attempted
ErrorCode [ "REPLACEMENT_UNDERPRICED" ] = "REPLACEMENT_UNDERPRICED" ;
// The gas limit could not be estimated
// - transaction: the transaction passed to estimateGas
ErrorCode [ "UNPREDICTABLE_GAS_LIMIT" ] = "UNPREDICTABLE_GAS_LIMIT" ;
// The transaction was replaced by one with a higher gas price
// - reason: "cancelled", "replaced" or "repriced"
// - cancelled: true if reason == "cancelled" or reason == "replaced")
// - hash: original transaction hash
// - replacement: the full TransactionsResponse for the replacement
// - receipt: the receipt of the replacement
ErrorCode [ "TRANSACTION_REPLACED" ] = "TRANSACTION_REPLACED" ;
///////////////////
// Interaction Errors
// The user rejected the action, such as signing a message or sending
// a transaction
ErrorCode [ "ACTION_REJECTED" ] = "ACTION_REJECTED" ;
} ) ( ErrorCode || ( ErrorCode = { } ) ) ;
const HEX = "0123456789abcdef" ;
class Logger {
constructor ( version ) {
Object . defineProperty ( this , "version" , {
enumerable : true ,
value : version ,
writable : false
} ) ;
}
_log ( logLevel , args ) {
const level = logLevel . toLowerCase ( ) ;
if ( LogLevels [ level ] == null ) {
this . throwArgumentError ( "invalid log level name" , "logLevel" , logLevel ) ;
}
if ( _logLevel > LogLevels [ level ] ) {
return ;
}
console . log . apply ( console , args ) ;
}
debug ( ... args ) {
this . _log ( Logger . levels . DEBUG , args ) ;
}
info ( ... args ) {
this . _log ( Logger . levels . INFO , args ) ;
}
warn ( ... args ) {
this . _log ( Logger . levels . WARNING , args ) ;
}
makeError ( message , code , params ) {
// Errors are being censored
if ( _censorErrors ) {
return this . makeError ( "censored error" , code , { } ) ;
}
if ( ! code ) {
code = Logger . errors . UNKNOWN _ERROR ;
}
if ( ! params ) {
params = { } ;
}
const messageDetails = [ ] ;
Object . keys ( params ) . forEach ( ( key ) => {
const value = params [ key ] ;
try {
if ( value instanceof Uint8Array ) {
let hex = "" ;
for ( let i = 0 ; i < value . length ; i ++ ) {
hex += HEX [ value [ i ] >> 4 ] ;
hex += HEX [ value [ i ] & 0x0f ] ;
}
messageDetails . push ( key + "=Uint8Array(0x" + hex + ")" ) ;
}
else {
messageDetails . push ( key + "=" + JSON . stringify ( value ) ) ;
}
}
catch ( error ) {
messageDetails . push ( key + "=" + JSON . stringify ( params [ key ] . toString ( ) ) ) ;
}
} ) ;
messageDetails . push ( ` code= ${ code } ` ) ;
messageDetails . push ( ` version= ${ this . version } ` ) ;
const reason = message ;
let url = "" ;
switch ( code ) {
case ErrorCode . NUMERIC _FAULT : {
url = "NUMERIC_FAULT" ;
const fault = message ;
switch ( fault ) {
case "overflow" :
case "underflow" :
case "division-by-zero" :
url += "-" + fault ;
break ;
case "negative-power" :
case "negative-width" :
url += "-unsupported" ;
break ;
case "unbound-bitwise-result" :
url += "-unbound-result" ;
break ;
}
break ;
}
case ErrorCode . CALL _EXCEPTION :
case ErrorCode . INSUFFICIENT _FUNDS :
case ErrorCode . MISSING _NEW :
case ErrorCode . NONCE _EXPIRED :
case ErrorCode . REPLACEMENT _UNDERPRICED :
case ErrorCode . TRANSACTION _REPLACED :
case ErrorCode . UNPREDICTABLE _GAS _LIMIT :
url = code ;
break ;
}
if ( url ) {
message += " [ See: https:/\/links.ethers.org/v5-errors-" + url + " ]" ;
}
if ( messageDetails . length ) {
message += " (" + messageDetails . join ( ", " ) + ")" ;
}
// @TODO: Any??
const error = new Error ( message ) ;
error . reason = reason ;
error . code = code ;
Object . keys ( params ) . forEach ( function ( key ) {
error [ key ] = params [ key ] ;
} ) ;
return error ;
}
throwError ( message , code , params ) {
throw this . makeError ( message , code , params ) ;
}
throwArgumentError ( message , name , value ) {
return this . throwError ( message , Logger . errors . INVALID _ARGUMENT , {
argument : name ,
value : value
} ) ;
}
assert ( condition , message , code , params ) {
if ( ! ! condition ) {
return ;
}
this . throwError ( message , code , params ) ;
}
assertArgument ( condition , message , name , value ) {
if ( ! ! condition ) {
return ;
}
this . throwArgumentError ( message , name , value ) ;
}
checkNormalize ( message ) {
if ( _normalizeError ) {
this . throwError ( "platform missing String.prototype.normalize" , Logger . errors . UNSUPPORTED _OPERATION , {
operation : "String.prototype.normalize" , form : _normalizeError
} ) ;
}
}
checkSafeUint53 ( value , message ) {
if ( typeof ( value ) !== "number" ) {
return ;
}
if ( message == null ) {
message = "value not safe" ;
}
if ( value < 0 || value >= 0x1fffffffffffff ) {
this . throwError ( message , Logger . errors . NUMERIC _FAULT , {
operation : "checkSafeInteger" ,
fault : "out-of-safe-range" ,
value : value
} ) ;
}
if ( value % 1 ) {
this . throwError ( message , Logger . errors . NUMERIC _FAULT , {
operation : "checkSafeInteger" ,
fault : "non-integer" ,
value : value
} ) ;
}
}
checkArgumentCount ( count , expectedCount , message ) {
if ( message ) {
message = ": " + message ;
}
else {
message = "" ;
}
if ( count < expectedCount ) {
this . throwError ( "missing argument" + message , Logger . errors . MISSING _ARGUMENT , {
count : count ,
expectedCount : expectedCount
} ) ;
}
if ( count > expectedCount ) {
this . throwError ( "too many arguments" + message , Logger . errors . UNEXPECTED _ARGUMENT , {
count : count ,
expectedCount : expectedCount
} ) ;
}
}
checkNew ( target , kind ) {
if ( target === Object || target == null ) {
this . throwError ( "missing new" , Logger . errors . MISSING _NEW , { name : kind . name } ) ;
}
}
checkAbstract ( target , kind ) {
if ( target === kind ) {
this . throwError ( "cannot instantiate abstract class " + JSON . stringify ( kind . name ) + " directly; use a sub-class" , Logger . errors . UNSUPPORTED _OPERATION , { name : target . name , operation : "new" } ) ;
}
else if ( target === Object || target == null ) {
this . throwError ( "missing new" , Logger . errors . MISSING _NEW , { name : kind . name } ) ;
}
}
static globalLogger ( ) {
if ( ! _globalLogger ) {
_globalLogger = new Logger ( version$2 ) ;
}
return _globalLogger ;
}
static setCensorship ( censorship , permanent ) {
if ( ! censorship && permanent ) {
this . globalLogger ( ) . throwError ( "cannot permanently disable censorship" , Logger . errors . UNSUPPORTED _OPERATION , {
operation : "setCensorship"
} ) ;
}
if ( _permanentCensorErrors ) {
if ( ! censorship ) {
return ;
}
this . globalLogger ( ) . throwError ( "error censorship permanent" , Logger . errors . UNSUPPORTED _OPERATION , {
operation : "setCensorship"
} ) ;
}
_censorErrors = ! ! censorship ;
_permanentCensorErrors = ! ! permanent ;
}
static setLogLevel ( logLevel ) {
const level = LogLevels [ logLevel . toLowerCase ( ) ] ;
if ( level == null ) {
Logger . globalLogger ( ) . warn ( "invalid log level - " + logLevel ) ;
return ;
}
_logLevel = level ;
}
static from ( version ) {
return new Logger ( version ) ;
}
}
Logger . errors = ErrorCode ;
Logger . levels = LogLevel ;
const version$1 = "bytes/5.7.0" ;
const logger$1 = new Logger ( version$1 ) ;
///////////////////////////////
function isHexable ( value ) {
return ! ! ( value . toHexString ) ;
}
function addSlice ( array ) {
if ( array . slice ) {
return array ;
}
array . slice = function ( ) {
const args = Array . prototype . slice . call ( arguments ) ;
return addSlice ( new Uint8Array ( Array . prototype . slice . apply ( array , args ) ) ) ;
} ;
return array ;
}
function isInteger ( value ) {
return ( typeof ( value ) === "number" && value == value && ( value % 1 ) === 0 ) ;
}
function isBytes ( value ) {
if ( value == null ) {
return false ;
}
if ( value . constructor === Uint8Array ) {
return true ;
}
if ( typeof ( value ) === "string" ) {
return false ;
}
if ( ! isInteger ( value . length ) || value . length < 0 ) {
return false ;
}
for ( let i = 0 ; i < value . length ; i ++ ) {
const v = value [ i ] ;
if ( ! isInteger ( v ) || v < 0 || v >= 256 ) {
return false ;
}
}
return true ;
}
function arrayify ( value , options ) {
if ( ! options ) {
options = { } ;
}
if ( typeof ( value ) === "number" ) {
logger$1 . checkSafeUint53 ( value , "invalid arrayify value" ) ;
const result = [ ] ;
while ( value ) {
result . unshift ( value & 0xff ) ;
value = parseInt ( String ( value / 256 ) ) ;
}
if ( result . length === 0 ) {
result . push ( 0 ) ;
}
return addSlice ( new Uint8Array ( result ) ) ;
}
if ( options . allowMissingPrefix && typeof ( value ) === "string" && value . substring ( 0 , 2 ) !== "0x" ) {
value = "0x" + value ;
}
if ( isHexable ( value ) ) {
value = value . toHexString ( ) ;
}
if ( isHexString ( value ) ) {
let hex = value . substring ( 2 ) ;
if ( hex . length % 2 ) {
if ( options . hexPad === "left" ) {
hex = "0" + hex ;
}
else if ( options . hexPad === "right" ) {
hex += "0" ;
}
else {
logger$1 . throwArgumentError ( "hex data is odd-length" , "value" , value ) ;
}
}
const result = [ ] ;
for ( let i = 0 ; i < hex . length ; i += 2 ) {
result . push ( parseInt ( hex . substring ( i , i + 2 ) , 16 ) ) ;
}
return addSlice ( new Uint8Array ( result ) ) ;
}
if ( isBytes ( value ) ) {
return addSlice ( new Uint8Array ( value ) ) ;
}
return logger$1 . throwArgumentError ( "invalid arrayify value" , "value" , value ) ;
}
function isHexString ( value , length ) {
if ( typeof ( value ) !== "string" || ! value . match ( /^0x[0-9A-Fa-f]*$/ ) ) {
return false ;
}
return true ;
}
var sha3$1 = { exports : { } } ;
/ * *
* [ js - sha3 ] { @ link https : //github.com/emn178/js-sha3}
*
* @ version 0.8 . 0
* @ author Chen , Yi - Cyuan [ emn178 @ gmail . com ]
* @ copyright Chen , Yi - Cyuan 2015 - 2018
* @ license MIT
* /
2024-09-30 18:11:48 +03:00
var hasRequiredSha3 ;
function requireSha3 ( ) {
if ( hasRequiredSha3 ) return sha3$1 . exports ;
hasRequiredSha3 = 1 ;
( function ( module ) {
/*jslint bitwise: true */
( function ( ) {
var INPUT _ERROR = 'input is invalid type' ;
var FINALIZE _ERROR = 'finalize already called' ;
var WINDOW = typeof window === 'object' ;
var root = WINDOW ? window : { } ;
if ( root . JS _SHA3 _NO _WINDOW ) {
WINDOW = false ;
}
var WEB _WORKER = ! WINDOW && typeof self === 'object' ;
var NODE _JS = ! root . JS _SHA3 _NO _NODE _JS && typeof process === 'object' && process . versions && process . versions . node ;
if ( NODE _JS ) {
root = commonjsGlobal ;
} else if ( WEB _WORKER ) {
root = self ;
}
var COMMON _JS = ! root . JS _SHA3 _NO _COMMON _JS && 'object' === 'object' && module . exports ;
var ARRAY _BUFFER = ! root . JS _SHA3 _NO _ARRAY _BUFFER && typeof ArrayBuffer !== 'undefined' ;
var HEX _CHARS = '0123456789abcdef' . split ( '' ) ;
var SHAKE _PADDING = [ 31 , 7936 , 2031616 , 520093696 ] ;
var CSHAKE _PADDING = [ 4 , 1024 , 262144 , 67108864 ] ;
var KECCAK _PADDING = [ 1 , 256 , 65536 , 16777216 ] ;
var PADDING = [ 6 , 1536 , 393216 , 100663296 ] ;
var SHIFT = [ 0 , 8 , 16 , 24 ] ;
var RC = [ 1 , 0 , 32898 , 0 , 32906 , 2147483648 , 2147516416 , 2147483648 , 32907 , 0 , 2147483649 ,
0 , 2147516545 , 2147483648 , 32777 , 2147483648 , 138 , 0 , 136 , 0 , 2147516425 , 0 ,
2147483658 , 0 , 2147516555 , 0 , 139 , 2147483648 , 32905 , 2147483648 , 32771 ,
2147483648 , 32770 , 2147483648 , 128 , 2147483648 , 32778 , 0 , 2147483658 , 2147483648 ,
2147516545 , 2147483648 , 32896 , 2147483648 , 2147483649 , 0 , 2147516424 , 2147483648 ] ;
var BITS = [ 224 , 256 , 384 , 512 ] ;
var SHAKE _BITS = [ 128 , 256 ] ;
var OUTPUT _TYPES = [ 'hex' , 'buffer' , 'arrayBuffer' , 'array' , 'digest' ] ;
var CSHAKE _BYTEPAD = {
'128' : 168 ,
'256' : 136
} ;
if ( root . JS _SHA3 _NO _NODE _JS || ! Array . isArray ) {
Array . isArray = function ( obj ) {
return Object . prototype . toString . call ( obj ) === '[object Array]' ;
} ;
}
if ( ARRAY _BUFFER && ( root . JS _SHA3 _NO _ARRAY _BUFFER _IS _VIEW || ! ArrayBuffer . isView ) ) {
ArrayBuffer . isView = function ( obj ) {
return typeof obj === 'object' && obj . buffer && obj . buffer . constructor === ArrayBuffer ;
} ;
}
var createOutputMethod = function ( bits , padding , outputType ) {
return function ( message ) {
return new Keccak ( bits , padding , bits ) . update ( message ) [ outputType ] ( ) ;
} ;
} ;
var createShakeOutputMethod = function ( bits , padding , outputType ) {
return function ( message , outputBits ) {
return new Keccak ( bits , padding , outputBits ) . update ( message ) [ outputType ] ( ) ;
} ;
} ;
var createCshakeOutputMethod = function ( bits , padding , outputType ) {
return function ( message , outputBits , n , s ) {
return methods [ 'cshake' + bits ] . update ( message , outputBits , n , s ) [ outputType ] ( ) ;
} ;
} ;
var createKmacOutputMethod = function ( bits , padding , outputType ) {
return function ( key , message , outputBits , s ) {
return methods [ 'kmac' + bits ] . update ( key , message , outputBits , s ) [ outputType ] ( ) ;
} ;
} ;
var createOutputMethods = function ( method , createMethod , bits , padding ) {
for ( var i = 0 ; i < OUTPUT _TYPES . length ; ++ i ) {
var type = OUTPUT _TYPES [ i ] ;
method [ type ] = createMethod ( bits , padding , type ) ;
}
return method ;
} ;
var createMethod = function ( bits , padding ) {
var method = createOutputMethod ( bits , padding , 'hex' ) ;
method . create = function ( ) {
return new Keccak ( bits , padding , bits ) ;
} ;
method . update = function ( message ) {
return method . create ( ) . update ( message ) ;
} ;
return createOutputMethods ( method , createOutputMethod , bits , padding ) ;
} ;
var createShakeMethod = function ( bits , padding ) {
var method = createShakeOutputMethod ( bits , padding , 'hex' ) ;
method . create = function ( outputBits ) {
return new Keccak ( bits , padding , outputBits ) ;
} ;
method . update = function ( message , outputBits ) {
return method . create ( outputBits ) . update ( message ) ;
} ;
return createOutputMethods ( method , createShakeOutputMethod , bits , padding ) ;
} ;
var createCshakeMethod = function ( bits , padding ) {
var w = CSHAKE _BYTEPAD [ bits ] ;
var method = createCshakeOutputMethod ( bits , padding , 'hex' ) ;
method . create = function ( outputBits , n , s ) {
if ( ! n && ! s ) {
return methods [ 'shake' + bits ] . create ( outputBits ) ;
} else {
return new Keccak ( bits , padding , outputBits ) . bytepad ( [ n , s ] , w ) ;
}
} ;
method . update = function ( message , outputBits , n , s ) {
return method . create ( outputBits , n , s ) . update ( message ) ;
} ;
return createOutputMethods ( method , createCshakeOutputMethod , bits , padding ) ;
} ;
var createKmacMethod = function ( bits , padding ) {
var w = CSHAKE _BYTEPAD [ bits ] ;
var method = createKmacOutputMethod ( bits , padding , 'hex' ) ;
method . create = function ( key , outputBits , s ) {
return new Kmac ( bits , padding , outputBits ) . bytepad ( [ 'KMAC' , s ] , w ) . bytepad ( [ key ] , w ) ;
} ;
method . update = function ( key , message , outputBits , s ) {
return method . create ( key , outputBits , s ) . update ( message ) ;
} ;
return createOutputMethods ( method , createKmacOutputMethod , bits , padding ) ;
} ;
var algorithms = [
{ name : 'keccak' , padding : KECCAK _PADDING , bits : BITS , createMethod : createMethod } ,
{ name : 'sha3' , padding : PADDING , bits : BITS , createMethod : createMethod } ,
{ name : 'shake' , padding : SHAKE _PADDING , bits : SHAKE _BITS , createMethod : createShakeMethod } ,
{ name : 'cshake' , padding : CSHAKE _PADDING , bits : SHAKE _BITS , createMethod : createCshakeMethod } ,
{ name : 'kmac' , padding : CSHAKE _PADDING , bits : SHAKE _BITS , createMethod : createKmacMethod }
] ;
var methods = { } , methodNames = [ ] ;
for ( var i = 0 ; i < algorithms . length ; ++ i ) {
var algorithm = algorithms [ i ] ;
var bits = algorithm . bits ;
for ( var j = 0 ; j < bits . length ; ++ j ) {
var methodName = algorithm . name + '_' + bits [ j ] ;
methodNames . push ( methodName ) ;
methods [ methodName ] = algorithm . createMethod ( bits [ j ] , algorithm . padding ) ;
if ( algorithm . name !== 'sha3' ) {
var newMethodName = algorithm . name + bits [ j ] ;
methodNames . push ( newMethodName ) ;
methods [ newMethodName ] = methods [ methodName ] ;
}
}
}
function Keccak ( bits , padding , outputBits ) {
this . blocks = [ ] ;
this . s = [ ] ;
this . padding = padding ;
this . outputBits = outputBits ;
this . reset = true ;
this . finalized = false ;
this . block = 0 ;
this . start = 0 ;
this . blockCount = ( 1600 - ( bits << 1 ) ) >> 5 ;
this . byteCount = this . blockCount << 2 ;
this . outputBlocks = outputBits >> 5 ;
this . extraBytes = ( outputBits & 31 ) >> 3 ;
for ( var i = 0 ; i < 50 ; ++ i ) {
this . s [ i ] = 0 ;
}
}
Keccak . prototype . update = function ( message ) {
if ( this . finalized ) {
throw new Error ( FINALIZE _ERROR ) ;
}
var notString , type = typeof message ;
if ( type !== 'string' ) {
if ( type === 'object' ) {
if ( message === null ) {
throw new Error ( INPUT _ERROR ) ;
} else if ( ARRAY _BUFFER && message . constructor === ArrayBuffer ) {
message = new Uint8Array ( message ) ;
} else if ( ! Array . isArray ( message ) ) {
if ( ! ARRAY _BUFFER || ! ArrayBuffer . isView ( message ) ) {
throw new Error ( INPUT _ERROR ) ;
}
}
} else {
throw new Error ( INPUT _ERROR ) ;
}
notString = true ;
}
var blocks = this . blocks , byteCount = this . byteCount , length = message . length ,
blockCount = this . blockCount , index = 0 , s = this . s , i , code ;
while ( index < length ) {
if ( this . reset ) {
this . reset = false ;
blocks [ 0 ] = this . block ;
for ( i = 1 ; i < blockCount + 1 ; ++ i ) {
blocks [ i ] = 0 ;
}
}
if ( notString ) {
for ( i = this . start ; index < length && i < byteCount ; ++ index ) {
blocks [ i >> 2 ] |= message [ index ] << SHIFT [ i ++ & 3 ] ;
}
} else {
for ( i = this . start ; index < length && i < byteCount ; ++ index ) {
code = message . charCodeAt ( index ) ;
if ( code < 0x80 ) {
blocks [ i >> 2 ] |= code << SHIFT [ i ++ & 3 ] ;
} else if ( code < 0x800 ) {
blocks [ i >> 2 ] |= ( 0xc0 | ( code >> 6 ) ) << SHIFT [ i ++ & 3 ] ;
blocks [ i >> 2 ] |= ( 0x80 | ( code & 0x3f ) ) << SHIFT [ i ++ & 3 ] ;
} else if ( code < 0xd800 || code >= 0xe000 ) {
blocks [ i >> 2 ] |= ( 0xe0 | ( code >> 12 ) ) << SHIFT [ i ++ & 3 ] ;
blocks [ i >> 2 ] |= ( 0x80 | ( ( code >> 6 ) & 0x3f ) ) << SHIFT [ i ++ & 3 ] ;
blocks [ i >> 2 ] |= ( 0x80 | ( code & 0x3f ) ) << SHIFT [ i ++ & 3 ] ;
} else {
code = 0x10000 + ( ( ( code & 0x3ff ) << 10 ) | ( message . charCodeAt ( ++ index ) & 0x3ff ) ) ;
blocks [ i >> 2 ] |= ( 0xf0 | ( code >> 18 ) ) << SHIFT [ i ++ & 3 ] ;
blocks [ i >> 2 ] |= ( 0x80 | ( ( code >> 12 ) & 0x3f ) ) << SHIFT [ i ++ & 3 ] ;
blocks [ i >> 2 ] |= ( 0x80 | ( ( code >> 6 ) & 0x3f ) ) << SHIFT [ i ++ & 3 ] ;
blocks [ i >> 2 ] |= ( 0x80 | ( code & 0x3f ) ) << SHIFT [ i ++ & 3 ] ;
}
}
}
this . lastByteIndex = i ;
if ( i >= byteCount ) {
this . start = i - byteCount ;
this . block = blocks [ blockCount ] ;
for ( i = 0 ; i < blockCount ; ++ i ) {
s [ i ] ^= blocks [ i ] ;
}
f ( s ) ;
this . reset = true ;
} else {
this . start = i ;
}
}
return this ;
} ;
Keccak . prototype . encode = function ( x , right ) {
var o = x & 255 , n = 1 ;
var bytes = [ o ] ;
x = x >> 8 ;
o = x & 255 ;
while ( o > 0 ) {
bytes . unshift ( o ) ;
x = x >> 8 ;
o = x & 255 ;
++ n ;
}
if ( right ) {
bytes . push ( n ) ;
} else {
bytes . unshift ( n ) ;
}
this . update ( bytes ) ;
return bytes . length ;
} ;
Keccak . prototype . encodeString = function ( str ) {
var notString , type = typeof str ;
if ( type !== 'string' ) {
if ( type === 'object' ) {
if ( str === null ) {
throw new Error ( INPUT _ERROR ) ;
} else if ( ARRAY _BUFFER && str . constructor === ArrayBuffer ) {
str = new Uint8Array ( str ) ;
} else if ( ! Array . isArray ( str ) ) {
if ( ! ARRAY _BUFFER || ! ArrayBuffer . isView ( str ) ) {
throw new Error ( INPUT _ERROR ) ;
}
}
} else {
throw new Error ( INPUT _ERROR ) ;
}
notString = true ;
}
var bytes = 0 , length = str . length ;
if ( notString ) {
bytes = length ;
} else {
for ( var i = 0 ; i < str . length ; ++ i ) {
var code = str . charCodeAt ( i ) ;
if ( code < 0x80 ) {
bytes += 1 ;
} else if ( code < 0x800 ) {
bytes += 2 ;
} else if ( code < 0xd800 || code >= 0xe000 ) {
bytes += 3 ;
} else {
code = 0x10000 + ( ( ( code & 0x3ff ) << 10 ) | ( str . charCodeAt ( ++ i ) & 0x3ff ) ) ;
bytes += 4 ;
}
}
}
bytes += this . encode ( bytes * 8 ) ;
this . update ( str ) ;
return bytes ;
} ;
Keccak . prototype . bytepad = function ( strs , w ) {
var bytes = this . encode ( w ) ;
for ( var i = 0 ; i < strs . length ; ++ i ) {
bytes += this . encodeString ( strs [ i ] ) ;
}
var paddingBytes = w - bytes % w ;
var zeros = [ ] ;
zeros . length = paddingBytes ;
this . update ( zeros ) ;
return this ;
} ;
Keccak . prototype . finalize = function ( ) {
if ( this . finalized ) {
return ;
}
this . finalized = true ;
var blocks = this . blocks , i = this . lastByteIndex , blockCount = this . blockCount , s = this . s ;
blocks [ i >> 2 ] |= this . padding [ i & 3 ] ;
if ( this . lastByteIndex === this . byteCount ) {
blocks [ 0 ] = blocks [ blockCount ] ;
for ( i = 1 ; i < blockCount + 1 ; ++ i ) {
blocks [ i ] = 0 ;
}
}
blocks [ blockCount - 1 ] |= 0x80000000 ;
for ( i = 0 ; i < blockCount ; ++ i ) {
s [ i ] ^= blocks [ i ] ;
}
f ( s ) ;
} ;
Keccak . prototype . toString = Keccak . prototype . hex = function ( ) {
this . finalize ( ) ;
var blockCount = this . blockCount , s = this . s , outputBlocks = this . outputBlocks ,
extraBytes = this . extraBytes , i = 0 , j = 0 ;
var hex = '' , block ;
while ( j < outputBlocks ) {
for ( i = 0 ; i < blockCount && j < outputBlocks ; ++ i , ++ j ) {
block = s [ i ] ;
hex += HEX _CHARS [ ( block >> 4 ) & 0x0F ] + HEX _CHARS [ block & 0x0F ] +
HEX _CHARS [ ( block >> 12 ) & 0x0F ] + HEX _CHARS [ ( block >> 8 ) & 0x0F ] +
HEX _CHARS [ ( block >> 20 ) & 0x0F ] + HEX _CHARS [ ( block >> 16 ) & 0x0F ] +
HEX _CHARS [ ( block >> 28 ) & 0x0F ] + HEX _CHARS [ ( block >> 24 ) & 0x0F ] ;
}
if ( j % blockCount === 0 ) {
f ( s ) ;
i = 0 ;
}
}
if ( extraBytes ) {
block = s [ i ] ;
hex += HEX _CHARS [ ( block >> 4 ) & 0x0F ] + HEX _CHARS [ block & 0x0F ] ;
if ( extraBytes > 1 ) {
hex += HEX _CHARS [ ( block >> 12 ) & 0x0F ] + HEX _CHARS [ ( block >> 8 ) & 0x0F ] ;
}
if ( extraBytes > 2 ) {
hex += HEX _CHARS [ ( block >> 20 ) & 0x0F ] + HEX _CHARS [ ( block >> 16 ) & 0x0F ] ;
}
}
return hex ;
} ;
Keccak . prototype . arrayBuffer = function ( ) {
this . finalize ( ) ;
var blockCount = this . blockCount , s = this . s , outputBlocks = this . outputBlocks ,
extraBytes = this . extraBytes , i = 0 , j = 0 ;
var bytes = this . outputBits >> 3 ;
var buffer ;
if ( extraBytes ) {
buffer = new ArrayBuffer ( ( outputBlocks + 1 ) << 2 ) ;
} else {
buffer = new ArrayBuffer ( bytes ) ;
}
var array = new Uint32Array ( buffer ) ;
while ( j < outputBlocks ) {
for ( i = 0 ; i < blockCount && j < outputBlocks ; ++ i , ++ j ) {
array [ j ] = s [ i ] ;
}
if ( j % blockCount === 0 ) {
f ( s ) ;
}
}
if ( extraBytes ) {
array [ i ] = s [ i ] ;
buffer = buffer . slice ( 0 , bytes ) ;
}
return buffer ;
} ;
Keccak . prototype . buffer = Keccak . prototype . arrayBuffer ;
Keccak . prototype . digest = Keccak . prototype . array = function ( ) {
this . finalize ( ) ;
var blockCount = this . blockCount , s = this . s , outputBlocks = this . outputBlocks ,
extraBytes = this . extraBytes , i = 0 , j = 0 ;
var array = [ ] , offset , block ;
while ( j < outputBlocks ) {
for ( i = 0 ; i < blockCount && j < outputBlocks ; ++ i , ++ j ) {
offset = j << 2 ;
block = s [ i ] ;
array [ offset ] = block & 0xFF ;
array [ offset + 1 ] = ( block >> 8 ) & 0xFF ;
array [ offset + 2 ] = ( block >> 16 ) & 0xFF ;
array [ offset + 3 ] = ( block >> 24 ) & 0xFF ;
}
if ( j % blockCount === 0 ) {
f ( s ) ;
}
}
if ( extraBytes ) {
offset = j << 2 ;
block = s [ i ] ;
array [ offset ] = block & 0xFF ;
if ( extraBytes > 1 ) {
array [ offset + 1 ] = ( block >> 8 ) & 0xFF ;
}
if ( extraBytes > 2 ) {
array [ offset + 2 ] = ( block >> 16 ) & 0xFF ;
}
}
return array ;
} ;
function Kmac ( bits , padding , outputBits ) {
Keccak . call ( this , bits , padding , outputBits ) ;
}
Kmac . prototype = new Keccak ( ) ;
Kmac . prototype . finalize = function ( ) {
this . encode ( this . outputBits , true ) ;
return Keccak . prototype . finalize . call ( this ) ;
} ;
var f = function ( s ) {
var h , l , n , c0 , c1 , c2 , c3 , c4 , c5 , c6 , c7 , c8 , c9 ,
b0 , b1 , b2 , b3 , b4 , b5 , b6 , b7 , b8 , b9 , b10 , b11 , b12 , b13 , b14 , b15 , b16 , b17 ,
b18 , b19 , b20 , b21 , b22 , b23 , b24 , b25 , b26 , b27 , b28 , b29 , b30 , b31 , b32 , b33 ,
b34 , b35 , b36 , b37 , b38 , b39 , b40 , b41 , b42 , b43 , b44 , b45 , b46 , b47 , b48 , b49 ;
for ( n = 0 ; n < 48 ; n += 2 ) {
c0 = s [ 0 ] ^ s [ 10 ] ^ s [ 20 ] ^ s [ 30 ] ^ s [ 40 ] ;
c1 = s [ 1 ] ^ s [ 11 ] ^ s [ 21 ] ^ s [ 31 ] ^ s [ 41 ] ;
c2 = s [ 2 ] ^ s [ 12 ] ^ s [ 22 ] ^ s [ 32 ] ^ s [ 42 ] ;
c3 = s [ 3 ] ^ s [ 13 ] ^ s [ 23 ] ^ s [ 33 ] ^ s [ 43 ] ;
c4 = s [ 4 ] ^ s [ 14 ] ^ s [ 24 ] ^ s [ 34 ] ^ s [ 44 ] ;
c5 = s [ 5 ] ^ s [ 15 ] ^ s [ 25 ] ^ s [ 35 ] ^ s [ 45 ] ;
c6 = s [ 6 ] ^ s [ 16 ] ^ s [ 26 ] ^ s [ 36 ] ^ s [ 46 ] ;
c7 = s [ 7 ] ^ s [ 17 ] ^ s [ 27 ] ^ s [ 37 ] ^ s [ 47 ] ;
c8 = s [ 8 ] ^ s [ 18 ] ^ s [ 28 ] ^ s [ 38 ] ^ s [ 48 ] ;
c9 = s [ 9 ] ^ s [ 19 ] ^ s [ 29 ] ^ s [ 39 ] ^ s [ 49 ] ;
h = c8 ^ ( ( c2 << 1 ) | ( c3 >>> 31 ) ) ;
l = c9 ^ ( ( c3 << 1 ) | ( c2 >>> 31 ) ) ;
s [ 0 ] ^= h ;
s [ 1 ] ^= l ;
s [ 10 ] ^= h ;
s [ 11 ] ^= l ;
s [ 20 ] ^= h ;
s [ 21 ] ^= l ;
s [ 30 ] ^= h ;
s [ 31 ] ^= l ;
s [ 40 ] ^= h ;
s [ 41 ] ^= l ;
h = c0 ^ ( ( c4 << 1 ) | ( c5 >>> 31 ) ) ;
l = c1 ^ ( ( c5 << 1 ) | ( c4 >>> 31 ) ) ;
s [ 2 ] ^= h ;
s [ 3 ] ^= l ;
s [ 12 ] ^= h ;
s [ 13 ] ^= l ;
s [ 22 ] ^= h ;
s [ 23 ] ^= l ;
s [ 32 ] ^= h ;
s [ 33 ] ^= l ;
s [ 42 ] ^= h ;
s [ 43 ] ^= l ;
h = c2 ^ ( ( c6 << 1 ) | ( c7 >>> 31 ) ) ;
l = c3 ^ ( ( c7 << 1 ) | ( c6 >>> 31 ) ) ;
s [ 4 ] ^= h ;
s [ 5 ] ^= l ;
s [ 14 ] ^= h ;
s [ 15 ] ^= l ;
s [ 24 ] ^= h ;
s [ 25 ] ^= l ;
s [ 34 ] ^= h ;
s [ 35 ] ^= l ;
s [ 44 ] ^= h ;
s [ 45 ] ^= l ;
h = c4 ^ ( ( c8 << 1 ) | ( c9 >>> 31 ) ) ;
l = c5 ^ ( ( c9 << 1 ) | ( c8 >>> 31 ) ) ;
s [ 6 ] ^= h ;
s [ 7 ] ^= l ;
s [ 16 ] ^= h ;
s [ 17 ] ^= l ;
s [ 26 ] ^= h ;
s [ 27 ] ^= l ;
s [ 36 ] ^= h ;
s [ 37 ] ^= l ;
s [ 46 ] ^= h ;
s [ 47 ] ^= l ;
h = c6 ^ ( ( c0 << 1 ) | ( c1 >>> 31 ) ) ;
l = c7 ^ ( ( c1 << 1 ) | ( c0 >>> 31 ) ) ;
s [ 8 ] ^= h ;
s [ 9 ] ^= l ;
s [ 18 ] ^= h ;
s [ 19 ] ^= l ;
s [ 28 ] ^= h ;
s [ 29 ] ^= l ;
s [ 38 ] ^= h ;
s [ 39 ] ^= l ;
s [ 48 ] ^= h ;
s [ 49 ] ^= l ;
b0 = s [ 0 ] ;
b1 = s [ 1 ] ;
b32 = ( s [ 11 ] << 4 ) | ( s [ 10 ] >>> 28 ) ;
b33 = ( s [ 10 ] << 4 ) | ( s [ 11 ] >>> 28 ) ;
b14 = ( s [ 20 ] << 3 ) | ( s [ 21 ] >>> 29 ) ;
b15 = ( s [ 21 ] << 3 ) | ( s [ 20 ] >>> 29 ) ;
b46 = ( s [ 31 ] << 9 ) | ( s [ 30 ] >>> 23 ) ;
b47 = ( s [ 30 ] << 9 ) | ( s [ 31 ] >>> 23 ) ;
b28 = ( s [ 40 ] << 18 ) | ( s [ 41 ] >>> 14 ) ;
b29 = ( s [ 41 ] << 18 ) | ( s [ 40 ] >>> 14 ) ;
b20 = ( s [ 2 ] << 1 ) | ( s [ 3 ] >>> 31 ) ;
b21 = ( s [ 3 ] << 1 ) | ( s [ 2 ] >>> 31 ) ;
b2 = ( s [ 13 ] << 12 ) | ( s [ 12 ] >>> 20 ) ;
b3 = ( s [ 12 ] << 12 ) | ( s [ 13 ] >>> 20 ) ;
b34 = ( s [ 22 ] << 10 ) | ( s [ 23 ] >>> 22 ) ;
b35 = ( s [ 23 ] << 10 ) | ( s [ 22 ] >>> 22 ) ;
b16 = ( s [ 33 ] << 13 ) | ( s [ 32 ] >>> 19 ) ;
b17 = ( s [ 32 ] << 13 ) | ( s [ 33 ] >>> 19 ) ;
b48 = ( s [ 42 ] << 2 ) | ( s [ 43 ] >>> 30 ) ;
b49 = ( s [ 43 ] << 2 ) | ( s [ 42 ] >>> 30 ) ;
b40 = ( s [ 5 ] << 30 ) | ( s [ 4 ] >>> 2 ) ;
b41 = ( s [ 4 ] << 30 ) | ( s [ 5 ] >>> 2 ) ;
b22 = ( s [ 14 ] << 6 ) | ( s [ 15 ] >>> 26 ) ;
b23 = ( s [ 15 ] << 6 ) | ( s [ 14 ] >>> 26 ) ;
b4 = ( s [ 25 ] << 11 ) | ( s [ 24 ] >>> 21 ) ;
b5 = ( s [ 24 ] << 11 ) | ( s [ 25 ] >>> 21 ) ;
b36 = ( s [ 34 ] << 15 ) | ( s [ 35 ] >>> 17 ) ;
b37 = ( s [ 35 ] << 15 ) | ( s [ 34 ] >>> 17 ) ;
b18 = ( s [ 45 ] << 29 ) | ( s [ 44 ] >>> 3 ) ;
b19 = ( s [ 44 ] << 29 ) | ( s [ 45 ] >>> 3 ) ;
b10 = ( s [ 6 ] << 28 ) | ( s [ 7 ] >>> 4 ) ;
b11 = ( s [ 7 ] << 28 ) | ( s [ 6 ] >>> 4 ) ;
b42 = ( s [ 17 ] << 23 ) | ( s [ 16 ] >>> 9 ) ;
b43 = ( s [ 16 ] << 23 ) | ( s [ 17 ] >>> 9 ) ;
b24 = ( s [ 26 ] << 25 ) | ( s [ 27 ] >>> 7 ) ;
b25 = ( s [ 27 ] << 25 ) | ( s [ 26 ] >>> 7 ) ;
b6 = ( s [ 36 ] << 21 ) | ( s [ 37 ] >>> 11 ) ;
b7 = ( s [ 37 ] << 21 ) | ( s [ 36 ] >>> 11 ) ;
b38 = ( s [ 47 ] << 24 ) | ( s [ 46 ] >>> 8 ) ;
b39 = ( s [ 46 ] << 24 ) | ( s [ 47 ] >>> 8 ) ;
b30 = ( s [ 8 ] << 27 ) | ( s [ 9 ] >>> 5 ) ;
b31 = ( s [ 9 ] << 27 ) | ( s [ 8 ] >>> 5 ) ;
b12 = ( s [ 18 ] << 20 ) | ( s [ 19 ] >>> 12 ) ;
b13 = ( s [ 19 ] << 20 ) | ( s [ 18 ] >>> 12 ) ;
b44 = ( s [ 29 ] << 7 ) | ( s [ 28 ] >>> 25 ) ;
b45 = ( s [ 28 ] << 7 ) | ( s [ 29 ] >>> 25 ) ;
b26 = ( s [ 38 ] << 8 ) | ( s [ 39 ] >>> 24 ) ;
b27 = ( s [ 39 ] << 8 ) | ( s [ 38 ] >>> 24 ) ;
b8 = ( s [ 48 ] << 14 ) | ( s [ 49 ] >>> 18 ) ;
b9 = ( s [ 49 ] << 14 ) | ( s [ 48 ] >>> 18 ) ;
s [ 0 ] = b0 ^ ( ~ b2 & b4 ) ;
s [ 1 ] = b1 ^ ( ~ b3 & b5 ) ;
s [ 10 ] = b10 ^ ( ~ b12 & b14 ) ;
s [ 11 ] = b11 ^ ( ~ b13 & b15 ) ;
s [ 20 ] = b20 ^ ( ~ b22 & b24 ) ;
s [ 21 ] = b21 ^ ( ~ b23 & b25 ) ;
s [ 30 ] = b30 ^ ( ~ b32 & b34 ) ;
s [ 31 ] = b31 ^ ( ~ b33 & b35 ) ;
s [ 40 ] = b40 ^ ( ~ b42 & b44 ) ;
s [ 41 ] = b41 ^ ( ~ b43 & b45 ) ;
s [ 2 ] = b2 ^ ( ~ b4 & b6 ) ;
s [ 3 ] = b3 ^ ( ~ b5 & b7 ) ;
s [ 12 ] = b12 ^ ( ~ b14 & b16 ) ;
s [ 13 ] = b13 ^ ( ~ b15 & b17 ) ;
s [ 22 ] = b22 ^ ( ~ b24 & b26 ) ;
s [ 23 ] = b23 ^ ( ~ b25 & b27 ) ;
s [ 32 ] = b32 ^ ( ~ b34 & b36 ) ;
s [ 33 ] = b33 ^ ( ~ b35 & b37 ) ;
s [ 42 ] = b42 ^ ( ~ b44 & b46 ) ;
s [ 43 ] = b43 ^ ( ~ b45 & b47 ) ;
s [ 4 ] = b4 ^ ( ~ b6 & b8 ) ;
s [ 5 ] = b5 ^ ( ~ b7 & b9 ) ;
s [ 14 ] = b14 ^ ( ~ b16 & b18 ) ;
s [ 15 ] = b15 ^ ( ~ b17 & b19 ) ;
s [ 24 ] = b24 ^ ( ~ b26 & b28 ) ;
s [ 25 ] = b25 ^ ( ~ b27 & b29 ) ;
s [ 34 ] = b34 ^ ( ~ b36 & b38 ) ;
s [ 35 ] = b35 ^ ( ~ b37 & b39 ) ;
s [ 44 ] = b44 ^ ( ~ b46 & b48 ) ;
s [ 45 ] = b45 ^ ( ~ b47 & b49 ) ;
s [ 6 ] = b6 ^ ( ~ b8 & b0 ) ;
s [ 7 ] = b7 ^ ( ~ b9 & b1 ) ;
s [ 16 ] = b16 ^ ( ~ b18 & b10 ) ;
s [ 17 ] = b17 ^ ( ~ b19 & b11 ) ;
s [ 26 ] = b26 ^ ( ~ b28 & b20 ) ;
s [ 27 ] = b27 ^ ( ~ b29 & b21 ) ;
s [ 36 ] = b36 ^ ( ~ b38 & b30 ) ;
s [ 37 ] = b37 ^ ( ~ b39 & b31 ) ;
s [ 46 ] = b46 ^ ( ~ b48 & b40 ) ;
s [ 47 ] = b47 ^ ( ~ b49 & b41 ) ;
s [ 8 ] = b8 ^ ( ~ b0 & b2 ) ;
s [ 9 ] = b9 ^ ( ~ b1 & b3 ) ;
s [ 18 ] = b18 ^ ( ~ b10 & b12 ) ;
s [ 19 ] = b19 ^ ( ~ b11 & b13 ) ;
s [ 28 ] = b28 ^ ( ~ b20 & b22 ) ;
s [ 29 ] = b29 ^ ( ~ b21 & b23 ) ;
s [ 38 ] = b38 ^ ( ~ b30 & b32 ) ;
s [ 39 ] = b39 ^ ( ~ b31 & b33 ) ;
s [ 48 ] = b48 ^ ( ~ b40 & b42 ) ;
s [ 49 ] = b49 ^ ( ~ b41 & b43 ) ;
s [ 0 ] ^= RC [ n ] ;
s [ 1 ] ^= RC [ n + 1 ] ;
}
} ;
if ( COMMON _JS ) {
module . exports = methods ;
} else {
for ( i = 0 ; i < methodNames . length ; ++ i ) {
root [ methodNames [ i ] ] = methods [ methodNames [ i ] ] ;
}
}
} ) ( ) ;
} ( sha3$1 ) ) ;
return sha3$1 . exports ;
}
2024-04-29 18:55:15 +03:00
2024-09-30 18:11:48 +03:00
var sha3Exports = /*@__PURE__*/ requireSha3 ( ) ;
2024-04-29 18:55:15 +03:00
var sha3 = /*@__PURE__*/ getDefaultExportFromCjs ( sha3Exports ) ;
function keccak256 ( data ) {
return '0x' + sha3 . keccak _256 ( arrayify ( data ) ) ;
}
const version = "strings/5.7.0" ;
const logger = new Logger ( version ) ;
///////////////////////////////
var UnicodeNormalizationForm ;
( function ( UnicodeNormalizationForm ) {
UnicodeNormalizationForm [ "current" ] = "" ;
UnicodeNormalizationForm [ "NFC" ] = "NFC" ;
UnicodeNormalizationForm [ "NFD" ] = "NFD" ;
UnicodeNormalizationForm [ "NFKC" ] = "NFKC" ;
UnicodeNormalizationForm [ "NFKD" ] = "NFKD" ;
} ) ( UnicodeNormalizationForm || ( UnicodeNormalizationForm = { } ) ) ;
var Utf8ErrorReason ;
( function ( Utf8ErrorReason ) {
// A continuation byte was present where there was nothing to continue
// - offset = the index the codepoint began in
Utf8ErrorReason [ "UNEXPECTED_CONTINUE" ] = "unexpected continuation byte" ;
// An invalid (non-continuation) byte to start a UTF-8 codepoint was found
// - offset = the index the codepoint began in
Utf8ErrorReason [ "BAD_PREFIX" ] = "bad codepoint prefix" ;
// The string is too short to process the expected codepoint
// - offset = the index the codepoint began in
Utf8ErrorReason [ "OVERRUN" ] = "string overrun" ;
// A missing continuation byte was expected but not found
// - offset = the index the continuation byte was expected at
Utf8ErrorReason [ "MISSING_CONTINUE" ] = "missing continuation byte" ;
// The computed code point is outside the range for UTF-8
// - offset = start of this codepoint
// - badCodepoint = the computed codepoint; outside the UTF-8 range
Utf8ErrorReason [ "OUT_OF_RANGE" ] = "out of UTF-8 range" ;
// UTF-8 strings may not contain UTF-16 surrogate pairs
// - offset = start of this codepoint
// - badCodepoint = the computed codepoint; inside the UTF-16 surrogate range
Utf8ErrorReason [ "UTF16_SURROGATE" ] = "UTF-16 surrogate" ;
// The string is an overlong representation
// - offset = start of this codepoint
// - badCodepoint = the computed codepoint; already bounds checked
Utf8ErrorReason [ "OVERLONG" ] = "overlong representation" ;
} ) ( Utf8ErrorReason || ( Utf8ErrorReason = { } ) ) ;
// http://stackoverflow.com/questions/18729405/how-to-convert-utf8-string-to-byte-array
function toUtf8Bytes ( str , form = UnicodeNormalizationForm . current ) {
if ( form != UnicodeNormalizationForm . current ) {
logger . checkNormalize ( ) ;
str = str . normalize ( form ) ;
}
let result = [ ] ;
for ( let i = 0 ; i < str . length ; i ++ ) {
const c = str . charCodeAt ( i ) ;
if ( c < 0x80 ) {
result . push ( c ) ;
}
else if ( c < 0x800 ) {
result . push ( ( c >> 6 ) | 0xc0 ) ;
result . push ( ( c & 0x3f ) | 0x80 ) ;
}
else if ( ( c & 0xfc00 ) == 0xd800 ) {
i ++ ;
const c2 = str . charCodeAt ( i ) ;
if ( i >= str . length || ( c2 & 0xfc00 ) !== 0xdc00 ) {
throw new Error ( "invalid utf-8 string" ) ;
}
// Surrogate Pair
const pair = 0x10000 + ( ( c & 0x03ff ) << 10 ) + ( c2 & 0x03ff ) ;
result . push ( ( pair >> 18 ) | 0xf0 ) ;
result . push ( ( ( pair >> 12 ) & 0x3f ) | 0x80 ) ;
result . push ( ( ( pair >> 6 ) & 0x3f ) | 0x80 ) ;
result . push ( ( pair & 0x3f ) | 0x80 ) ;
}
else {
result . push ( ( c >> 12 ) | 0xe0 ) ;
result . push ( ( ( c >> 6 ) & 0x3f ) | 0x80 ) ;
result . push ( ( c & 0x3f ) | 0x80 ) ;
}
}
return arrayify ( result ) ;
}
const SEED = "mimcsponge" ;
const NROUNDS = 220 ;
async function buildMimcSponge ( ) {
2024-09-19 20:49:25 +03:00
const bn128 = await ffjavascript . getCurveFromName ( "bn128" , true ) ;
2024-04-29 18:55:15 +03:00
return new MimcSponge ( bn128 . Fr ) ;
}
class MimcSponge {
constructor ( F ) {
this . F = F ;
this . cts = this . getConstants ( SEED , NROUNDS ) ;
}
getIV ( seed ) {
const F = this . F ;
if ( typeof seed === "undefined" ) seed = SEED ;
const c = keccak256 ( toUtf8Bytes ( seed + "_iv" ) ) ;
2024-09-19 20:49:25 +03:00
const cn = ffjavascript . Scalar . e ( c ) ;
2024-04-29 18:55:15 +03:00
const iv = cn . mod ( F . p ) ;
return iv ;
} ;
getConstants ( seed , nRounds ) {
const F = this . F ;
if ( typeof nRounds === "undefined" ) nRounds = NROUNDS ;
const cts = new Array ( nRounds ) ;
let c = keccak256 ( toUtf8Bytes ( SEED ) ) ; for ( let i = 1 ; i < nRounds ; i ++ ) {
c = keccak256 ( c ) ;
cts [ i ] = F . e ( c ) ;
}
cts [ 0 ] = F . e ( 0 ) ;
cts [ cts . length - 1 ] = F . e ( 0 ) ;
return cts ;
} ;
hash ( _xL _in , _xR _in , _k ) {
const F = this . F ;
let xL = F . e ( _xL _in ) ;
let xR = F . e ( _xR _in ) ;
const k = F . e ( _k ) ;
for ( let i = 0 ; i < NROUNDS ; i ++ ) {
const c = this . cts [ i ] ;
const t = ( i == 0 ) ? F . add ( xL , k ) : F . add ( F . add ( xL , k ) , c ) ;
const t2 = F . square ( t ) ;
const t4 = F . square ( t2 ) ;
const t5 = F . mul ( t4 , t ) ;
const xR _tmp = F . e ( xR ) ;
if ( i < ( NROUNDS - 1 ) ) {
xR = xL ;
xL = F . add ( xR _tmp , t5 ) ;
} else {
xR = F . add ( xR _tmp , t5 ) ;
}
}
return {
xL : xL ,
xR : xR
} ;
}
multiHash ( arr , key , numOutputs ) {
const F = this . F ;
if ( typeof ( numOutputs ) === "undefined" ) {
numOutputs = 1 ;
}
if ( typeof ( key ) === "undefined" ) {
key = F . zero ;
}
let R = F . zero ;
let C = F . zero ;
for ( let i = 0 ; i < arr . length ; i ++ ) {
R = F . add ( R , F . e ( arr [ i ] ) ) ;
const S = this . hash ( R , C , key ) ;
R = S . xL ;
C = S . xR ;
}
let outputs = [ R ] ;
for ( let i = 1 ; i < numOutputs ; i ++ ) {
const S = this . hash ( R , C , key ) ;
R = S . xL ;
C = S . xR ;
outputs . push ( R ) ;
}
if ( numOutputs == 1 ) {
return outputs [ 0 ] ;
} else {
return outputs ;
}
}
}
var _ _async$1 = ( _ _this , _ _arguments , generator ) => {
return new Promise ( ( resolve , reject ) => {
var fulfilled = ( value ) => {
try {
step ( generator . next ( value ) ) ;
} catch ( e ) {
reject ( e ) ;
}
} ;
var rejected = ( value ) => {
try {
step ( generator . throw ( value ) ) ;
} catch ( e ) {
reject ( e ) ;
}
} ;
var step = ( x ) => x . done ? resolve ( x . value ) : Promise . resolve ( x . value ) . then ( fulfilled , rejected ) ;
step ( ( generator = generator . apply ( _ _this , _ _arguments ) ) . next ( ) ) ;
} ) ;
} ;
class Mimc {
constructor ( ) {
this . mimcPromise = this . initMimc ( ) ;
}
initMimc ( ) {
return _ _async$1 ( this , null , function * ( ) {
this . sponge = yield buildMimcSponge ( ) ;
this . hash = ( left , right ) => {
var _a , _b ;
return ( _b = this . sponge ) == null ? void 0 : _b . F . toString ( ( _a = this . sponge ) == null ? void 0 : _a . multiHash ( [ BigInt ( left ) , BigInt ( right ) ] ) ) ;
} ;
} ) ;
}
getHash ( ) {
return _ _async$1 ( this , null , function * ( ) {
yield this . mimcPromise ;
return {
sponge : this . sponge ,
hash : this . hash
} ;
} ) ;
}
}
const mimc = new Mimc ( ) ;
BigInt . prototype . toJSON = function ( ) {
return this . toString ( ) ;
} ;
const isNode = ! process . browser && typeof globalThis . window === "undefined" ;
var _ _async = ( _ _this , _ _arguments , generator ) => {
return new Promise ( ( resolve , reject ) => {
var fulfilled = ( value ) => {
try {
step ( generator . next ( value ) ) ;
} catch ( e ) {
reject ( e ) ;
}
} ;
var rejected = ( value ) => {
try {
step ( generator . throw ( value ) ) ;
} catch ( e ) {
reject ( e ) ;
}
} ;
var step = ( x ) => x . done ? resolve ( x . value ) : Promise . resolve ( x . value ) . then ( fulfilled , rejected ) ;
step ( ( generator = generator . apply ( _ _this , _ _arguments ) ) . next ( ) ) ;
} ) ;
} ;
function nodePostWork ( ) {
return _ _async ( this , null , function * ( ) {
const { hash : hashFunction } = yield mimc . getHash ( ) ;
2024-09-19 20:49:25 +03:00
const { merkleTreeHeight , edge , elements , zeroElement } = workerThreads . workerData ;
2024-04-29 18:55:15 +03:00
if ( edge ) {
2024-09-30 18:11:48 +03:00
const merkleTree2 = new libExports . PartialMerkleTree ( merkleTreeHeight , edge , elements , {
2024-04-29 18:55:15 +03:00
zeroElement ,
hashFunction
} ) ;
2024-09-19 20:49:25 +03:00
workerThreads . parentPort . postMessage ( merkleTree2 . toString ( ) ) ;
2024-04-29 18:55:15 +03:00
return ;
}
2024-09-30 18:11:48 +03:00
const merkleTree = new libExports . MerkleTree ( merkleTreeHeight , elements , {
2024-04-29 18:55:15 +03:00
zeroElement ,
hashFunction
} ) ;
2024-09-19 20:49:25 +03:00
workerThreads . parentPort . postMessage ( merkleTree . toString ( ) ) ;
2024-04-29 18:55:15 +03:00
} ) ;
}
2024-09-19 20:49:25 +03:00
if ( isNode && workerThreads ) {
2024-04-29 18:55:15 +03:00
nodePostWork ( ) ;
} else if ( ! isNode && typeof addEventListener === "function" && typeof postMessage === "function" ) {
addEventListener ( "message" , ( e ) => _ _async ( undefined , null , function * ( ) {
let data ;
if ( e . data ) {
data = e . data ;
} else {
data = e ;
}
const { hash : hashFunction } = yield mimc . getHash ( ) ;
const { merkleTreeHeight , edge , elements , zeroElement } = data ;
if ( edge ) {
2024-09-30 18:11:48 +03:00
const merkleTree2 = new libExports . PartialMerkleTree ( merkleTreeHeight , edge , elements , {
2024-04-29 18:55:15 +03:00
zeroElement ,
hashFunction
} ) ;
postMessage ( merkleTree2 . toString ( ) ) ;
return ;
}
2024-09-30 18:11:48 +03:00
const merkleTree = new libExports . MerkleTree ( merkleTreeHeight , elements , {
2024-04-29 18:55:15 +03:00
zeroElement ,
hashFunction
} ) ;
postMessage ( merkleTree . toString ( ) ) ;
} ) ) ;
} else {
throw new Error ( "This browser / environment does not support workers!" ) ;
}