eth/catalyst: implement kintsugi-spec v3 (#24067)
This commit is contained in:
parent
f5f5c0855a
commit
2295640ebd
@ -19,6 +19,7 @@ package forkid
|
||||
import (
|
||||
"bytes"
|
||||
"math"
|
||||
"math/big"
|
||||
"testing"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
@ -29,6 +30,8 @@ import (
|
||||
// TestCreation tests that different genesis and fork rule combinations result in
|
||||
// the correct fork ID.
|
||||
func TestCreation(t *testing.T) {
|
||||
mergeConfig := *params.MainnetChainConfig
|
||||
mergeConfig.MergeForkBlock = big.NewInt(15000000)
|
||||
type testcase struct {
|
||||
head uint64
|
||||
want ID
|
||||
@ -65,7 +68,7 @@ func TestCreation(t *testing.T) {
|
||||
{12964999, ID{Hash: checksumToBytes(0x0eb440f6), Next: 12965000}}, // Last Berlin block
|
||||
{12965000, ID{Hash: checksumToBytes(0xb715077d), Next: 13773000}}, // First London block
|
||||
{13772999, ID{Hash: checksumToBytes(0xb715077d), Next: 13773000}}, // Last London block
|
||||
{13773000, ID{Hash: checksumToBytes(0x20c327fc), Next: 0}}, /// First Arrow Glacier block
|
||||
{13773000, ID{Hash: checksumToBytes(0x20c327fc), Next: 0}}, // First Arrow Glacier block
|
||||
{20000000, ID{Hash: checksumToBytes(0x20c327fc), Next: 0}}, // Future Arrow Glacier block
|
||||
},
|
||||
},
|
||||
@ -133,6 +136,38 @@ func TestCreation(t *testing.T) {
|
||||
{6000000, ID{Hash: checksumToBytes(0xB8C6299D), Next: 0}}, // Future London block
|
||||
},
|
||||
},
|
||||
// Merge test cases
|
||||
{
|
||||
&mergeConfig,
|
||||
params.MainnetGenesisHash,
|
||||
[]testcase{
|
||||
{0, ID{Hash: checksumToBytes(0xfc64ec04), Next: 1150000}}, // Unsynced
|
||||
{1149999, ID{Hash: checksumToBytes(0xfc64ec04), Next: 1150000}}, // Last Frontier block
|
||||
{1150000, ID{Hash: checksumToBytes(0x97c2c34c), Next: 1920000}}, // First Homestead block
|
||||
{1919999, ID{Hash: checksumToBytes(0x97c2c34c), Next: 1920000}}, // Last Homestead block
|
||||
{1920000, ID{Hash: checksumToBytes(0x91d1f948), Next: 2463000}}, // First DAO block
|
||||
{2462999, ID{Hash: checksumToBytes(0x91d1f948), Next: 2463000}}, // Last DAO block
|
||||
{2463000, ID{Hash: checksumToBytes(0x7a64da13), Next: 2675000}}, // First Tangerine block
|
||||
{2674999, ID{Hash: checksumToBytes(0x7a64da13), Next: 2675000}}, // Last Tangerine block
|
||||
{2675000, ID{Hash: checksumToBytes(0x3edd5b10), Next: 4370000}}, // First Spurious block
|
||||
{4369999, ID{Hash: checksumToBytes(0x3edd5b10), Next: 4370000}}, // Last Spurious block
|
||||
{4370000, ID{Hash: checksumToBytes(0xa00bc324), Next: 7280000}}, // First Byzantium block
|
||||
{7279999, ID{Hash: checksumToBytes(0xa00bc324), Next: 7280000}}, // Last Byzantium block
|
||||
{7280000, ID{Hash: checksumToBytes(0x668db0af), Next: 9069000}}, // First and last Constantinople, first Petersburg block
|
||||
{9068999, ID{Hash: checksumToBytes(0x668db0af), Next: 9069000}}, // Last Petersburg block
|
||||
{9069000, ID{Hash: checksumToBytes(0x879d6e30), Next: 9200000}}, // First Istanbul and first Muir Glacier block
|
||||
{9199999, ID{Hash: checksumToBytes(0x879d6e30), Next: 9200000}}, // Last Istanbul and first Muir Glacier block
|
||||
{9200000, ID{Hash: checksumToBytes(0xe029e991), Next: 12244000}}, // First Muir Glacier block
|
||||
{12243999, ID{Hash: checksumToBytes(0xe029e991), Next: 12244000}}, // Last Muir Glacier block
|
||||
{12244000, ID{Hash: checksumToBytes(0x0eb440f6), Next: 12965000}}, // First Berlin block
|
||||
{12964999, ID{Hash: checksumToBytes(0x0eb440f6), Next: 12965000}}, // Last Berlin block
|
||||
{12965000, ID{Hash: checksumToBytes(0xb715077d), Next: 13773000}}, // First London block
|
||||
{13772999, ID{Hash: checksumToBytes(0xb715077d), Next: 13773000}}, // Last London block
|
||||
{13773000, ID{Hash: checksumToBytes(0x20c327fc), Next: 15000000}}, // First Arrow Glacier block
|
||||
{15000000, ID{Hash: checksumToBytes(0xe3abe201), Next: 0}}, // First Merge Start block
|
||||
{20000000, ID{Hash: checksumToBytes(0xe3abe201), Next: 0}}, // Future Merge Start block
|
||||
},
|
||||
},
|
||||
}
|
||||
for i, tt := range tests {
|
||||
for j, ttt := range tt.cases {
|
||||
|
@ -46,10 +46,11 @@ var (
|
||||
VALID = GenericStringResponse{"VALID"}
|
||||
SUCCESS = GenericStringResponse{"SUCCESS"}
|
||||
INVALID = ForkChoiceResponse{Status: "INVALID", PayloadID: nil}
|
||||
SYNCING = ForkChoiceResponse{Status: "INVALID", PayloadID: nil}
|
||||
UnknownHeader = rpc.CustomError{Code: -32000, Message: "unknown header"}
|
||||
UnknownPayload = rpc.CustomError{Code: -32001, Message: "unknown payload"}
|
||||
InvalidPayloadID = rpc.CustomError{Code: 1, Message: "invalid payload id"}
|
||||
SYNCING = ForkChoiceResponse{Status: "SYNCING", PayloadID: nil}
|
||||
GenericServerError = rpc.CustomError{Code: -32000, ValidationError: "Server error"}
|
||||
UnknownPayload = rpc.CustomError{Code: -32001, ValidationError: "Unknown payload"}
|
||||
InvalidTB = rpc.CustomError{Code: -32002, ValidationError: "Invalid terminal block"}
|
||||
InvalidPayloadID = rpc.CustomError{Code: 1, ValidationError: "invalid payload id"}
|
||||
)
|
||||
|
||||
// Register adds catalyst APIs to the full node.
|
||||
@ -232,7 +233,7 @@ func computePayloadId(headBlockHash common.Hash, params *PayloadAttributesV1) []
|
||||
hasher.Write(headBlockHash[:])
|
||||
binary.Write(hasher, binary.BigEndian, params.Timestamp)
|
||||
hasher.Write(params.Random[:])
|
||||
hasher.Write(params.FeeRecipient[:])
|
||||
hasher.Write(params.SuggestedFeeRecipient[:])
|
||||
return hasher.Sum([]byte{})[:8]
|
||||
}
|
||||
|
||||
@ -308,7 +309,7 @@ func (api *ConsensusAPI) assembleBlock(parentHash common.Hash, params *PayloadAt
|
||||
log.Warn("Producing block too far in the future", "diff", common.PrettyDuration(diff))
|
||||
}
|
||||
pending := api.eth.TxPool().Pending(true)
|
||||
coinbase := params.FeeRecipient
|
||||
coinbase := params.SuggestedFeeRecipient
|
||||
num := parent.Number()
|
||||
header := &types.Header{
|
||||
ParentHash: parent.Hash(),
|
||||
@ -419,10 +420,10 @@ func ExecutableDataToBlock(params ExecutableDataV1) (*types.Block, error) {
|
||||
header := &types.Header{
|
||||
ParentHash: params.ParentHash,
|
||||
UncleHash: types.EmptyUncleHash,
|
||||
Coinbase: params.Coinbase,
|
||||
Coinbase: params.FeeRecipient,
|
||||
Root: params.StateRoot,
|
||||
TxHash: types.DeriveSha(types.Transactions(txs), trie.NewStackTrie(nil)),
|
||||
ReceiptHash: params.ReceiptRoot,
|
||||
ReceiptHash: params.ReceiptsRoot,
|
||||
Bloom: types.BytesToBloom(params.LogsBloom),
|
||||
Difficulty: common.Big0,
|
||||
Number: number,
|
||||
@ -444,14 +445,14 @@ func BlockToExecutableData(block *types.Block, random common.Hash) *ExecutableDa
|
||||
return &ExecutableDataV1{
|
||||
BlockHash: block.Hash(),
|
||||
ParentHash: block.ParentHash(),
|
||||
Coinbase: block.Coinbase(),
|
||||
FeeRecipient: block.Coinbase(),
|
||||
StateRoot: block.Root(),
|
||||
Number: block.NumberU64(),
|
||||
GasLimit: block.GasLimit(),
|
||||
GasUsed: block.GasUsed(),
|
||||
BaseFeePerGas: block.BaseFee(),
|
||||
Timestamp: block.Time(),
|
||||
ReceiptRoot: block.ReceiptHash(),
|
||||
ReceiptsRoot: block.ReceiptHash(),
|
||||
LogsBloom: block.Bloom().Bytes(),
|
||||
Transactions: encodeTransactions(block.Transactions()),
|
||||
Random: random,
|
||||
@ -475,11 +476,11 @@ func (api *ConsensusAPI) checkTerminalTotalDifficulty(head common.Hash) error {
|
||||
// make sure the parent has enough terminal total difficulty
|
||||
newHeadBlock := api.eth.BlockChain().GetBlockByHash(head)
|
||||
if newHeadBlock == nil {
|
||||
return &UnknownHeader
|
||||
return &GenericServerError
|
||||
}
|
||||
td := api.eth.BlockChain().GetTd(newHeadBlock.Hash(), newHeadBlock.NumberU64())
|
||||
if td != nil && td.Cmp(api.eth.BlockChain().Config().TerminalTotalDifficulty) < 0 {
|
||||
return errors.New("total difficulty not reached yet")
|
||||
return &InvalidTB
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -494,7 +495,7 @@ func (api *ConsensusAPI) setHead(newHead common.Hash) error {
|
||||
}
|
||||
newHeadHeader := api.les.BlockChain().GetHeaderByHash(newHead)
|
||||
if newHeadHeader == nil {
|
||||
return &UnknownHeader
|
||||
return &GenericServerError
|
||||
}
|
||||
if err := api.les.BlockChain().SetChainHead(newHeadHeader); err != nil {
|
||||
return err
|
||||
@ -508,15 +509,11 @@ func (api *ConsensusAPI) setHead(newHead common.Hash) error {
|
||||
}
|
||||
headBlock := api.eth.BlockChain().CurrentBlock()
|
||||
if headBlock.Hash() == newHead {
|
||||
// Trigger the transition if it's the first `NewHead` event.
|
||||
if merger := api.merger(); !merger.PoSFinalized() {
|
||||
merger.FinalizePoS()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
newHeadBlock := api.eth.BlockChain().GetBlockByHash(newHead)
|
||||
if newHeadBlock == nil {
|
||||
return &UnknownHeader
|
||||
return &GenericServerError
|
||||
}
|
||||
if err := api.eth.BlockChain().SetChainHead(newHeadBlock); err != nil {
|
||||
return err
|
||||
|
@ -376,7 +376,7 @@ func TestFullAPI(t *testing.T) {
|
||||
params := PayloadAttributesV1{
|
||||
Timestamp: parent.Time() + 1,
|
||||
Random: crypto.Keccak256Hash([]byte{byte(i)}),
|
||||
FeeRecipient: parent.Coinbase(),
|
||||
SuggestedFeeRecipient: parent.Coinbase(),
|
||||
}
|
||||
fcState := ForkchoiceStateV1{
|
||||
HeadBlockHash: parent.Hash(),
|
||||
|
@ -29,7 +29,7 @@ import (
|
||||
type PayloadAttributesV1 struct {
|
||||
Timestamp uint64 `json:"timestamp" gencodec:"required"`
|
||||
Random common.Hash `json:"random" gencodec:"required"`
|
||||
FeeRecipient common.Address `json:"feeRecipient" gencodec:"required"`
|
||||
SuggestedFeeRecipient common.Address `json:"suggestedFeeRecipient" gencodec:"required"`
|
||||
}
|
||||
|
||||
// JSON type overrides for PayloadAttributesV1.
|
||||
@ -42,9 +42,9 @@ type payloadAttributesMarshaling struct {
|
||||
// Structure described at https://github.com/ethereum/execution-apis/src/engine/specification.md
|
||||
type ExecutableDataV1 struct {
|
||||
ParentHash common.Hash `json:"parentHash" gencodec:"required"`
|
||||
Coinbase common.Address `json:"coinbase" gencodec:"required"`
|
||||
FeeRecipient common.Address `json:"feeRecipient" gencodec:"required"`
|
||||
StateRoot common.Hash `json:"stateRoot" gencodec:"required"`
|
||||
ReceiptRoot common.Hash `json:"receiptRoot" gencodec:"required"`
|
||||
ReceiptsRoot common.Hash `json:"receiptsRoot" gencodec:"required"`
|
||||
LogsBloom []byte `json:"logsBloom" gencodec:"required"`
|
||||
Random common.Hash `json:"random" gencodec:"required"`
|
||||
Number uint64 `json:"blockNumber" gencodec:"required"`
|
||||
|
@ -17,12 +17,12 @@ func (p PayloadAttributesV1) MarshalJSON() ([]byte, error) {
|
||||
type PayloadAttributesV1 struct {
|
||||
Timestamp hexutil.Uint64 `json:"timestamp" gencodec:"required"`
|
||||
Random common.Hash `json:"random" gencodec:"required"`
|
||||
FeeRecipient common.Address `json:"feeRecipient" gencodec:"required"`
|
||||
SuggestedFeeRecipient common.Address `json:"suggestedFeeRecipient" gencodec:"required"`
|
||||
}
|
||||
var enc PayloadAttributesV1
|
||||
enc.Timestamp = hexutil.Uint64(p.Timestamp)
|
||||
enc.Random = p.Random
|
||||
enc.FeeRecipient = p.FeeRecipient
|
||||
enc.SuggestedFeeRecipient = p.SuggestedFeeRecipient
|
||||
return json.Marshal(&enc)
|
||||
}
|
||||
|
||||
@ -31,7 +31,7 @@ func (p *PayloadAttributesV1) UnmarshalJSON(input []byte) error {
|
||||
type PayloadAttributesV1 struct {
|
||||
Timestamp *hexutil.Uint64 `json:"timestamp" gencodec:"required"`
|
||||
Random *common.Hash `json:"random" gencodec:"required"`
|
||||
FeeRecipient *common.Address `json:"feeRecipient" gencodec:"required"`
|
||||
SuggestedFeeRecipient *common.Address `json:"suggestedFeeRecipient" gencodec:"required"`
|
||||
}
|
||||
var dec PayloadAttributesV1
|
||||
if err := json.Unmarshal(input, &dec); err != nil {
|
||||
@ -45,9 +45,9 @@ func (p *PayloadAttributesV1) UnmarshalJSON(input []byte) error {
|
||||
return errors.New("missing required field 'random' for PayloadAttributesV1")
|
||||
}
|
||||
p.Random = *dec.Random
|
||||
if dec.FeeRecipient == nil {
|
||||
return errors.New("missing required field 'feeRecipient' for PayloadAttributesV1")
|
||||
if dec.SuggestedFeeRecipient == nil {
|
||||
return errors.New("missing required field 'suggestedFeeRecipient' for PayloadAttributesV1")
|
||||
}
|
||||
p.FeeRecipient = *dec.FeeRecipient
|
||||
p.SuggestedFeeRecipient = *dec.SuggestedFeeRecipient
|
||||
return nil
|
||||
}
|
||||
|
@ -17,9 +17,9 @@ var _ = (*executableDataMarshaling)(nil)
|
||||
func (e ExecutableDataV1) MarshalJSON() ([]byte, error) {
|
||||
type ExecutableDataV1 struct {
|
||||
ParentHash common.Hash `json:"parentHash" gencodec:"required"`
|
||||
Coinbase common.Address `json:"coinbase" gencodec:"required"`
|
||||
FeeRecipient common.Address `json:"feeRecipient" gencodec:"required"`
|
||||
StateRoot common.Hash `json:"stateRoot" gencodec:"required"`
|
||||
ReceiptRoot common.Hash `json:"receiptRoot" gencodec:"required"`
|
||||
ReceiptsRoot common.Hash `json:"receiptsRoot" gencodec:"required"`
|
||||
LogsBloom hexutil.Bytes `json:"logsBloom" gencodec:"required"`
|
||||
Random common.Hash `json:"random" gencodec:"required"`
|
||||
Number hexutil.Uint64 `json:"blockNumber" gencodec:"required"`
|
||||
@ -33,9 +33,9 @@ func (e ExecutableDataV1) MarshalJSON() ([]byte, error) {
|
||||
}
|
||||
var enc ExecutableDataV1
|
||||
enc.ParentHash = e.ParentHash
|
||||
enc.Coinbase = e.Coinbase
|
||||
enc.FeeRecipient = e.FeeRecipient
|
||||
enc.StateRoot = e.StateRoot
|
||||
enc.ReceiptRoot = e.ReceiptRoot
|
||||
enc.ReceiptsRoot = e.ReceiptsRoot
|
||||
enc.LogsBloom = e.LogsBloom
|
||||
enc.Random = e.Random
|
||||
enc.Number = hexutil.Uint64(e.Number)
|
||||
@ -58,9 +58,9 @@ func (e ExecutableDataV1) MarshalJSON() ([]byte, error) {
|
||||
func (e *ExecutableDataV1) UnmarshalJSON(input []byte) error {
|
||||
type ExecutableDataV1 struct {
|
||||
ParentHash *common.Hash `json:"parentHash" gencodec:"required"`
|
||||
Coinbase *common.Address `json:"coinbase" gencodec:"required"`
|
||||
FeeRecipient *common.Address `json:"feeRecipient" gencodec:"required"`
|
||||
StateRoot *common.Hash `json:"stateRoot" gencodec:"required"`
|
||||
ReceiptRoot *common.Hash `json:"receiptRoot" gencodec:"required"`
|
||||
ReceiptsRoot *common.Hash `json:"receiptsRoot" gencodec:"required"`
|
||||
LogsBloom *hexutil.Bytes `json:"logsBloom" gencodec:"required"`
|
||||
Random *common.Hash `json:"random" gencodec:"required"`
|
||||
Number *hexutil.Uint64 `json:"blockNumber" gencodec:"required"`
|
||||
@ -80,18 +80,18 @@ func (e *ExecutableDataV1) UnmarshalJSON(input []byte) error {
|
||||
return errors.New("missing required field 'parentHash' for ExecutableDataV1")
|
||||
}
|
||||
e.ParentHash = *dec.ParentHash
|
||||
if dec.Coinbase == nil {
|
||||
return errors.New("missing required field 'coinbase' for ExecutableDataV1")
|
||||
if dec.FeeRecipient == nil {
|
||||
return errors.New("missing required field 'feeRecipient' for ExecutableDataV1")
|
||||
}
|
||||
e.Coinbase = *dec.Coinbase
|
||||
e.FeeRecipient = *dec.FeeRecipient
|
||||
if dec.StateRoot == nil {
|
||||
return errors.New("missing required field 'stateRoot' for ExecutableDataV1")
|
||||
}
|
||||
e.StateRoot = *dec.StateRoot
|
||||
if dec.ReceiptRoot == nil {
|
||||
return errors.New("missing required field 'receiptRoot' for ExecutableDataV1")
|
||||
if dec.ReceiptsRoot == nil {
|
||||
return errors.New("missing required field 'receiptsRoot' for ExecutableDataV1")
|
||||
}
|
||||
e.ReceiptRoot = *dec.ReceiptRoot
|
||||
e.ReceiptsRoot = *dec.ReceiptsRoot
|
||||
if dec.LogsBloom == nil {
|
||||
return errors.New("missing required field 'logsBloom' for ExecutableDataV1")
|
||||
}
|
||||
|
@ -257,16 +257,16 @@ var (
|
||||
//
|
||||
// This configuration is intentionally not using keyed fields to force anyone
|
||||
// adding flags to the config to also have to set these fields.
|
||||
AllEthashProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, new(EthashConfig), nil}
|
||||
AllEthashProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, new(EthashConfig), nil}
|
||||
|
||||
// AllCliqueProtocolChanges contains every protocol change (EIPs) introduced
|
||||
// and accepted by the Ethereum core developers into the Clique consensus.
|
||||
//
|
||||
// This configuration is intentionally not using keyed fields to force anyone
|
||||
// adding flags to the config to also have to set these fields.
|
||||
AllCliqueProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, nil, &CliqueConfig{Period: 0, Epoch: 30000}}
|
||||
AllCliqueProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, nil, nil, &CliqueConfig{Period: 0, Epoch: 30000}}
|
||||
|
||||
TestChainConfig = &ChainConfig{big.NewInt(1), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, new(EthashConfig), nil}
|
||||
TestChainConfig = &ChainConfig{big.NewInt(1), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, new(EthashConfig), nil}
|
||||
TestRules = TestChainConfig.Rules(new(big.Int))
|
||||
)
|
||||
|
||||
@ -346,6 +346,7 @@ type ChainConfig struct {
|
||||
BerlinBlock *big.Int `json:"berlinBlock,omitempty"` // Berlin switch block (nil = no fork, 0 = already on berlin)
|
||||
LondonBlock *big.Int `json:"londonBlock,omitempty"` // London switch block (nil = no fork, 0 = already on london)
|
||||
ArrowGlacierBlock *big.Int `json:"arrowGlacierBlock,omitempty"` // Eip-4345 (bomb delay) switch block (nil = no fork, 0 = already activated)
|
||||
MergeForkBlock *big.Int `json:"mergeForkBlock,omitempty"` // EIP-3675 (TheMerge) switch block (nil = no fork, 0 = already in merge proceedings)
|
||||
|
||||
// TerminalTotalDifficulty is the amount of total difficulty reached by
|
||||
// the network that triggers the consensus upgrade.
|
||||
@ -386,7 +387,7 @@ func (c *ChainConfig) String() string {
|
||||
default:
|
||||
engine = "unknown"
|
||||
}
|
||||
return fmt.Sprintf("{ChainID: %v Homestead: %v DAO: %v DAOSupport: %v EIP150: %v EIP155: %v EIP158: %v Byzantium: %v Constantinople: %v Petersburg: %v Istanbul: %v, Muir Glacier: %v, Berlin: %v, London: %v, Arrow Glacier: %v, Engine: %v}",
|
||||
return fmt.Sprintf("{ChainID: %v Homestead: %v DAO: %v DAOSupport: %v EIP150: %v EIP155: %v EIP158: %v Byzantium: %v Constantinople: %v Petersburg: %v Istanbul: %v, Muir Glacier: %v, Berlin: %v, London: %v, Arrow Glacier: %v, MergeFork: %v, Engine: %v}",
|
||||
c.ChainID,
|
||||
c.HomesteadBlock,
|
||||
c.DAOForkBlock,
|
||||
@ -402,6 +403,7 @@ func (c *ChainConfig) String() string {
|
||||
c.BerlinBlock,
|
||||
c.LondonBlock,
|
||||
c.ArrowGlacierBlock,
|
||||
c.MergeForkBlock,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
@ -522,6 +524,7 @@ func (c *ChainConfig) CheckConfigForkOrder() error {
|
||||
{name: "berlinBlock", block: c.BerlinBlock},
|
||||
{name: "londonBlock", block: c.LondonBlock},
|
||||
{name: "arrowGlacierBlock", block: c.ArrowGlacierBlock, optional: true},
|
||||
{name: "mergeStartBlock", block: c.MergeForkBlock, optional: true},
|
||||
} {
|
||||
if lastFork.name != "" {
|
||||
// Next one must be higher number
|
||||
@ -594,6 +597,9 @@ func (c *ChainConfig) checkCompatible(newcfg *ChainConfig, head *big.Int) *Confi
|
||||
if isForkIncompatible(c.ArrowGlacierBlock, newcfg.ArrowGlacierBlock, head) {
|
||||
return newCompatError("Arrow Glacier fork block", c.ArrowGlacierBlock, newcfg.ArrowGlacierBlock)
|
||||
}
|
||||
if isForkIncompatible(c.MergeForkBlock, newcfg.MergeForkBlock, head) {
|
||||
return newCompatError("Merge Start fork block", c.MergeForkBlock, newcfg.MergeForkBlock)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -105,9 +105,9 @@ func (e *invalidParamsError) Error() string { return e.message }
|
||||
|
||||
type CustomError struct {
|
||||
Code int
|
||||
Message string
|
||||
ValidationError string
|
||||
}
|
||||
|
||||
func (e *CustomError) ErrorCode() int { return e.Code }
|
||||
|
||||
func (e *CustomError) Error() string { return e.Message }
|
||||
func (e *CustomError) Error() string { return e.ValidationError }
|
||||
|
Loading…
Reference in New Issue
Block a user