Merge pull request #2048 from Pythonberg1997/bc-fusion
chore: merge with develop branch
This commit is contained in:
commit
3414e5672a
6
.github/workflows/build-test.yml
vendored
6
.github/workflows/build-test.yml
vendored
@ -7,7 +7,9 @@ on:
|
||||
- develop
|
||||
|
||||
pull_request:
|
||||
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
unit-test:
|
||||
@ -45,5 +47,3 @@ jobs:
|
||||
run: |
|
||||
go mod download
|
||||
make geth
|
||||
|
||||
|
||||
|
4
.github/workflows/commit-lint.yml
vendored
4
.github/workflows/commit-lint.yml
vendored
@ -7,7 +7,9 @@ on:
|
||||
- develop
|
||||
|
||||
pull_request:
|
||||
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
commitlint:
|
||||
|
3
.github/workflows/integration-test.yml
vendored
3
.github/workflows/integration-test.yml
vendored
@ -7,6 +7,9 @@ on:
|
||||
- develop
|
||||
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
truffle-test:
|
||||
|
4
.github/workflows/lint.yml
vendored
4
.github/workflows/lint.yml
vendored
@ -7,7 +7,9 @@ on:
|
||||
- develop
|
||||
|
||||
pull_request:
|
||||
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
golang-lint:
|
||||
|
7
.github/workflows/unit-test.yml
vendored
7
.github/workflows/unit-test.yml
vendored
@ -7,7 +7,9 @@ on:
|
||||
- develop
|
||||
|
||||
pull_request:
|
||||
|
||||
branches:
|
||||
- master
|
||||
- develop
|
||||
|
||||
jobs:
|
||||
unit-test:
|
||||
@ -48,6 +50,5 @@ jobs:
|
||||
ANDROID_HOME: "" # Skip android test
|
||||
run: |
|
||||
git submodule update --init --depth 1 --recursive
|
||||
go mod tidy
|
||||
go mod download
|
||||
make test
|
||||
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -14,7 +14,6 @@
|
||||
*/**/*tx_database*
|
||||
*/**/*dapps*
|
||||
build/_vendor/pkg
|
||||
/tests/truffle/storage
|
||||
|
||||
#*
|
||||
.#*
|
||||
|
33
CHANGELOG.md
33
CHANGELOG.md
@ -1,4 +1,37 @@
|
||||
# Changelog
|
||||
## v1.3.5
|
||||
FEATURE
|
||||
* [\#1970](https://github.com/bnb-chain/bsc/pull/1970) core: enable Shanghai EIPs
|
||||
* [\#1973](https://github.com/bnb-chain/bsc/pull/1973) core/systemcontracts: include BEP-319 on kepler hardfork
|
||||
|
||||
BUGFIX
|
||||
* [\#1964](https://github.com/bnb-chain/bsc/pull/1964) consensus/parlia: hardfork block can be epoch block
|
||||
* [\#1979](https://github.com/bnb-chain/bsc/pull/1979) fix: upgrade pebble and improve config
|
||||
* [\#1980](https://github.com/bnb-chain/bsc/pull/1980) internal/ethapi: fix null effectiveGasPrice in GetTransactionReceipt
|
||||
|
||||
IMPROVEMENT
|
||||
* [\#1977](https://github.com/bnb-chain/bsc/pull/1977) doc: add instructions for starting fullnode with pbss
|
||||
|
||||
## v1.3.4
|
||||
BUGFIX
|
||||
* fix: remove pipecommit in miner
|
||||
* add a hard fork: Hertzfix
|
||||
|
||||
## v1.3.3
|
||||
BUGFIX
|
||||
* [\#1986](https://github.com/bnb-chain/bsc/pull/1986) fix(cmd): check pruneancient when creating db
|
||||
|
||||
IMPROVEMENT
|
||||
* [\#2000](https://github.com/bnb-chain/bsc/pull/2000) cmd/utils: exit process if txlookuplimit flag is set
|
||||
|
||||
## v1.3.2
|
||||
BUGFIX
|
||||
* fix: remove sharedPool
|
||||
|
||||
IMPROVEMENT
|
||||
* [\#2007](https://github.com/bnb-chain/bsc/pull/2007) consensus/parlia: increase size of snapshot cache in parlia
|
||||
* [\#2008](https://github.com/bnb-chain/bsc/pull/2008) consensus/parlia: recover faster when snapshot of parlia is gone in disk
|
||||
|
||||
## v1.3.1
|
||||
FEATURE
|
||||
* [\#1881](https://github.com/bnb-chain/bsc/pull/1881) feat: active pbss
|
||||
|
@ -29,6 +29,7 @@ import (
|
||||
"github.com/ethereum/go-ethereum/cmd/utils"
|
||||
"github.com/ethereum/go-ethereum/common"
|
||||
"github.com/ethereum/go-ethereum/crypto"
|
||||
"github.com/ethereum/go-ethereum/internal/flags"
|
||||
"github.com/ethereum/go-ethereum/signer/core"
|
||||
)
|
||||
|
||||
@ -38,19 +39,16 @@ const (
|
||||
)
|
||||
|
||||
var (
|
||||
au = aurora.NewAurora(true)
|
||||
privateKeyFlag = &cli.StringFlag{
|
||||
Name: "private-key",
|
||||
Usage: "Hex string for the BLS12-381 private key you wish encrypt into a keystore file",
|
||||
Value: "",
|
||||
}
|
||||
au = aurora.NewAurora(true)
|
||||
showPrivateKeyFlag = &cli.BoolFlag{
|
||||
Name: "show-private-key",
|
||||
Usage: "Show the BLS12-381 private key you will encrypt into a keystore file",
|
||||
Name: "show-private-key",
|
||||
Usage: "Show the BLS12-381 private key you will encrypt into a keystore file",
|
||||
Category: flags.AccountCategory,
|
||||
}
|
||||
blsAccountPasswordFileFlag = &cli.StringFlag{
|
||||
Name: "blsaccountpassword",
|
||||
Usage: "File path for the BLS account password, which contains the password to encrypt private key into keystore file for managing votes in fast_finality feature",
|
||||
importedAccountPasswordFileFlag = &cli.StringFlag{
|
||||
Name: "importedaccountpassword",
|
||||
Usage: "Password file path for the imported BLS account , which contains the password to get the private key by decrypting the keystore file",
|
||||
Category: flags.AccountCategory,
|
||||
}
|
||||
chainIdFlag = &cli.Int64Flag{
|
||||
Name: "chain-id",
|
||||
@ -137,10 +135,8 @@ Make sure you backup your BLS keys regularly.`,
|
||||
Category: "BLS ACCOUNT COMMANDS",
|
||||
Flags: []cli.Flag{
|
||||
utils.DataDirFlag,
|
||||
privateKeyFlag,
|
||||
showPrivateKeyFlag,
|
||||
utils.BLSPasswordFileFlag,
|
||||
blsAccountPasswordFileFlag,
|
||||
},
|
||||
Description: `
|
||||
geth bls account new
|
||||
@ -156,17 +152,17 @@ You must remember this password to unlock your account in the future.`,
|
||||
Name: "import",
|
||||
Usage: "Import a BLS account",
|
||||
Action: blsAccountImport,
|
||||
ArgsUsage: "<keystore file>",
|
||||
ArgsUsage: "<keyFile>",
|
||||
Category: "BLS ACCOUNT COMMANDS",
|
||||
Flags: []cli.Flag{
|
||||
utils.DataDirFlag,
|
||||
utils.BLSPasswordFileFlag,
|
||||
blsAccountPasswordFileFlag,
|
||||
importedAccountPasswordFileFlag,
|
||||
},
|
||||
Description: `
|
||||
geth bls account import <keyFile>
|
||||
|
||||
Import a encrypted BLS account from keystore file <keyFile> into the BLS wallet.
|
||||
Import a encrypted BLS account or a BLS12-381 private key from file <keyFile> into the BLS wallet.
|
||||
|
||||
If the BLS wallet not created yet, it will try to create BLS wallet first.`,
|
||||
},
|
||||
@ -242,7 +238,10 @@ func blsWalletCreate(ctx *cli.Context) error {
|
||||
utils.Fatalf("BLS wallet already exists in <DATADIR>/bls/wallet.")
|
||||
}
|
||||
|
||||
password := utils.GetPassPhraseWithList("Your new BLS wallet will be locked with a password. Please give a password. Do not forget this password.", true, 0, GetBLSPassword(ctx))
|
||||
password := utils.GetPassPhraseWithList("Your new BLS wallet will be locked with a password. Please give a password. Do not forget this password.", true, 0, utils.MakePasswordListFromPath(ctx.String(utils.BLSPasswordFileFlag.Name)))
|
||||
if err := core.ValidatePasswordFormat(password); err != nil {
|
||||
utils.Fatalf("Password invalid: %v.", err)
|
||||
}
|
||||
|
||||
opts := []accounts.Option{}
|
||||
opts = append(opts, accounts.WithWalletDir(dir))
|
||||
@ -272,7 +271,10 @@ func openOrCreateBLSWallet(ctx *cli.Context, cfg *gethConfig) (*wallet.Wallet, e
|
||||
}
|
||||
if !dirExists {
|
||||
fmt.Println("BLS wallet not exists, creating BLS wallet...")
|
||||
password := utils.GetPassPhraseWithList("Your new BLS wallet will be locked with a password. Please give a password. Do not forget this password.", true, 0, GetBLSPassword(ctx))
|
||||
password := utils.GetPassPhraseWithList("Your new BLS wallet will be locked with a password. Please give a password. Do not forget this password.", true, 0, utils.MakePasswordListFromPath(ctx.String(utils.BLSPasswordFileFlag.Name)))
|
||||
if err := core.ValidatePasswordFormat(password); err != nil {
|
||||
utils.Fatalf("Password invalid: %v.", err)
|
||||
}
|
||||
|
||||
opts := []accounts.Option{}
|
||||
opts = append(opts, accounts.WithWalletDir(walletDir))
|
||||
@ -292,7 +294,7 @@ func openOrCreateBLSWallet(ctx *cli.Context, cfg *gethConfig) (*wallet.Wallet, e
|
||||
return w, nil
|
||||
}
|
||||
|
||||
walletPassword := utils.GetPassPhraseWithList("Enter the password for your BLS wallet.", false, 0, GetBLSPassword(ctx))
|
||||
walletPassword := utils.GetPassPhraseWithList("Enter the password for your BLS wallet.", false, 0, utils.MakePasswordListFromPath(ctx.String(utils.BLSPasswordFileFlag.Name)))
|
||||
w, err = wallet.OpenWallet(context.Background(), &wallet.Config{
|
||||
WalletDir: walletDir,
|
||||
WalletPassword: walletPassword,
|
||||
@ -332,27 +334,11 @@ func blsAccountCreate(ctx *cli.Context) error {
|
||||
if err := os.MkdirAll(keystoreDir, 0755); err != nil {
|
||||
utils.Fatalf("Could not access keystore dir: %v.", err)
|
||||
}
|
||||
accountPassword := utils.GetPassPhraseWithList("Your new BLS account will be encrypted with a password. Please give a password. Do not forget this password.", true, 0, GetBLSAccountPassword(ctx))
|
||||
if err := core.ValidatePasswordFormat(accountPassword); err != nil {
|
||||
utils.Fatalf("Password invalid: %v.", err)
|
||||
}
|
||||
accountPassword := w.Password()
|
||||
|
||||
encryptor := keystorev4.New()
|
||||
secretKey, err := bls.RandKey()
|
||||
privateKeyString := ctx.String(privateKeyFlag.Name)
|
||||
if privateKeyString != "" {
|
||||
if len(privateKeyString) > 2 && strings.Contains(privateKeyString, "0x") {
|
||||
privateKeyString = privateKeyString[2:] // Strip the 0x prefix, if any.
|
||||
}
|
||||
bytesValue, err := hex.DecodeString(privateKeyString)
|
||||
if err != nil {
|
||||
utils.Fatalf("could not decode as hex string: %s", privateKeyString)
|
||||
}
|
||||
secretKey, err = bls.SecretKeyFromBytes(bytesValue)
|
||||
if err != nil {
|
||||
utils.Fatalf("not a valid BLS12-381 private key")
|
||||
}
|
||||
} else if err != nil {
|
||||
if err != nil {
|
||||
utils.Fatalf("Could not generate BLS secret key: %v.", err)
|
||||
}
|
||||
|
||||
@ -406,22 +392,6 @@ func blsAccountCreate(ctx *cli.Context) error {
|
||||
|
||||
// blsAccountImport imports a BLS account into the BLS wallet.
|
||||
func blsAccountImport(ctx *cli.Context) error {
|
||||
keyfile := ctx.Args().First()
|
||||
if len(keyfile) == 0 {
|
||||
utils.Fatalf("The keystore file must be given as argument.")
|
||||
}
|
||||
keyJSON, err := os.ReadFile(keyfile)
|
||||
if err != nil {
|
||||
utils.Fatalf("Could not read keystore file: %v", err)
|
||||
}
|
||||
keystore := &keymanager.Keystore{}
|
||||
if err := json.Unmarshal(keyJSON, keystore); err != nil {
|
||||
utils.Fatalf("Could not decode keystore file: %v.", err)
|
||||
}
|
||||
if keystore.Pubkey == "" {
|
||||
utils.Fatalf(" Missing public key, wrong keystore file.")
|
||||
}
|
||||
|
||||
cfg := gethConfig{Node: defaultNodeConfig()}
|
||||
// Load config file.
|
||||
if file := ctx.String(configFileFlag.Name); file != "" {
|
||||
@ -444,13 +414,53 @@ func blsAccountImport(ctx *cli.Context) error {
|
||||
utils.Fatalf("The BLS keymanager cannot import keystores")
|
||||
}
|
||||
|
||||
password := utils.GetPassPhraseWithList("Enter the password for your imported account.", false, 0, GetBLSAccountPassword(ctx))
|
||||
keyfile := ctx.Args().First()
|
||||
if len(keyfile) == 0 {
|
||||
utils.Fatalf("The keystore file must be given as argument.")
|
||||
}
|
||||
keyInfo, err := os.ReadFile(keyfile)
|
||||
if err != nil {
|
||||
utils.Fatalf("Could not read keystore file: %v", err)
|
||||
}
|
||||
keystore := &keymanager.Keystore{}
|
||||
var importedAccountPassword string
|
||||
if err := json.Unmarshal(keyInfo, keystore); err != nil {
|
||||
secretKey, err := bls.SecretKeyFromBytes(common.FromHex(strings.TrimRight(string(keyInfo), "\r\n")))
|
||||
if err != nil {
|
||||
utils.Fatalf("keyFile is neither a keystore file or include a valid BLS12-381 private key: %v.", err)
|
||||
}
|
||||
pubKeyBytes := secretKey.PublicKey().Marshal()
|
||||
encryptor := keystorev4.New()
|
||||
importedAccountPassword = w.Password()
|
||||
cryptoFields, err := encryptor.Encrypt(secretKey.Marshal(), importedAccountPassword)
|
||||
if err != nil {
|
||||
utils.Fatalf("Could not encrypt secret key: %v.", err)
|
||||
}
|
||||
id, err := uuid.NewRandom()
|
||||
if err != nil {
|
||||
utils.Fatalf("Could not generate uuid: %v.", err)
|
||||
}
|
||||
keystore = &keymanager.Keystore{
|
||||
Crypto: cryptoFields,
|
||||
ID: id.String(),
|
||||
Pubkey: fmt.Sprintf("%x", pubKeyBytes),
|
||||
Version: encryptor.Version(),
|
||||
Name: encryptor.Name(),
|
||||
}
|
||||
}
|
||||
if keystore.Pubkey == "" {
|
||||
utils.Fatalf(" Missing public key, wrong keystore file.")
|
||||
}
|
||||
|
||||
if importedAccountPassword == "" {
|
||||
importedAccountPassword = utils.GetPassPhraseWithList("Enter the password for your imported account.", false, 0, utils.MakePasswordListFromPath(ctx.String(importedAccountPasswordFileFlag.Name)))
|
||||
}
|
||||
|
||||
fmt.Println("Importing BLS account, this may take a while...")
|
||||
statuses, err := accounts.ImportAccounts(context.Background(), &accounts.ImportAccountsConfig{
|
||||
Importer: k,
|
||||
Keystores: []*keymanager.Keystore{keystore},
|
||||
AccountPassword: password,
|
||||
AccountPassword: importedAccountPassword,
|
||||
})
|
||||
if err != nil {
|
||||
utils.Fatalf("Import BLS account failed: %v.", err)
|
||||
@ -481,7 +491,7 @@ func blsAccountList(ctx *cli.Context) error {
|
||||
utils.Fatalf("BLS wallet not exists.")
|
||||
}
|
||||
|
||||
walletPassword := utils.GetPassPhraseWithList("Enter the password for your BLS wallet.", false, 0, GetBLSPassword(ctx))
|
||||
walletPassword := utils.GetPassPhraseWithList("Enter the password for your BLS wallet.", false, 0, utils.MakePasswordListFromPath(ctx.String(utils.BLSPasswordFileFlag.Name)))
|
||||
w, err := wallet.OpenWallet(context.Background(), &wallet.Config{
|
||||
WalletDir: walletDir,
|
||||
WalletPassword: walletPassword,
|
||||
@ -560,7 +570,7 @@ func blsAccountDelete(ctx *cli.Context) error {
|
||||
utils.Fatalf("BLS wallet not exists.")
|
||||
}
|
||||
|
||||
walletPassword := utils.GetPassPhraseWithList("Enter the password for your BLS wallet.", false, 0, GetBLSPassword(ctx))
|
||||
walletPassword := utils.GetPassPhraseWithList("Enter the password for your BLS wallet.", false, 0, utils.MakePasswordListFromPath(ctx.String(utils.BLSPasswordFileFlag.Name)))
|
||||
w, err := wallet.OpenWallet(context.Background(), &wallet.Config{
|
||||
WalletDir: walletDir,
|
||||
WalletPassword: walletPassword,
|
||||
@ -663,7 +673,7 @@ func blsAccountGenerateProof(ctx *cli.Context) error {
|
||||
utils.Fatalf("BLS wallet not exists.")
|
||||
}
|
||||
|
||||
walletPassword := utils.GetPassPhraseWithList("Enter the password for your BLS wallet.", false, 0, GetBLSPassword(ctx))
|
||||
walletPassword := utils.GetPassPhraseWithList("Enter the password for your BLS wallet.", false, 0, utils.MakePasswordListFromPath(ctx.String(utils.BLSPasswordFileFlag.Name)))
|
||||
w, err := wallet.OpenWallet(context.Background(), &wallet.Config{
|
||||
WalletDir: walletDir,
|
||||
WalletPassword: walletPassword,
|
||||
@ -697,27 +707,3 @@ func blsAccountGenerateProof(ctx *cli.Context) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetBLSPassword(ctx *cli.Context) []string {
|
||||
path := ctx.String(utils.BLSPasswordFileFlag.Name)
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
text, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
utils.Fatalf("Failed to read wallet password file: %v", err)
|
||||
}
|
||||
return []string{string(text)}
|
||||
}
|
||||
|
||||
func GetBLSAccountPassword(ctx *cli.Context) []string {
|
||||
path := ctx.String(blsAccountPasswordFileFlag.Name)
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
text, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
utils.Fatalf("Failed to read account password file: %v", err)
|
||||
}
|
||||
return []string{string(text)}
|
||||
}
|
||||
|
33
cmd/geth/testdata/bls-account-usage-demo.sh
vendored
Normal file
33
cmd/geth/testdata/bls-account-usage-demo.sh
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "0. prepare---------------------------------------------------------------------------------"
|
||||
echo 123abc7890 > bls-password.txt
|
||||
echo 123abc7891 > bls-password1.txt
|
||||
basedir=$(cd `dirname $0`; pwd)
|
||||
workspace=${basedir}/../../../
|
||||
|
||||
echo "1. create a bls account--------------------------------------------------------------------"
|
||||
${workspace}/build/bin/geth bls account new --blspassword ./bls-password.txt --datadir ./bls
|
||||
${workspace}/build/bin/geth bls account list --blspassword ./bls-password.txt --datadir ./bls
|
||||
|
||||
echo "2. import a bls account by passing file including a private key-----------------------------"
|
||||
secretKey=`${workspace}/build/bin/geth bls account new --show-private-key --blspassword ./bls-password1.txt --datadir ./bls1 | grep private | awk '{print $NF}'`
|
||||
echo ${secretKey} > ./bls1/secretKey
|
||||
${workspace}/build/bin/geth bls account import --blspassword ./bls-password.txt --datadir ./bls ./bls1/secretKey
|
||||
${workspace}/build/bin/geth bls account list --blspassword ./bls-password.txt --datadir ./bls
|
||||
|
||||
echo "3. delete the imported account above--------------------------------------------------------"
|
||||
publicKey=`${workspace}/build/bin/geth bls account list --blspassword ./bls-password.txt --datadir ./bls |grep public | tail -1 | awk '{print $NF}'`
|
||||
${workspace}/build/bin/geth bls account delete --blspassword ./bls-password.txt --datadir ./bls ${publicKey}
|
||||
${workspace}/build/bin/geth bls account list --blspassword ./bls-password.txt --datadir ./bls
|
||||
|
||||
echo "4. import a bls account by passing a keystore file------------------------------------------"
|
||||
keystoreFile=`ls bls1/bls/keystore`
|
||||
${workspace}/build/bin/geth bls account import --importedaccountpassword ./bls-password1.txt --blspassword ./bls-password.txt --datadir ./bls ./bls1/bls/keystore/${keystoreFile}
|
||||
${workspace}/build/bin/geth bls account list --blspassword ./bls-password.txt --datadir ./bls
|
||||
|
||||
echo "5. clearup----------------------------------------------------------------------------------"
|
||||
rm -rf bls
|
||||
rm -rf bls1
|
||||
rm -rf bls-password.txt
|
||||
rm -rf bls-password1.txt
|
@ -1092,14 +1092,14 @@ Please note that --` + MetricsHTTPFlag.Name + ` must be set to start the server.
|
||||
|
||||
BLSPasswordFileFlag = &cli.StringFlag{
|
||||
Name: "blspassword",
|
||||
Usage: "File path for the BLS password, which contains the password to unlock BLS wallet for managing votes in fast_finality feature",
|
||||
Category: flags.FastFinalityCategory,
|
||||
Usage: "Password file path for the BLS wallet, which contains the password to unlock BLS wallet for managing votes in fast_finality feature",
|
||||
Category: flags.AccountCategory,
|
||||
}
|
||||
|
||||
BLSWalletDirFlag = &flags.DirectoryFlag{
|
||||
Name: "blswallet",
|
||||
Usage: "Path for the blsWallet dir in fast finality feature (default = inside the datadir)",
|
||||
Category: flags.FastFinalityCategory,
|
||||
Category: flags.AccountCategory,
|
||||
}
|
||||
|
||||
VoteJournalDirFlag = &flags.DirectoryFlag{
|
||||
@ -1462,7 +1462,10 @@ func setEtherbase(ctx *cli.Context, cfg *ethconfig.Config) {
|
||||
|
||||
// MakePasswordList reads password lines from the file specified by the global --password flag.
|
||||
func MakePasswordList(ctx *cli.Context) []string {
|
||||
path := ctx.Path(PasswordFileFlag.Name)
|
||||
return MakePasswordListFromPath(ctx.Path(PasswordFileFlag.Name))
|
||||
}
|
||||
|
||||
func MakePasswordListFromPath(path string) []string {
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
@ -1919,7 +1922,7 @@ func SetEthConfig(ctx *cli.Context, stack *node.Node, cfg *ethconfig.Config) {
|
||||
cfg.EnableTrustProtocol = ctx.IsSet(EnableTrustProtocolFlag.Name)
|
||||
}
|
||||
if ctx.IsSet(PipeCommitFlag.Name) {
|
||||
cfg.PipeCommit = ctx.Bool(PipeCommitFlag.Name)
|
||||
log.Warn("The --pipecommit flag is deprecated and could be removed in the future!")
|
||||
}
|
||||
if ctx.IsSet(RangeLimitFlag.Name) {
|
||||
cfg.RangeLimit = ctx.Bool(RangeLimitFlag.Name)
|
||||
|
@ -4481,6 +4481,54 @@ const stakeABI = `
|
||||
],
|
||||
"stateMutability": "view"
|
||||
},
|
||||
{
|
||||
"type": "function",
|
||||
"name": "getValidatorRewardRecord",
|
||||
"inputs": [
|
||||
{
|
||||
"name": "operatorAddress",
|
||||
"type": "address",
|
||||
"internalType": "address"
|
||||
},
|
||||
{
|
||||
"name": "dayIndex",
|
||||
"type": "uint256",
|
||||
"internalType": "uint256"
|
||||
}
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "",
|
||||
"type": "uint256",
|
||||
"internalType": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view"
|
||||
},
|
||||
{
|
||||
"type": "function",
|
||||
"name": "getValidatorTotalPooledBNBRecord",
|
||||
"inputs": [
|
||||
{
|
||||
"name": "operatorAddress",
|
||||
"type": "address",
|
||||
"internalType": "address"
|
||||
},
|
||||
{
|
||||
"name": "dayIndex",
|
||||
"type": "uint256",
|
||||
"internalType": "uint256"
|
||||
}
|
||||
],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "",
|
||||
"type": "uint256",
|
||||
"internalType": "uint256"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view"
|
||||
},
|
||||
{
|
||||
"type": "function",
|
||||
"name": "initialize",
|
||||
|
@ -204,14 +204,14 @@ func getTopValidatorsByVotingPower(validatorItems []ValidatorItem, maxElectedVal
|
||||
hp := &validatorHeap
|
||||
heap.Init(hp)
|
||||
|
||||
length := int(maxElectedValidators.Int64())
|
||||
if length > len(validatorHeap) {
|
||||
length = len(validatorHeap)
|
||||
topN := int(maxElectedValidators.Int64())
|
||||
if topN > len(validatorHeap) {
|
||||
topN = len(validatorHeap)
|
||||
}
|
||||
eValidators := make([]common.Address, length)
|
||||
eVotingPowers := make([]uint64, length)
|
||||
eVoteAddrs := make([][]byte, length)
|
||||
for i := 0; i < length; i++ {
|
||||
eValidators := make([]common.Address, topN)
|
||||
eVotingPowers := make([]uint64, topN)
|
||||
eVoteAddrs := make([][]byte, topN)
|
||||
for i := 0; i < topN; i++ {
|
||||
item := heap.Pop(hp).(ValidatorItem)
|
||||
eValidators[i] = item.address
|
||||
// as the decimal in BNB Beacon Chain is 1e8 and in BNB Smart Chain is 1e18, we need to divide it by 1e10
|
||||
|
@ -19,6 +19,7 @@ func preHertzConfig() *params.ChainConfig {
|
||||
config.LondonBlock = nil
|
||||
config.BerlinBlock = nil
|
||||
config.HertzBlock = nil
|
||||
config.HertzfixBlock = nil
|
||||
return &config
|
||||
}
|
||||
|
||||
|
@ -59,7 +59,9 @@ func (p *statePrefetcher) Prefetch(block *types.Block, statedb *state.StateDB, c
|
||||
for i := 0; i < prefetchThread; i++ {
|
||||
go func() {
|
||||
newStatedb := statedb.CopyDoPrefetch()
|
||||
newStatedb.EnableWriteOnSharedStorage()
|
||||
if !p.config.IsHertzfix(header.Number) {
|
||||
newStatedb.EnableWriteOnSharedStorage()
|
||||
}
|
||||
gaspool := new(GasPool).AddGas(block.GasLimit())
|
||||
blockContext := NewEVMBlockContext(header, p.bc, nil)
|
||||
evm := vm.NewEVM(blockContext, vm.TxContext{}, statedb, p.config, *cfg)
|
||||
@ -106,7 +108,9 @@ func (p *statePrefetcher) PrefetchMining(txs TransactionsByPriceAndNonce, header
|
||||
go func(startCh <-chan *types.Transaction, stopCh <-chan struct{}) {
|
||||
idx := 0
|
||||
newStatedb := statedb.CopyDoPrefetch()
|
||||
newStatedb.EnableWriteOnSharedStorage()
|
||||
if !p.config.IsHertzfix(header.Number) {
|
||||
newStatedb.EnableWriteOnSharedStorage()
|
||||
}
|
||||
gaspool := new(GasPool).AddGas(gasLimit)
|
||||
blockContext := NewEVMBlockContext(header, p.bc, nil)
|
||||
evm := vm.NewEVM(blockContext, vm.TxContext{}, statedb, p.config, cfg)
|
||||
|
File diff suppressed because one or more lines are too long
@ -1027,6 +1027,9 @@ func (pool *LegacyPool) addRemoteSync(tx *types.Transaction) error {
|
||||
|
||||
// addTxs attempts to queue a batch of transactions if they are valid.
|
||||
func (pool *LegacyPool) addTxs(txs []*types.Transaction, local, sync bool) []error {
|
||||
// Do not treat as local if local transactions have been disabled
|
||||
local = local && !pool.config.NoLocals
|
||||
|
||||
// Filter out known ones without obtaining the pool lock or recovering signatures
|
||||
var (
|
||||
errs = make([]error, len(txs))
|
||||
|
@ -1453,6 +1453,9 @@ func (c *verifyDoubleSignEvidence) Run(input []byte) ([]byte, error) {
|
||||
// check sig
|
||||
msgHash1 := types.SealHash(header1, evidence.ChainId)
|
||||
msgHash2 := types.SealHash(header2, evidence.ChainId)
|
||||
if bytes.Equal(msgHash1.Bytes(), msgHash2.Bytes()) {
|
||||
return nil, ErrExecutionReverted
|
||||
}
|
||||
pubkey1, err := secp256k1.RecoverPubkey(msgHash1.Bytes(), sig1)
|
||||
if err != nil {
|
||||
return nil, ErrExecutionReverted
|
||||
|
@ -18,6 +18,7 @@ RUN apk add --no-cache ca-certificates npm nodejs bash alpine-sdk expect
|
||||
|
||||
RUN git clone https://github.com/bnb-chain/bsc-genesis-contract.git /root/genesis \
|
||||
&& cd /root/genesis && npm install
|
||||
#RUN curl -L https://foundry.paradigm.xyz | bash
|
||||
|
||||
COPY --from=bsc /usr/local/bin/geth /usr/local/bin/geth
|
||||
|
||||
|
@ -170,9 +170,9 @@ type TxFetcher struct {
|
||||
alternates map[common.Hash]map[string]struct{} // In-flight transaction alternate origins if retrieval fails
|
||||
|
||||
// Callbacks
|
||||
hasTx func(common.Hash) bool // Retrieves a tx from the local txpool
|
||||
addTxs func([]*txpool.Transaction) []error // Insert a batch of transactions into local txpool
|
||||
fetchTxs func(string, []common.Hash) error // Retrieves a set of txs from a remote peer
|
||||
hasTx func(common.Hash) bool // Retrieves a tx from the local txpool
|
||||
addTxs func(string, []*txpool.Transaction) []error // Insert a batch of transactions into local txpool
|
||||
fetchTxs func(string, []common.Hash) error // Retrieves a set of txs from a remote peer
|
||||
|
||||
step chan struct{} // Notification channel when the fetcher loop iterates
|
||||
clock mclock.Clock // Time wrapper to simulate in tests
|
||||
@ -181,14 +181,14 @@ type TxFetcher struct {
|
||||
|
||||
// NewTxFetcher creates a transaction fetcher to retrieve transaction
|
||||
// based on hash announcements.
|
||||
func NewTxFetcher(hasTx func(common.Hash) bool, addTxs func([]*txpool.Transaction) []error, fetchTxs func(string, []common.Hash) error) *TxFetcher {
|
||||
func NewTxFetcher(hasTx func(common.Hash) bool, addTxs func(string, []*txpool.Transaction) []error, fetchTxs func(string, []common.Hash) error) *TxFetcher {
|
||||
return NewTxFetcherForTests(hasTx, addTxs, fetchTxs, mclock.System{}, nil)
|
||||
}
|
||||
|
||||
// NewTxFetcherForTests is a testing method to mock out the realtime clock with
|
||||
// a simulated version and the internal randomness with a deterministic one.
|
||||
func NewTxFetcherForTests(
|
||||
hasTx func(common.Hash) bool, addTxs func([]*txpool.Transaction) []error, fetchTxs func(string, []common.Hash) error,
|
||||
hasTx func(common.Hash) bool, addTxs func(string, []*txpool.Transaction) []error, fetchTxs func(string, []common.Hash) error,
|
||||
clock mclock.Clock, rand *mrand.Rand) *TxFetcher {
|
||||
return &TxFetcher{
|
||||
notify: make(chan *txAnnounce),
|
||||
@ -300,7 +300,7 @@ func (f *TxFetcher) Enqueue(peer string, txs []*types.Transaction, direct bool)
|
||||
for j, tx := range batch {
|
||||
wrapped[j] = &txpool.Transaction{Tx: tx}
|
||||
}
|
||||
for j, err := range f.addTxs(wrapped) {
|
||||
for j, err := range f.addTxs(peer, wrapped) {
|
||||
// Track the transaction hash if the price is too low for us.
|
||||
// Avoid re-request this transaction when we receive another
|
||||
// announcement.
|
||||
|
@ -378,7 +378,7 @@ func TestTransactionFetcherCleanup(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -417,7 +417,7 @@ func TestTransactionFetcherCleanupEmpty(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -455,7 +455,7 @@ func TestTransactionFetcherMissingRescheduling(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -501,7 +501,7 @@ func TestTransactionFetcherMissingCleanup(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -539,7 +539,7 @@ func TestTransactionFetcherBroadcasts(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -644,7 +644,7 @@ func TestTransactionFetcherTimeoutRescheduling(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -865,7 +865,7 @@ func TestTransactionFetcherUnderpricedDedup(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
errs := make([]error, len(txs))
|
||||
for i := 0; i < len(errs); i++ {
|
||||
if i%2 == 0 {
|
||||
@ -938,7 +938,7 @@ func TestTransactionFetcherUnderpricedDoSProtection(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
errs := make([]error, len(txs))
|
||||
for i := 0; i < len(errs); i++ {
|
||||
errs[i] = txpool.ErrUnderpriced
|
||||
@ -964,7 +964,7 @@ func TestTransactionFetcherOutOfBoundDeliveries(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -1017,7 +1017,7 @@ func TestTransactionFetcherDrop(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -1083,7 +1083,7 @@ func TestTransactionFetcherDropRescheduling(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -1128,7 +1128,7 @@ func TestTransactionFetcherFuzzCrash01(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -1155,7 +1155,7 @@ func TestTransactionFetcherFuzzCrash02(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -1184,7 +1184,7 @@ func TestTransactionFetcherFuzzCrash03(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
@ -1217,7 +1217,7 @@ func TestTransactionFetcherFuzzCrash04(t *testing.T) {
|
||||
init: func() *TxFetcher {
|
||||
return NewTxFetcher(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error {
|
||||
|
@ -146,6 +146,7 @@ func newTestBackend(t *testing.T, londonBlock *big.Int, pending bool) *testBacke
|
||||
config.LubanBlock = nil
|
||||
config.PlatoBlock = nil
|
||||
config.HertzBlock = nil
|
||||
config.HertzfixBlock = nil
|
||||
config.TerminalTotalDifficulty = common.Big0
|
||||
engine := ethash.NewFaker()
|
||||
|
||||
|
@ -33,6 +33,7 @@ import (
|
||||
"github.com/ethereum/go-ethereum/core/monitor"
|
||||
"github.com/ethereum/go-ethereum/core/rawdb"
|
||||
"github.com/ethereum/go-ethereum/core/txpool"
|
||||
"github.com/ethereum/go-ethereum/core/txpool/legacypool"
|
||||
"github.com/ethereum/go-ethereum/core/types"
|
||||
"github.com/ethereum/go-ethereum/eth/downloader"
|
||||
"github.com/ethereum/go-ethereum/eth/fetcher"
|
||||
@ -65,7 +66,8 @@ const (
|
||||
)
|
||||
|
||||
var (
|
||||
syncChallengeTimeout = 15 * time.Second // Time allowance for a node to reply to the sync progress challenge
|
||||
syncChallengeTimeout = 15 * time.Second // Time allowance for a node to reply to the sync progress challenge
|
||||
accountBlacklistPeerCounter = metrics.NewRegisteredCounter("eth/count/blacklist", nil)
|
||||
)
|
||||
|
||||
// txPool defines the methods needed from a transaction pool implementation to
|
||||
@ -342,8 +344,21 @@ func newHandler(config *handlerConfig) (*handler, error) {
|
||||
}
|
||||
return p.RequestTxs(hashes)
|
||||
}
|
||||
addTxs := func(txs []*txpool.Transaction) []error {
|
||||
return h.txpool.Add(txs, false, false)
|
||||
addTxs := func(peer string, txs []*txpool.Transaction) []error {
|
||||
errors := h.txpool.Add(txs, false, false)
|
||||
for _, err := range errors {
|
||||
if err == legacypool.ErrInBlackList {
|
||||
accountBlacklistPeerCounter.Inc(1)
|
||||
p := h.peers.peer(peer)
|
||||
if p != nil {
|
||||
remoteAddr := p.remoteAddr()
|
||||
if remoteAddr != nil {
|
||||
log.Warn("blacklist account detected from other peer", "remoteAddr", remoteAddr, "ID", p.ID())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return errors
|
||||
}
|
||||
h.txFetcher = fetcher.NewTxFetcher(h.txpool.Has, addTxs, fetchTx)
|
||||
h.chainSync = newChainSyncer(h)
|
||||
|
@ -17,6 +17,8 @@
|
||||
package eth
|
||||
|
||||
import (
|
||||
"net"
|
||||
|
||||
"github.com/ethereum/go-ethereum/eth/protocols/bsc"
|
||||
"github.com/ethereum/go-ethereum/eth/protocols/trust"
|
||||
|
||||
@ -45,6 +47,13 @@ func (p *ethPeer) info() *ethPeerInfo {
|
||||
}
|
||||
}
|
||||
|
||||
func (p *ethPeer) remoteAddr() net.Addr {
|
||||
if p.Peer != nil && p.Peer.Peer != nil {
|
||||
return p.Peer.Peer.RemoteAddr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// snapPeerInfo represents a short summary of the `snap` sub-protocol metadata known
|
||||
// about a connected peer.
|
||||
type snapPeerInfo struct {
|
||||
|
@ -1159,12 +1159,14 @@ func (w *worker) commit(env *environment, interval func(), update bool, start ti
|
||||
if interval != nil {
|
||||
interval()
|
||||
}
|
||||
/*
|
||||
|
||||
err := env.state.WaitPipeVerification()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
env.state.CorrectAccountsRoot(w.chain.CurrentBlock().Root)
|
||||
err := env.state.WaitPipeVerification()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
env.state.CorrectAccountsRoot(w.chain.CurrentBlock().Root)
|
||||
*/
|
||||
|
||||
// Withdrawals are set to nil here, because this is only called in PoW.
|
||||
finalizeStart := time.Now()
|
||||
|
@ -210,6 +210,9 @@ func (p *Peer) RemoteAddr() net.Addr {
|
||||
}
|
||||
log.Warn("RemoteAddr", "invalid testRemoteAddr", p.testRemoteAddr)
|
||||
}
|
||||
if p.rw == nil {
|
||||
return nil
|
||||
}
|
||||
return p.rw.fd.RemoteAddr()
|
||||
}
|
||||
|
||||
|
@ -164,6 +164,10 @@ var (
|
||||
BerlinBlock: big.NewInt(31302048),
|
||||
LondonBlock: big.NewInt(31302048),
|
||||
HertzBlock: big.NewInt(31302048),
|
||||
HertzfixBlock: big.NewInt(34140700),
|
||||
// UnixTime: 1705996800 is January 23, 2024 8:00:00 AM UTC
|
||||
ShanghaiTime: newUint64(1705996800),
|
||||
KeplerTime: newUint64(1705996800),
|
||||
|
||||
// TODO
|
||||
FeynmanTime: nil,
|
||||
@ -199,6 +203,10 @@ var (
|
||||
BerlinBlock: big.NewInt(31103030),
|
||||
LondonBlock: big.NewInt(31103030),
|
||||
HertzBlock: big.NewInt(31103030),
|
||||
HertzfixBlock: big.NewInt(35682300),
|
||||
// UnixTime: 1702972800 is December 19, 2023 8:00:00 AM UTC
|
||||
ShanghaiTime: newUint64(1702972800),
|
||||
KeplerTime: newUint64(1702972800),
|
||||
|
||||
// TODO
|
||||
FeynmanTime: nil,
|
||||
@ -233,6 +241,7 @@ var (
|
||||
PlatoBlock: nil,
|
||||
BerlinBlock: nil,
|
||||
HertzBlock: nil,
|
||||
HertzfixBlock: nil,
|
||||
|
||||
// TODO
|
||||
FeynmanTime: nil,
|
||||
@ -268,7 +277,7 @@ var (
|
||||
BerlinBlock: big.NewInt(0),
|
||||
LondonBlock: big.NewInt(0),
|
||||
HertzBlock: big.NewInt(0),
|
||||
|
||||
HertzfixBlock: big.NewInt(0),
|
||||
Parlia: &ParliaConfig{
|
||||
Period: 3,
|
||||
Epoch: 200,
|
||||
@ -484,7 +493,7 @@ type ChainConfig struct {
|
||||
LubanBlock *big.Int `json:"lubanBlock,omitempty" toml:",omitempty"` // lubanBlock switch block (nil = no fork, 0 = already activated)
|
||||
PlatoBlock *big.Int `json:"platoBlock,omitempty" toml:",omitempty"` // platoBlock switch block (nil = no fork, 0 = already activated)
|
||||
HertzBlock *big.Int `json:"hertzBlock,omitempty" toml:",omitempty"` // hertzBlock switch block (nil = no fork, 0 = already activated)
|
||||
|
||||
HertzfixBlock *big.Int `json:"hertzfixBlock,omitempty" toml:",omitempty"` // hertzfixBlock switch block (nil = no fork, 0 = already activated)
|
||||
// Various consensus engines
|
||||
Ethash *EthashConfig `json:"ethash,omitempty" toml:",omitempty"`
|
||||
Clique *CliqueConfig `json:"clique,omitempty" toml:",omitempty"`
|
||||
@ -556,7 +565,7 @@ func (c *ChainConfig) String() string {
|
||||
FeynmanTime = big.NewInt(0).SetUint64(*c.FeynmanTime)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("{ChainID: %v Homestead: %v DAO: %v DAOSupport: %v EIP150: %v EIP155: %v EIP158: %v Byzantium: %v Constantinople: %v Petersburg: %v Istanbul: %v, Muir Glacier: %v, Ramanujan: %v, Niels: %v, MirrorSync: %v, Bruno: %v, Berlin: %v, YOLO v3: %v, CatalystBlock: %v, London: %v, ArrowGlacier: %v, MergeFork:%v, Euler: %v, Gibbs: %v, Nano: %v, Moran: %v, Planck: %v,Luban: %v, Plato: %v, Hertz: %v, ShanghaiTime: %v, KeplerTime: %v, FeynmanTime: %v, Engine: %v}",
|
||||
return fmt.Sprintf("{ChainID: %v Homestead: %v DAO: %v DAOSupport: %v EIP150: %v EIP155: %v EIP158: %v Byzantium: %v Constantinople: %v Petersburg: %v Istanbul: %v, Muir Glacier: %v, Ramanujan: %v, Niels: %v, MirrorSync: %v, Bruno: %v, Berlin: %v, YOLO v3: %v, CatalystBlock: %v, London: %v, ArrowGlacier: %v, MergeFork:%v, Euler: %v, Gibbs: %v, Nano: %v, Moran: %v, Planck: %v,Luban: %v, Plato: %v, Hertz: %v, Hertzfix: %v, ShanghaiTime: %v, KeplerTime: %v, FeynmanTime: %v, Engine: %v}",
|
||||
c.ChainID,
|
||||
c.HomesteadBlock,
|
||||
c.DAOForkBlock,
|
||||
@ -587,6 +596,7 @@ func (c *ChainConfig) String() string {
|
||||
c.LubanBlock,
|
||||
c.PlatoBlock,
|
||||
c.HertzBlock,
|
||||
c.HertzfixBlock,
|
||||
ShanghaiTime,
|
||||
KeplerTime,
|
||||
FeynmanTime,
|
||||
@ -709,6 +719,14 @@ func (c *ChainConfig) IsOnHertz(num *big.Int) bool {
|
||||
return configBlockEqual(c.HertzBlock, num)
|
||||
}
|
||||
|
||||
func (c *ChainConfig) IsHertzfix(num *big.Int) bool {
|
||||
return isBlockForked(c.HertzfixBlock, num)
|
||||
}
|
||||
|
||||
func (c *ChainConfig) IsOnHertzfix(num *big.Int) bool {
|
||||
return configBlockEqual(c.HertzfixBlock, num)
|
||||
}
|
||||
|
||||
// IsMuirGlacier returns whether num is either equal to the Muir Glacier (EIP-2384) fork block or greater.
|
||||
func (c *ChainConfig) IsMuirGlacier(num *big.Int) bool {
|
||||
return isBlockForked(c.MuirGlacierBlock, num)
|
||||
@ -884,6 +902,7 @@ func (c *ChainConfig) CheckConfigForkOrder() error {
|
||||
{name: "lubanBlock", block: c.LubanBlock},
|
||||
{name: "platoBlock", block: c.PlatoBlock},
|
||||
{name: "hertzBlock", block: c.HertzBlock},
|
||||
{name: "hertzfixBlock", block: c.HertzfixBlock},
|
||||
{name: "shanghaiTime", timestamp: c.ShanghaiTime},
|
||||
{name: "keplerTime", timestamp: c.KeplerTime},
|
||||
{name: "feynmanTime", timestamp: c.FeynmanTime},
|
||||
@ -1017,6 +1036,9 @@ func (c *ChainConfig) checkCompatible(newcfg *ChainConfig, headNumber *big.Int,
|
||||
if isForkBlockIncompatible(c.HertzBlock, newcfg.HertzBlock, headNumber) {
|
||||
return newBlockCompatError("hertz fork block", c.HertzBlock, newcfg.HertzBlock)
|
||||
}
|
||||
if isForkBlockIncompatible(c.HertzfixBlock, newcfg.HertzfixBlock, headNumber) {
|
||||
return newBlockCompatError("hertzfix fork block", c.HertzfixBlock, newcfg.HertzfixBlock)
|
||||
}
|
||||
if isForkTimestampIncompatible(c.ShanghaiTime, newcfg.ShanghaiTime, headTimestamp) {
|
||||
return newTimestampCompatError("Shanghai fork timestamp", c.ShanghaiTime, newcfg.ShanghaiTime)
|
||||
}
|
||||
@ -1186,6 +1208,7 @@ type Rules struct {
|
||||
IsLuban bool
|
||||
IsPlato bool
|
||||
IsHertz bool
|
||||
IsHertzfix bool
|
||||
IsShanghai, IsKepler, IsFeynman, IsCancun, IsPrague bool
|
||||
IsVerkle bool
|
||||
}
|
||||
@ -1215,6 +1238,7 @@ func (c *ChainConfig) Rules(num *big.Int, isMerge bool, timestamp uint64) Rules
|
||||
IsLuban: c.IsLuban(num),
|
||||
IsPlato: c.IsPlato(num),
|
||||
IsHertz: c.IsHertz(num),
|
||||
IsHertzfix: c.IsHertzfix(num),
|
||||
IsShanghai: c.IsShanghai(num, timestamp),
|
||||
IsKepler: c.IsKepler(num, timestamp),
|
||||
IsFeynman: c.IsFeynman(num, timestamp),
|
||||
|
@ -23,7 +23,7 @@ import (
|
||||
const (
|
||||
VersionMajor = 1 // Major version component of the current release
|
||||
VersionMinor = 3 // Minor version component of the current release
|
||||
VersionPatch = 1 // Patch version component of the current release
|
||||
VersionPatch = 5 // Patch version component of the current release
|
||||
VersionMeta = "" // Version metadata to append to the version string
|
||||
)
|
||||
|
||||
|
@ -26,10 +26,15 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/ethereum/go-ethereum/common/gopool"
|
||||
"github.com/ethereum/go-ethereum/metrics"
|
||||
|
||||
"github.com/ethereum/go-ethereum/log"
|
||||
)
|
||||
|
||||
var (
|
||||
accountBlacklistRpcCounter = metrics.NewRegisteredCounter("rpc/count/blacklist", nil)
|
||||
)
|
||||
|
||||
// handler handles JSON-RPC messages. There is one handler per connection. Note that
|
||||
// handler is not safe for concurrent use. Message handling never blocks indefinitely
|
||||
// because RPCs are processed on background goroutines launched by handler.
|
||||
@ -476,6 +481,11 @@ func (h *handler) handleCallMsg(ctx *callProc, reqCtx context.Context, msg *json
|
||||
xForward := reqCtx.Value("X-Forwarded-For")
|
||||
h.log.Warn("Served "+msg.Method, "reqid", idForLog{msg.ID}, "t", time.Since(start), "err", resp.Error.Message, "X-Forwarded-For", xForward)
|
||||
|
||||
monitoredError := "sender or to in black list" // using legacypool.ErrInBlackList.Error() will cause `import cycle`
|
||||
if strings.Contains(resp.Error.Message, monitoredError) {
|
||||
accountBlacklistRpcCounter.Inc(1)
|
||||
log.Warn("blacklist account detected from direct rpc", "remoteAddr", h.conn.remoteAddr())
|
||||
}
|
||||
ctx = append(ctx, "err", resp.Error.Message)
|
||||
if resp.Error.Data != nil {
|
||||
ctx = append(ctx, "errdata", resp.Error.Data)
|
||||
|
@ -80,7 +80,7 @@ func Fuzz(input []byte) int {
|
||||
|
||||
f := fetcher.NewTxFetcherForTests(
|
||||
func(common.Hash) bool { return false },
|
||||
func(txs []*txpool.Transaction) []error {
|
||||
func(peer string, txs []*txpool.Transaction) []error {
|
||||
return make([]error, len(txs))
|
||||
},
|
||||
func(string, []common.Hash) error { return nil },
|
||||
|
@ -1,4 +1,4 @@
|
||||
BSC_CHAIN_ID=99
|
||||
BSC_CHAIN_ID=714
|
||||
CLUSTER_CIDR=99.1.0.0/16
|
||||
BOOTSTRAP_PUB_KEY=177ae5db445a2f70db781b019aedd928f5b1528a7a43448840b022408f9a21509adcce0b37c87d59da68d47a16879cc1e95a62bbac9723f7b22f4365b2afabbe
|
||||
BOOTSTRAP_TCP_PORT=30311
|
||||
|
@ -7,26 +7,29 @@ function prepare() {
|
||||
echo "geth do not exist!"
|
||||
exit 1
|
||||
fi
|
||||
rm -rf ${workspace}/storage/*
|
||||
cd ${workspace}/genesis
|
||||
rm -rf validators.conf
|
||||
cp ${workspace}/storage/genesis.json ${workspace}/genesis/genesis.json
|
||||
}
|
||||
|
||||
function init_validator() {
|
||||
node_id=$1
|
||||
rm -rf ${workspace}/storage/${node_id}
|
||||
mkdir -p ${workspace}/storage/${node_id}
|
||||
geth --datadir ${workspace}/storage/${node_id} account new --password /dev/null > ${workspace}/storage/${node_id}Info
|
||||
validatorAddr=`cat ${workspace}/storage/${node_id}Info|grep 'Public address of the key'|awk '{print $6}'`
|
||||
echo "${validatorAddr},${validatorAddr},${validatorAddr},0x0000000010000000" >> ${workspace}/genesis/validators.conf
|
||||
echo ${validatorAddr} > ${workspace}/storage/${node_id}/address
|
||||
cp -r ${workspace}/storage/keystore ${workspace}/storage/${node_id}/
|
||||
cp ${workspace}/storage/address ${workspace}/storage/${node_id}/address
|
||||
}
|
||||
|
||||
function generate_genesis() {
|
||||
cd ${workspace}/genesis/scripts/
|
||||
node generate-validator.js
|
||||
INIT_HOLDER_ADDRESSES=$(ls ${workspace}/init-holders | tr '\n' ',')
|
||||
INIT_HOLDER_ADDRESSES=${INIT_HOLDER_ADDRESSES/%,/}
|
||||
node generate-initHolders.js --initHolders ${INIT_HOLDER_ADDRESSES}
|
||||
node generate-genesis.js --chainid ${BSC_CHAIN_ID}
|
||||
|
||||
cd ${workspace}/genesis
|
||||
#source /root/.profile && foundryup
|
||||
#forge install --no-git --no-commit foundry-rs/forge-std@v1.1.1
|
||||
bash ${workspace}/genesis/scripts/generate.sh local
|
||||
}
|
||||
|
||||
function init_genesis_data() {
|
||||
@ -43,8 +46,8 @@ function init_genesis_data() {
|
||||
|
||||
function prepareBLSWallet(){
|
||||
node_id=$1
|
||||
echo "123456" > ${workspace}/storage/${node_id}/blspassword.txt
|
||||
expect ${workspace}/scripts/create_bls_key.sh ${workspace}/storage/${node_id}
|
||||
echo "1234567890" > ${workspace}/storage/${node_id}/blspassword.txt
|
||||
geth bls account new --datadir ${workspace}/storage/${node_id} --blspassword ${workspace}/storage/${node_id}/blspassword.txt
|
||||
|
||||
sed -i -e 's/DataDir/BLSPasswordFile = \"{{BLSPasswordFile}}\"\nBLSWalletDir = \"{{BLSWalletDir}}\"\nDataDir/g' ${workspace}/storage/${node_id}/config.toml
|
||||
PassWordPath="/root/.ethereum/blspassword.txt"
|
||||
@ -54,14 +57,14 @@ function prepareBLSWallet(){
|
||||
}
|
||||
|
||||
prepare
|
||||
|
||||
NUMS_OF_VALIDATOR=1
|
||||
# Step 1, generate config for each validator
|
||||
for((i=1;i<=${NUMS_OF_VALIDATOR};i++)); do
|
||||
init_validator "bsc-validator${i}"
|
||||
done
|
||||
|
||||
# Step 2, use validator configs to generate genesis file
|
||||
generate_genesis
|
||||
#generate_genesis
|
||||
|
||||
# Step 3, use genesis file to init cluster data
|
||||
init_genesis_data bsc-rpc bsc-rpc
|
||||
|
@ -13,4 +13,4 @@ done
|
||||
geth --config ${DATA_DIR}/config.toml --datadir ${DATA_DIR} --netrestrict ${CLUSTER_CIDR} \
|
||||
--verbosity ${VERBOSE} --nousb \
|
||||
--rpc.allow-unprotected-txs --history.transactions 15768000 \
|
||||
-unlock ${unlock_sequences} --password /dev/null
|
||||
-unlock ${unlock_sequences} --password /dev/null >${DATA_DIR}/bscnode-rpc.log
|
||||
|
@ -15,4 +15,4 @@ geth --config ${DATA_DIR}/config.toml --datadir ${DATA_DIR} --netrestrict ${CLUS
|
||||
--bootnodes enode://${BOOTSTRAP_PUB_KEY}@${BOOTSTRAP_IP}:${BOOTSTRAP_TCP_PORT} \
|
||||
--mine -unlock ${VALIDATOR_ADDR} --miner.etherbase ${VALIDATOR_ADDR} --password /dev/null \
|
||||
--light.serve 50 \
|
||||
--rpc.allow-unprotected-txs --history.transactions 15768000
|
||||
--rpc.allow-unprotected-txs --history.transactions 15768000 >${DATA_DIR}/bscnode-validator.log
|
||||
|
@ -1,17 +0,0 @@
|
||||
#!/usr/bin/expect
|
||||
# 6 num wanted
|
||||
set wallet_password 123456
|
||||
# 10 characters at least wanted
|
||||
set account_password 1234567890
|
||||
|
||||
set timeout 5
|
||||
spawn geth bls account new --datadir [lindex $argv 0]
|
||||
expect "*assword:*"
|
||||
send "$wallet_password\r"
|
||||
expect "*assword:*"
|
||||
send "$wallet_password\r"
|
||||
expect "*assword:*"
|
||||
send "$account_password\r"
|
||||
expect "*assword:*"
|
||||
send "$account_password\r"
|
||||
expect EOF
|
1
tests/truffle/storage/address
Normal file
1
tests/truffle/storage/address
Normal file
@ -0,0 +1 @@
|
||||
0x03735c2ED70a56CD221e0024eB4bF90243C9d6E9
|
130
tests/truffle/storage/genesis.json
Normal file
130
tests/truffle/storage/genesis.json
Normal file
File diff suppressed because one or more lines are too long
1
tests/truffle/storage/keystore/UTC--2023-12-07T03-21-00.035782000Z--03735c2ed70a56cd221e0024eb4bf90243c9d6e9
Normal file
1
tests/truffle/storage/keystore/UTC--2023-12-07T03-21-00.035782000Z--03735c2ed70a56cd221e0024eb4bf90243c9d6e9
Normal file
@ -0,0 +1 @@
|
||||
{"address":"03735c2ed70a56cd221e0024eb4bf90243c9d6e9","crypto":{"cipher":"aes-128-ctr","ciphertext":"b66550ef67345005ead82c9e2835d311fb0e8787191af3696119977064f6120e","cipherparams":{"iv":"5b8f4ed6026ab6c733857e3bed90a869"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"68fc3d0829122badcf8c6efd40c6f2e086a5167db489bed099e3d2b97c1be496"},"mac":"71fd9f23862f7252ca88a6ea2c29fd0ab3527c5b9f5d1f827856263247c92f46"},"id":"7f75b57a-a4ba-4877-a42b-652f966d8aa6","version":3}
|
Loading…
Reference in New Issue
Block a user