39 lines
974 B
TOML
39 lines
974 B
TOML
[app]
|
|
chain_id = 1
|
|
|
|
# no database
|
|
# no influxdb
|
|
# no redis
|
|
# no sentry
|
|
# no public limits means anon gets full access
|
|
|
|
# no thundering herd protection
|
|
min_sum_soft_limit = 1
|
|
min_synced_rpcs = 1
|
|
|
|
# 1GB of cache
|
|
response_cache_max_bytes = 1_000_000_000
|
|
|
|
[balanced_rpcs]
|
|
|
|
[balanced_rpcs.llama_public_both]
|
|
# TODO: what should we do if all rpcs are disabled? warn and wait for a config change?
|
|
disabled = false
|
|
display_name = "LlamaNodes Both"
|
|
ws_url = "wss://eth.llamarpc.com/"
|
|
http_url = "https://eth.llamarpc.com/"
|
|
soft_limit = 1_000
|
|
|
|
[balanced_rpcs.llama_public_https]
|
|
disabled = false
|
|
display_name = "LlamaNodes HTTPS"
|
|
http_url = "https://eth.llamarpc.com/"
|
|
soft_limit = 1_000
|
|
|
|
[balanced_rpcs.llama_public_wss]
|
|
# TODO: what should we do if all rpcs are disabled? warn and wait for a config change?
|
|
disabled = false
|
|
display_name = "LlamaNodes WSS"
|
|
ws_url = "wss://eth.llamarpc.com/"
|
|
soft_limit = 1_000
|