2023-02-07 02:20:36 +03:00
|
|
|
[app]
|
|
|
|
chain_id = 1
|
|
|
|
|
|
|
|
# no database
|
|
|
|
# no influxdb
|
|
|
|
# no redis
|
|
|
|
# no sentry
|
|
|
|
# no public limits means anon gets full access
|
|
|
|
|
|
|
|
# no thundering herd protection
|
|
|
|
min_sum_soft_limit = 1
|
|
|
|
min_synced_rpcs = 1
|
|
|
|
|
|
|
|
# 1GB of cache
|
|
|
|
response_cache_max_bytes = 1_000_000_000
|
|
|
|
|
|
|
|
[balanced_rpcs]
|
|
|
|
|
2023-02-12 12:22:53 +03:00
|
|
|
[balanced_rpcs.llama_public_both]
|
2023-02-07 02:20:36 +03:00
|
|
|
# TODO: what should we do if all rpcs are disabled? warn and wait for a config change?
|
|
|
|
disabled = false
|
2023-02-12 12:22:53 +03:00
|
|
|
display_name = "LlamaNodes Both"
|
|
|
|
ws_url = "wss://eth.llamarpc.com/"
|
|
|
|
http_url = "https://eth.llamarpc.com/"
|
2023-02-07 02:20:36 +03:00
|
|
|
soft_limit = 1_000
|
|
|
|
tier = 0
|
|
|
|
|
|
|
|
[balanced_rpcs.llama_public_https]
|
|
|
|
disabled = false
|
|
|
|
display_name = "LlamaNodes HTTPS"
|
2023-02-12 12:22:53 +03:00
|
|
|
http_url = "https://eth.llamarpc.com/"
|
|
|
|
soft_limit = 1_000
|
|
|
|
tier = 0
|
|
|
|
|
|
|
|
[balanced_rpcs.llama_public_wss]
|
|
|
|
# TODO: what should we do if all rpcs are disabled? warn and wait for a config change?
|
|
|
|
disabled = false
|
|
|
|
display_name = "LlamaNodes WSS"
|
|
|
|
ws_url = "wss://eth.llamarpc.com/"
|
2023-02-07 02:20:36 +03:00
|
|
|
soft_limit = 1_000
|
|
|
|
tier = 0
|