web3-proxy/web3_proxy/src/frontend/status.rs

41 lines
1.4 KiB
Rust
Raw Normal View History

2022-08-16 22:29:00 +03:00
use crate::app::Web3ProxyApp;
2022-09-10 05:59:07 +03:00
use axum::{http::StatusCode, response::IntoResponse, Extension, Json};
use moka::future::ConcurrentCacheExt;
use serde_json::json;
2022-09-09 06:53:16 +03:00
use std::sync::Arc;
2022-06-05 22:58:47 +03:00
2022-06-29 21:22:53 +03:00
/// Health check page for load balancers to use
2022-07-07 06:22:09 +03:00
pub async fn health(Extension(app): Extension<Arc<Web3ProxyApp>>) -> impl IntoResponse {
2022-09-12 17:33:55 +03:00
// TODO: also check that the head block is not too old
2022-08-10 08:56:09 +03:00
if app.balanced_rpcs.synced() {
2022-06-29 21:22:53 +03:00
(StatusCode::OK, "OK")
} else {
(StatusCode::SERVICE_UNAVAILABLE, ":(")
}
}
2022-09-10 05:59:07 +03:00
/// Prometheus metrics
/// TODO: when done debugging, remove this and only allow access on a different port
pub async fn prometheus(Extension(app): Extension<Arc<Web3ProxyApp>>) -> impl IntoResponse {
app.prometheus_metrics()
}
2022-06-05 22:58:47 +03:00
/// Very basic status page
2022-08-05 22:22:23 +03:00
/// TODO: replace this with proper stats and monitoring
2022-07-07 06:22:09 +03:00
pub async fn status(Extension(app): Extension<Arc<Web3ProxyApp>>) -> impl IntoResponse {
2022-09-10 05:59:07 +03:00
app.pending_transactions.sync();
app.user_cache.sync();
2022-09-12 17:33:55 +03:00
// TODO: what else should we include? uptime, cache hit rates, cpu load
2022-09-10 05:59:07 +03:00
let body = json!({
"pending_transactions_count": app.pending_transactions.entry_count(),
"pending_transactions_size": app.pending_transactions.weighted_size(),
"user_cache_count": app.user_cache.entry_count(),
"user_cache_size": app.user_cache.weighted_size(),
"balanced_rpcs": app.balanced_rpcs,
"private_rpcs": app.private_rpcs,
});
Json(body)
2022-06-05 22:58:47 +03:00
}