web3-proxy/web3_proxy/src/prometheus.rs

60 lines
1.7 KiB
Rust
Raw Normal View History

2022-09-20 09:58:40 +03:00
use axum::headers::HeaderName;
use axum::http::HeaderValue;
use axum::response::{IntoResponse, Response};
use axum::{routing::get, Extension, Router};
use std::net::SocketAddr;
use std::sync::atomic::Ordering;
2022-09-20 09:58:40 +03:00
use std::sync::Arc;
use tokio::sync::broadcast;
use tracing::info;
2022-09-20 09:58:40 +03:00
use crate::app::Web3ProxyApp;
use crate::errors::Web3ProxyResult;
2022-09-20 09:58:40 +03:00
/// Run a prometheus metrics server on the given port.
pub async fn serve(
app: Arc<Web3ProxyApp>,
mut shutdown_receiver: broadcast::Receiver<()>,
2023-05-24 00:40:34 +03:00
) -> Web3ProxyResult<()> {
// routes should be ordered most to least common
let router = Router::new()
.route("/", get(root))
.layer(Extension(app.clone()));
2022-09-20 09:58:40 +03:00
// note: the port here might be 0
let port = app.prometheus_port.load(Ordering::Relaxed);
// TODO: config for the host?
2022-09-20 09:58:40 +03:00
let addr = SocketAddr::from(([0, 0, 0, 0], port));
let service = router.into_make_service();
2022-09-20 09:58:40 +03:00
// `axum::Server` is a re-export of `hyper::Server`
let server = axum::Server::bind(&addr).serve(service);
let port = server.local_addr().port();
info!("prometheus listening on port {}", port);
app.prometheus_port.store(port, Ordering::Relaxed);
server
.with_graceful_shutdown(async move {
let _ = shutdown_receiver.recv().await;
})
2022-09-20 09:58:40 +03:00
.await
.map_err(Into::into)
}
async fn root(Extension(app): Extension<Arc<Web3ProxyApp>>) -> Response {
2022-12-28 09:11:18 +03:00
let serialized = app.prometheus_metrics().await;
2022-09-20 09:58:40 +03:00
let mut r = serialized.into_response();
// // TODO: is there an easier way to do this?
r.headers_mut().insert(
HeaderName::from_static("content-type"),
HeaderValue::from_static("application/openmetrics-text; version=1.0.0; charset=utf-8"),
);
r
}