delete unused code after the rate limit refactor

This commit is contained in:
Bryan Stitt 2022-08-21 09:44:53 +00:00
parent b16aa8d813
commit bda666eb6c
6 changed files with 12 additions and 50 deletions

View File

@ -1,18 +0,0 @@
use axum::{extract::Query, routing::get, Router};
use serde::{de, Deserialize, Deserializer};
use std::{fmt, str::FromStr};
/// Serde deserialization decorator to map empty Strings to None,
/// https://github.com/tokio-rs/axum/blob/1fe45583626a4c9c890cc01131d38c57f8728686/examples/query-params-with-empty-strings/src/main.rs
pub fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
where
D: Deserializer<'de>,
T: FromStr,
T::Err: fmt::Display,
{
let opt = Option::<String>::deserialize(de)?;
match opt.as_deref() {
None | Some("") => Ok(None),
Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some),
}
}

View File

@ -1,9 +1,8 @@
use super::errors::{anyhow_error_into_response, FrontendResult};
use super::rate_limit::{rate_limit_by_ip, rate_limit_by_user_key, RateLimitResult};
use super::errors::FrontendResult;
use super::rate_limit::{rate_limit_by_ip, rate_limit_by_user_key};
use crate::stats::{Protocol, ProxyRequestLabels};
use crate::{app::Web3ProxyApp, jsonrpc::JsonRpcRequestEnum};
use axum::extract::Path;
use axum::response::Response;
use axum::{http::StatusCode, response::IntoResponse, Extension, Json};
use axum_client_ip::ClientIp;
use std::net::IpAddr;

View File

@ -1,4 +1,3 @@
mod axum_ext;
mod errors;
mod http;
mod http_proxy;
@ -7,12 +6,10 @@ mod users;
mod ws_proxy;
use crate::app::Web3ProxyApp;
use ::http::{Request, StatusCode};
use ::http::Request;
use axum::{
body::Body,
error_handling::HandleError,
handler::Handler,
response::Response,
routing::{get, post},
Extension, Router,
};
@ -22,16 +19,6 @@ use tower_http::trace::TraceLayer;
use tower_request_id::{RequestId, RequestIdLayer};
use tracing::{error_span, info};
// handle errors by converting them into something that implements
// `IntoResponse`
async fn handle_anyhow_error(err: anyhow::Error) -> (StatusCode, String) {
// TODO: i dont like this, but lets see if it works. need to moved to the errors module and replace the version that is there
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Something went wrong: {}", err),
)
}
/// http and websocket frontend for customers
pub async fn serve(port: u16, proxy_app: Arc<Web3ProxyApp>) -> anyhow::Result<()> {
// create a tracing span for each request with a random request id and the method
@ -67,6 +54,7 @@ pub async fn serve(port: u16, proxy_app: Arc<Web3ProxyApp>) -> anyhow::Result<()
.route("/status", get(http::status))
.route("/login/:user_address", get(users::get_login))
.route("/login/:user_address/:message_eip", get(users::get_login))
.route("/login", post(users::post_login))
.route("/users", post(users::post_user))
// layers are ordered bottom up
// the last layer is first for requests and last for responses

View File

@ -1,7 +1,7 @@
use super::errors::{anyhow_error_into_response, FrontendErrorResponse, FrontendResult};
use super::errors::{anyhow_error_into_response, FrontendErrorResponse};
use crate::app::{UserCacheValue, Web3ProxyApp};
use axum::response::Response;
use derive_more::{From, TryInto};
use derive_more::From;
use entities::user_keys;
use redis_rate_limit::ThrottleResult;
use reqwest::StatusCode;
@ -71,10 +71,10 @@ pub async fn rate_limit_by_user_key(
// TODO: change this to a Ulid
user_key: Uuid,
) -> RateLimitFrontendResult {
let rate_limit_result = app.rate_limit_by_key(user_key).await?.into();
let rate_limit_result = app.rate_limit_by_key(user_key).await?;
match rate_limit_result {
RateLimitResult::AllowedIp(x) => panic!("only user keys or errors are expected here"),
RateLimitResult::AllowedIp(_) => panic!("only user keys or errors are expected here"),
RateLimitResult::AllowedUser(x) => Ok(x.into()),
rate_limit_result => {
let _: RequestFrom = rate_limit_result.try_into()?;

View File

@ -7,15 +7,12 @@
// I wonder how we handle payment
// probably have to do manual withdrawals
use super::errors::FrontendResult;
use super::rate_limit::rate_limit_by_ip;
use super::{
errors::{anyhow_error_into_response, FrontendResult},
rate_limit::RateLimitResult,
};
use crate::app::Web3ProxyApp;
use axum::{
extract::{Path, Query},
response::{IntoResponse, Response},
response::IntoResponse,
Extension, Json,
};
use axum_client_ip::ClientIp;
@ -24,7 +21,6 @@ use entities::{user, user_keys};
use ethers::{prelude::Address, types::Bytes};
use hashbrown::HashMap;
use redis_rate_limit::redis::AsyncCommands;
use reqwest::StatusCode;
use sea_orm::ActiveModelTrait;
use serde::Deserialize;
use siwe::Message;
@ -33,9 +29,6 @@ use std::{net::IpAddr, ops::Add};
use time::{Duration, OffsetDateTime};
use ulid::Ulid;
#[allow(unused)]
use super::axum_ext::empty_string_as_none;
// TODO: how do we customize axum's error response? I think we probably want an enum that implements IntoResponse instead
#[debug_handler]
pub async fn get_login(
@ -233,7 +226,7 @@ pub async fn post_user(
Extension(app): Extension<Arc<Web3ProxyApp>>,
ClientIp(ip): ClientIp,
) -> FrontendResult {
todo!("finish post_login");
todo!("finish post_user");
// let user = user::ActiveModel {
// address: sea_orm::Set(payload.address.to_fixed_bytes().into()),

View File

@ -3,7 +3,7 @@ use super::rate_limit::{rate_limit_by_ip, rate_limit_by_user_key};
use axum::{
extract::ws::{Message, WebSocket, WebSocketUpgrade},
extract::Path,
response::{IntoResponse, Redirect, Response},
response::{IntoResponse, Redirect},
Extension,
};
use axum_client_ip::ClientIp;