dry user data caching

This commit is contained in:
Bryan Stitt 2022-09-03 19:43:19 +00:00
parent 8225285bb8
commit 1c2f3e1445
3 changed files with 67 additions and 53 deletions

@ -134,6 +134,9 @@
- [ ] web3connection3.block(...) might wait forever. be sure to do it safely
- [ ] search for all "todo!"
- [ ] replace all `.context("no servers in sync")` with proper error type
- [ ] when using a bunch of slow public servers, i see "no servers in sync" even when things should be right
- [ ] i think checking the parents of the heaviest chain works most of the time, but not always
- maybe iterate connection heads by total weight? i still think we need to include parent hashes
## V1
@ -179,6 +182,7 @@
- [ ] refactor from_anyhow_error to have consistent error codes and http codes. maybe implement the Error trait
- [ ] when handling errors from axum parsing the Json...Enum, the errors don't get wrapped in json. i think we need a Layer
- [ ] don't "unwrap" anywhere. give proper errors
- [ ] tool to revoke bearer tokens that also clears redis
new endpoints for users:
- [x] GET /u/:api_key

@ -1,5 +1,6 @@
use super::errors::{anyhow_error_into_response, FrontendErrorResponse};
use crate::app::{UserCacheValue, Web3ProxyApp};
use anyhow::Context;
use axum::response::Response;
use derive_more::From;
use entities::user_keys;
@ -151,6 +152,54 @@ impl Web3ProxyApp {
Ok(RateLimitResult::AllowedIp(ip))
}
pub(crate) async fn cache_user_data(&self, user_key: Uuid) -> anyhow::Result<UserCacheValue> {
let db = self.db_conn.as_ref().context("no database")?;
/// helper enum for query just a few columns instead of the entire table
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryAs {
UserId,
RequestsPerMinute,
}
// TODO: join the user table to this to return the User? we don't always need it
let user_data = match user_keys::Entity::find()
.select_only()
.column_as(user_keys::Column::UserId, QueryAs::UserId)
.column_as(
user_keys::Column::RequestsPerMinute,
QueryAs::RequestsPerMinute,
)
.filter(user_keys::Column::ApiKey.eq(user_key))
.filter(user_keys::Column::Active.eq(true))
.into_values::<_, QueryAs>()
.one(db)
.await?
{
Some((user_id, requests_per_minute)) => {
UserCacheValue::from((
// TODO: how long should this cache last? get this from config
Instant::now() + Duration::from_secs(60),
user_id,
requests_per_minute,
))
}
None => {
// TODO: think about this more
UserCacheValue::from((
// TODO: how long should this cache last? get this from config
Instant::now() + Duration::from_secs(60),
0,
0,
))
}
};
// save for the next run
self.user_cache.write().insert(user_key, user_data);
Ok(user_data)
}
pub async fn rate_limit_by_key(&self, user_key: Uuid) -> anyhow::Result<RateLimitResult> {
// check the local cache
let user_data = if let Some(cached_user) = self.user_cache.read().get(&user_key) {
@ -168,58 +217,14 @@ impl Web3ProxyApp {
};
// if cache was empty, check the database
// TODO: i think there is a cleaner way to do this
let user_data = if user_data.is_none() {
if let Some(db) = &self.db_conn {
/// helper enum for query just a few columns instead of the entire table
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryAs {
UserId,
RequestsPerMinute,
}
// TODO: join the user table to this to return the User? we don't always need it
let user_data = match user_keys::Entity::find()
.select_only()
.column_as(user_keys::Column::UserId, QueryAs::UserId)
.column_as(
user_keys::Column::RequestsPerMinute,
QueryAs::RequestsPerMinute,
)
.filter(user_keys::Column::ApiKey.eq(user_key))
.filter(user_keys::Column::Active.eq(true))
.into_values::<_, QueryAs>()
.one(db)
.await?
{
Some((user_id, requests_per_minute)) => {
UserCacheValue::from((
// TODO: how long should this cache last? get this from config
Instant::now() + Duration::from_secs(60),
user_id,
requests_per_minute,
))
}
None => {
// TODO: think about this more
UserCacheValue::from((
// TODO: how long should this cache last? get this from config
Instant::now() + Duration::from_secs(60),
0,
0,
))
}
};
// save for the next run
self.user_cache.write().insert(user_key, user_data);
user_data
} else {
// TODO: rate limit with only local caches?
unimplemented!("no cache hit and no database connection")
}
self.cache_user_data(user_key)
.await
.context("no user data")?
} else {
// unwrap the cache's result
user_data.unwrap()
user_data.context("no user data")?
};
if user_data.user_id == 0 {

@ -178,7 +178,7 @@ pub async fn post_login(
.await
.unwrap();
let (u_id, response) = match u {
let (u, uk, response) = match u {
None => {
let txn = db.begin().await?;
@ -213,7 +213,7 @@ pub async fn post_login(
let response = (StatusCode::CREATED, Json(response_json)).into_response();
(u.id, response)
(u, uk, response)
}
Some(u) => {
// the user is already registered
@ -232,17 +232,22 @@ pub async fn post_login(
let response = (StatusCode::OK, Json(response_json)).into_response();
(u.id, response)
(u, uk, response)
}
};
// TODO: set a session cookie with the bearer token?
// save the bearer token in redis with a long (7 or 30 day?) expiry. or in database?
let mut redis_conn = app.redis_conn().await?;
let bearer_key = format!("bearer:{}", bearer_token);
redis_conn.set(bearer_key, u_id.to_string()).await?;
redis_conn.set(bearer_key, u.id.to_string()).await?;
// save the user data in redis with a short expiry
// TODO: we already have uk, so this could be more efficient. it works for now
app.cache_user_data(uk.api_key).await?;
Ok(response)
}