Skip to content

Commit

Permalink
feat: add json cache
Browse files Browse the repository at this point in the history
  • Loading branch information
MasterPtato committed Jun 22, 2024
1 parent 115f02e commit 384771d
Show file tree
Hide file tree
Showing 4 changed files with 88 additions and 0 deletions.
1 change: 1 addition & 0 deletions lib/cache/build/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ rivet-metrics = { path = "../../metrics" }
rivet-pools = { path = "../../pools" }
rivet-util = { path = "../../util/core" }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0"
tokio = { version = "1.29", features = ["full"] }
tracing = "0.1"
Expand Down
6 changes: 6 additions & 0 deletions lib/cache/build/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@ pub enum Error {
#[error("proto encode: {0}")]
ProtoEncode(prost::EncodeError),

#[error("serde decode: {0}")]
SerdeDecode(serde_json::Error),

#[error("serde encode: {0}")]
SerdeEncode(serde_json::Error),

#[error("optimistic lock failed too many times")]
OptimisticLockFailedTooManyTimes,
}
80 changes: 80 additions & 0 deletions lib/cache/build/src/req_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use std::{
use redis::AsyncCommands;
use rivet_pools::prelude::*;
use tracing::Instrument;
use serde::{Serialize, de::DeserializeOwned};

use super::*;
use crate::{
Expand Down Expand Up @@ -660,6 +661,85 @@ impl RequestConfig {
}
}

// MARK: JSON fetch
impl RequestConfig {
#[tracing::instrument(err, skip(key, getter))]
pub async fn fetch_one_json<Key, Value, Getter, Fut>(
self,
base_key: impl ToString + Debug,
key: Key,
getter: Getter,
) -> Result<Option<Value>, Error>
where
Key: CacheKey + Send + Sync,
Value: Serialize + DeserializeOwned + Debug + Send + Sync,
Getter: Fn(GetterCtx<Key, Value>, Key) -> Fut + Clone,
Fut: Future<Output = GetterResult<GetterCtx<Key, Value>>>,
{
let values = self
.fetch_all_json_with_keys(base_key, [key], move |cache, keys| {
let getter = getter.clone();
async move {
debug_assert_eq!(1, keys.len());
if let Some(key) = keys.into_iter().next() {
getter(cache, key).await
} else {
tracing::error!("no keys provided to fetch one");
Ok(cache)
}
}
})
.await?;
Ok(values.into_iter().next().map(|(_, v)| v))
}

#[tracing::instrument(err, skip(keys, getter))]
pub async fn fetch_all_json<Key, Value, Getter, Fut>(
self,
base_key: impl ToString + Debug,
keys: impl IntoIterator<Item = Key>,
getter: Getter,
) -> Result<Vec<Value>, Error>
where
Key: CacheKey + Send + Sync,
Value: Serialize + DeserializeOwned + Debug + Send + Sync,
Getter: Fn(GetterCtx<Key, Value>, Vec<Key>) -> Fut + Clone,
Fut: Future<Output = GetterResult<GetterCtx<Key, Value>>>,
{
self.fetch_all_json_with_keys::<Key, Value, Getter, Fut>(base_key, keys, getter)
.await
// TODO: Find a way to not allocate another vec here
.map(|x| x.into_iter().map(|(_, v)| v).collect::<Vec<_>>())
}

#[tracing::instrument(err, skip(keys, getter))]
pub async fn fetch_all_json_with_keys<Key, Value, Getter, Fut>(
self,
base_key: impl ToString + Debug,
keys: impl IntoIterator<Item = Key>,
getter: Getter,
) -> Result<Vec<(Key, Value)>, Error>
where
Key: CacheKey + Send + Sync,
Value: Serialize + DeserializeOwned + Debug + Send + Sync,
Getter: Fn(GetterCtx<Key, Value>, Vec<Key>) -> Fut + Clone,
Fut: Future<Output = GetterResult<GetterCtx<Key, Value>>>,
{
self.fetch_all_convert(
base_key,
keys,
getter,
|value: &Value| -> Result<Vec<u8>, Error> {
serde_json::to_vec(value).map_err(Error::SerdeEncode)
},
|value: &Vec<u8>| -> Result<Value, Error> {
serde_json::from_slice(value.as_slice()).map_err(Error::SerdeDecode)
},
)
.await
}
}

// #[tracing::instrument(skip(conn))]
// async fn unwatch_gracefully(conn: &mut RedisPool) {
// tracing::debug!("unwatching");
Expand Down
1 change: 1 addition & 0 deletions svc/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 384771d

Please sign in to comment.