Skip to content

Commit

Permalink
refactor(feed): use FeedUrl instead of String
Browse files Browse the repository at this point in the history
  • Loading branch information
ymgyt committed Apr 20, 2024
1 parent 802892a commit 759950b
Show file tree
Hide file tree
Showing 5 changed files with 135 additions and 21 deletions.
1 change: 1 addition & 0 deletions crates/synd_feed/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ serde_json = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
url = { workspace = true, features = ["serde"] }

[features]
graphql = ["dep:async-graphql"]
Expand Down
18 changes: 10 additions & 8 deletions crates/synd_feed/src/feed/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use moka::future::Cache;

use crate::{
feed::parser::{FetchFeed, FetchFeedResult},
types::{self},
types::{self, FeedUrl},
};

#[derive(Clone, Copy)]
Expand Down Expand Up @@ -44,18 +44,20 @@ impl CacheConfig {

#[async_trait]
pub trait FetchCachedFeed: Send + Sync {
async fn fetch_feed(&self, url: String) -> FetchFeedResult<Arc<types::Feed>>;
async fn fetch_feed(&self, url: FeedUrl) -> FetchFeedResult<Arc<types::Feed>>;
/// Fetch feeds by spawning tasks
async fn fetch_feeds_parallel(&self, urls: &[String])
-> Vec<FetchFeedResult<Arc<types::Feed>>>;
async fn fetch_feeds_parallel(
&self,
urls: &[FeedUrl],
) -> Vec<FetchFeedResult<Arc<types::Feed>>>;
}

#[derive(Clone)]
pub struct CacheLayer<S> {
service: S,
// Use Arc to avoid expensive clone
// https://github.com/moka-rs/moka?tab=readme-ov-file#avoiding-to-clone-the-value-at-get
cache: Cache<String, Arc<types::Feed>>,
cache: Cache<FeedUrl, Arc<types::Feed>>,
}
impl<S> CacheLayer<S> {
/// Construct `CacheLayer` with default config
Expand Down Expand Up @@ -88,10 +90,10 @@ where
S: FetchFeed + Clone + 'static,
{
#[tracing::instrument(skip_all, fields(%url))]
async fn fetch_feed(&self, url: String) -> FetchFeedResult<Arc<types::Feed>> {
async fn fetch_feed(&self, url: FeedUrl) -> FetchFeedResult<Arc<types::Feed>> {
// lookup cache
if let Some(feed) = self.cache.get(&url).await {
tracing::debug!(url, "Feed cache hit");
tracing::debug!(url = url.as_str(), "Feed cache hit");
return Ok(feed);
}

Expand All @@ -105,7 +107,7 @@ where
/// Fetch feeds by spawning tasks
async fn fetch_feeds_parallel(
&self,
urls: &[String],
urls: &[FeedUrl],
) -> Vec<FetchFeedResult<Arc<types::Feed>>> {
let mut handles = Vec::with_capacity(urls.len());

Expand Down
19 changes: 9 additions & 10 deletions crates/synd_feed/src/feed/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::{sync::Arc, time::Duration};
use async_trait::async_trait;
use feed_rs::parser::{ParseErrorKind, ParseFeedError, Parser};

use crate::types::Feed;
use crate::types::{Feed, FeedUrl};

pub type FetchFeedResult<T> = std::result::Result<T, FetchFeedError>;

Expand All @@ -29,21 +29,21 @@ pub enum FetchFeedError {

#[async_trait]
pub trait FetchFeed: Send + Sync {
async fn fetch_feed(&self, url: String) -> FetchFeedResult<Feed>;
async fn fetch_feed(&self, url: FeedUrl) -> FetchFeedResult<Feed>;
/// Fetch feeds by spawning tasks
async fn fetch_feeds_parallel(&self, urls: &[String]) -> FetchFeedResult<Vec<Feed>>;
async fn fetch_feeds_parallel(&self, urls: &[FeedUrl]) -> FetchFeedResult<Vec<Feed>>;
}

#[async_trait]
impl<T> FetchFeed for Arc<T>
where
T: FetchFeed,
{
async fn fetch_feed(&self, url: String) -> FetchFeedResult<Feed> {
async fn fetch_feed(&self, url: FeedUrl) -> FetchFeedResult<Feed> {
self.fetch_feed(url).await
}
/// Fetch feeds by spawning tasks
async fn fetch_feeds_parallel(&self, urls: &[String]) -> FetchFeedResult<Vec<Feed>> {
async fn fetch_feeds_parallel(&self, urls: &[FeedUrl]) -> FetchFeedResult<Vec<Feed>> {
self.fetch_feeds_parallel(urls).await
}
}
Expand All @@ -57,11 +57,11 @@ pub struct FeedService {

#[async_trait]
impl FetchFeed for FeedService {
async fn fetch_feed(&self, url: String) -> FetchFeedResult<Feed> {
async fn fetch_feed(&self, url: FeedUrl) -> FetchFeedResult<Feed> {
use futures_util::StreamExt;
let mut stream = self
.http
.get(&url)
.get(url.clone().into_inner())
.send()
.await
.map_err(FetchFeedError::Fetch)?
Expand All @@ -82,7 +82,7 @@ impl FetchFeed for FeedService {
self.parse(url, buff.as_slice())
}

async fn fetch_feeds_parallel(&self, urls: &[String]) -> FetchFeedResult<Vec<Feed>> {
async fn fetch_feeds_parallel(&self, urls: &[FeedUrl]) -> FetchFeedResult<Vec<Feed>> {
// Order is matter, so we could not use tokio JoinSet or futures FuturesUnordered
// should use FuturesOrders ?
let mut handles = Vec::with_capacity(urls.len());
Expand Down Expand Up @@ -115,11 +115,10 @@ impl FeedService {
Self { http, buff_limit }
}

pub fn parse<S>(&self, url: impl Into<String>, source: S) -> FetchFeedResult<Feed>
pub fn parse<S>(&self, url: FeedUrl, source: S) -> FetchFeedResult<Feed>
where
S: std::io::Read,
{
let url = url.into();
let parser = Self::build_parser(&url);

parser
Expand Down
9 changes: 6 additions & 3 deletions crates/synd_feed/src/types/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,17 @@ use feed_rs::model::{self as feedrs, Generator, Link, Person, Text};
pub use feedrs::FeedType;

pub type Time = DateTime<Utc>;
pub type FeedUrl = String;
// pub type FeedUrl = String;

mod requirement;
pub use requirement::Requirement;

mod category;
pub use category::Category;

mod url;
pub use url::FeedUrl;

#[derive(PartialEq, Eq, Debug, Clone)]
pub struct EntryId<'a>(Cow<'a, str>);

Expand Down Expand Up @@ -136,8 +139,8 @@ impl FeedMeta {
&self.feed_type
}

pub fn url(&self) -> &str {
self.url.as_str()
pub fn url(&self) -> &FeedUrl {
&self.url
}

pub fn title(&self) -> Option<&str> {
Expand Down
109 changes: 109 additions & 0 deletions crates/synd_feed/src/types/url.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
use core::fmt;

use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;

#[derive(Error, Debug)]
pub enum FeedUrlError {
#[error("invalid url: {0}")]
InvalidUrl(url::ParseError),
}

/// Feed Url which serve rss or atom
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct FeedUrl(Url);

impl TryFrom<&str> for FeedUrl {
type Error = FeedUrlError;

fn try_from(s: &str) -> Result<Self, Self::Error> {
Url::parse(s).map(FeedUrl).map_err(FeedUrlError::InvalidUrl)
}
}

impl AsRef<str> for FeedUrl {
fn as_ref(&self) -> &str {
self.0.as_str()
}
}

impl From<Url> for FeedUrl {
fn from(url: Url) -> Self {
Self(url)
}
}

impl From<FeedUrl> for Url {
fn from(url: FeedUrl) -> Self {
url.0
}
}

impl fmt::Display for FeedUrl {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}

impl FeedUrl {
pub fn into_inner(self) -> Url {
self.0
}

pub fn as_str(&self) -> &str {
self.0.as_str()
}
}

#[cfg(feature = "graphql")]
#[async_graphql::Scalar]
impl async_graphql::ScalarType for FeedUrl {
fn parse(value: async_graphql::Value) -> async_graphql::InputValueResult<Self> {
let async_graphql::Value::String(s) = value else {
return Err(async_graphql::InputValueError::expected_type(value));
};

match Url::parse(&s) {
Ok(url) => Ok(FeedUrl::from(url)),
Err(err) => Err(async_graphql::InputValueError::custom(err)),
}
}

fn to_value(&self) -> async_graphql::Value {
// Is this clone avoidable?
async_graphql::Value::String(self.0.clone().into())
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn backward_compatible() {
let org = "https://blog.ymgyt.io/atom.xml";
let u = FeedUrl::from(Url::parse(org).unwrap());

assert_eq!(u.as_str(), org);
assert_eq!(format!("{u}").as_str(), org);
}

#[test]
fn deserialize_from_strings() {
let data = vec![
"https://blog.ymgyt.io/atom.xml",
"https://blog.ymgyt.io/atom2.xml",
];
let serialized = serde_json::to_string(&data).unwrap();
let deserialized: Vec<FeedUrl> = serde_json::from_str(&serialized).unwrap();

assert_eq!(
deserialized,
vec![
FeedUrl::from(Url::parse("https://blog.ymgyt.io/atom.xml").unwrap()),
FeedUrl::from(Url::parse("https://blog.ymgyt.io/atom2.xml").unwrap()),
],
);
}
}

0 comments on commit 759950b

Please sign in to comment.