Skip to content

Commit

Permalink
feat(feed): add feed annotations
Browse files Browse the repository at this point in the history
  • Loading branch information
ymgyt committed Apr 10, 2024
1 parent 2f1bac7 commit 6f9f1fe
Show file tree
Hide file tree
Showing 5 changed files with 154 additions and 30 deletions.
27 changes: 16 additions & 11 deletions crates/synd_feed/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,22 @@ readme = "README.md"
version = "0.1.5"

[dependencies]
anyhow = { workspace = true }
async-trait = { workspace = true }
chrono = { workspace = true }
feed-rs = { workspace = true }
futures-util = { workspace = true }
moka = { workspace = true, features = ["future"] }
reqwest = { workspace = true, features = ["stream"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
anyhow = { workspace = true }
async-graphql = { workspace = true, optional = true }
async-trait = { workspace = true }
chrono = { workspace = true }
feed-rs = { workspace = true }
futures-util = { workspace = true }
moka = { workspace = true, features = ["future"] }
reqwest = { workspace = true, features = ["stream"] }
serde = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }

[features]
graphql = ["dep:async-graphql"]

[lints]
workspace = true
Expand Down
2 changes: 1 addition & 1 deletion crates/synd_feed/src/feed/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use moka::future::Cache;

use crate::{
feed::parser::{FetchFeed, FetchFeedResult},
types,
types::{self},
};

#[derive(Clone, Copy)]
Expand Down
54 changes: 54 additions & 0 deletions crates/synd_feed/src/types/category.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
use std::borrow::Cow;

use serde::{Deserialize, Serialize};
use thiserror::Error;

#[derive(Error, Debug)]
pub enum CategoryError {
#[error("not empty validation is violated")]
NotEmptyViolated,
#[error("len max validation is violated")]
LenMaxViolated,
}

#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Category<'a>(Cow<'a, str>);

impl<'a> Category<'a> {
const MAX_LEN: usize = 30;
pub fn new(c: impl Into<Cow<'a, str>>) -> Result<Self, CategoryError> {
let c = c.into().trim().to_ascii_lowercase();

match c.len() {
0 => return Err(CategoryError::NotEmptyViolated),
n if n > Self::MAX_LEN => return Err(CategoryError::LenMaxViolated),
_ => {}
}

Ok(Self(c.into()))
}

pub fn into_inner(self) -> Cow<'a, str> {
self.0
}
}

#[cfg(feature = "graphql")]
#[async_graphql::Scalar]
impl<'s> async_graphql::ScalarType for Category<'s> {
fn parse(value: async_graphql::Value) -> async_graphql::InputValueResult<Self> {
let async_graphql::Value::String(s) = value else {
return Err(async_graphql::InputValueError::expected_type(value));
};

match Category::new(s) {
Ok(c) => Ok(c),
Err(err) => Err(async_graphql::InputValueError::custom(err)),
}
}

fn to_value(&self) -> async_graphql::Value {
// Is this clone avoidable?
async_graphql::Value::String(self.0.clone().into_owned())
}
}
73 changes: 55 additions & 18 deletions crates/synd_feed/src/types.rs → crates/synd_feed/src/types/mod.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
use std::{borrow::Cow, fmt::Display};

use chrono::{DateTime, Utc};
use feed_rs::model as feedrs;
use feed_rs::model::{self as feedrs, Generator, Link, Person, Text};

pub use feedrs::FeedType;

pub type Time = DateTime<Utc>;
pub type FeedUrl = String;

mod requirement;
pub use requirement::Requirement;

mod category;
pub use category::Category;

#[derive(PartialEq, Eq, Debug, Clone)]
pub struct EntryId<'a>(Cow<'a, str>);

Expand Down Expand Up @@ -87,50 +93,60 @@ impl Entry {
#[derive(Debug, Clone)]
pub struct FeedMeta {
url: FeedUrl,
// TODO: extrace feedrs data
// no entries
feed: feedrs::Feed,
// feed_rs models
feed_type: FeedType,
title: Option<Text>,
updated: Option<Time>,
authors: Vec<Person>,
description: Option<Text>,
links: Vec<Link>,
generator: Option<Generator>,
published: Option<Time>,
}

#[derive(Debug, Clone)]
pub struct Annotated<T> {
pub feed: T,
pub requirement: Option<Requirement>,
pub category: Option<Category<'static>>,
}

impl FeedMeta {
pub fn r#type(&self) -> &FeedType {
&self.feed.feed_type
&self.feed_type
}

pub fn url(&self) -> &str {
self.url.as_str()
}

pub fn title(&self) -> Option<&str> {
self.feed.title.as_ref().map(|text| text.content.as_str())
self.title.as_ref().map(|text| text.content.as_str())
}

pub fn updated(&self) -> Option<Time> {
self.feed.updated
self.updated.or(self.published)
}

pub fn authors(&self) -> impl Iterator<Item = &str> {
self.feed.authors.iter().map(|person| person.name.as_str())
self.authors.iter().map(|person| person.name.as_str())
}

pub fn description(&self) -> Option<&str> {
self.feed
.description
.as_ref()
.map(|text| text.content.as_str())
self.description.as_ref().map(|text| text.content.as_str())
}

pub fn links(&self) -> impl Iterator<Item = &feedrs::Link> {
self.feed.links.iter()
self.links.iter()
}

/// Return website link to which feed syndicate
pub fn website_url(&self) -> Option<&str> {
link::find_website_url(self.r#type(), &self.feed.links)
link::find_website_url(self.r#type(), &self.links)
}

pub fn generator(&self) -> Option<&str> {
self.feed.generator.as_ref().map(|g| g.content.as_str())
self.generator.as_ref().map(|g| g.content.as_str())
}
}

Expand Down Expand Up @@ -172,11 +188,32 @@ impl Feed {
}

impl From<(FeedUrl, feed_rs::model::Feed)> for Feed {
fn from((url, mut feed): (FeedUrl, feedrs::Feed)) -> Self {
let entries = std::mem::take(&mut feed.entries);
fn from((url, feed): (FeedUrl, feedrs::Feed)) -> Self {
let feed_rs::model::Feed {
feed_type,
title,
updated,
authors,
description,
links,
generator,
published,
entries,
..
} = feed;
let meta = FeedMeta {
url,
feed_type,
title,
updated,
authors,
description,
links,
generator,
published,
};
let entries = entries.into_iter().map(Entry).collect();

let meta = FeedMeta { url, feed };
Feed { meta, entries }
}
}
Expand Down
28 changes: 28 additions & 0 deletions crates/synd_feed/src/types/requirement.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
use serde::{Deserialize, Serialize};
use std::str::FromStr;

/// `Requirement` expresses how important the feed is
/// using an analogy to [RFC2119](https://datatracker.ietf.org/doc/html/rfc2119)
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[cfg_attr(feature = "graphql", derive(async_graphql::Enum))]
pub enum Requirement {
/// `Must` indicates it must be read
Must,
/// `Should` suggests it should be read unless there is a special reason not to
Should,
/// `May` implies it is probably worth reading
May,
}

impl FromStr for Requirement {
type Err = &'static str;

fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
_ if s.eq_ignore_ascii_case("MUST") => Ok(Requirement::Must),
_ if s.eq_ignore_ascii_case("SHOULD") => Ok(Requirement::Should),
_ if s.eq_ignore_ascii_case("MAY") => Ok(Requirement::May),
_ => Err("invalid requirement, should be one of ['must', 'should', 'may']"),
}
}
}

0 comments on commit 6f9f1fe

Please sign in to comment.