Skip to content

Commit

Permalink
Allow marking multiple posts as read in single api call (fixes #3963) (
Browse files Browse the repository at this point in the history
…#4048)

* Allow marking multiple posts as read in single api call (fixes #3963)

* cleanup

* limit array length

* fix test

* review

---------

Co-authored-by: Dessalines <dessalines@users.noreply.github.com>
  • Loading branch information
Nutomic and dessalines committed Oct 17, 2023
1 parent 3f62135 commit 3a19af5
Show file tree
Hide file tree
Showing 8 changed files with 91 additions and 99 deletions.
34 changes: 19 additions & 15 deletions crates/api/src/post/mark_read.rs
Original file line number Diff line number Diff line change
@@ -1,30 +1,34 @@
use actix_web::web::{Data, Json};
use lemmy_api_common::{
context::LemmyContext,
post::{MarkPostAsRead, PostResponse},
utils,
};
use lemmy_db_views::structs::{LocalUserView, PostView};
use lemmy_utils::error::LemmyError;
use lemmy_api_common::{context::LemmyContext, post::MarkPostAsRead, SuccessResponse};
use lemmy_db_schema::source::post::PostRead;
use lemmy_db_views::structs::LocalUserView;
use lemmy_utils::error::{LemmyError, LemmyErrorExt, LemmyErrorType, MAX_API_PARAM_ELEMENTS};
use std::collections::HashSet;

#[tracing::instrument(skip(context))]
pub async fn mark_post_as_read(
data: Json<MarkPostAsRead>,
context: Data<LemmyContext>,
local_user_view: LocalUserView,
) -> Result<Json<PostResponse>, LemmyError> {
let post_id = data.post_id;
) -> Result<Json<SuccessResponse>, LemmyError> {
let mut post_ids = data.post_ids.iter().cloned().collect::<HashSet<_>>();
post_ids.insert(data.post_id);
let person_id = local_user_view.person.id;

if post_ids.len() > MAX_API_PARAM_ELEMENTS {
Err(LemmyErrorType::TooManyItems)?;
}

// Mark the post as read / unread
if data.read {
utils::mark_post_as_read(person_id, post_id, &mut context.pool()).await?;
PostRead::mark_as_read(&mut context.pool(), post_ids, person_id)
.await
.with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?;
} else {
utils::mark_post_as_unread(person_id, post_id, &mut context.pool()).await?;
PostRead::mark_as_unread(&mut context.pool(), post_ids, person_id)
.await
.with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?;
}

// Fetch it
let post_view = PostView::read(&mut context.pool(), post_id, Some(person_id), false).await?;

Ok(Json(PostResponse { post_view }))
Ok(Json(SuccessResponse::default()))
}
2 changes: 2 additions & 0 deletions crates/api_common/src/post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,9 @@ pub struct RemovePost {
#[cfg_attr(feature = "full", ts(export))]
/// Mark a post as read.
pub struct MarkPostAsRead {
/// TODO: deprecated, send `post_ids` instead
pub post_id: PostId,
pub post_ids: Vec<PostId>,
pub read: bool,
}

Expand Down
27 changes: 7 additions & 20 deletions crates/api_common/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ use lemmy_db_schema::{
password_reset_request::PasswordResetRequest,
person::{Person, PersonUpdateForm},
person_block::PersonBlock,
post::{Post, PostRead, PostReadForm},
post::{Post, PostRead},
},
traits::{Crud, Readable},
traits::Crud,
utils::DbPool,
};
use lemmy_db_views::{comment_view::CommentQuery, structs::LocalUserView};
Expand All @@ -39,6 +39,7 @@ use lemmy_utils::{
};
use regex::Regex;
use rosetta_i18n::{Language, LanguageId};
use std::collections::HashSet;
use tracing::warn;
use url::{ParseError, Url};

Expand Down Expand Up @@ -117,25 +118,11 @@ pub async fn mark_post_as_read(
person_id: PersonId,
post_id: PostId,
pool: &mut DbPool<'_>,
) -> Result<PostRead, LemmyError> {
let post_read_form = PostReadForm { post_id, person_id };

PostRead::mark_as_read(pool, &post_read_form)
.await
.with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)
}

#[tracing::instrument(skip_all)]
pub async fn mark_post_as_unread(
person_id: PersonId,
post_id: PostId,
pool: &mut DbPool<'_>,
) -> Result<usize, LemmyError> {
let post_read_form = PostReadForm { post_id, person_id };

PostRead::mark_as_unread(pool, &post_read_form)
) -> Result<(), LemmyError> {
PostRead::mark_as_read(pool, HashSet::from([post_id]), person_id)
.await
.with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)
.with_lemmy_type(LemmyErrorType::CouldntMarkPostAsRead)?;
Ok(())
}

pub fn check_user_valid(person: &Person) -> Result<(), LemmyError> {
Expand Down
14 changes: 4 additions & 10 deletions crates/apub/src/api/user_settings_backup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,12 @@ use lemmy_db_schema::{
};
use lemmy_db_views::structs::LocalUserView;
use lemmy_utils::{
error::{LemmyError, LemmyErrorType, LemmyResult},
error::{LemmyError, LemmyErrorType, LemmyResult, MAX_API_PARAM_ELEMENTS},
spawn_try_task,
};
use serde::{Deserialize, Serialize};
use tracing::info;

/// Maximum number of follow/block URLs which can be imported at once, to prevent server overloading.
/// To import a larger backup, split it into multiple parts.
///
/// TODO: having the user manually split files will very be confusing
const MAX_URL_IMPORT_COUNT: usize = 1000;

/// Backup of user data. This struct should never be changed so that the data can be used as a
/// long-term backup in case the instance goes down unexpectedly. All fields are optional to allow
/// importing partial backups.
Expand Down Expand Up @@ -138,8 +132,8 @@ pub async fn import_settings(
+ data.blocked_users.len()
+ data.saved_posts.len()
+ data.saved_comments.len();
if url_count > MAX_URL_IMPORT_COUNT {
Err(LemmyErrorType::UserBackupTooLarge)?;
if url_count > MAX_API_PARAM_ELEMENTS {
Err(LemmyErrorType::TooManyItems)?;
}

spawn_try_task(async move {
Expand Down Expand Up @@ -434,7 +428,7 @@ mod tests {

assert_eq!(
imported.err().unwrap().error_type,
LemmyErrorType::UserBackupTooLarge
LemmyErrorType::TooManyItems
);

LocalUser::delete(&mut context.pool(), export_user.local_user.id)
Expand Down
94 changes: 53 additions & 41 deletions crates/db_schema/src/impls/post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,14 @@ use crate::{
PostSavedForm,
PostUpdateForm,
},
traits::{Crud, Likeable, Readable, Saveable},
traits::{Crud, Likeable, Saveable},
utils::{get_conn, naive_now, DbPool, DELETED_REPLACEMENT_TEXT, FETCH_LIMIT_MAX},
};
use ::url::Url;
use chrono::{Duration, Utc};
use diesel::{dsl::insert_into, result::Error, ExpressionMethods, QueryDsl, TextExpressionMethods};
use diesel_async::RunQueryDsl;
use std::collections::HashSet;

#[async_trait]
impl Crud for Post {
Expand Down Expand Up @@ -302,34 +303,38 @@ impl Saveable for PostSaved {
}
}

#[async_trait]
impl Readable for PostRead {
type Form = PostReadForm;
async fn mark_as_read(
impl PostRead {
pub async fn mark_as_read(
pool: &mut DbPool<'_>,
post_read_form: &PostReadForm,
) -> Result<Self, Error> {
use crate::schema::post_read::dsl::{person_id, post_id, post_read};
post_ids: HashSet<PostId>,
person_id: PersonId,
) -> Result<usize, Error> {
use crate::schema::post_read::dsl::post_read;
let conn = &mut get_conn(pool).await?;

let forms = post_ids
.into_iter()
.map(|post_id| PostReadForm { post_id, person_id })
.collect::<Vec<PostReadForm>>();
insert_into(post_read)
.values(post_read_form)
.on_conflict((post_id, person_id))
.do_update()
.set(post_read_form)
.get_result::<Self>(conn)
.values(forms)
.on_conflict_do_nothing()
.execute(conn)
.await
}

async fn mark_as_unread(
pub async fn mark_as_unread(
pool: &mut DbPool<'_>,
post_read_form: &PostReadForm,
post_id_: HashSet<PostId>,
person_id_: PersonId,
) -> Result<usize, Error> {
use crate::schema::post_read::dsl::{person_id, post_id, post_read};
let conn = &mut get_conn(pool).await?;

diesel::delete(
post_read
.filter(post_id.eq(post_read_form.post_id))
.filter(person_id.eq(post_read_form.person_id)),
.filter(post_id.eq_any(post_id_))
.filter(person_id.eq(person_id_)),
)
.execute(conn)
.await
Expand All @@ -352,16 +357,16 @@ mod tests {
PostLike,
PostLikeForm,
PostRead,
PostReadForm,
PostSaved,
PostSavedForm,
PostUpdateForm,
},
},
traits::{Crud, Likeable, Readable, Saveable},
traits::{Crud, Likeable, Saveable},
utils::build_db_pool_for_tests,
};
use serial_test::serial;
use std::collections::HashSet;

#[tokio::test]
#[serial]
Expand Down Expand Up @@ -398,6 +403,13 @@ mod tests {

let inserted_post = Post::create(pool, &new_post).await.unwrap();

let new_post2 = PostInsertForm::builder()
.name("A test post 2".into())
.creator_id(inserted_person.id)
.community_id(inserted_community.id)
.build();
let inserted_post2 = Post::create(pool, &new_post2).await.unwrap();

let expected_post = Post {
id: inserted_post.id,
name: "A test post".into(),
Expand Down Expand Up @@ -455,19 +467,14 @@ mod tests {
};

// Post Read
let post_read_form = PostReadForm {
post_id: inserted_post.id,
person_id: inserted_person.id,
};

let inserted_post_read = PostRead::mark_as_read(pool, &post_read_form).await.unwrap();

let expected_post_read = PostRead {
id: inserted_post_read.id,
post_id: inserted_post.id,
person_id: inserted_person.id,
published: inserted_post_read.published,
};
let marked_as_read = PostRead::mark_as_read(
pool,
HashSet::from([inserted_post.id, inserted_post2.id]),
inserted_person.id,
)
.await
.unwrap();
assert_eq!(2, marked_as_read);

let read_post = Post::read(pool, inserted_post.id).await.unwrap();

Expand All @@ -482,11 +489,21 @@ mod tests {
let like_removed = PostLike::remove(pool, inserted_person.id, inserted_post.id)
.await
.unwrap();
assert_eq!(1, like_removed);
let saved_removed = PostSaved::unsave(pool, &post_saved_form).await.unwrap();
let read_removed = PostRead::mark_as_unread(pool, &post_read_form)
.await
.unwrap();
let num_deleted = Post::delete(pool, inserted_post.id).await.unwrap();
assert_eq!(1, saved_removed);
let read_removed = PostRead::mark_as_unread(
pool,
HashSet::from([inserted_post.id, inserted_post2.id]),
inserted_person.id,
)
.await
.unwrap();
assert_eq!(2, read_removed);

let num_deleted = Post::delete(pool, inserted_post.id).await.unwrap()
+ Post::delete(pool, inserted_post2.id).await.unwrap();
assert_eq!(2, num_deleted);
Community::delete(pool, inserted_community.id)
.await
.unwrap();
Expand All @@ -498,10 +515,5 @@ mod tests {
assert_eq!(expected_post, updated_post);
assert_eq!(expected_post_like, inserted_post_like);
assert_eq!(expected_post_saved, inserted_post_saved);
assert_eq!(expected_post_read, inserted_post_read);
assert_eq!(1, like_removed);
assert_eq!(1, saved_removed);
assert_eq!(1, read_removed);
assert_eq!(1, num_deleted);
}
}
2 changes: 1 addition & 1 deletion crates/db_schema/src/source/post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ pub struct PostRead {

#[cfg_attr(feature = "full", derive(Insertable, AsChangeset))]
#[cfg_attr(feature = "full", diesel(table_name = post_read))]
pub struct PostReadForm {
pub(crate) struct PostReadForm {
pub post_id: PostId,
pub person_id: PersonId,
}
11 changes: 0 additions & 11 deletions crates/db_schema/src/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,17 +140,6 @@ pub trait Blockable {
Self: Sized;
}

#[async_trait]
pub trait Readable {
type Form;
async fn mark_as_read(pool: &mut DbPool<'_>, form: &Self::Form) -> Result<Self, Error>
where
Self: Sized;
async fn mark_as_unread(pool: &mut DbPool<'_>, form: &Self::Form) -> Result<usize, Error>
where
Self: Sized;
}

#[async_trait]
pub trait Reportable {
type Form;
Expand Down
6 changes: 5 additions & 1 deletion crates/utils/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ pub struct LemmyError {
pub context: SpanTrace,
}

/// Maximum number of items in an array passed as API parameter. See [[LemmyErrorType::TooManyItems]]
pub const MAX_API_PARAM_ELEMENTS: usize = 1000;

impl<T> From<T> for LemmyError
where
T: Into<anyhow::Error>,
Expand Down Expand Up @@ -215,8 +218,9 @@ pub enum LemmyErrorType {
InstanceBlockAlreadyExists,
/// `jwt` cookie must be marked secure and httponly
AuthCookieInsecure,
/// Thrown when an API call is submitted with more than 1000 array elements, see [[MAX_API_PARAM_ELEMENTS]]
TooManyItems,
CommunityHasNoFollowers,
UserBackupTooLarge,
Unknown(String),
}

Expand Down

0 comments on commit 3a19af5

Please sign in to comment.