diff --git a/boost/app.go b/boost/app.go index 485e40c8..19ce435a 100644 --- a/boost/app.go +++ b/boost/app.go @@ -42,6 +42,7 @@ func MakeApp(env *env.Environment, validator *pkg.Validator) (*App, error) { router := Router{ Env: env, + Db: db, Mux: baseHttp.NewServeMux(), Pipeline: middleware.Pipeline{ Env: env, @@ -69,4 +70,5 @@ func (a *App) Boot() { router.Talks() router.Education() router.Recommendations() + router.Posts() } diff --git a/boost/router.go b/boost/router.go index 8433d7fb..be4921ee 100644 --- a/boost/router.go +++ b/boost/router.go @@ -1,6 +1,8 @@ package boost import ( + "github.com/oullin/database" + "github.com/oullin/database/repository" "github.com/oullin/env" "github.com/oullin/handler" "github.com/oullin/pkg/http" @@ -12,6 +14,7 @@ type Router struct { Env *env.Environment Mux *baseHttp.ServeMux Pipeline middleware.Pipeline + Db *database.Connection } func (r *Router) PipelineFor(apiHandler http.ApiHandler) baseHttp.HandlerFunc { @@ -28,6 +31,17 @@ func (r *Router) PipelineFor(apiHandler http.ApiHandler) baseHttp.HandlerFunc { ) } +func (r *Router) Posts() { + repo := repository.Posts{DB: r.Db} + abstract := handler.MakePostsHandler(&repo) + + index := r.PipelineFor(abstract.Index) + show := r.PipelineFor(abstract.Show) + + r.Mux.HandleFunc("GET /posts", index) + r.Mux.HandleFunc("GET /posts/{slug}", show) +} + func (r *Router) Profile() { abstract := handler.MakeProfileHandler("./storage/fixture/profile.json") diff --git a/config/makefile/app.mk b/config/makefile/app.mk index 44dcb571..e7c52b54 100644 --- a/config/makefile/app.mk +++ b/config/makefile/app.mk @@ -25,7 +25,7 @@ audit: watch: # --- Works with (air). # https://github.com/air-verse/air - cd $(APP_PATH) && air + cd $(APP_PATH) && air -d install-air: # --- Works with (air). diff --git a/database/connection.go b/database/connection.go index d78c2181..2550f5d2 100644 --- a/database/connection.go +++ b/database/connection.go @@ -73,6 +73,10 @@ func (c *Connection) Sql() *gorm.DB { return c.driver } +func (c *Connection) GetSession() *gorm.Session { + return &gorm.Session{QueryFields: true} +} + func (c *Connection) Transaction(callback func(db *gorm.DB) error) error { return c.driver.Transaction(callback) } diff --git a/database/repository/pagination/paginate.go b/database/repository/pagination/paginate.go new file mode 100644 index 00000000..0c37abb2 --- /dev/null +++ b/database/repository/pagination/paginate.go @@ -0,0 +1,23 @@ +package pagination + +type Paginate struct { + Page int + Limit int + NumItems int64 +} + +func (a *Paginate) SetNumItems(number int64) { + a.NumItems = number +} + +func (a *Paginate) GetNumItemsAsInt() int64 { + return a.NumItems +} + +func (a *Paginate) GetNumItemsAsFloat() float64 { + return float64(a.NumItems) +} + +func (a *Paginate) GetLimit() int { + return a.Limit +} diff --git a/database/repository/pagination/pagination.go b/database/repository/pagination/pagination.go new file mode 100644 index 00000000..0e957808 --- /dev/null +++ b/database/repository/pagination/pagination.go @@ -0,0 +1,90 @@ +package pagination + +import "math" + +const MinPage = 1 +const MaxLimit = 100 + +// Pagination holds the data for a single page along with all pagination metadata. +// It's generic and can be used for any data type. +// +// NextPage and PreviousPage are pointers (*int) so they can be nil (and omitted from JSON output) +// when there isn't a next or previous page. +type Pagination[T any] struct { + Data []T `json:"data"` + Page int `json:"page"` + Total int64 `json:"total"` + PageSize int `json:"page_size"` + TotalPages int `json:"total_pages"` + NextPage *int `json:"next_page,omitempty"` + PreviousPage *int `json:"previous_page,omitempty"` +} + +func MakePagination[T any](data []T, paginate Paginate) *Pagination[T] { + pSize := float64(paginate.Limit) + if pSize <= 0 { + pSize = 10 + } + + totalPages := int( + math.Ceil(paginate.GetNumItemsAsFloat() / pSize), + ) + + pagination := Pagination[T]{ + Data: data, + Page: paginate.Page, + Total: paginate.GetNumItemsAsInt(), + PageSize: paginate.Limit, + TotalPages: totalPages, + NextPage: nil, + PreviousPage: nil, + } + + var nextPage *int + if pagination.Page < pagination.TotalPages { + p := pagination.Page + 1 + nextPage = &p + } + + var prevPage *int + if pagination.Page > 1 && pagination.Page <= pagination.TotalPages { + p := pagination.Page - 1 + prevPage = &p + } + + pagination.NextPage = nextPage + pagination.PreviousPage = prevPage + + return &pagination +} + +// HydratePagination transforms a paginated result containing items of a source type (S) +// into a new result containing items of a destination type (D). +// +// It takes a source Pagination and a mapper function that defines the conversion +// logic from an item of type S to an item of type D. +// +// Type Parameters: +// - S: The source type (e.g., a database model like database.Post). +// - D: The destination type (e.g., an API response DTO like PostResponse). +// +// The function returns a new Pagination with the transformed data, while preserving +// all original pagination metadata (Total, CurrentPage, etc.). +func HydratePagination[S any, D any](source *Pagination[S], mapper func(S) D) *Pagination[D] { + mappedData := make([]D, len(source.Data)) + + // Iterate over the source data and apply the mapper function + for i, item := range source.Data { + mappedData[i] = mapper(item) + } + + return &Pagination[D]{ + Data: mappedData, + Total: source.Total, + Page: source.Page, + PageSize: source.PageSize, + TotalPages: source.TotalPages, + NextPage: source.NextPage, + PreviousPage: source.PreviousPage, + } +} diff --git a/database/repository/posts.go b/database/repository/posts.go index 12c2ef0c..4b224e9f 100644 --- a/database/repository/posts.go +++ b/database/repository/posts.go @@ -4,6 +4,8 @@ import ( "fmt" "github.com/google/uuid" "github.com/oullin/database" + "github.com/oullin/database/repository/pagination" + "github.com/oullin/database/repository/queries" "github.com/oullin/pkg/gorm" ) @@ -13,6 +15,67 @@ type Posts struct { Tags *Tags } +func (p Posts) GetPosts(filters queries.PostFilters, paginate pagination.Paginate) (*pagination.Pagination[database.Post], error) { + var numItems int64 + var posts []database.Post + + query := p.DB.Sql(). + Model(&database.Post{}). + Where("posts.published_at is not null"). // only published posts will be selected. + Where("posts.deleted_at is null") // deleted posted will be discarded. + + queries.ApplyPostsFilters(&filters, query) + + countQuery := query. + Session(p.DB.GetSession()). // clone the based query. + Distinct("posts.id") // remove duplicated posts to get the actual count. + + if err := countQuery.Count(&numItems).Error; err != nil { + return nil, err + } + + offset := (paginate.Page - 1) * paginate.Limit + + err := query.Preload("Author"). + Preload("Categories"). + Preload("Tags"). + Order("posts.published_at DESC"). + Limit(paginate.Limit). + Offset(offset). + Distinct(). // remove duplications if any after applying JOINS + Find(&posts).Error + + if err != nil { + return nil, err + } + + paginate.SetNumItems(numItems) + result := pagination.MakePagination[database.Post](posts, paginate) + + return result, nil +} + +func (p Posts) FindBy(slug string) *database.Post { + post := database.Post{} + + result := p.DB.Sql(). + Preload("Author"). + Preload("Categories"). + Preload("Tags"). + Where("LOWER(slug) = ?", slug). + First(&post) + + if gorm.HasDbIssues(result.Error) { + return nil + } + + if result.RowsAffected > 0 { + return &post + } + + return nil +} + func (p Posts) FindCategoryBy(slug string) *database.Category { return p.Categories.FindBy(slug) } diff --git a/database/repository/queries/posts.go b/database/repository/queries/posts.go new file mode 100644 index 00000000..f26dc79e --- /dev/null +++ b/database/repository/queries/posts.go @@ -0,0 +1,67 @@ +package queries + +import ( + "gorm.io/gorm" +) + +// ApplyPostsFilters The given query master table is "posts" +func ApplyPostsFilters(filters *PostFilters, query *gorm.DB) { + if filters == nil { + return + } + + if filters.GetTitle() != "" { + query.Where("LOWER(posts.title) ILIKE ?", "%"+filters.GetTitle()+"%") + } + + if filters.GetText() != "" { + query. + Where("LOWER(posts.slug) ILIKE ? OR LOWER(posts.excerpt) ILIKE ? OR LOWER(posts.content) ILIKE ?", + "%"+filters.GetText()+"%", + "%"+filters.GetText()+"%", + "%"+filters.GetText()+"%", + ) + } + + if filters.GetAuthor() != "" { + query. + Joins("JOIN users ON posts.author_id = users.id"). + Where("users.deleted_at IS NULL"). + Where("("+ + "LOWER(users.bio) ILIKE ? OR LOWER(users.first_name) LIKE ? OR LOWER(users.last_name) LIKE ? OR LOWER(users.display_name) ILIKE ?"+ + ")", + "%"+filters.GetAuthor()+"%", + "%"+filters.GetAuthor()+"%", + "%"+filters.GetAuthor()+"%", + "%"+filters.GetAuthor()+"%", + ) + } + + if filters.GetCategory() != "" { + query. + Joins("JOIN post_categories ON post_categories.post_id = posts.id"). + Joins("JOIN categories ON categories.id = post_categories.category_id"). + Where("categories.deleted_at IS NULL"). + Where("("+ + "LOWER(categories.slug) ILIKE ? OR LOWER(categories.name) ILIKE ? OR LOWER(categories.description) ILIKE ?"+ + ")", + "%"+filters.GetCategory()+"%", + "%"+filters.GetCategory()+"%", + "%"+filters.GetCategory()+"%", + ) + } + + if filters.GetTag() != "" { + query. + Joins("JOIN post_tags ON post_tags.post_id = posts.id"). + Joins("JOIN tags ON tags.id = post_tags.tag_id"). + Where("tags.deleted_at IS NULL"). + Where("("+ + "LOWER(tags.slug) ILIKE ? OR LOWER(tags.name) ILIKE ? OR LOWER(tags.description) ILIKE ?"+ + ")", + "%"+filters.GetTag()+"%", + "%"+filters.GetTag()+"%", + "%"+filters.GetTag()+"%", + ) + } +} diff --git a/database/repository/queries/posts_filters.go b/database/repository/queries/posts_filters.go new file mode 100644 index 00000000..20ef2e9e --- /dev/null +++ b/database/repository/queries/posts_filters.go @@ -0,0 +1,40 @@ +package queries + +import ( + "github.com/oullin/pkg" + "strings" +) + +type PostFilters struct { + Text string + Title string // Will perform a case-insensitive partial match + Author string + Category string + Tag string +} + +func (f PostFilters) GetText() string { + return f.sanitiseString(f.Text) +} + +func (f PostFilters) GetTitle() string { + return f.sanitiseString(f.Title) +} + +func (f PostFilters) GetAuthor() string { + return f.sanitiseString(f.Author) +} + +func (f PostFilters) GetCategory() string { + return f.sanitiseString(f.Category) +} + +func (f PostFilters) GetTag() string { + return f.sanitiseString(f.Tag) +} + +func (f PostFilters) sanitiseString(seed string) string { + str := pkg.MakeStringable(seed) + + return strings.TrimSpace(str.ToLower()) +} diff --git a/go.mod b/go.mod index 1150f536..b68092ae 100644 --- a/go.mod +++ b/go.mod @@ -10,6 +10,7 @@ require ( github.com/lib/pq v1.10.9 golang.org/x/crypto v0.39.0 golang.org/x/term v0.32.0 + golang.org/x/text v0.26.0 gopkg.in/yaml.v3 v3.0.1 gorm.io/driver/postgres v1.6.0 gorm.io/gorm v1.30.0 @@ -30,7 +31,6 @@ require ( golang.org/x/net v0.41.0 // indirect golang.org/x/sync v0.15.0 // indirect golang.org/x/sys v0.33.0 // indirect - golang.org/x/text v0.26.0 // indirect ) replace github.com/oullin/boost => ./boost diff --git a/handler/posts.go b/handler/posts.go new file mode 100644 index 00000000..b7aa2157 --- /dev/null +++ b/handler/posts.go @@ -0,0 +1,81 @@ +package handler + +import ( + "encoding/json" + "fmt" + "github.com/oullin/database/repository" + "github.com/oullin/database/repository/pagination" + "github.com/oullin/handler/posts" + "github.com/oullin/pkg" + "github.com/oullin/pkg/http" + "log/slog" + baseHttp "net/http" +) + +type PostsHandler struct { + Posts *repository.Posts +} + +func MakePostsHandler(posts *repository.Posts) PostsHandler { + return PostsHandler{ + Posts: posts, + } +} + +func (h *PostsHandler) Index(w baseHttp.ResponseWriter, r *baseHttp.Request) *http.ApiError { + defer pkg.CloseWithLog(r.Body) + + payload, err := http.ParseRequestBody[posts.IndexRequestBody](r) + + if err != nil { + slog.Error("failed to parse request body", "err", err) + + return http.InternalError("There was an issue reading the request. Please, try again later.") + } + + result, err := h.Posts.GetPosts( + posts.GetFiltersFrom(payload), + posts.GetPaginateFrom(r.URL.Query()), + ) + + if err != nil { + slog.Error("failed to fetch posts", "err", err) + + return http.InternalError("There was an issue reading the posts. Please, try again later.") + } + + items := pagination.HydratePagination( + result, + posts.GetPostsResponse, + ) + + if err := json.NewEncoder(w).Encode(items); err != nil { + slog.Error("failed to encode response", "err", err) + + return http.InternalError("There was an issue processing the response. Please, try later.") + } + + return nil +} + +func (h *PostsHandler) Show(w baseHttp.ResponseWriter, r *baseHttp.Request) *http.ApiError { + slug := posts.GetSlugFrom(r) + + if slug == "" { + return http.BadRequestError("Slugs are required to show posts content") + } + + post := h.Posts.FindBy(slug) + if post == nil { + return http.NotFound(fmt.Sprintf("The given post '%s' was not found", slug)) + } + + items := posts.GetPostsResponse(*post) + if err := json.NewEncoder(w).Encode(items); err != nil { + slog.Error(err.Error()) + + return http.InternalError("There was an issue processing the response. Please, try later.") + } + + return nil +} diff --git a/handler/posts/response.go b/handler/posts/response.go new file mode 100644 index 00000000..ea07333c --- /dev/null +++ b/handler/posts/response.go @@ -0,0 +1,62 @@ +package posts + +import ( + "time" +) + +type IndexRequestBody struct { + Title string `json:"title"` + Author string `json:"author"` + Category string `json:"category"` + Tag string `json:"tag"` + Text string `json:"text"` +} + +type PostResponse struct { + UUID string `json:"uuid"` + Author UserData `json:"author"` + Slug string `json:"slug"` + Title string `json:"title"` + Excerpt string `json:"excerpt"` + Content string `json:"content"` + CoverImageURL string `json:"cover_image_url"` + PublishedAt *time.Time `json:"published_at"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + + // Associations + Categories []CategoryData `json:"categories"` + Tags []TagData `json:"tags"` +} + +type UserData struct { + UUID string `json:"uuid"` + FirstName string `json:"first_name"` + LastName string `json:"last_name"` + Username string `json:"username"` + DisplayName string `json:"display_name"` + Bio string `json:"bio"` + PictureFileName string `json:"picture_file_name"` + ProfilePictureURL string `json:"profile_picture_url"` + IsAdmin bool `json:"is_admin"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +type CategoryData struct { + UUID string `json:"uuid"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +type TagData struct { + UUID string `json:"uuid"` + Name string `json:"name"` + Slug string `json:"slug"` + Description string `json:"description"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} diff --git a/handler/posts/transformer.go b/handler/posts/transformer.go new file mode 100644 index 00000000..b87177e8 --- /dev/null +++ b/handler/posts/transformer.go @@ -0,0 +1,120 @@ +package posts + +import ( + "github.com/oullin/database" + "github.com/oullin/database/repository/pagination" + "github.com/oullin/database/repository/queries" + "github.com/oullin/pkg" + baseHttp "net/http" + "net/url" + "strconv" + "strings" +) + +func GetPostsResponse(p database.Post) PostResponse { + return PostResponse{ + UUID: p.UUID, + Slug: p.Slug, + Title: p.Title, + Excerpt: p.Excerpt, + Content: p.Content, + CoverImageURL: p.CoverImageURL, + PublishedAt: p.PublishedAt, + CreatedAt: p.CreatedAt, + UpdatedAt: p.UpdatedAt, + Categories: GetCategoriesResponse(p.Categories), + Tags: GetTagsResponse(p.Tags), + Author: UserData{ + UUID: p.Author.UUID, + FirstName: p.Author.FirstName, + LastName: p.Author.LastName, + Username: p.Author.Username, + DisplayName: p.Author.DisplayName, + Bio: p.Author.Bio, + PictureFileName: p.Author.PictureFileName, + ProfilePictureURL: p.Author.ProfilePictureURL, + IsAdmin: p.Author.IsAdmin, + CreatedAt: p.Author.CreatedAt, + UpdatedAt: p.Author.UpdatedAt, + }, + } +} + +func GetCategoriesResponse(categories []database.Category) []CategoryData { + var data []CategoryData + + for _, category := range categories { + data = append(data, CategoryData{ + UUID: category.UUID, + Name: category.Name, + Slug: category.Slug, + Description: category.Description, + CreatedAt: category.CreatedAt, + UpdatedAt: category.UpdatedAt, + }) + } + + return data +} + +func GetTagsResponse(tags []database.Tag) []TagData { + var data []TagData + + for _, tag := range tags { + data = append(data, TagData{ + UUID: tag.UUID, + Name: tag.Name, + Slug: tag.Slug, + Description: tag.Description, + CreatedAt: tag.CreatedAt, + UpdatedAt: tag.UpdatedAt, + }) + } + + return data +} + +func GetPaginateFrom(url url.Values) pagination.Paginate { + page := pagination.MinPage + pageSize := pagination.MaxLimit + + if url.Get("page") != "" { + if tPage, err := strconv.Atoi(url.Get("page")); err == nil { + page = tPage + } + } + + if url.Get("limit") != "" { + if limit, err := strconv.Atoi(url.Get("limit")); err == nil { + pageSize = limit + } + } + + if page < pagination.MinPage { + page = pagination.MinPage + } + + if pageSize > pagination.MaxLimit || pageSize < 1 { + pageSize = pagination.MaxLimit + } + + return pagination.Paginate{ + Page: page, + Limit: pageSize, + } +} + +func GetFiltersFrom(request IndexRequestBody) queries.PostFilters { + return queries.PostFilters{ + Title: request.Title, + Author: request.Author, + Category: request.Category, + Tag: request.Tag, + } +} + +func GetSlugFrom(r *baseHttp.Request) string { + str := pkg.MakeStringable(r.PathValue("slug")) + + return strings.TrimSpace(str.ToLower()) +} diff --git a/pkg/http/request.go b/pkg/http/request.go new file mode 100644 index 00000000..5218ac04 --- /dev/null +++ b/pkg/http/request.go @@ -0,0 +1,31 @@ +package http + +import ( + "encoding/json" + "fmt" + "io" + baseHttp "net/http" +) + +const MaxRequestSize = 1 << 20 // 1MB limit + +func ParseRequestBody[T any](r *baseHttp.Request) (T, error) { + var err error + var request T + var data []byte + + limitedReader := io.LimitReader(r.Body, MaxRequestSize) + if data, err = io.ReadAll(limitedReader); err != nil { + return request, fmt.Errorf("failed to read the given request body: %w", err) + } + + if len(data) == 0 { + return request, nil + } + + if err = json.Unmarshal(data, &request); err != nil { + return request, fmt.Errorf("failed to unmarshal the given request body: %w", err) + } + + return request, nil +} diff --git a/pkg/http/response.go b/pkg/http/response.go index 0eb8129d..d589a94f 100644 --- a/pkg/http/response.go +++ b/pkg/http/response.go @@ -67,7 +67,21 @@ func (r *Response) RespondWithNotModified() { func InternalError(msg string) *ApiError { return &ApiError{ - Message: fmt.Sprintf("Internal Server Error: %s", msg), + Message: fmt.Sprintf("Internal server error: %s", msg), Status: baseHttp.StatusInternalServerError, } } + +func BadRequestError(msg string) *ApiError { + return &ApiError{ + Message: fmt.Sprintf("Bad request error: %s", msg), + Status: baseHttp.StatusBadRequest, + } +} + +func NotFound(msg string) *ApiError { + return &ApiError{ + Message: fmt.Sprintf("Not found error: %s", msg), + Status: baseHttp.StatusNotFound, + } +} diff --git a/pkg/stringable.go b/pkg/stringable.go index 6ee000ac..429a4d52 100644 --- a/pkg/stringable.go +++ b/pkg/stringable.go @@ -2,6 +2,8 @@ package pkg import ( "fmt" + "golang.org/x/text/cases" + "golang.org/x/text/language" "strings" "time" "unicode" @@ -17,6 +19,12 @@ func MakeStringable(value string) *Stringable { } } +func (s Stringable) ToLower() string { + caser := cases.Lower(language.English) + + return strings.TrimSpace(caser.String(s.value)) +} + func (s Stringable) ToSnakeCase() string { var result strings.Builder diff --git a/pkg/support.go b/pkg/support.go new file mode 100644 index 00000000..b1f1bf47 --- /dev/null +++ b/pkg/support.go @@ -0,0 +1,12 @@ +package pkg + +import ( + "io" + "log/slog" +) + +func CloseWithLog(c io.Closer) { + if err := c.Close(); err != nil { + slog.Error("failed to close resource", "err", err) + } +}