diff --git a/apps/website/app/(home)/blog/[slug]/page.tsx b/apps/website/app/(home)/blog/[slug]/page.tsx
index da837bf1d..869f97c79 100644
--- a/apps/website/app/(home)/blog/[slug]/page.tsx
+++ b/apps/website/app/(home)/blog/[slug]/page.tsx
@@ -2,7 +2,8 @@ import fs from "fs/promises";
import path from "path";
import { notFound } from "next/navigation";
import { Metadata } from "next";
-import { getBlog } from "../readBlogs";
+import { getMarkdownPage } from "~/utils/getMarkdownFile";
+import { BLOG_PATH } from "~/data/constants";
type Params = {
params: Promise<{
@@ -13,7 +14,10 @@ type Params = {
export default async function BlogPost({ params }: Params) {
try {
const { slug } = await params;
- const { data, contentHtml } = await getBlog(slug);
+ const { data, contentHtml } = await getMarkdownPage({
+ slug,
+ directory: BLOG_PATH,
+ });
return (
@@ -41,7 +45,7 @@ export default async function BlogPost({ params }: Params) {
export async function generateStaticParams() {
try {
- const blogPath = path.join(process.cwd(), "app/blog/posts");
+ const blogPath = path.resolve(process.cwd(), BLOG_PATH);
// 1) Check if the directory exists
const directoryExists = await fs
.stat(blogPath)
@@ -74,7 +78,10 @@ export async function generateStaticParams() {
export async function generateMetadata({ params }: Params): Promise
{
try {
const { slug } = await params;
- const { data } = await getBlog(slug);
+ const { data } = await getMarkdownPage({
+ slug,
+ directory: BLOG_PATH,
+ });
return {
title: data.title,
diff --git a/apps/website/app/(home)/blog/readBlogs.tsx b/apps/website/app/(home)/blog/readBlogs.tsx
index bfaeb3bae..2acc2bbb3 100644
--- a/apps/website/app/(home)/blog/readBlogs.tsx
+++ b/apps/website/app/(home)/blog/readBlogs.tsx
@@ -1,12 +1,10 @@
-import { remark } from "remark";
-import html from "remark-html";
-import { notFound } from "next/navigation";
import path from "path";
import fs from "fs/promises";
import matter from "gray-matter";
-import { BlogSchema, type Blog, BlogFrontmatter } from "./schema";
+import { BLOG_PATH } from "~/data/constants";
+import { PageSchema, type PageData } from "~/types/schema";
-const BLOG_DIRECTORY = path.join(process.cwd(), "app/blog/posts");
+const BLOG_DIRECTORY = path.join(process.cwd(), BLOG_PATH);
async function validateBlogDirectory(): Promise {
try {
@@ -18,12 +16,12 @@ async function validateBlogDirectory(): Promise {
}
}
-async function processBlogFile(filename: string): Promise {
+async function processBlogFile(filename: string): Promise {
try {
- const filePath = path.join(BLOG_DIRECTORY, filename);
+ const filePath = path.resolve(BLOG_DIRECTORY, filename);
const fileContent = await fs.readFile(filePath, "utf-8");
const { data } = matter(fileContent);
- const validatedData = BlogSchema.parse(data);
+ const validatedData = PageSchema.parse(data);
return {
slug: filename.replace(/\.md$/, ""),
@@ -35,12 +33,7 @@ async function processBlogFile(filename: string): Promise {
}
}
-async function getMarkdownContent(content: string): Promise {
- const processedContent = await remark().use(html).process(content);
- return processedContent.toString();
-}
-
-export async function getAllBlogs(): Promise {
+export async function getAllBlogs(): Promise {
try {
const directoryExists = await validateBlogDirectory();
if (!directoryExists) return [];
@@ -49,7 +42,7 @@ export async function getAllBlogs(): Promise {
const blogs = await Promise.all(
files.filter((filename) => filename.endsWith(".md")).map(processBlogFile),
);
- const validBlogs = blogs.filter(Boolean) as Blog[];
+ const validBlogs = blogs.filter((blog): blog is PageData => blog !== null);
return validBlogs.filter((blog) => blog.published);
} catch (error) {
console.error("Error reading blog directory:", error);
@@ -57,32 +50,7 @@ export async function getAllBlogs(): Promise {
}
}
-export async function getBlog(
- slug: string,
-): Promise<{ data: BlogFrontmatter; contentHtml: string }> {
- try {
- const filePath = path.join(BLOG_DIRECTORY, `${slug}.md`);
- await fs.access(filePath);
-
- const fileContent = await fs.readFile(filePath, "utf-8");
- const { data: rawData, content } = matter(fileContent);
- const data = BlogSchema.parse(rawData);
-
- if (!data.published) {
- console.log(`Post ${slug} is not published`);
- return notFound();
- }
-
- const contentHtml = await getMarkdownContent(content);
-
- return { data, contentHtml };
- } catch (error) {
- console.error("Error loading blog post:", error);
- return notFound();
- }
-}
-
-export async function getLatestBlogs(): Promise {
+export async function getLatestBlogs(): Promise {
const blogs = await getAllBlogs();
return blogs
.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime())
diff --git a/apps/website/app/data/constants.ts b/apps/website/app/data/constants.ts
new file mode 100644
index 000000000..4267add82
--- /dev/null
+++ b/apps/website/app/data/constants.ts
@@ -0,0 +1 @@
+export const BLOG_PATH = "app/(home)/blog/posts";
diff --git a/apps/website/app/(home)/blog/schema.tsx b/apps/website/app/types/schema.tsx
similarity index 52%
rename from apps/website/app/(home)/blog/schema.tsx
rename to apps/website/app/types/schema.tsx
index 13568c765..c2fc38ea8 100644
--- a/apps/website/app/(home)/blog/schema.tsx
+++ b/apps/website/app/types/schema.tsx
@@ -1,14 +1,14 @@
import { z } from "zod";
-export const BlogSchema = z.object({
+export const PageSchema = z.object({
title: z.string(),
+ published: z.boolean().default(false),
date: z.string(),
author: z.string(),
- published: z.boolean().default(false),
});
-export type BlogFrontmatter = z.infer;
+export type PageFrontmatter = z.infer;
-export type Blog = BlogFrontmatter & {
+export type PageData = PageFrontmatter & {
slug: string;
};
diff --git a/apps/website/app/utils/getFileContent.ts b/apps/website/app/utils/getFileContent.ts
new file mode 100644
index 000000000..7f63cbe3d
--- /dev/null
+++ b/apps/website/app/utils/getFileContent.ts
@@ -0,0 +1,25 @@
+import path from "path";
+import fs from "fs/promises";
+
+type Props = {
+ filename: string;
+ directory: string;
+};
+
+export const getFileContent = async ({
+ filename,
+ directory,
+}: Props): Promise => {
+ try {
+ const safeFilename = path.basename(filename);
+ const filePath = path.join(directory, safeFilename);
+ const fileContent = await fs.readFile(filePath, "utf-8");
+ return fileContent;
+ } catch (error) {
+ throw error instanceof Error
+ ? new Error(`Failed to read file ${filename}: ${error.message}`, {
+ cause: error,
+ })
+ : new Error(`Failed to read file ${filename}: Unknown error`);
+ }
+};
diff --git a/apps/website/app/utils/getHtmlFromMarkdown.ts b/apps/website/app/utils/getHtmlFromMarkdown.ts
new file mode 100644
index 000000000..58ec759af
--- /dev/null
+++ b/apps/website/app/utils/getHtmlFromMarkdown.ts
@@ -0,0 +1,43 @@
+import { unified } from "unified";
+import remarkParse from "remark-parse";
+import remarkRehype from "remark-rehype";
+import rehypeStringify from "rehype-stringify";
+import { toString } from "mdast-util-to-string";
+import { visit } from "unist-util-visit";
+import type { Root } from "mdast";
+
+function remarkHeadingId() {
+ return (tree: Root) => {
+ visit(tree, "heading", (node) => {
+ const text = toString(node);
+ const id = text
+ .toLowerCase()
+ .replace(/[^a-z0-9]+/g, "-")
+ .replace(/(^-|-$)/g, "");
+
+ node.data = {
+ hName: `h${node.depth}`,
+ hProperties: { id },
+ };
+ });
+ };
+}
+
+export async function getHtmlFromMarkdown(markdown: string): Promise {
+ if (!markdown) {
+ throw new Error('Markdown content is required');
+ }
+
+ try {
+ const htmlString = await unified()
+ .use(remarkParse)
+ .use(remarkHeadingId)
+ .use(remarkRehype)
+ .use(rehypeStringify)
+ .process(markdown);
+ return htmlString.toString();
+ } catch (error) {
+ console.error('Error processing markdown:', error);
+ throw new Error('Failed to process markdown content');
+ }
+}
diff --git a/apps/website/app/utils/getMarkdownFile.ts b/apps/website/app/utils/getMarkdownFile.ts
new file mode 100644
index 000000000..be65f55b2
--- /dev/null
+++ b/apps/website/app/utils/getMarkdownFile.ts
@@ -0,0 +1,50 @@
+import { getHtmlFromMarkdown } from "~/utils/getHtmlFromMarkdown";
+import { getFileContent } from "~/utils/getFileContent";
+import { notFound } from "next/navigation";
+import { PageFrontmatter, PageSchema } from "~/types/schema";
+import matter from "gray-matter";
+
+type Props = {
+ slug: string;
+ directory: string;
+};
+
+type ProcessedMarkdownPage = {
+ data: PageFrontmatter;
+ contentHtml: string;
+};
+
+export const getMarkdownPage = async ({
+ slug,
+ directory,
+}: Props): Promise => {
+ try {
+ if (!slug || !directory) {
+ throw new Error('Both slug and directory are required');
+ }
+
+ // Prevent directory traversal
+ if (slug.includes('..') || directory.includes('..')) {
+ throw new Error('Invalid path');
+ }
+
+ const fileContent = await getFileContent({
+ filename: `${slug}.md`,
+ directory,
+ });
+ const { data: rawData, content } = matter(fileContent);
+ const data = PageSchema.parse(rawData);
+
+ if (!data.published) {
+ console.log(`Post ${slug} is not published`);
+ return notFound();
+ }
+
+ const contentHtml = await getHtmlFromMarkdown(content);
+
+ return { data, contentHtml };
+ } catch (error) {
+ console.error("Error loading blog post:", error);
+ return notFound();
+ }
+};
diff --git a/apps/website/package.json b/apps/website/package.json
index 9ef7bf842..509833cbb 100644
--- a/apps/website/package.json
+++ b/apps/website/package.json
@@ -10,16 +10,18 @@
"lint": "next lint"
},
"dependencies": {
+ "@repo/types": "*",
"@repo/ui": "*",
"gray-matter": "^4.0.3",
"next": "^15.0.3",
"react": "19.0.0-rc-66855b96-20241106",
"react-dom": "19.0.0-rc-66855b96-20241106",
+ "rehype-stringify": "^10.0.1",
"remark": "^15.0.1",
"remark-html": "^16.0.1",
- "zod": "^3.24.1",
+ "remark-rehype": "^11.1.1",
"resend": "^4.0.1",
- "@repo/types": "*"
+ "zod": "^3.24.1"
},
"devDependencies": {
"@repo/eslint-config": "*",
diff --git a/package-lock.json b/package-lock.json
index 3afd26bef..1cd26b9c1 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -6399,8 +6399,10 @@
"next": "^15.0.3",
"react": "19.0.0-rc-66855b96-20241106",
"react-dom": "19.0.0-rc-66855b96-20241106",
+ "rehype-stringify": "^10.0.1",
"remark": "^15.0.1",
"remark-html": "^16.0.1",
+ "remark-rehype": "^11.1.1",
"resend": "^4.0.1",
"zod": "^3.24.1"
},
@@ -16441,6 +16443,21 @@
"jsesc": "bin/jsesc"
}
},
+ "node_modules/rehype-stringify": {
+ "version": "10.0.1",
+ "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-10.0.1.tgz",
+ "integrity": "sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "hast-util-to-html": "^9.0.0",
+ "unified": "^11.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/remark": {
"version": "15.0.1",
"resolved": "https://registry.npmjs.org/remark/-/remark-15.0.1.tgz",
@@ -16490,6 +16507,23 @@
"url": "https://opencollective.com/unified"
}
},
+ "node_modules/remark-rehype": {
+ "version": "11.1.1",
+ "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.1.tgz",
+ "integrity": "sha512-g/osARvjkBXb6Wo0XvAeXQohVta8i84ACbenPpoSsxTOQH/Ae0/RGP4WZgnMH5pMLpsj4FG7OHmcIcXxpza8eQ==",
+ "license": "MIT",
+ "dependencies": {
+ "@types/hast": "^3.0.0",
+ "@types/mdast": "^4.0.0",
+ "mdast-util-to-hast": "^13.0.0",
+ "unified": "^11.0.0",
+ "vfile": "^6.0.0"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/unified"
+ }
+ },
"node_modules/remark-stringify": {
"version": "11.0.0",
"resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz",