diff --git a/examples/next-zep-langchain/.env.local.example b/examples/next-zep-langchain/.env.local.example
new file mode 100644
index 00000000000..e7de326b82d
--- /dev/null
+++ b/examples/next-zep-langchain/.env.local.example
@@ -0,0 +1,2 @@
+OPENAI_API_KEY=xxxxxxx
+ZEP_API_KEY=xxxxxxx
\ No newline at end of file
diff --git a/examples/next-zep-langchain/.gitignore b/examples/next-zep-langchain/.gitignore
new file mode 100644
index 00000000000..8f322f0d8f4
--- /dev/null
+++ b/examples/next-zep-langchain/.gitignore
@@ -0,0 +1,35 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/.pnp
+.pnp.js
+
+# testing
+/coverage
+
+# next.js
+/.next/
+/out/
+
+# production
+/build
+
+# misc
+.DS_Store
+*.pem
+
+# debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# local env files
+.env*.local
+
+# vercel
+.vercel
+
+# typescript
+*.tsbuildinfo
+next-env.d.ts
diff --git a/examples/next-zep-langchain/README.md b/examples/next-zep-langchain/README.md
new file mode 100644
index 00000000000..7f545c81a11
--- /dev/null
+++ b/examples/next-zep-langchain/README.md
@@ -0,0 +1,46 @@
+# Vercel AI SDK, Zep, Next.js, and OpenAI Chat Example
+
+This example shows how to use the [Vercel AI SDK](https://sdk.vercel.ai/docs) with [Next.js](https://nextjs.org/), [OpenAI](https://openai.com) and [LangChain](https://js.langchain.com) to create a ChatGPT-like AI-powered streaming chat bot with long term memory (provided by [Zep](https://getzep.com)).
+
+## Deploy your own
+
+
+Deploy the example using [Vercel](https://vercel.com?utm_source=github&utm_medium=readme&utm_campaign=ai-sdk-example):
+
+
+[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fvercel%2Fai%2Ftree%2Fmain%2Fexamples%2Fnext-zep-langchain&env=OPENAI_API_KEY,ZEP_API_KEY&envDescription=OpenAI%20API%20Key&envLink=https%3A%2F%2Fplatform.openai.com%2Faccount%2Fapi-keys&project-name=vercel-ai-chat-zep-langchain&repository-name=vercel-ai-chat-zep-langchain)
+
+## How to use
+
+Execute [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app) with [npm](https://docs.npmjs.com/cli/init), [Yarn](https://yarnpkg.com/lang/en/docs/cli/create/), or [pnpm](https://pnpm.io) to bootstrap the example:
+
+```bash
+npx create-next-app --example https://github.com/vercel/ai/tree/main/examples/next-zep-langchain next-zep-langchain-app
+```
+
+```bash
+yarn create next-app --example https://github.com/vercel/ai/tree/main/examples/next-zep-langchain next-zep-langchain-app
+```
+
+```bash
+pnpm create next-app --example https://github.com/vercel/ai/tree/main/examples/next-zep-langchain next-zep-langchain-app
+```
+
+To run the example locally you need to:
+1. Sign up at [OpenAI's Developer Platform](https://platform.openai.com/signup).
+ 1. Go to [OpenAI's dashboard](https://platform.openai.com/account/api-keys) and create an API KEY.
+ 2. Set the required OpenAI environment variable as the token value as shown [the example env file](./.env.local.example) but in a new file called `.env.local`
+2. Sign up for [Zep Account](https://app.getzep.com) and create a new project.
+ 1. Generate Project API key and set it as `ZEP_API_KEY` in the `.env.local` file.
+3. `pnpm install` to install the required dependencies.
+4. `pnpm dev` to launch the development server.
+
+## Learn More
+
+To learn more about Zep, OpenAI, Next.js, and the Vercel AI SDK take a look at the following resources:
+
+- [Vercel AI SDK docs](https://sdk.vercel.ai/docs)
+- [Vercel AI Playground](https://play.vercel.ai)
+- [Zep docs](https://help.getzep.com)
+- [OpenAI Documentation](https://platform.openai.com/docs) - learn about OpenAI features and API.
+- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
diff --git a/examples/next-zep-langchain/app/api/chat/route.ts b/examples/next-zep-langchain/app/api/chat/route.ts
new file mode 100644
index 00000000000..d79aa979f42
--- /dev/null
+++ b/examples/next-zep-langchain/app/api/chat/route.ts
@@ -0,0 +1,69 @@
+import {
+ StreamingTextResponse,
+} from 'ai';
+import { ChatPromptTemplate, MessagesPlaceholder } from "@langchain/core/prompts";
+import { BytesOutputParser } from "@langchain/core/output_parsers";
+import { ZepClient } from '@getzep/zep-js';
+import { ZepChatMessageHistory } from '@getzep/zep-js/langchain';
+import { RunnableWithMessageHistory } from "@langchain/core/runnables";
+import { ConsoleCallbackHandler } from "@langchain/core/tracers/console";
+import { ChatOpenAI } from "@langchain/openai";
+
+export const runtime = 'edge';
+
+export async function POST(req: Request) {
+ const { messages, sessionId } = await req.json();
+ const zep = await ZepClient.init(process.env.ZEP_API_KEY);
+ const currentMessageContent = messages[messages.length - 1].content;
+
+ const prompt = ChatPromptTemplate.fromMessages([
+ ["system", "Answer the user's question below. Be polite and helpful:"],
+ new MessagesPlaceholder("history"),
+ ["human", "{question}"],
+ ]);
+ /**
+ * See a full list of supported models at:
+ * https://js.langchain.com/docs/modules/model_io/models/
+ */
+ const model = new ChatOpenAI({
+ temperature: 0.8,
+ });
+
+ /**
+ * Chat models stream message chunks rather than bytes, so this
+ * output parser handles serialization and encoding.
+ */
+ const outputParser = new BytesOutputParser();
+
+ // Create a simple chain that pipes the prompt to the model with a console callback (useful for debugging)
+ const chain = prompt.pipe(model).withConfig({
+ callbacks: [new ConsoleCallbackHandler()],
+ });
+
+ // Add memory to our chain by wrapping it with a RunnableWithMessageHistory (using ZepChatMessageHistory as the history provider)
+ // This will add user and assistant messages to the chain as well as enrich model prompts with history and conversation facts
+ const chainWithHistory = new RunnableWithMessageHistory({
+ runnable: chain,
+ // Create a new ZepChatMessageHistory instance for each session. Relies on the sessionId passed as a configurable to the final chain
+ getMessageHistory: (sessionId: string) => new ZepChatMessageHistory({
+ client: zep,
+ sessionId: sessionId,
+ // Recommended memory type to use, it will enrich the model prompts with conversation facts and the most recent summary
+ memoryType: "perpetual",
+ }),
+ // The key for the input messages in the prompt, must match the human message key in the prompt
+ inputMessagesKey: "question",
+ // The key for the history messages in the prompt, must match the MessagesPlaceholder key in the prompt
+ historyMessagesKey: "history",
+ }).pipe(outputParser);
+
+ const stream = await chainWithHistory.stream({
+ question: currentMessageContent,
+ }, {
+ configurable: {
+ sessionId: sessionId,
+ }
+ });
+
+ return new StreamingTextResponse(stream);
+}
\ No newline at end of file
diff --git a/examples/next-zep-langchain/app/favicon.ico b/examples/next-zep-langchain/app/favicon.ico
new file mode 100644
index 00000000000..718d6fea483
Binary files /dev/null and b/examples/next-zep-langchain/app/favicon.ico differ
diff --git a/examples/next-zep-langchain/app/globals.css b/examples/next-zep-langchain/app/globals.css
new file mode 100644
index 00000000000..b5c61c95671
--- /dev/null
+++ b/examples/next-zep-langchain/app/globals.css
@@ -0,0 +1,3 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
diff --git a/examples/next-zep-langchain/app/layout.tsx b/examples/next-zep-langchain/app/layout.tsx
new file mode 100644
index 00000000000..34624c55418
--- /dev/null
+++ b/examples/next-zep-langchain/app/layout.tsx
@@ -0,0 +1,21 @@
+import './globals.css';
+import { Inter } from 'next/font/google';
+
+const inter = Inter({ subsets: ['latin'] });
+
+export const metadata = {
+ title: 'Create Next App',
+ description: 'Generated by create next app',
+};
+
+export default function RootLayout({
+ children,
+}: {
+ children: React.ReactNode;
+}) {
+ return (
+
+
{children}
+
+ );
+}
diff --git a/examples/next-zep-langchain/app/page.tsx b/examples/next-zep-langchain/app/page.tsx
new file mode 100644
index 00000000000..4479e0f3e9b
--- /dev/null
+++ b/examples/next-zep-langchain/app/page.tsx
@@ -0,0 +1,40 @@
+'use client';
+import * as uuid from 'uuid';
+import { useChat } from 'ai/react';
+import { useEffect, useMemo } from 'react';
+import { getQueryParam } from '@/app/utils';
+
+export default function Chat() {
+ useEffect(() => {
+ if (!getQueryParam("sessionId")) {
+ window.location.search = `sessionId=${uuid.v4()}`;
+ }
+ }, []);
+
+ const { messages, input, handleInputChange, handleSubmit } = useChat({
+ body: {
+ sessionId: getQueryParam("sessionId"),
+ },
+ api: "/api/chat"
+ });
+
+ return (
+
+ {messages.map(m => (
+
+ {m.role === 'user' ? 'User: ' : 'AI: '}
+ {m.content}
+
+ ))}
+
+
+
+ );
+}
diff --git a/examples/next-zep-langchain/app/utils.ts b/examples/next-zep-langchain/app/utils.ts
new file mode 100644
index 00000000000..8feacf7c74c
--- /dev/null
+++ b/examples/next-zep-langchain/app/utils.ts
@@ -0,0 +1,7 @@
+export const getQueryParam = (name: string) => {
+ if (typeof window !== "undefined") {
+ const urlParams = new URLSearchParams(window.location.search);
+ return urlParams.get(name);
+ }
+ return null
+}
\ No newline at end of file
diff --git a/examples/next-zep-langchain/next.config.js b/examples/next-zep-langchain/next.config.js
new file mode 100644
index 00000000000..658404ac690
--- /dev/null
+++ b/examples/next-zep-langchain/next.config.js
@@ -0,0 +1,4 @@
+/** @type {import('next').NextConfig} */
+const nextConfig = {};
+
+module.exports = nextConfig;
diff --git a/examples/next-zep-langchain/package.json b/examples/next-zep-langchain/package.json
new file mode 100644
index 00000000000..94ca0c7eae0
--- /dev/null
+++ b/examples/next-zep-langchain/package.json
@@ -0,0 +1,33 @@
+{
+ "name": "next-zep",
+ "version": "0.0.0",
+ "private": true,
+ "scripts": {
+ "dev": "next dev",
+ "build": "next build",
+ "start": "next start",
+ "lint": "next lint"
+ },
+ "dependencies": {
+ "@getzep/zep-js": "2.0.0-rc.4",
+ "ai": "latest",
+ "langchain": "^0.1.29",
+ "next": "14.1.1",
+ "openai": "4.29.0",
+ "react": "18.2.0",
+ "react-dom": "^18.2.0",
+ "uuid": "^9.0.1"
+ },
+ "devDependencies": {
+ "@types/node": "^17.0.12",
+ "@types/react": "18.2.8",
+ "@types/react-dom": "18.2.4",
+ "@types/uuid": "^9.0.8",
+ "autoprefixer": "^10.4.14",
+ "eslint": "^7.32.0",
+ "eslint-config-next": "13.4.12",
+ "postcss": "^8.4.23",
+ "tailwindcss": "^3.3.2",
+ "typescript": "5.1.3"
+ }
+}
diff --git a/examples/next-zep-langchain/postcss.config.js b/examples/next-zep-langchain/postcss.config.js
new file mode 100644
index 00000000000..12a703d900d
--- /dev/null
+++ b/examples/next-zep-langchain/postcss.config.js
@@ -0,0 +1,6 @@
+module.exports = {
+ plugins: {
+ tailwindcss: {},
+ autoprefixer: {},
+ },
+};
diff --git a/examples/next-zep-langchain/tailwind.config.js b/examples/next-zep-langchain/tailwind.config.js
new file mode 100644
index 00000000000..db68cff5779
--- /dev/null
+++ b/examples/next-zep-langchain/tailwind.config.js
@@ -0,0 +1,18 @@
+/** @type {import('tailwindcss').Config} */
+module.exports = {
+ content: [
+ './pages/**/*.{js,ts,jsx,tsx,mdx}',
+ './components/**/*.{js,ts,jsx,tsx,mdx}',
+ './app/**/*.{js,ts,jsx,tsx,mdx}',
+ ],
+ theme: {
+ extend: {
+ backgroundImage: {
+ 'gradient-radial': 'radial-gradient(var(--tw-gradient-stops))',
+ 'gradient-conic':
+ 'conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))',
+ },
+ },
+ },
+ plugins: [],
+};
diff --git a/examples/next-zep-langchain/tsconfig.json b/examples/next-zep-langchain/tsconfig.json
new file mode 100644
index 00000000000..e06a4454ab0
--- /dev/null
+++ b/examples/next-zep-langchain/tsconfig.json
@@ -0,0 +1,28 @@
+{
+ "compilerOptions": {
+ "target": "es5",
+ "lib": ["dom", "dom.iterable", "esnext"],
+ "allowJs": true,
+ "skipLibCheck": true,
+ "strict": true,
+ "forceConsistentCasingInFileNames": true,
+ "noEmit": true,
+ "esModuleInterop": true,
+ "module": "esnext",
+ "moduleResolution": "node",
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "jsx": "preserve",
+ "incremental": true,
+ "plugins": [
+ {
+ "name": "next"
+ }
+ ],
+ "paths": {
+ "@/*": ["./*"]
+ }
+ },
+ "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
+ "exclude": ["node_modules"]
+}
diff --git a/examples/next-zep/.env.local.example b/examples/next-zep/.env.local.example
new file mode 100644
index 00000000000..e7de326b82d
--- /dev/null
+++ b/examples/next-zep/.env.local.example
@@ -0,0 +1,2 @@
+OPENAI_API_KEY=xxxxxxx
+ZEP_API_KEY=xxxxxxx
\ No newline at end of file
diff --git a/examples/next-zep/.gitignore b/examples/next-zep/.gitignore
new file mode 100644
index 00000000000..8f322f0d8f4
--- /dev/null
+++ b/examples/next-zep/.gitignore
@@ -0,0 +1,35 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/.pnp
+.pnp.js
+
+# testing
+/coverage
+
+# next.js
+/.next/
+/out/
+
+# production
+/build
+
+# misc
+.DS_Store
+*.pem
+
+# debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# local env files
+.env*.local
+
+# vercel
+.vercel
+
+# typescript
+*.tsbuildinfo
+next-env.d.ts
diff --git a/examples/next-zep/README.md b/examples/next-zep/README.md
new file mode 100644
index 00000000000..8355e00148b
--- /dev/null
+++ b/examples/next-zep/README.md
@@ -0,0 +1,46 @@
+# Vercel AI SDK, Zep, Next.js, and OpenAI Chat Example
+
+This example shows how to use the [Vercel AI SDK](https://sdk.vercel.ai/docs) with [Next.js](https://nextjs.org/) and [OpenAI](https://openai.com) to create a ChatGPT-like AI-powered streaming chat bot with long term memory (provided by [Zep](https://getzep.com)).
+
+## Deploy your own
+
+
+Deploy the example using [Vercel](https://vercel.com?utm_source=github&utm_medium=readme&utm_campaign=ai-sdk-example):
+
+
+[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fvercel%2Fai%2Ftree%2Fmain%2Fexamples%2Fnext-zep&env=OPENAI_API_KEY,ZEP_API_KEY&envDescription=OpenAI%20API%20Key&envLink=https%3A%2F%2Fplatform.openai.com%2Faccount%2Fapi-keys&project-name=vercel-ai-chat-zep&repository-name=vercel-ai-chat-zep)
+
+## How to use
+
+Execute [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app) with [npm](https://docs.npmjs.com/cli/init), [Yarn](https://yarnpkg.com/lang/en/docs/cli/create/), or [pnpm](https://pnpm.io) to bootstrap the example:
+
+```bash
+npx create-next-app --example https://github.com/vercel/ai/tree/main/examples/next-zep next-zep-app
+```
+
+```bash
+yarn create next-app --example https://github.com/vercel/ai/tree/main/examples/next-zep next-zep-app
+```
+
+```bash
+pnpm create next-app --example https://github.com/vercel/ai/tree/main/examples/next-zep next-zep-app
+```
+
+To run the example locally you need to:
+1. Sign up at [OpenAI's Developer Platform](https://platform.openai.com/signup).
+ 1. Go to [OpenAI's dashboard](https://platform.openai.com/account/api-keys) and create an API KEY.
+ 2. Set the required OpenAI environment variable as the token value as shown [the example env file](./.env.local.example) but in a new file called `.env.local`
+2. Sign up for [Zep Account](https://app.getzep.com) and create a new project.
+ 1. Generate Project API key and set it as `ZEP_API_KEY` in the `.env.local` file.
+3. `pnpm install` to install the required dependencies.
+4. `pnpm dev` to launch the development server.
+
+## Learn More
+
+To learn more about Zep, OpenAI, Next.js, and the Vercel AI SDK take a look at the following resources:
+
+- [Vercel AI SDK docs](https://sdk.vercel.ai/docs)
+- [Vercel AI Playground](https://play.vercel.ai)
+- [Zep docs](https://help.getzep.com)
+- [OpenAI Documentation](https://platform.openai.com/docs) - learn about OpenAI features and API.
+- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
diff --git a/examples/next-zep/app/api/chat/route.ts b/examples/next-zep/app/api/chat/route.ts
new file mode 100644
index 00000000000..9e13b158644
--- /dev/null
+++ b/examples/next-zep/app/api/chat/route.ts
@@ -0,0 +1,75 @@
+import OpenAI from 'openai';
+import { OpenAIStream, StreamingTextResponse } from 'ai';
+import { Memory, Message, ZepClient } from '@getzep/zep-js';
+import { ChatCompletionMessageParam } from 'ai/prompts';
+
+// Create an OpenAI API client (that's edge friendly!)
+const openai = new OpenAI({
+ apiKey: process.env.OPENAI_API_KEY,
+});
+
+// IMPORTANT! Set the runtime to edge
+export const runtime = 'edge';
+
+export async function POST(req: Request) {
+ const { messages, sessionId } = await req.json();
+ const lastMessage = messages[messages.length - 1];
+
+ const zep = await ZepClient.init(process.env.ZEP_API_KEY);
+ // Add the user message to the memory
+ await zep.memory.addMemory(
+ sessionId,
+ new Memory({
+ messages: [
+ new Message({
+ role: lastMessage.role,
+ content: lastMessage.content,
+ role_type: 'user'
+ })
+ ]
+ })
+ );
+ // Retrieve the memory and create a system message with conversation facts + most recent summary
+ const memory = await zep.memory.getMemory(sessionId, 'perpetual');
+ let systemContent = '';
+ if (memory?.summary) {
+ systemContent += memory.summary.content;
+ }
+ if (memory?.facts) {
+ systemContent += `\n${memory.facts.join('\n')}`;
+ }
+ const systemMessage: ChatCompletionMessageParam = {
+ content: systemContent,
+ role: 'system'
+ };
+ // Create a list of openai friendly memory messages
+ const memoryMessages: ChatCompletionMessageParam[] = (memory?.messages ?? []).map((message) => ({
+ content: message.content,
+ role: message.role_type as 'assistant' | 'system' | 'user'
+ }));
+ const response = await openai.chat.completions.create({
+ model: 'gpt-3.5-turbo',
+ stream: true,
+ messages: [systemMessage, ...memoryMessages]
+ });
+
+ const stream = OpenAIStream(response, {
+ // Add the completion to the memory
+ async onFinal(completion: string) {
+ await zep.memory.addMemory(
+ sessionId,
+ new Memory({
+ messages: [
+ new Message({
+ role: 'ai',
+ content: completion,
+ role_type: 'assistant'
+ })
+ ]
+ })
+ );
+ }
+ });
+ // Respond with the stream
+ return new StreamingTextResponse(stream);
+}
\ No newline at end of file
diff --git a/examples/next-zep/app/favicon.ico b/examples/next-zep/app/favicon.ico
new file mode 100644
index 00000000000..718d6fea483
Binary files /dev/null and b/examples/next-zep/app/favicon.ico differ
diff --git a/examples/next-zep/app/globals.css b/examples/next-zep/app/globals.css
new file mode 100644
index 00000000000..b5c61c95671
--- /dev/null
+++ b/examples/next-zep/app/globals.css
@@ -0,0 +1,3 @@
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
diff --git a/examples/next-zep/app/layout.tsx b/examples/next-zep/app/layout.tsx
new file mode 100644
index 00000000000..34624c55418
--- /dev/null
+++ b/examples/next-zep/app/layout.tsx
@@ -0,0 +1,21 @@
+import './globals.css';
+import { Inter } from 'next/font/google';
+
+const inter = Inter({ subsets: ['latin'] });
+
+export const metadata = {
+ title: 'Create Next App',
+ description: 'Generated by create next app',
+};
+
+export default function RootLayout({
+ children,
+}: {
+ children: React.ReactNode;
+}) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/examples/next-zep/app/page.tsx b/examples/next-zep/app/page.tsx
new file mode 100644
index 00000000000..4479e0f3e9b
--- /dev/null
+++ b/examples/next-zep/app/page.tsx
@@ -0,0 +1,40 @@
+'use client';
+import * as uuid from 'uuid';
+import { useChat } from 'ai/react';
+import { useEffect, useMemo } from 'react';
+import { getQueryParam } from '@/app/utils';
+
+export default function Chat() {
+ useEffect(() => {
+ if (!getQueryParam("sessionId")) {
+ window.location.search = `sessionId=${uuid.v4()}`;
+ }
+ }, []);
+
+ const { messages, input, handleInputChange, handleSubmit } = useChat({
+ body: {
+ sessionId: getQueryParam("sessionId"),
+ },
+ api: "/api/chat"
+ });
+
+ return (
+
+ {messages.map(m => (
+
+ {m.role === 'user' ? 'User: ' : 'AI: '}
+ {m.content}
+
+ ))}
+
+
+
+ );
+}
diff --git a/examples/next-zep/app/utils.ts b/examples/next-zep/app/utils.ts
new file mode 100644
index 00000000000..8feacf7c74c
--- /dev/null
+++ b/examples/next-zep/app/utils.ts
@@ -0,0 +1,7 @@
+export const getQueryParam = (name: string) => {
+ if (typeof window !== "undefined") {
+ const urlParams = new URLSearchParams(window.location.search);
+ return urlParams.get(name);
+ }
+ return null
+}
\ No newline at end of file
diff --git a/examples/next-zep/next.config.js b/examples/next-zep/next.config.js
new file mode 100644
index 00000000000..658404ac690
--- /dev/null
+++ b/examples/next-zep/next.config.js
@@ -0,0 +1,4 @@
+/** @type {import('next').NextConfig} */
+const nextConfig = {};
+
+module.exports = nextConfig;
diff --git a/examples/next-zep/package.json b/examples/next-zep/package.json
new file mode 100644
index 00000000000..6b0fab47c5e
--- /dev/null
+++ b/examples/next-zep/package.json
@@ -0,0 +1,32 @@
+{
+ "name": "next-zep",
+ "version": "0.0.0",
+ "private": true,
+ "scripts": {
+ "dev": "next dev",
+ "build": "next build",
+ "start": "next start",
+ "lint": "next lint"
+ },
+ "dependencies": {
+ "@getzep/zep-js": "2.0.0-rc.4",
+ "ai": "latest",
+ "next": "14.1.1",
+ "openai": "4.29.0",
+ "react": "18.2.0",
+ "react-dom": "^18.2.0",
+ "uuid": "^9.0.1"
+ },
+ "devDependencies": {
+ "@types/node": "^17.0.12",
+ "@types/react": "18.2.8",
+ "@types/react-dom": "18.2.4",
+ "@types/uuid": "^9.0.8",
+ "autoprefixer": "^10.4.14",
+ "eslint": "^7.32.0",
+ "eslint-config-next": "13.4.12",
+ "postcss": "^8.4.23",
+ "tailwindcss": "^3.3.2",
+ "typescript": "5.1.3"
+ }
+}
diff --git a/examples/next-zep/postcss.config.js b/examples/next-zep/postcss.config.js
new file mode 100644
index 00000000000..12a703d900d
--- /dev/null
+++ b/examples/next-zep/postcss.config.js
@@ -0,0 +1,6 @@
+module.exports = {
+ plugins: {
+ tailwindcss: {},
+ autoprefixer: {},
+ },
+};
diff --git a/examples/next-zep/tailwind.config.js b/examples/next-zep/tailwind.config.js
new file mode 100644
index 00000000000..db68cff5779
--- /dev/null
+++ b/examples/next-zep/tailwind.config.js
@@ -0,0 +1,18 @@
+/** @type {import('tailwindcss').Config} */
+module.exports = {
+ content: [
+ './pages/**/*.{js,ts,jsx,tsx,mdx}',
+ './components/**/*.{js,ts,jsx,tsx,mdx}',
+ './app/**/*.{js,ts,jsx,tsx,mdx}',
+ ],
+ theme: {
+ extend: {
+ backgroundImage: {
+ 'gradient-radial': 'radial-gradient(var(--tw-gradient-stops))',
+ 'gradient-conic':
+ 'conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))',
+ },
+ },
+ },
+ plugins: [],
+};
diff --git a/examples/next-zep/tsconfig.json b/examples/next-zep/tsconfig.json
new file mode 100644
index 00000000000..e06a4454ab0
--- /dev/null
+++ b/examples/next-zep/tsconfig.json
@@ -0,0 +1,28 @@
+{
+ "compilerOptions": {
+ "target": "es5",
+ "lib": ["dom", "dom.iterable", "esnext"],
+ "allowJs": true,
+ "skipLibCheck": true,
+ "strict": true,
+ "forceConsistentCasingInFileNames": true,
+ "noEmit": true,
+ "esModuleInterop": true,
+ "module": "esnext",
+ "moduleResolution": "node",
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "jsx": "preserve",
+ "incremental": true,
+ "plugins": [
+ {
+ "name": "next"
+ }
+ ],
+ "paths": {
+ "@/*": ["./*"]
+ }
+ },
+ "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
+ "exclude": ["node_modules"]
+}
diff --git a/turbo.json b/turbo.json
index 95dbfbaffb0..1cc01e5f472 100644
--- a/turbo.json
+++ b/turbo.json
@@ -20,7 +20,8 @@
"ASSISTANT_ID",
"INKEEP_API_KEY",
"INKEEP_INTEGRATION_ID",
- "VERCEL_URL"
+ "VERCEL_URL",
+ "ZEP_API_KEY"
],
"outputs": ["dist/**", ".next/**", "!.next/cache/**"]
},