From f3f8fa450e163c043669495ee9a65aa3dc0b7165 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 09:31:15 +0700 Subject: [PATCH 01/38] feat: call APIs to get llamacloud projects and pipelines --- .../typescript/streaming/service.ts | 58 +++++++++++++++---- 1 file changed, 48 insertions(+), 10 deletions(-) diff --git a/templates/components/llamaindex/typescript/streaming/service.ts b/templates/components/llamaindex/typescript/streaming/service.ts index 6b6c4206c..469257684 100644 --- a/templates/components/llamaindex/typescript/streaming/service.ts +++ b/templates/components/llamaindex/typescript/streaming/service.ts @@ -13,7 +13,39 @@ interface LlamaCloudFile { project_id: string; } +interface LLamaCloudProject { + id: string; + organization_id: string; + name: string; + is_default: boolean; +} + +interface LLamaCloudPipeline { + id: string; + name: string; + project_id: string; +} + export class LLamaCloudFileService { + private static readonly headers = { + Accept: "application/json", + Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`, + }; + + public static async getAllProjectsAndPipelines() { + try { + const projects = await this.getAllProjects(); + const pipelines = await this.getAllPipelines(); + return projects.map((project) => ({ + ...project, + pipelines: pipelines.filter((p) => p.project_id === project.id), + })); + } catch (error) { + console.error("Error listing projects and pipelines:", error); + return []; + } + } + public static async downloadFiles(nodes: NodeWithScore[]) { const files = this.nodesToDownloadFiles(nodes); if (!files.length) return; @@ -104,11 +136,7 @@ export class LLamaCloudFileService { fileId: string, ): Promise { const url = `${LLAMA_CLOUD_BASE_URL}/files/${fileId}/content?project_id=${projectId}`; - const headers = { - Accept: "application/json", - Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`, - }; - const response = await fetch(url, { method: "GET", headers }); + const response = await fetch(url, { method: "GET", headers: this.headers }); const data = (await response.json()) as { url: string }; return data.url; } @@ -117,12 +145,22 @@ export class LLamaCloudFileService { pipelineId: string, ): Promise { const url = `${LLAMA_CLOUD_BASE_URL}/pipelines/${pipelineId}/files`; - const headers = { - Accept: "application/json", - Authorization: `Bearer ${process.env.LLAMA_CLOUD_API_KEY}`, - }; - const response = await fetch(url, { method: "GET", headers }); + const response = await fetch(url, { method: "GET", headers: this.headers }); const data = await response.json(); return data; } + + private static async getAllProjects(): Promise { + const url = `${LLAMA_CLOUD_BASE_URL}/projects`; + const response = await fetch(url, { method: "GET", headers: this.headers }); + const data = (await response.json()) as LLamaCloudProject[]; + return data; + } + + private static async getAllPipelines(): Promise { + const url = `${LLAMA_CLOUD_BASE_URL}/pipelines`; + const response = await fetch(url, { method: "GET", headers: this.headers }); + const data = (await response.json()) as LLamaCloudPipeline[]; + return data; + } } From 39282dc8551f649ce82d44c1dd0c17c3ad5e7898 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 09:31:54 +0700 Subject: [PATCH 02/38] feat: update chat config API to response llamacloud config --- .../streaming/nextjs/app/api/chat/config/route.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/templates/types/streaming/nextjs/app/api/chat/config/route.ts b/templates/types/streaming/nextjs/app/api/chat/config/route.ts index 8d875e67b..8892f071c 100644 --- a/templates/types/streaming/nextjs/app/api/chat/config/route.ts +++ b/templates/types/streaming/nextjs/app/api/chat/config/route.ts @@ -1,4 +1,5 @@ import { NextResponse } from "next/server"; +import { LLamaCloudFileService } from "../llamaindex/streaming/service"; /** * This API is to get config from the backend envs and expose them to the frontend @@ -6,6 +7,15 @@ import { NextResponse } from "next/server"; export async function GET() { const config = { starterQuestions: process.env.CONVERSATION_STARTERS?.trim().split("\n"), + llamaCloud: await getLLamaCloudConfig(), }; return NextResponse.json(config, { status: 200 }); } + +async function getLLamaCloudConfig() { + if (!process.env.LLAMA_CLOUD_API_KEY) return undefined; + const projects = await LLamaCloudFileService.getAllProjectsAndPipelines(); + return { + projects, + }; +} From eac3d74989f0fa71b2543c1d5802aa73e47bc56c Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 09:33:13 +0700 Subject: [PATCH 03/38] feat: add shadcn select component --- .../nextjs/app/components/ui/select.tsx | 159 ++++++++++++++++++ templates/types/streaming/nextjs/package.json | 1 + 2 files changed, 160 insertions(+) create mode 100644 templates/types/streaming/nextjs/app/components/ui/select.tsx diff --git a/templates/types/streaming/nextjs/app/components/ui/select.tsx b/templates/types/streaming/nextjs/app/components/ui/select.tsx new file mode 100644 index 000000000..c01b068ba --- /dev/null +++ b/templates/types/streaming/nextjs/app/components/ui/select.tsx @@ -0,0 +1,159 @@ +"use client"; + +import * as SelectPrimitive from "@radix-ui/react-select"; +import { Check, ChevronDown, ChevronUp } from "lucide-react"; +import * as React from "react"; +import { cn } from "./lib/utils"; + +const Select = SelectPrimitive.Root; + +const SelectGroup = SelectPrimitive.Group; + +const SelectValue = SelectPrimitive.Value; + +const SelectTrigger = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + span]:line-clamp-1", + className, + )} + {...props} + > + {children} + + + + +)); +SelectTrigger.displayName = SelectPrimitive.Trigger.displayName; + +const SelectScrollUpButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)); +SelectScrollUpButton.displayName = SelectPrimitive.ScrollUpButton.displayName; + +const SelectScrollDownButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)); +SelectScrollDownButton.displayName = + SelectPrimitive.ScrollDownButton.displayName; + +const SelectContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, position = "popper", ...props }, ref) => ( + + + + + {children} + + + + +)); +SelectContent.displayName = SelectPrimitive.Content.displayName; + +const SelectLabel = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +SelectLabel.displayName = SelectPrimitive.Label.displayName; + +const SelectItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + + + + + + + {children} + +)); +SelectItem.displayName = SelectPrimitive.Item.displayName; + +const SelectSeparator = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +SelectSeparator.displayName = SelectPrimitive.Separator.displayName; + +export { + Select, + SelectContent, + SelectGroup, + SelectItem, + SelectLabel, + SelectScrollDownButton, + SelectScrollUpButton, + SelectSeparator, + SelectTrigger, + SelectValue, +}; diff --git a/templates/types/streaming/nextjs/package.json b/templates/types/streaming/nextjs/package.json index 5d429a417..b0b8bd577 100644 --- a/templates/types/streaming/nextjs/package.json +++ b/templates/types/streaming/nextjs/package.json @@ -15,6 +15,7 @@ "@llamaindex/pdf-viewer": "^1.1.3", "@radix-ui/react-collapsible": "^1.0.3", "@radix-ui/react-hover-card": "^1.0.7", + "@radix-ui/react-select": "^2.1.1", "@radix-ui/react-slot": "^1.0.2", "ai": "^3.0.21", "ajv": "^8.12.0", From d39ffda4e056b4214afb4b0a0e0674fcb44caec5 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 09:34:59 +0700 Subject: [PATCH 04/38] feat: render llamacloud pipeline options --- .../app/components/ui/chat/chat-input.tsx | 2 ++ .../components/ui/chat/hooks/use-config.ts | 21 +++++++++++ .../ui/chat/widgets/LlamaCloudSelector.tsx | 36 +++++++++++++++++++ 3 files changed, 59 insertions(+) create mode 100644 templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx index 01c7c0b1b..9f5173f6f 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx +++ b/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx @@ -6,6 +6,7 @@ import { Input } from "../input"; import UploadImagePreview from "../upload-image-preview"; import { ChatHandler } from "./chat.interface"; import { useFile } from "./hooks/use-file"; +import { LlamaCloudSelector } from "./widgets/LlamaCloudSelector"; const ALLOWED_EXTENSIONS = ["png", "jpg", "jpeg", "csv", "pdf", "txt", "docx"]; @@ -109,6 +110,7 @@ export default function ChatInput( disabled: props.isLoading, }} /> + diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts index 05de32a8e..1ea22beb5 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts +++ b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts @@ -2,9 +2,29 @@ import { useEffect, useMemo, useState } from "react"; +export interface LLamaCloudProject { + id: string; + organization_id: string; + name: string; + is_default: boolean; +} + +export interface LLamaCloudPipeline { + id: string; + name: string; + project_id: string; +} + export interface ChatConfig { backend?: string; starterQuestions?: string[]; + llamaCloud?: { + projects: Array< + LLamaCloudProject & { + pipelines: LLamaCloudPipeline[]; + } + >; + }; } export function useClientConfig(): ChatConfig { @@ -27,5 +47,6 @@ export function useClientConfig(): ChatConfig { return { backend: backendOrigin, starterQuestions: config?.starterQuestions, + llamaCloud: config?.llamaCloud, }; } diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx new file mode 100644 index 000000000..6c3ad5ab0 --- /dev/null +++ b/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx @@ -0,0 +1,36 @@ +import { + Select, + SelectContent, + SelectGroup, + SelectItem, + SelectLabel, + SelectTrigger, + SelectValue, +} from "../../select"; +import { useClientConfig } from "../hooks/use-config"; + +export function LlamaCloudSelector() { + const { llamaCloud } = useClientConfig(); + if (!llamaCloud?.projects.length) return null; + return ( + + ); +} From 9e0a3034a9941223f5352c3e717c84965834c6fc Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:35:21 +0700 Subject: [PATCH 05/38] feat: update llamacloud service to manage config --- .../typescript/streaming/service.ts | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/templates/components/llamaindex/typescript/streaming/service.ts b/templates/components/llamaindex/typescript/streaming/service.ts index 469257684..ec17ec585 100644 --- a/templates/components/llamaindex/typescript/streaming/service.ts +++ b/templates/components/llamaindex/typescript/streaming/service.ts @@ -6,6 +6,7 @@ import path from "node:path"; const LLAMA_CLOUD_OUTPUT_DIR = "output/llamacloud"; const LLAMA_CLOUD_BASE_URL = "https://cloud.llamaindex.ai/api/v1"; const FILE_DELIMITER = "$"; // delimiter between pipelineId and filename +const LLAMA_CLOUD_CONFIG_FILE = path.join("config", "llamacloud.json"); interface LlamaCloudFile { name: string; @@ -26,6 +27,11 @@ interface LLamaCloudPipeline { project_id: string; } +interface LLamaCloudConfig { + project: string; + pipeline: string; +} + export class LLamaCloudFileService { private static readonly headers = { Accept: "application/json", @@ -46,6 +52,26 @@ export class LLamaCloudFileService { } } + public static getConfig(): LLamaCloudConfig | undefined { + try { + return JSON.parse(fs.readFileSync(LLAMA_CLOUD_CONFIG_FILE, "utf-8")); + } catch (error) { + // If file doesn't exist, return undefined + return undefined; + } + } + + public static async updateConfig(config: LLamaCloudConfig) { + try { + await fs.promises.writeFile( + LLAMA_CLOUD_CONFIG_FILE, + JSON.stringify(config, null, 2), + ); + } catch (error) { + throw new Error(`Error updating LlamaCloud config: ${error}`); + } + } + public static async downloadFiles(nodes: NodeWithScore[]) { const files = this.nodesToDownloadFiles(nodes); if (!files.length) return; From 8c99b76988674ab4a86729c48760f8eafc208b75 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:37:34 +0700 Subject: [PATCH 06/38] feat: update chat config API to get and update llamacloud config --- .../nextjs/app/api/chat/config/route.ts | 37 +++++++++++++++---- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/templates/types/streaming/nextjs/app/api/chat/config/route.ts b/templates/types/streaming/nextjs/app/api/chat/config/route.ts index 8892f071c..f15dcfd67 100644 --- a/templates/types/streaming/nextjs/app/api/chat/config/route.ts +++ b/templates/types/streaming/nextjs/app/api/chat/config/route.ts @@ -1,4 +1,4 @@ -import { NextResponse } from "next/server"; +import { NextRequest, NextResponse } from "next/server"; import { LLamaCloudFileService } from "../llamaindex/streaming/service"; /** @@ -7,15 +7,36 @@ import { LLamaCloudFileService } from "../llamaindex/streaming/service"; export async function GET() { const config = { starterQuestions: process.env.CONVERSATION_STARTERS?.trim().split("\n"), - llamaCloud: await getLLamaCloudConfig(), + llamaCloud: { + config: LLamaCloudFileService.getConfig(), + projects: await LLamaCloudFileService.getAllProjectsAndPipelines(), + }, }; return NextResponse.json(config, { status: 200 }); } -async function getLLamaCloudConfig() { - if (!process.env.LLAMA_CLOUD_API_KEY) return undefined; - const projects = await LLamaCloudFileService.getAllProjectsAndPipelines(); - return { - projects, - }; +export async function POST(request: NextRequest) { + const body = await request.json(); + const { project, pipeline }: { project: string; pipeline: string } = body; + + if (!project || !pipeline) { + return NextResponse.json( + { message: "Please provide project and pipeline names" }, + { status: 400 }, + ); + } + + try { + await LLamaCloudFileService.updateConfig({ project, pipeline }); + return NextResponse.json( + { message: "Successfully updated LlamaCloud configs" }, + { status: 201 }, + ); + } catch (error) { + console.error(error); + return NextResponse.json( + { message: "Failed to update LlamaCloud configs" }, + { status: 500 }, + ); + } } From 27a2a0555a5d0f1ecbb444f0d16a6f8825d74cf7 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:38:15 +0700 Subject: [PATCH 07/38] feat: update llamacloud datasource to use config json --- .../vectordbs/typescript/llamacloud/index.ts | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/templates/components/vectordbs/typescript/llamacloud/index.ts b/templates/components/vectordbs/typescript/llamacloud/index.ts index 3f0875ccd..c5539c6d7 100644 --- a/templates/components/vectordbs/typescript/llamacloud/index.ts +++ b/templates/components/vectordbs/typescript/llamacloud/index.ts @@ -1,12 +1,20 @@ import { LlamaCloudIndex } from "llamaindex/cloud/LlamaCloudIndex"; -import { checkRequiredEnvVars } from "./shared"; +import { LLamaCloudFileService } from "../llamaindex/streaming/service"; export async function getDataSource() { - checkRequiredEnvVars(); + const { project, pipeline } = LLamaCloudFileService.getConfig() || {}; + const projectName = project || process.env.LLAMA_CLOUD_PROJECT_NAME; + const pipelineName = pipeline || process.env.LLAMA_CLOUD_INDEX_NAME; + const apiKey = process.env.LLAMA_CLOUD_API_KEY; + if (!projectName || !pipelineName || !apiKey) { + throw new Error( + "Set project, pipeline, and api key in the config file or as environment variables.", + ); + } const index = new LlamaCloudIndex({ - name: process.env.LLAMA_CLOUD_INDEX_NAME!, - projectName: process.env.LLAMA_CLOUD_PROJECT_NAME!, - apiKey: process.env.LLAMA_CLOUD_API_KEY, + name: pipelineName, + projectName, + apiKey, baseUrl: process.env.LLAMA_CLOUD_BASE_URL, }); return index; From a759447ca54c094413b828760545f7b529f2f29b Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:39:47 +0700 Subject: [PATCH 08/38] feat: able to change config from UI --- .../components/ui/chat/hooks/use-config.ts | 55 ++++++++++++++----- .../ui/chat/widgets/LlamaCloudSelector.tsx | 41 ++++++++++++-- 2 files changed, 75 insertions(+), 21 deletions(-) diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts index 1ea22beb5..f9af9d09f 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts +++ b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts @@ -2,32 +2,38 @@ import { useEffect, useMemo, useState } from "react"; +export interface LLamaCloudPipeline { + id: string; + name: string; + project_id: string; +} + export interface LLamaCloudProject { id: string; organization_id: string; name: string; is_default: boolean; + pipelines: LLamaCloudPipeline[]; } -export interface LLamaCloudPipeline { - id: string; - name: string; - project_id: string; +export interface LlamaCloudConfig { + project: string; // project name + pipeline: string; // pipeline name } export interface ChatConfig { backend?: string; starterQuestions?: string[]; llamaCloud?: { - projects: Array< - LLamaCloudProject & { - pipelines: LLamaCloudPipeline[]; - } - >; + projects: LLamaCloudProject[]; + config?: LlamaCloudConfig; }; + updateLlamaCloudConfig: (config: LlamaCloudConfig) => Promise; } -export function useClientConfig(): ChatConfig { +export function useClientConfig(opts?: { + shouldFetchConfig: boolean; +}): ChatConfig { const chatAPI = process.env.NEXT_PUBLIC_CHAT_API; const [config, setConfig] = useState(); @@ -37,16 +43,35 @@ export function useClientConfig(): ChatConfig { const configAPI = `${backendOrigin}/api/chat/config`; + // control whether to call the config API to reduce unnecessary requests + const shouldFetchConfig = opts?.shouldFetchConfig ?? false; + useEffect(() => { - fetch(configAPI) - .then((response) => response.json()) - .then((data) => setConfig({ ...data, chatAPI })) - .catch((error) => console.error("Error fetching config", error)); - }, [chatAPI, configAPI]); + if (shouldFetchConfig) { + fetch(configAPI) + .then((response) => response.json()) + .then((data) => setConfig({ ...data, chatAPI })) + .catch((error) => console.error("Error fetching config", error)); + } + }, [chatAPI, configAPI, shouldFetchConfig]); + + const updateLlamaCloudConfig = async (config: LlamaCloudConfig) => { + const response = await fetch(configAPI, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(config), + }); + if (!response.ok) { + throw new Error("Failed to update LlamaCloud config"); + } + }; return { backend: backendOrigin, starterQuestions: config?.starterQuestions, llamaCloud: config?.llamaCloud, + updateLlamaCloudConfig, }; } diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx index 6c3ad5ab0..9c09e744a 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx +++ b/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx @@ -7,15 +7,36 @@ import { SelectTrigger, SelectValue, } from "../../select"; -import { useClientConfig } from "../hooks/use-config"; +import { LlamaCloudConfig, useClientConfig } from "../hooks/use-config"; + +// stringify the config to store in the select value +const toSelectValue = (llamaCloudConfig?: LlamaCloudConfig) => { + if (!llamaCloudConfig) return undefined; + return JSON.stringify(llamaCloudConfig); +}; export function LlamaCloudSelector() { - const { llamaCloud } = useClientConfig(); + const { llamaCloud, updateLlamaCloudConfig } = useClientConfig({ + shouldFetchConfig: true, + }); if (!llamaCloud?.projects.length) return null; + + const handlePipelineSelect = async (value: string) => { + try { + const { project, pipeline } = JSON.parse(value) as LlamaCloudConfig; + await updateLlamaCloudConfig({ project, pipeline }); + } catch (error) { + console.error("Failed to update LlamaCloud config", error); + } + }; + return ( - + + {llamaCloud.projects.map((project) => ( @@ -24,7 +45,15 @@ export function LlamaCloudSelector() { Project: {project.name} {project.pipelines.map((pipeline) => ( - + {pipeline.name} ))} From b5581d59af66ee009a87a85175a73937868f2c51 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:40:32 +0700 Subject: [PATCH 09/38] feat: add should fetch options to reduce requests --- .../streaming/nextjs/app/components/ui/chat/chat-messages.tsx | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/chat-messages.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/chat-messages.tsx index e0afd8b5d..d4ba5416a 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/chat-messages.tsx +++ b/templates/types/streaming/nextjs/app/components/ui/chat/chat-messages.tsx @@ -13,7 +13,9 @@ export default function ChatMessages( "messages" | "isLoading" | "reload" | "stop" | "append" >, ) { - const { starterQuestions } = useClientConfig(); + const { starterQuestions } = useClientConfig({ + shouldFetchConfig: true, + }); const scrollableChatContainerRef = useRef(null); const messageLength = props.messages.length; const lastMessage = props.messages[messageLength - 1]; From 6f694b619f709841db2fa5321f8dc8512ba74237 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:54:25 +0700 Subject: [PATCH 10/38] fix: update style of selector --- .../app/components/ui/chat/widgets/LlamaCloudSelector.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx index 9c09e744a..3b60de4ba 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx +++ b/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx @@ -35,8 +35,8 @@ export function LlamaCloudSelector() { onValueChange={handlePipelineSelect} defaultValue={toSelectValue(llamaCloud.config)} > - - + + {llamaCloud.projects.map((project) => ( From d11d71eb4de5b51300fad209610b5e95fa9b1e2e Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:57:54 +0700 Subject: [PATCH 11/38] Create quick-walls-switch.md --- .changeset/quick-walls-switch.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .changeset/quick-walls-switch.md diff --git a/.changeset/quick-walls-switch.md b/.changeset/quick-walls-switch.md new file mode 100644 index 000000000..81455fde6 --- /dev/null +++ b/.changeset/quick-walls-switch.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +Implement a selector to configure LlamaCloud parameters From 90c3ca185cc94e82b0b4a74cc2d9d44d1c3a87d0 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Fri, 2 Aug 2024 14:43:36 +0700 Subject: [PATCH 12/38] feat: update UI to send pipeline config via request body --- .../app/components/ui/chat/chat-input.tsx | 26 ++++++++---- .../components/ui/chat/hooks/use-config.ts | 32 ++------------- .../ui/chat/hooks/use-llama-cloud.ts | 20 ++++++++++ .../ui/chat/widgets/LlamaCloudSelector.tsx | 40 ++++++++++++------- 4 files changed, 67 insertions(+), 51 deletions(-) create mode 100644 templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx index 9f5173f6f..7c8702a89 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx +++ b/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx @@ -6,6 +6,7 @@ import { Input } from "../input"; import UploadImagePreview from "../upload-image-preview"; import { ChatHandler } from "./chat.interface"; import { useFile } from "./hooks/use-file"; +import { useLlamaCloud } from "./hooks/use-llama-cloud"; import { LlamaCloudSelector } from "./widgets/LlamaCloudSelector"; const ALLOWED_EXTENSIONS = ["png", "jpg", "jpeg", "csv", "pdf", "txt", "docx"]; @@ -35,6 +36,12 @@ export default function ChatInput( reset, getAnnotations, } = useFile(); + const { projects, pipeline, setPipeline } = useLlamaCloud(); + + // Additional data to be sent to the API endpoint. + const requestAdditionalData = { + llamaCloudPipeline: pipeline, + }; // default submit function does not handle including annotations in the message // so we need to use append function to submit new message with annotations @@ -43,12 +50,15 @@ export default function ChatInput( annotations: JSONValue[] | undefined, ) => { e.preventDefault(); - props.append!({ - content: props.input, - role: "user", - createdAt: new Date(), - annotations, - }); + props.append!( + { + content: props.input, + role: "user", + createdAt: new Date(), + annotations, + }, + { data: requestAdditionalData }, + ); props.setInput!(""); }; @@ -58,7 +68,7 @@ export default function ChatInput( handleSubmitWithAnnotations(e, annotations); return reset(); } - props.handleSubmit(e); + props.handleSubmit(e, { data: requestAdditionalData }); }; const handleUploadFile = async (file: File) => { @@ -110,7 +120,7 @@ export default function ChatInput( disabled: props.isLoading, }} /> - + diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts index f9af9d09f..801a60533 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts +++ b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-config.ts @@ -2,23 +2,15 @@ import { useEffect, useMemo, useState } from "react"; -export interface LLamaCloudPipeline { - id: string; - name: string; - project_id: string; -} - export interface LLamaCloudProject { id: string; organization_id: string; name: string; is_default: boolean; - pipelines: LLamaCloudPipeline[]; -} - -export interface LlamaCloudConfig { - project: string; // project name - pipeline: string; // pipeline name + pipelines: Array<{ + id: string; + name: string; + }>; } export interface ChatConfig { @@ -26,9 +18,7 @@ export interface ChatConfig { starterQuestions?: string[]; llamaCloud?: { projects: LLamaCloudProject[]; - config?: LlamaCloudConfig; }; - updateLlamaCloudConfig: (config: LlamaCloudConfig) => Promise; } export function useClientConfig(opts?: { @@ -55,23 +45,9 @@ export function useClientConfig(opts?: { } }, [chatAPI, configAPI, shouldFetchConfig]); - const updateLlamaCloudConfig = async (config: LlamaCloudConfig) => { - const response = await fetch(configAPI, { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify(config), - }); - if (!response.ok) { - throw new Error("Failed to update LlamaCloud config"); - } - }; - return { backend: backendOrigin, starterQuestions: config?.starterQuestions, llamaCloud: config?.llamaCloud, - updateLlamaCloudConfig, }; } diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts new file mode 100644 index 000000000..2a18cd0c1 --- /dev/null +++ b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts @@ -0,0 +1,20 @@ +"use client"; + +import { useState } from "react"; +import { useClientConfig } from "./use-config"; + +export interface PipelineConfig { + project: string; // project name + pipeline: string; // pipeline name +} + +export function useLlamaCloud() { + const { llamaCloud } = useClientConfig({ shouldFetchConfig: true }); + const [pipeline, setPipeline] = useState(); + + return { + projects: llamaCloud?.projects ?? [], + pipeline, + setPipeline, + }; +} diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx index 3b60de4ba..1c05e0ecf 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx +++ b/templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx @@ -7,39 +7,49 @@ import { SelectTrigger, SelectValue, } from "../../select"; -import { LlamaCloudConfig, useClientConfig } from "../hooks/use-config"; +import { LLamaCloudProject } from "../hooks/use-config"; +import { PipelineConfig } from "../hooks/use-llama-cloud"; // stringify the config to store in the select value -const toSelectValue = (llamaCloudConfig?: LlamaCloudConfig) => { +const toSelectValue = (llamaCloudConfig?: PipelineConfig) => { if (!llamaCloudConfig) return undefined; return JSON.stringify(llamaCloudConfig); }; -export function LlamaCloudSelector() { - const { llamaCloud, updateLlamaCloudConfig } = useClientConfig({ - shouldFetchConfig: true, - }); - if (!llamaCloud?.projects.length) return null; +const DEFAULT_SELECT_VALUE = "default_env"; + +export interface LlamaCloudSelectorProps { + projects: LLamaCloudProject[]; + setPipeline: (pipelineConfig: PipelineConfig | undefined) => void; +} + +export function LlamaCloudSelector({ + projects, + setPipeline, +}: LlamaCloudSelectorProps) { + if (!projects.length) return null; const handlePipelineSelect = async (value: string) => { - try { - const { project, pipeline } = JSON.parse(value) as LlamaCloudConfig; - await updateLlamaCloudConfig({ project, pipeline }); - } catch (error) { - console.error("Failed to update LlamaCloud config", error); - } + if (value === DEFAULT_SELECT_VALUE) return setPipeline(undefined); + setPipeline(JSON.parse(value) as PipelineConfig); }; return ( - - Environment - - Use default pipeline - - {projects.map((project) => ( From 042fee249082ecc2c772a4d11eaae2745ca6ecec Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Mon, 5 Aug 2024 16:48:58 +0700 Subject: [PATCH 29/38] refactor: clean up --- .../streaming/fastapi/app/api/routers/models.py | 8 +------- .../nextjs/app/components/ui/chat/chat-input.tsx | 13 +++++-------- .../app/components/ui/chat/hooks/use-config.ts | 2 ++ .../app/components/ui/chat/hooks/use-llama-cloud.ts | 6 +++--- 4 files changed, 11 insertions(+), 18 deletions(-) diff --git a/templates/types/streaming/fastapi/app/api/routers/models.py b/templates/types/streaming/fastapi/app/api/routers/models.py index b8f7d148e..ebe5b6a78 100644 --- a/templates/types/streaming/fastapi/app/api/routers/models.py +++ b/templates/types/streaming/fastapi/app/api/routers/models.py @@ -85,13 +85,7 @@ class Config: "role": "user", "content": "What standards for letters exist?", } - ], - "data": { - "llamaCloudPipeline": { - "project": "Default", - "pipeline": "jonas" - } - } + ] } } diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx index 0e18272dc..5584ae54c 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx +++ b/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx @@ -36,13 +36,10 @@ export default function ChatInput( reset, getAnnotations, } = useFile(); - const { isUsingLLamaCloud, projects, pipeline, setPipeline } = - useLlamaCloud(); + const { projects, pipeline, setPipeline } = useLlamaCloud(); // Additional data to be sent to the API endpoint. - const requestAdditionalData = { - llamaCloudPipeline: pipeline, - }; + const requestData = { llamaCloudPipeline: pipeline }; // default submit function does not handle including annotations in the message // so we need to use append function to submit new message with annotations @@ -58,7 +55,7 @@ export default function ChatInput( createdAt: new Date(), annotations, }, - { data: requestAdditionalData }, + { data: requestData }, ); props.setInput!(""); }; @@ -69,7 +66,7 @@ export default function ChatInput( handleSubmitWithAnnotations(e, annotations); return reset(); } - props.handleSubmit(e, { data: requestAdditionalData }); + props.handleSubmit(e, { data: requestData }); }; const handleUploadFile = async (file: File) => { @@ -121,7 +118,7 @@ export default function ChatInput( disabled: props.isLoading, }} /> - {isUsingLLamaCloud && ( + {process.env.NEXT_PUBLIC_USE_LLAMACLOUD === "true" && ( (); - const isUsingLLamaCloud = process.env.NEXT_PUBLIC_USE_LLAMACLOUD === "true"; useEffect(() => { - if (isUsingLLamaCloud && !config) { + if (process.env.NEXT_PUBLIC_USE_LLAMACLOUD === "true" && !config) { fetch(`${backend}/api/chat/config/llamacloud`) .then((response) => response.json()) .then((data) => setConfig(data)) @@ -41,7 +42,6 @@ export function useLlamaCloud() { }; return { - isUsingLLamaCloud, projects: config?.projects ?? [], pipeline: config?.pipeline, setPipeline, From a3f921d1036c532351dcd33c17e1ffa5709e092b Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Mon, 5 Aug 2024 17:05:17 +0700 Subject: [PATCH 30/38] fix: using llamacloud from env --- .../nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts index 706716fa6..446fc374e 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts +++ b/templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts @@ -35,7 +35,7 @@ export function useLlamaCloud() { .then((data) => setConfig(data)) .catch((error) => console.error("Error fetching config", error)); } - }, [backend, config, isUsingLLamaCloud]); + }, [backend, config]); const setPipeline = (pipelineConfig?: PipelineConfig) => { setConfig({ ...config, pipeline: pipelineConfig }); From 8fe83cfad696b70d3a34584c62934ef5b15e03c1 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Mon, 5 Aug 2024 17:16:16 +0700 Subject: [PATCH 31/38] refactor: remove usellamacloud --- .../app/components/ui/chat/chat-input.tsx | 19 +++--- .../ui/chat/hooks/use-llama-cloud.ts | 49 -------------- .../ui/chat/widgets/LlamaCloudSelector.tsx | 64 ++++++++++++++++--- 3 files changed, 67 insertions(+), 65 deletions(-) delete mode 100644 templates/types/streaming/nextjs/app/components/ui/chat/hooks/use-llama-cloud.ts diff --git a/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx b/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx index 5584ae54c..1b4765b56 100644 --- a/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx +++ b/templates/types/streaming/nextjs/app/components/ui/chat/chat-input.tsx @@ -1,4 +1,5 @@ import { JSONValue } from "ai"; +import { useState } from "react"; import { Button } from "../button"; import { DocumentPreview } from "../document-preview"; import FileUploader from "../file-uploader"; @@ -6,11 +7,17 @@ import { Input } from "../input"; import UploadImagePreview from "../upload-image-preview"; import { ChatHandler } from "./chat.interface"; import { useFile } from "./hooks/use-file"; -import { useLlamaCloud } from "./hooks/use-llama-cloud"; import { LlamaCloudSelector } from "./widgets/LlamaCloudSelector"; const ALLOWED_EXTENSIONS = ["png", "jpg", "jpeg", "csv", "pdf", "txt", "docx"]; +export type RequestData = { + llamaCloudPipeline?: { + project: string; + pipeline: string; + }; +}; + export default function ChatInput( props: Pick< ChatHandler, @@ -36,10 +43,7 @@ export default function ChatInput( reset, getAnnotations, } = useFile(); - const { projects, pipeline, setPipeline } = useLlamaCloud(); - - // Additional data to be sent to the API endpoint. - const requestData = { llamaCloudPipeline: pipeline }; + const [requestData, setRequestData] = useState(); // default submit function does not handle including annotations in the message // so we need to use append function to submit new message with annotations @@ -120,9 +124,8 @@ export default function ChatInput( /> {process.env.NEXT_PUBLIC_USE_LLAMACLOUD === "true" && ( )}