From 3356a8f1d5a606395961ed57b2643ca5c879486c Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Mon, 22 Sep 2025 17:40:48 -0400 Subject: [PATCH 01/18] wip hist --- pyproject.toml.jinja | 2 +- src/{{ project_name_snake }}/qa_workflows.py | 114 +++++++++++-------- test-proj/pyproject.toml | 2 +- test-proj/src/test_proj/qa_workflows.py | 114 +++++++++++-------- 4 files changed, 140 insertions(+), 92 deletions(-) diff --git a/pyproject.toml.jinja b/pyproject.toml.jinja index 9d9e42a..63d506b 100644 --- a/pyproject.toml.jinja +++ b/pyproject.toml.jinja @@ -1,5 +1,5 @@ [project] -name = "{{ project_name_snake }}" +name = "{{ project_name }}" version = "0.1.0" description = "Add your description here" readme = "README.md" diff --git a/src/{{ project_name_snake }}/qa_workflows.py b/src/{{ project_name_snake }}/qa_workflows.py index a6a5878..5d30c5f 100644 --- a/src/{{ project_name_snake }}/qa_workflows.py +++ b/src/{{ project_name_snake }}/qa_workflows.py @@ -1,13 +1,17 @@ +from __future__ import annotations import logging import os import tempfile +from typing import Any, Literal import httpx from dotenv import load_dotenv +from llama_cloud import ChatMessage from llama_cloud.types import RetrievalMode from llama_index.core import Settings from llama_index.core.chat_engine.types import BaseChatEngine, ChatMode from llama_index.core.memory import ChatMemoryBuffer +from pydantic import BaseModel, Field from llama_index.embeddings.openai import OpenAIEmbedding from llama_index.llms.openai import OpenAI from llama_cloud_services import LlamaCloudIndex @@ -164,12 +168,10 @@ async def parse_document(self, ev: FileDownloadedEvent, ctx: Context) -> StopEve ) -class ChatResponseEvent(Event): - """Event emitted when chat engine generates a response""" +class AppendChatMessage(Event): + """Event emitted when chat engine appends a message to the conversation history""" - response: str - sources: list - query: str + message: ConversationMessage class ChatDeltaEvent(Event): @@ -178,7 +180,25 @@ class ChatDeltaEvent(Event): delta: str -class ChatWorkflow(Workflow): +class ChatWorkflowState(BaseModel): + conversation_history: list[ChatMessage] = Field(default_factory=list) + session_id: str | None = None + index_name: str | None = None + + +class SourceMessage(BaseModel): + text: str + score: float + metadata: dict[str, Any] + + +class ConversationMessage(BaseModel): + role: Literal["user", "assistant"] + text: str + sources: list[SourceMessage] = Field(default_factory=list) + + +class ChatWorkflow(Workflow[ChatWorkflowState]): """Workflow to handle continuous chat queries against indexed documents""" def __init__(self, **kwargs): @@ -188,7 +208,9 @@ def __init__(self, **kwargs): ] = {} # Cache chat engines per index @step - async def initialize_chat(self, ev: ChatEvent, ctx: Context) -> InputRequiredEvent: + async def initialize_chat( + self, ev: ChatEvent, ctx: Context[ChatWorkflowState] + ) -> InputRequiredEvent: """Initialize the chat session and request first input""" try: logger.info(f"Initializing chat {ev.index_name}") @@ -198,7 +220,8 @@ async def initialize_chat(self, ev: ChatEvent, ctx: Context) -> InputRequiredEve # Store session info in context await ctx.store.set("index_name", index_name) await ctx.store.set("session_id", session_id) - await ctx.store.set("conversation_history", []) + if ctx.store.get("conversation_history", None) is None: + await ctx.store.set("conversation_history", []) # Create cache key for chat engine cache_key = f"{index_name}_{session_id}" @@ -216,7 +239,12 @@ async def initialize_chat(self, ev: ChatEvent, ctx: Context) -> InputRequiredEve ) # Create chat engine with memory - memory = ChatMemoryBuffer.from_defaults(token_limit=3900) + memory = ChatMemoryBuffer.from_defaults( + token_limit=3900, + chat_history=await ctx.store.get( + "conversation_history", default=[] + ), + ) self.chat_engines[cache_key] = index.as_chat_engine( chat_mode=ChatMode.CONTEXT, memory=memory, @@ -230,10 +258,20 @@ async def initialize_chat(self, ev: ChatEvent, ctx: Context) -> InputRequiredEve retriever_mode=RetrievalMode.CHUNKS, ) + history = await ctx.store.get("conversation_history", default=[]) + if len(history) == 0: + ctx.write_event_to_stream( + ConversationMessage( + role="assistant", + text="Chat initialized. Ask a question (or type 'exit' to quit): ", + ) + ) + else: + for item in history: + item: ConversationMessage = item + ctx.write_event_to_stream(item) # Request first user input - return InputRequiredEvent( - prefix="Chat initialized. Ask a question (or type 'exit' to quit): " - ) + return InputRequiredEvent(prefix="[waiting for user message]") except Exception as e: return StopEvent( @@ -251,6 +289,10 @@ async def process_user_response( try: logger.info(f"Processing user response {ev.response}") user_input = ev.response.strip() + with ctx.store.edit_state() as state: + messages = state.get("conversation_history", default=[]) + messages.append(ConversationMessage(role="user", text=user_input)) + state.set("conversation_history", messages) logger.info(f"User input: {user_input}") @@ -287,48 +329,30 @@ async def process_user_response( # Extract source nodes for citations sources = [] - if hasattr(stream_response, "source_nodes"): + if stream_response.source_nodes: for node in stream_response.source_nodes: sources.append( - { - "text": node.text[:200] + "..." - if len(node.text) > 200 + SourceMessage( + text=node.text[:197] + "..." + if len(node.text) >= 200 else node.text, - "score": node.score if hasattr(node, "score") else None, - "metadata": node.metadata - if hasattr(node, "metadata") - else {}, - } + score=node.score, + metadata=node.metadata, + ) ) # Update conversation history - conversation_history = await ctx.store.get( - "conversation_history", default=[] - ) - conversation_history.append( - { - "query": user_input, - "response": full_text.strip() - if full_text - else str(stream_response), - "sources": sources, - } + response = ConversationMessage( + role="assistant", text=full_text.strip(), sources=sources ) - await ctx.store.set("conversation_history", conversation_history) + with ctx.store.edit_state() as state: + messages = state.get("conversation_history", default=[]) + messages.append(response) + state.set("conversation_history", messages) # After streaming completes, emit a summary response event to stream for frontend/main printing - ctx.write_event_to_stream( - ChatResponseEvent( - response=full_text.strip() if full_text else str(stream_response), - sources=sources, - query=user_input, - ) - ) - - # Prompt for next input - return InputRequiredEvent( - prefix="\nAsk another question (or type 'exit' to quit): " - ) + ctx.write_event_to_stream(AppendChatMessage(message=response)) + return InputRequiredEvent(prefix="[waiting for user message]") except Exception as e: return StopEvent( diff --git a/test-proj/pyproject.toml b/test-proj/pyproject.toml index f7706af..e5abba3 100644 --- a/test-proj/pyproject.toml +++ b/test-proj/pyproject.toml @@ -1,5 +1,5 @@ [project] -name = "test_proj" +name = "test-proj" version = "0.1.0" description = "Add your description here" readme = "README.md" diff --git a/test-proj/src/test_proj/qa_workflows.py b/test-proj/src/test_proj/qa_workflows.py index a6a5878..5d30c5f 100644 --- a/test-proj/src/test_proj/qa_workflows.py +++ b/test-proj/src/test_proj/qa_workflows.py @@ -1,13 +1,17 @@ +from __future__ import annotations import logging import os import tempfile +from typing import Any, Literal import httpx from dotenv import load_dotenv +from llama_cloud import ChatMessage from llama_cloud.types import RetrievalMode from llama_index.core import Settings from llama_index.core.chat_engine.types import BaseChatEngine, ChatMode from llama_index.core.memory import ChatMemoryBuffer +from pydantic import BaseModel, Field from llama_index.embeddings.openai import OpenAIEmbedding from llama_index.llms.openai import OpenAI from llama_cloud_services import LlamaCloudIndex @@ -164,12 +168,10 @@ async def parse_document(self, ev: FileDownloadedEvent, ctx: Context) -> StopEve ) -class ChatResponseEvent(Event): - """Event emitted when chat engine generates a response""" +class AppendChatMessage(Event): + """Event emitted when chat engine appends a message to the conversation history""" - response: str - sources: list - query: str + message: ConversationMessage class ChatDeltaEvent(Event): @@ -178,7 +180,25 @@ class ChatDeltaEvent(Event): delta: str -class ChatWorkflow(Workflow): +class ChatWorkflowState(BaseModel): + conversation_history: list[ChatMessage] = Field(default_factory=list) + session_id: str | None = None + index_name: str | None = None + + +class SourceMessage(BaseModel): + text: str + score: float + metadata: dict[str, Any] + + +class ConversationMessage(BaseModel): + role: Literal["user", "assistant"] + text: str + sources: list[SourceMessage] = Field(default_factory=list) + + +class ChatWorkflow(Workflow[ChatWorkflowState]): """Workflow to handle continuous chat queries against indexed documents""" def __init__(self, **kwargs): @@ -188,7 +208,9 @@ def __init__(self, **kwargs): ] = {} # Cache chat engines per index @step - async def initialize_chat(self, ev: ChatEvent, ctx: Context) -> InputRequiredEvent: + async def initialize_chat( + self, ev: ChatEvent, ctx: Context[ChatWorkflowState] + ) -> InputRequiredEvent: """Initialize the chat session and request first input""" try: logger.info(f"Initializing chat {ev.index_name}") @@ -198,7 +220,8 @@ async def initialize_chat(self, ev: ChatEvent, ctx: Context) -> InputRequiredEve # Store session info in context await ctx.store.set("index_name", index_name) await ctx.store.set("session_id", session_id) - await ctx.store.set("conversation_history", []) + if ctx.store.get("conversation_history", None) is None: + await ctx.store.set("conversation_history", []) # Create cache key for chat engine cache_key = f"{index_name}_{session_id}" @@ -216,7 +239,12 @@ async def initialize_chat(self, ev: ChatEvent, ctx: Context) -> InputRequiredEve ) # Create chat engine with memory - memory = ChatMemoryBuffer.from_defaults(token_limit=3900) + memory = ChatMemoryBuffer.from_defaults( + token_limit=3900, + chat_history=await ctx.store.get( + "conversation_history", default=[] + ), + ) self.chat_engines[cache_key] = index.as_chat_engine( chat_mode=ChatMode.CONTEXT, memory=memory, @@ -230,10 +258,20 @@ async def initialize_chat(self, ev: ChatEvent, ctx: Context) -> InputRequiredEve retriever_mode=RetrievalMode.CHUNKS, ) + history = await ctx.store.get("conversation_history", default=[]) + if len(history) == 0: + ctx.write_event_to_stream( + ConversationMessage( + role="assistant", + text="Chat initialized. Ask a question (or type 'exit' to quit): ", + ) + ) + else: + for item in history: + item: ConversationMessage = item + ctx.write_event_to_stream(item) # Request first user input - return InputRequiredEvent( - prefix="Chat initialized. Ask a question (or type 'exit' to quit): " - ) + return InputRequiredEvent(prefix="[waiting for user message]") except Exception as e: return StopEvent( @@ -251,6 +289,10 @@ async def process_user_response( try: logger.info(f"Processing user response {ev.response}") user_input = ev.response.strip() + with ctx.store.edit_state() as state: + messages = state.get("conversation_history", default=[]) + messages.append(ConversationMessage(role="user", text=user_input)) + state.set("conversation_history", messages) logger.info(f"User input: {user_input}") @@ -287,48 +329,30 @@ async def process_user_response( # Extract source nodes for citations sources = [] - if hasattr(stream_response, "source_nodes"): + if stream_response.source_nodes: for node in stream_response.source_nodes: sources.append( - { - "text": node.text[:200] + "..." - if len(node.text) > 200 + SourceMessage( + text=node.text[:197] + "..." + if len(node.text) >= 200 else node.text, - "score": node.score if hasattr(node, "score") else None, - "metadata": node.metadata - if hasattr(node, "metadata") - else {}, - } + score=node.score, + metadata=node.metadata, + ) ) # Update conversation history - conversation_history = await ctx.store.get( - "conversation_history", default=[] - ) - conversation_history.append( - { - "query": user_input, - "response": full_text.strip() - if full_text - else str(stream_response), - "sources": sources, - } + response = ConversationMessage( + role="assistant", text=full_text.strip(), sources=sources ) - await ctx.store.set("conversation_history", conversation_history) + with ctx.store.edit_state() as state: + messages = state.get("conversation_history", default=[]) + messages.append(response) + state.set("conversation_history", messages) # After streaming completes, emit a summary response event to stream for frontend/main printing - ctx.write_event_to_stream( - ChatResponseEvent( - response=full_text.strip() if full_text else str(stream_response), - sources=sources, - query=user_input, - ) - ) - - # Prompt for next input - return InputRequiredEvent( - prefix="\nAsk another question (or type 'exit' to quit): " - ) + ctx.write_event_to_stream(AppendChatMessage(message=response)) + return InputRequiredEvent(prefix="[waiting for user message]") except Exception as e: return StopEvent( From ee3dc11d099ebe4566e8f2df59b6a8958c3c3ad3 Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Mon, 22 Sep 2025 22:56:23 -0400 Subject: [PATCH 02/18] guh --- src/{{ project_name_snake }}/qa_workflows.py | 2 + test-proj/pyproject.toml | 1 + test-proj/src/test_proj/clients.py | 21 +- test-proj/src/test_proj/qa_workflows.py | 195 +++++++--------- test-proj/ui/package.json | 1 + test-proj/ui/src/components/ChatBot.tsx | 230 +++++++++++-------- test-proj/ui/src/components/Sidebar.tsx | 125 ++++++++++ test-proj/ui/src/libs/chatHistory.ts | 159 +++++++++++++ test-proj/ui/src/libs/chatWorkflowHandler.ts | 35 +++ test-proj/ui/src/pages/Home.tsx | 36 +-- 10 files changed, 582 insertions(+), 223 deletions(-) create mode 100644 test-proj/ui/src/components/Sidebar.tsx create mode 100644 test-proj/ui/src/libs/chatHistory.ts create mode 100644 test-proj/ui/src/libs/chatWorkflowHandler.ts diff --git a/src/{{ project_name_snake }}/qa_workflows.py b/src/{{ project_name_snake }}/qa_workflows.py index 5d30c5f..20b4828 100644 --- a/src/{{ project_name_snake }}/qa_workflows.py +++ b/src/{{ project_name_snake }}/qa_workflows.py @@ -1,4 +1,5 @@ from __future__ import annotations +from datetime import datetime import logging import os import tempfile @@ -196,6 +197,7 @@ class ConversationMessage(BaseModel): role: Literal["user", "assistant"] text: str sources: list[SourceMessage] = Field(default_factory=list) + timestamp: str = Field(default_factory=lambda: datetime.now().isoformat()) class ChatWorkflow(Workflow[ChatWorkflowState]): diff --git a/test-proj/pyproject.toml b/test-proj/pyproject.toml index e5abba3..7a43179 100644 --- a/test-proj/pyproject.toml +++ b/test-proj/pyproject.toml @@ -12,6 +12,7 @@ dependencies = [ "llama-index-llms-openai>=0.5.6", "llama-index-embeddings-openai>=0.5.1", "python-dotenv>=1.1.1", + "pydantic>=2.11.9", ] [build-system] diff --git a/test-proj/src/test_proj/clients.py b/test-proj/src/test_proj/clients.py index 9f8d0d8..2fe3766 100644 --- a/test-proj/src/test_proj/clients.py +++ b/test-proj/src/test_proj/clients.py @@ -3,7 +3,7 @@ import httpx from llama_cloud.client import AsyncLlamaCloud -from llama_cloud_services import LlamaParse +from llama_cloud_services import LlamaCloudIndex, LlamaParse # deployed agents may infer their name from the deployment name # Note: Make sure that an agent deployment with this name actually exists @@ -18,7 +18,8 @@ INDEX_NAME = "document_qa_index" -def get_custom_client() -> httpx.AsyncClient: +@functools.cache +def get_base_cloud_client() -> httpx.AsyncClient: return httpx.AsyncClient( timeout=60, headers={"Project-Id": LLAMA_CLOUD_PROJECT_ID} @@ -32,7 +33,7 @@ def get_llama_cloud_client() -> AsyncLlamaCloud: return AsyncLlamaCloud( base_url=LLAMA_CLOUD_BASE_URL, token=LLAMA_CLOUD_API_KEY, - httpx_client=get_custom_client(), + httpx_client=get_base_cloud_client(), ) @@ -48,5 +49,17 @@ def get_llama_parse_client() -> LlamaParse: result_type="markdown", api_key=LLAMA_CLOUD_API_KEY, project_id=LLAMA_CLOUD_PROJECT_ID, - custom_client=get_custom_client(), + custom_client=get_base_cloud_client(), + ) + + +@functools.lru_cache(maxsize=None) +def get_index(index_name: str) -> LlamaCloudIndex: + return LlamaCloudIndex.create_index( + name=index_name, + project_id=LLAMA_CLOUD_PROJECT_ID, + api_key=LLAMA_CLOUD_API_KEY, + base_url=LLAMA_CLOUD_BASE_URL, + show_progress=True, + custom_client=get_base_cloud_client(), ) diff --git a/test-proj/src/test_proj/qa_workflows.py b/test-proj/src/test_proj/qa_workflows.py index 5d30c5f..bfb950c 100644 --- a/test-proj/src/test_proj/qa_workflows.py +++ b/test-proj/src/test_proj/qa_workflows.py @@ -1,20 +1,23 @@ from __future__ import annotations +from collections.abc import AsyncGenerator +from datetime import datetime import logging import os import tempfile from typing import Any, Literal import httpx -from dotenv import load_dotenv -from llama_cloud import ChatMessage -from llama_cloud.types import RetrievalMode from llama_index.core import Settings -from llama_index.core.chat_engine.types import BaseChatEngine, ChatMode -from llama_index.core.memory import ChatMemoryBuffer -from pydantic import BaseModel, Field +from llama_index.core.chat_engine.types import ( + BaseChatEngine, + ChatMode, + StreamingAgentChatResponse, +) +from llama_index.core.llms import ChatMessage +import asyncio from llama_index.embeddings.openai import OpenAIEmbedding from llama_index.llms.openai import OpenAI -from llama_cloud_services import LlamaCloudIndex +from pydantic import BaseModel, Field from workflows import Workflow, step, Context from workflows.events import ( StartEvent, @@ -26,15 +29,12 @@ from workflows.retry_policy import ConstantDelayRetryPolicy from .clients import ( - LLAMA_CLOUD_API_KEY, - LLAMA_CLOUD_BASE_URL, - get_custom_client, + get_index, get_llama_cloud_client, get_llama_parse_client, LLAMA_CLOUD_PROJECT_ID, ) - -load_dotenv() +from llama_index.core.memory import Memory logger = logging.getLogger(__name__) @@ -57,15 +57,12 @@ class FileDownloadedEvent(Event): class ChatEvent(StartEvent): index_name: str - session_id: str # Configure LLM and embedding model Settings.llm = OpenAI(model="gpt-4", temperature=0.1) Settings.embed_model = OpenAIEmbedding(model="text-embedding-3-small") -custom_client = get_custom_client() - class DocumentUploadWorkflow(Workflow): """Workflow to upload and index documents using LlamaParse and LlamaCloud Index""" @@ -135,15 +132,7 @@ async def parse_document(self, ev: FileDownloadedEvent, ctx: Context) -> StopEve documents = result.get_text_documents() # Create or connect to LlamaCloud Index - index = LlamaCloudIndex.create_index( - documents=documents, - name=index_name, - project_id=LLAMA_CLOUD_PROJECT_ID, - api_key=LLAMA_CLOUD_API_KEY, - base_url=LLAMA_CLOUD_BASE_URL, - show_progress=True, - custom_client=custom_client, - ) + index = get_index(index_name) # Insert documents to index logger.info(f"Inserting {len(documents)} documents to {index_name}") @@ -181,9 +170,14 @@ class ChatDeltaEvent(Event): class ChatWorkflowState(BaseModel): - conversation_history: list[ChatMessage] = Field(default_factory=list) - session_id: str | None = None index_name: str | None = None + conversation_history: list[ConversationMessage] = Field(default_factory=list) + + def chat_messages(self) -> list[ChatMessage]: + return [ + ChatMessage(role=message.role, content=message.text) + for message in self.conversation_history + ] class SourceMessage(BaseModel): @@ -193,19 +187,31 @@ class SourceMessage(BaseModel): class ConversationMessage(BaseModel): + """ + Mostly just a wrapper for a ChatMessage with extra context for UI. Includes a timestamp and source references. + """ + role: Literal["user", "assistant"] text: str sources: list[SourceMessage] = Field(default_factory=list) + timestamp: str = Field(default_factory=lambda: datetime.now().isoformat()) -class ChatWorkflow(Workflow[ChatWorkflowState]): - """Workflow to handle continuous chat queries against indexed documents""" +def get_chat_engine(index_name: str) -> BaseChatEngine: + index = get_index(index_name) + return index.as_chat_engine( + chat_mode=ChatMode.CONTEXT, + llm=Settings.llm, + context_prompt=( + "You are a helpful assistant that answers questions based on the provided documents. " + "Always cite specific information from the documents when answering. " + "If you cannot find the answer in the documents, say so clearly." + ), + ) - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.chat_engines: dict[ - str, BaseChatEngine - ] = {} # Cache chat engines per index + +class ChatWorkflow(Workflow): + """Workflow to handle continuous chat queries against indexed documents""" @step async def initialize_chat( @@ -215,61 +221,23 @@ async def initialize_chat( try: logger.info(f"Initializing chat {ev.index_name}") index_name = ev.index_name - session_id = ev.session_id + initial_state = await ctx.store.get_state() # Store session info in context await ctx.store.set("index_name", index_name) - await ctx.store.set("session_id", session_id) - if ctx.store.get("conversation_history", None) is None: - await ctx.store.set("conversation_history", []) - - # Create cache key for chat engine - cache_key = f"{index_name}_{session_id}" - - # Initialize chat engine if not exists - if cache_key not in self.chat_engines: - logger.info(f"Initializing chat engine {cache_key}") - # Connect to LlamaCloud Index - index = LlamaCloudIndex( - name=index_name, - project_id=LLAMA_CLOUD_PROJECT_ID, - api_key=LLAMA_CLOUD_API_KEY, - base_url=LLAMA_CLOUD_BASE_URL, - async_httpx_client=custom_client, - ) - - # Create chat engine with memory - memory = ChatMemoryBuffer.from_defaults( - token_limit=3900, - chat_history=await ctx.store.get( - "conversation_history", default=[] - ), - ) - self.chat_engines[cache_key] = index.as_chat_engine( - chat_mode=ChatMode.CONTEXT, - memory=memory, - llm=Settings.llm, - context_prompt=( - "You are a helpful assistant that answers questions based on the provided documents. " - "Always cite specific information from the documents when answering. " - "If you cannot find the answer in the documents, say so clearly." - ), - verbose=False, - retriever_mode=RetrievalMode.CHUNKS, - ) - - history = await ctx.store.get("conversation_history", default=[]) - if len(history) == 0: + messages = await initial_state.memory.aget_all() + if len(messages) == 0: ctx.write_event_to_stream( - ConversationMessage( - role="assistant", - text="Chat initialized. Ask a question (or type 'exit' to quit): ", + AppendChatMessage( + message=ConversationMessage( + role="assistant", + text="Chat initialized. Ask a question (or type 'exit' to quit): ", + ) ) ) else: - for item in history: - item: ConversationMessage = item - ctx.write_event_to_stream(item) + for item in messages: + ctx.write_event_to_stream(AppendChatMessage(message=item)) # Request first user input return InputRequiredEvent(prefix="[waiting for user message]") @@ -283,43 +251,52 @@ async def initialize_chat( @step async def process_user_response( - self, ev: HumanResponseEvent, ctx: Context + self, ev: HumanResponseEvent, ctx: Context[ChatWorkflowState] ) -> InputRequiredEvent | HumanResponseEvent | StopEvent | None: """Process user input and generate response""" try: logger.info(f"Processing user response {ev.response}") user_input = ev.response.strip() - with ctx.store.edit_state() as state: - messages = state.get("conversation_history", default=[]) - messages.append(ConversationMessage(role="user", text=user_input)) - state.set("conversation_history", messages) + + initial_state = await ctx.store.get_state() + memory = initial_state.memory + index_name = initial_state.index_name logger.info(f"User input: {user_input}") # Check for exit command if user_input.lower() == "exit": logger.info("User input is exit") - conversation_history = await ctx.store.get( - "conversation_history", default=[] - ) return StopEvent( result={ "success": True, "message": "Chat session ended.", - "conversation_history": conversation_history, + "conversation_history": await memory.aget_all(), } ) - # Get session info from context - index_name = await ctx.store.get("index_name") - session_id = await ctx.store.get("session_id") - cache_key = f"{index_name}_{session_id}" - - # Get chat engine - chat_engine = self.chat_engines[cache_key] + chat_engine = get_chat_engine(index_name) # Process query with chat engine (streaming) - stream_response = await chat_engine.astream_chat(user_input) + async def _fake_stream_chat() -> AsyncGenerator[str, None]: + for token in ["Hel", "lo, ", "how ", "are ", "you?"]: + yield token + await asyncio.sleep(0.1) + + async def _fake_chat() -> StreamingAgentChatResponse: + class MockStreamResponse: + def __init__(self): + self.source_nodes = [] + + def async_response_gen(self): + return _fake_stream_chat() + + return MockStreamResponse() + + # stream_response = await _fake_chat() + stream_response = await chat_engine.astream_chat( + user_input, chat_history=initial_state.chat_messages() + ) full_text = "" # Emit streaming deltas to the event stream @@ -341,20 +318,22 @@ async def process_user_response( ) ) - # Update conversation history - response = ConversationMessage( - role="assistant", text=full_text.strip(), sources=sources - ) - with ctx.store.edit_state() as state: - messages = state.get("conversation_history", default=[]) - messages.append(response) - state.set("conversation_history", messages) - # After streaming completes, emit a summary response event to stream for frontend/main printing - ctx.write_event_to_stream(AppendChatMessage(message=response)) + assistant_response = ConversationMessage( + role="assistant", text=full_text, sources=sources + ) + ctx.write_event_to_stream(AppendChatMessage(message=assistant_response)) + async with ctx.store.edit_state() as state: + state.conversation_history.extend( + [ + ConversationMessage(role="user", text=user_input), + assistant_response, + ] + ) return InputRequiredEvent(prefix="[waiting for user message]") except Exception as e: + logger.error(f"Error processing query: {str(e)}", exc_info=True) return StopEvent( result={"success": False, "error": f"Error processing query: {str(e)}"} ) diff --git a/test-proj/ui/package.json b/test-proj/ui/package.json index 8cbfa1e..514717c 100644 --- a/test-proj/ui/package.json +++ b/test-proj/ui/package.json @@ -17,6 +17,7 @@ "@llamaindex/ui": "^2.1.1", "@llamaindex/workflows-client": "^1.2.0", "@radix-ui/themes": "^3.2.1", + "idb": "^8.0.3", "llama-cloud-services": "^0.3.6", "lucide-react": "^0.544.0", "react": "^19.0.0", diff --git a/test-proj/ui/src/components/ChatBot.tsx b/test-proj/ui/src/components/ChatBot.tsx index e65f929..fdc7ebb 100644 --- a/test-proj/ui/src/components/ChatBot.tsx +++ b/test-proj/ui/src/components/ChatBot.tsx @@ -19,26 +19,36 @@ import { cn, useWorkflowRun, useWorkflowHandler, + WorkflowEvent, } from "@llamaindex/ui"; import { AGENT_NAME } from "../libs/config"; import { toHumanResponseRawEvent } from "@/libs/utils"; +import { useChatWorkflowHandler } from "@/libs/chatWorkflowHandler"; type Role = "user" | "assistant"; interface Message { - id: string; role: Role; + isPartial?: boolean; content: string; timestamp: Date; error?: boolean; } -export default function ChatBot() { - const { runWorkflow } = useWorkflowRun(); +export default function ChatBot({ + handlerId, + onHandlerCreated, +}: { + handlerId?: string; + onHandlerCreated?: (handlerId: string) => void; +}) { + const workflowHandler = useChatWorkflowHandler({ + handlerId, + onHandlerCreated, + }); const messagesEndRef = useRef(null); const inputRef = useRef(null); const [messages, setMessages] = useState([]); const [input, setInput] = useState(""); const [isLoading, setIsLoading] = useState(false); - const [handlerId, setHandlerId] = useState(null); const lastProcessedEventIndexRef = useRef(0); const [canSend, setCanSend] = useState(false); const streamingMessageIndexRef = useRef(null); @@ -54,7 +64,7 @@ export default function ChatBot() { const defaultIndexName = (import.meta as any).env?.VITE_DEFAULT_INDEX_NAME || "document_qa_index"; const sessionIdRef = useRef( - `chat-${Math.random().toString(36).slice(2)}-${Date.now()}`, + `chat-${Math.random().toString(36).slice(2)}-${Date.now()}` ); // UI text defaults @@ -64,7 +74,11 @@ export default function ChatBot() { "Welcome! 👋 Upload a document with the control above, then ask questions here."; // Helper functions for message management - const appendMessage = (role: Role, msg: string): void => { + const appendMessage = ( + role: Role, + msg: string, + isPartial: boolean = false + ): void => { setMessages((prev) => { const id = `${role}-stream-${Date.now()}`; const idx = prev.length; @@ -75,27 +89,17 @@ export default function ChatBot() { id, role, content: msg, + isPartial, timestamp: new Date(), }, ]; }); }; - const updateMessage = (index: number, message: string) => { - setMessages((prev) => { - if (index < 0 || index >= prev.length) return prev; - const copy = [...prev]; - const existing = copy[index]; - copy[index] = { ...existing, content: message }; - return copy; - }); - }; - // Initialize with welcome message useEffect(() => { if (messages.length === 0) { const welcomeMsg: Message = { - id: "welcome", role: "assistant", content: welcomeMessage, timestamp: new Date(), @@ -104,19 +108,6 @@ export default function ChatBot() { } }, []); - // Create chat task on init - useEffect(() => { - (async () => { - if (!handlerId) { - const handler = await runWorkflow("chat", { - index_name: defaultIndexName, - session_id: sessionIdRef.current, - }); - setHandlerId(handler.handler_id); - } - })(); - }, []); - // Subscribe to task/events using hook (auto stream when handler exists) const { events } = useWorkflowHandler(handlerId ?? "", Boolean(handlerId)); @@ -127,36 +118,20 @@ export default function ChatBot() { if (startIdx < 0) startIdx = 0; if (startIdx >= events.length) return; - for (let i = startIdx; i < events.length; i++) { - const ev: any = events[i]; - const type = ev?.type as string | undefined; - const rawData = ev?.data as any; + const eventsToProcess = events.slice(startIdx); + const newMessages = toMessages(eventsToProcess); + if (newMessages.length > 0) { + setMessages((prev) => mergeMessages(prev, newMessages)); + } + for (const ev of eventsToProcess) { + const type = ev.type; if (!type) continue; - const data = (rawData && (rawData._data ?? rawData)) as any; - - if (type.includes("ChatDeltaEvent")) { - const delta: string = data?.delta ?? ""; - if (!delta) continue; - if (streamingMessageIndexRef.current === null) { - appendMessage("assistant", delta); - } else { - const idx = streamingMessageIndexRef.current; - const current = messages[idx!]?.content ?? ""; - if (current === "Thinking...") { - updateMessage(idx!, delta); - } else { - updateMessage(idx!, current + delta); - } - } - } else if (type.includes("ChatResponseEvent")) { - // finalize current stream - streamingMessageIndexRef.current = null; - } else if (type.includes("InputRequiredEvent")) { + if (type.endsWith(".InputRequiredEvent")) { // ready for next user input; enable send setCanSend(true); setIsLoading(false); inputRef.current?.focus(); - } else if (type.includes("StopEvent")) { + } else if (type.endsWith(".StopEvent")) { // finished; no summary bubble needed (chat response already streamed) } } @@ -178,16 +153,6 @@ export default function ChatBot() { ...(projectId ? { "Project-Id": projectId } : {}), }); - const startChatIfNeeded = async (): Promise => { - if (handlerId) return handlerId; - const handler = await runWorkflow("chat", { - index_name: defaultIndexName, - session_id: sessionIdRef.current, - }); - setHandlerId(handler.handler_id); - return handler.handler_id; - }; - // Removed manual SSE ensureEventStream; hook handles streaming const handleSubmit = async (e: FormEvent) => { @@ -198,7 +163,6 @@ export default function ChatBot() { // Add user message const userMessage: Message = { - id: `user-${Date.now()}`, role: "user", content: trimmedInput, timestamp: new Date(), @@ -212,37 +176,21 @@ export default function ChatBot() { // Immediately create an assistant placeholder to avoid visual gap before deltas if (streamingMessageIndexRef.current === null) { - appendMessage("assistant", "Thinking..."); + appendMessage("assistant", "Thinking...", true); } try { - // Ensure chat handler exists (created on init) - const hid = await startChatIfNeeded(); - // Send user input as HumanResponseEvent - const postRes = await fetch(`/deployments/${deployment}/events/${hid}`, { - method: "POST", - headers: { - "Content-Type": "application/json", - ...getCommonHeaders(), - }, - body: JSON.stringify({ - event: JSON.stringify(toHumanResponseRawEvent(trimmedInput)), - }), + await workflowHandler.sendEvent({ + data: { _data: { response: trimmedInput } }, + type: "workflows.events.HumanResponseEvent", }); - if (!postRes.ok) { - throw new Error( - `Failed to send message: ${postRes.status} ${postRes.statusText}`, - ); - } - // The assistant reply will be streamed by useWorkflowTask and appended incrementally } catch (err) { console.error("Chat error:", err); // Add error message const errorMessage: Message = { - id: `error-${Date.now()}`, role: "assistant", content: `Sorry, I encountered an error: ${err instanceof Error ? err.message : "Unknown error"}. Please try again.`, timestamp: new Date(), @@ -268,7 +216,6 @@ export default function ChatBot() { const clearChat = () => { setMessages([ { - id: "welcome", role: "assistant" as const, content: welcomeMessage, timestamp: new Date(), @@ -294,7 +241,7 @@ export default function ChatBot() { return (
{/* Header */} @@ -350,12 +297,12 @@ export default function ChatBot() {
) : (
- {messages.map((message) => ( + {messages.map((message, i) => (
{message.role !== "user" && ( @@ -364,7 +311,7 @@ export default function ChatBot() { "w-8 h-8 rounded-full flex items-center justify-center flex-shrink-0", message.error ? "bg-red-100 dark:bg-red-900" - : "bg-blue-100 dark:bg-blue-900", + : "bg-blue-100 dark:bg-blue-900" )} >
@@ -380,7 +327,7 @@ export default function ChatBot() {

{message.content} @@ -409,7 +356,7 @@ export default function ChatBot() { ? "text-blue-100" : message.error ? "text-red-500 dark:text-red-400" - : "text-gray-500 dark:text-gray-400", + : "text-gray-500 dark:text-gray-400" )} > {message.timestamp.toLocaleTimeString()} @@ -490,3 +437,92 @@ export default function ChatBot() {

); } + +interface _Message { + role: "assistant" | "user"; + content: string; + isPartial?: boolean; + timestamp: string; +} + +interface AppendChatMessageData { + message: ChatMessage; +} +interface ChatMessage { + role: "user" | "assistant"; + text: string; + sources: { + text: string; + score: number; + metadata: Record; + }[]; + timestamp: string; +} + +function mergeMessages(previous: Message[], current: Message[]): Message[] { + const lastPreviousMessage = previous[previous.length - 1]; + const restPrevious = previous.slice(0, -1); + const firstCurrentMessage = current[0]; + const restCurrent = current.slice(1); + if (!lastPreviousMessage || !firstCurrentMessage) { + return [...previous, ...current]; + } + if (lastPreviousMessage.isPartial && firstCurrentMessage.isPartial) { + const lastContent = + lastPreviousMessage.content === "Thinking..." + ? "" + : lastPreviousMessage.content; + const merged = { + ...lastPreviousMessage, + content: lastContent + firstCurrentMessage.content, + }; + return [...restPrevious, merged, ...restCurrent]; + } else if ( + lastPreviousMessage.isPartial && + firstCurrentMessage.role === lastPreviousMessage.role + ) { + return [...restPrevious, firstCurrentMessage, ...restCurrent]; + } else { + return [...previous, ...current]; + } +} + +function toMessages(events: WorkflowEvent[]): Message[] { + const messages: Message[] = []; + for (const ev of events) { + const type = ev.type; + const data = ev.data as any; + const lastMessage = messages[messages.length - 1]; + if (type.endsWith(".ChatDeltaEvent")) { + const delta: string = data?.delta ?? ""; + if (!delta) continue; + if (!lastMessage || !lastMessage.isPartial) { + messages.push({ + role: "assistant", + content: delta, + isPartial: true, + timestamp: new Date(), + }); + } else { + lastMessage.content += delta; + } + } else if (type.endsWith(".AppendChatMessage")) { + if ( + lastMessage && + lastMessage.isPartial && + lastMessage.role === "assistant" + ) { + messages.pop(); + } + const content = ev.data as unknown as AppendChatMessageData; + console.log("AppendChatMessage", content); + messages.push({ + role: content.message.role, + content: content.message.text, + timestamp: new Date(content.message.timestamp), + isPartial: false, + }); + } + } + return messages; +} diff --git a/test-proj/ui/src/components/Sidebar.tsx b/test-proj/ui/src/components/Sidebar.tsx new file mode 100644 index 0000000..dae9ad4 --- /dev/null +++ b/test-proj/ui/src/components/Sidebar.tsx @@ -0,0 +1,125 @@ +import { MessageSquare, Clock, Loader2 } from "lucide-react"; +import { + ScrollArea, + Card, + CardContent, + cn, +} from "@llamaindex/ui"; +import { useChatHistory, ChatHistory } from "../libs/chatHistory"; + +interface SidebarProps { + className?: string; +} + +export default function Sidebar({ className }: SidebarProps) { + const { loading, getChats, selectedChat, setSelectedChat } = useChatHistory(); + const chats = getChats(); + + const formatTimestamp = (timestamp: string): string => { + const date = new Date(timestamp); + const now = new Date(); + const diffInHours = (now.getTime() - date.getTime()) / (1000 * 60 * 60); + + if (diffInHours < 1) { + return "Just now"; + } else if (diffInHours < 24) { + return `${Math.floor(diffInHours)}h ago`; + } else if (diffInHours < 24 * 7) { + return `${Math.floor(diffInHours / 24)}d ago`; + } else { + return date.toLocaleDateString(); + } + }; + + const handleChatSelect = (chat: ChatHistory): void => { + setSelectedChat(chat); + }; + + return ( +
+ {/* Header */} +
+
+ +

+ Chat History +

+ {loading && ( + + )} +
+
+ + {/* Chat List */} + + {loading ? ( +
+
+ +

+ Loading chat history... +

+
+
+ ) : chats.length === 0 ? ( +
+
+ +

+ No chat history yet +

+

+ Start a conversation to see it here +

+
+
+ ) : ( +
+ {chats.map((chat) => ( + handleChatSelect(chat)} + > + +
+
+
+ +

+ Chat {chat.handlerId.slice(-8)} +

+
+
+ + {formatTimestamp(chat.timestamp)} +
+
+ {selectedChat?.handlerId === chat.handlerId && ( +
+ )} +
+ + + ))} +
+ )} + +
+ ); +} diff --git a/test-proj/ui/src/libs/chatHistory.ts b/test-proj/ui/src/libs/chatHistory.ts new file mode 100644 index 0000000..fe7e906 --- /dev/null +++ b/test-proj/ui/src/libs/chatHistory.ts @@ -0,0 +1,159 @@ +import { IDBPDatabase, openDB } from "idb"; +import { useEffect, useState } from "react"; + +export interface ChatHistory { + handlerId: string; + timestamp: string; +} + +export interface UseChatHistory { + loading: boolean; + addChat(handlerId: string): void; + deleteChat(handlerId: string): void; + getChats(): ChatHistory[]; + selectedChat: ChatHistory | null; + setSelectedChat(chat: ChatHistory | null): void; +} + +const DB_NAME = "chat-history"; +const DB_VERSION = 1; +const STORE_NAME = "chats"; + +/** + * Hook that tracks workflow handler ids, to use as markers of a chat conversation that can be reloaded. + * Stores chats in IndexedDB + * @returns + */ +export function useChatHistory(): UseChatHistory { + const [loading, setLoading] = useState(true); + const [chatHistory, setChatHistory] = useState([]); + const [selectedChat, setSelectedChat] = useState(null); + const [db, setDb] = useState | null>(null); + + // Initialize database + useEffect(() => { + let thisDb: IDBPDatabase | null = null; + + const initDb = async () => { + try { + thisDb = await openDB(DB_NAME, DB_VERSION, { + upgrade(db) { + if (!db.objectStoreNames.contains(STORE_NAME)) { + const store = db.createObjectStore(STORE_NAME, { + keyPath: "handlerId", + }); + store.createIndex("timestamp", "timestamp"); + } + }, + }); + setDb(thisDb); + } catch (error) { + console.error("Failed to initialize database:", error); + setLoading(false); + } + }; + + initDb(); + + return () => { + thisDb?.close(); + }; + }, []); + + // Load chat history when database is ready + useEffect(() => { + if (!db) return; + + const loadChats = async () => { + try { + setLoading(true); + const chats = await getChatsFromDb(); + setChatHistory(chats); + + // Initialize selectedChat to the latest chat (first in sorted array) + if (chats.length > 0 && !selectedChat) { + setSelectedChat(chats[0]); + } + } catch (error) { + console.error("Failed to load chat history:", error); + } finally { + setLoading(false); + } + }; + + loadChats(); + }, [db]); + + const getChatsFromDb = async (): Promise => { + if (!db) return []; + + try { + const transaction = db.transaction(STORE_NAME, "readonly"); + const store = transaction.objectStore(STORE_NAME); + const index = store.index("timestamp"); + const chats = await index.getAll(); + + // Sort by timestamp descending (most recent first) + return chats.sort( + (a, b) => + new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime() + ); + } catch (error) { + console.error("Failed to get chats from database:", error); + return []; + } + }; + + const addChat = async (handlerId: string): Promise => { + if (!db) return; + + try { + const chat: ChatHistory = { + handlerId, + timestamp: new Date().toISOString(), + }; + + const transaction = db.transaction(STORE_NAME, "readwrite"); + const store = transaction.objectStore(STORE_NAME); + await store.put(chat); + + // Update local state + setChatHistory((prev) => [ + chat, + ...prev.filter((c) => c.handlerId !== handlerId), + ]); + + // Set as selected chat if it's the first chat or if no chat is currently selected + if (!selectedChat) { + setSelectedChat(chat); + } + } catch (error) { + console.error("Failed to add chat to database:", error); + } + }; + + const deleteChat = async (handlerId: string): Promise => { + if (!db) return; + + try { + const transaction = db.transaction(STORE_NAME, "readwrite"); + const store = transaction.objectStore(STORE_NAME); + await store.delete(handlerId); + } catch (error) { + console.error("Failed to delete chat from database:", error); + } + }; + + const getChats = (): ChatHistory[] => { + return chatHistory; + }; + + return { + loading, + addChat, + getChats, + selectedChat, + setSelectedChat, + deleteChat, + }; +} diff --git a/test-proj/ui/src/libs/chatWorkflowHandler.ts b/test-proj/ui/src/libs/chatWorkflowHandler.ts new file mode 100644 index 0000000..9debbaa --- /dev/null +++ b/test-proj/ui/src/libs/chatWorkflowHandler.ts @@ -0,0 +1,35 @@ +import { useWorkflowHandler, useWorkflowRun } from "@llamaindex/ui"; +import { useEffect, useState } from "react"; +import { INDEX_NAME } from "./config"; + +/** + * Creates a new chat conversation if no handlerId is provided + */ +export function useChatWorkflowHandler({ + handlerId, + onHandlerCreated, +}: { + handlerId?: string; + onHandlerCreated?: (handlerId: string) => void; +}): ReturnType { + const create = useWorkflowRun(); + const [thisHandlerId, setThisHandlerId] = useState( + handlerId + ); + const workflowHandler = useWorkflowHandler(thisHandlerId ?? ""); + + const createHandler = async () => { + const handler = await create.runWorkflow("chat", { + index_name: INDEX_NAME, + }); + setThisHandlerId(handler.handler_id); + onHandlerCreated?.(handler.handler_id); + }; + useEffect(() => { + if (!handlerId) { + createHandler(); + } + }, [handlerId]); + + return workflowHandler; +} diff --git a/test-proj/ui/src/pages/Home.tsx b/test-proj/ui/src/pages/Home.tsx index c8d54b8..157e29b 100644 --- a/test-proj/ui/src/pages/Home.tsx +++ b/test-proj/ui/src/pages/Home.tsx @@ -1,8 +1,11 @@ import ChatBot from "../components/ChatBot"; import { WorkflowTrigger } from "@llamaindex/ui"; import { APP_TITLE, INDEX_NAME } from "../libs/config"; +import { useChatHistory } from "@/libs/chatHistory"; +import Sidebar from "@/components/Sidebar"; export default function Home() { + const chatHistory = useChatHistory(); return (
@@ -16,21 +19,26 @@ export default function Home() {

-
-
- { - return { - file_id: files[0].fileId, - index_name: INDEX_NAME, - }; - }} - /> -
+
+
-
- +
+ { + return { + file_id: files[0].fileId, + index_name: INDEX_NAME, + }; + }} + /> +
+
+
+ {!chatHistory.loading && ( + + )} +
From 3fb646ae7908621143f3c802173c70a8b6bc7646 Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Mon, 22 Sep 2025 23:20:34 -0400 Subject: [PATCH 03/18] wip --- test-proj/ui/src/components/ChatBot.tsx | 16 ++-- test-proj/ui/src/components/Sidebar.tsx | 114 +++++++++--------------- test-proj/ui/src/libs/chatHistory.ts | 35 ++++++-- test-proj/ui/src/pages/Home.tsx | 12 ++- 4 files changed, 86 insertions(+), 91 deletions(-) diff --git a/test-proj/ui/src/components/ChatBot.tsx b/test-proj/ui/src/components/ChatBot.tsx index fdc7ebb..7d1863f 100644 --- a/test-proj/ui/src/components/ChatBot.tsx +++ b/test-proj/ui/src/components/ChatBot.tsx @@ -17,12 +17,10 @@ import { Card, CardContent, cn, - useWorkflowRun, useWorkflowHandler, WorkflowEvent, } from "@llamaindex/ui"; import { AGENT_NAME } from "../libs/config"; -import { toHumanResponseRawEvent } from "@/libs/utils"; import { useChatWorkflowHandler } from "@/libs/chatWorkflowHandler"; type Role = "user" | "assistant"; @@ -52,21 +50,21 @@ export default function ChatBot({ const lastProcessedEventIndexRef = useRef(0); const [canSend, setCanSend] = useState(false); const streamingMessageIndexRef = useRef(null); + useEffect(() => { + if (handlerId) { + setIsLoading(false); // whenever handler becomes defined and changed, stop loading + setCanSend(true); + } + }, [handlerId]); // Deployment + auth setup - const deployment = AGENT_NAME || "document-qa"; const platformToken = (import.meta as any).env?.VITE_LLAMA_CLOUD_API_KEY as | string | undefined; const projectId = (import.meta as any).env?.VITE_LLAMA_DEPLOY_PROJECT_ID as | string | undefined; - const defaultIndexName = - (import.meta as any).env?.VITE_DEFAULT_INDEX_NAME || "document_qa_index"; - const sessionIdRef = useRef( - `chat-${Math.random().toString(36).slice(2)}-${Date.now()}` - ); - + // UI text defaults const title = "AI Document Assistant"; const placeholder = "Ask me anything about your documents..."; diff --git a/test-proj/ui/src/components/Sidebar.tsx b/test-proj/ui/src/components/Sidebar.tsx index dae9ad4..744a378 100644 --- a/test-proj/ui/src/components/Sidebar.tsx +++ b/test-proj/ui/src/components/Sidebar.tsx @@ -1,10 +1,5 @@ -import { MessageSquare, Clock, Loader2 } from "lucide-react"; -import { - ScrollArea, - Card, - CardContent, - cn, -} from "@llamaindex/ui"; +import { X } from "lucide-react"; +import { ScrollArea, cn } from "@llamaindex/ui"; import { useChatHistory, ChatHistory } from "../libs/chatHistory"; interface SidebarProps { @@ -12,7 +7,8 @@ interface SidebarProps { } export default function Sidebar({ className }: SidebarProps) { - const { loading, getChats, selectedChat, setSelectedChat } = useChatHistory(); + const { loading, getChats, selectedChatId, setSelectedChatId, deleteChat } = + useChatHistory(); const chats = getChats(); const formatTimestamp = (timestamp: string): string => { @@ -32,90 +28,68 @@ export default function Sidebar({ className }: SidebarProps) { }; const handleChatSelect = (chat: ChatHistory): void => { - setSelectedChat(chat); + setSelectedChatId(chat.handlerId); + }; + + const handleDeleteChat = (e: React.MouseEvent, handlerId: string): void => { + e.stopPropagation(); + deleteChat(handlerId); }; return ( -
+
{/* Header */} -
-
- -

- Chat History -

- {loading && ( - - )} -
+
+

+ Chats +

{/* Chat List */} {loading ? ( -
-
- -

- Loading chat history... -

+
+
+ Loading...
) : chats.length === 0 ? ( -
-
- -

- No chat history yet -

-

- Start a conversation to see it here -

+
+
+ No chats yet
) : ( -
+
{chats.map((chat) => ( - handleChatSelect(chat)} > - -
-
-
- -

- Chat {chat.handlerId.slice(-8)} -

-
-
- - {formatTimestamp(chat.timestamp)} -
-
- {selectedChat?.handlerId === chat.handlerId && ( -
- )} +
+
+ {formatTimestamp(chat.timestamp)}
- - +
+ +
))}
)} diff --git a/test-proj/ui/src/libs/chatHistory.ts b/test-proj/ui/src/libs/chatHistory.ts index fe7e906..fde969d 100644 --- a/test-proj/ui/src/libs/chatHistory.ts +++ b/test-proj/ui/src/libs/chatHistory.ts @@ -11,8 +11,8 @@ export interface UseChatHistory { addChat(handlerId: string): void; deleteChat(handlerId: string): void; getChats(): ChatHistory[]; - selectedChat: ChatHistory | null; - setSelectedChat(chat: ChatHistory | null): void; + selectedChatId: string | null; + setSelectedChatId(handlerId: string): void; } const DB_NAME = "chat-history"; @@ -27,7 +27,9 @@ const STORE_NAME = "chats"; export function useChatHistory(): UseChatHistory { const [loading, setLoading] = useState(true); const [chatHistory, setChatHistory] = useState([]); - const [selectedChat, setSelectedChat] = useState(null); + const [selectedChatHandlerId, setSelectedChatHandlerId] = useState< + string | null + >(null); const [db, setDb] = useState | null>(null); // Initialize database @@ -71,8 +73,8 @@ export function useChatHistory(): UseChatHistory { setChatHistory(chats); // Initialize selectedChat to the latest chat (first in sorted array) - if (chats.length > 0 && !selectedChat) { - setSelectedChat(chats[0]); + if (chats.length > 0 && !selectedChatHandlerId) { + setSelectedChatHandlerId(chats[0].handlerId); } } catch (error) { console.error("Failed to load chat history:", error); @@ -124,8 +126,8 @@ export function useChatHistory(): UseChatHistory { ]); // Set as selected chat if it's the first chat or if no chat is currently selected - if (!selectedChat) { - setSelectedChat(chat); + if (!selectedChatHandlerId) { + setSelectedChatHandlerId(chat.handlerId); } } catch (error) { console.error("Failed to add chat to database:", error); @@ -139,6 +141,21 @@ export function useChatHistory(): UseChatHistory { const transaction = db.transaction(STORE_NAME, "readwrite"); const store = transaction.objectStore(STORE_NAME); await store.delete(handlerId); + + // Update local state + setChatHistory((prev) => prev.filter((c) => c.handlerId !== handlerId)); + + // If the deleted chat was selected, select the next available chat or clear selection + if (selectedChatHandlerId === handlerId) { + const remainingChats = chatHistory.filter( + (c) => c.handlerId !== handlerId + ); + if (remainingChats.length > 0) { + setSelectedChatHandlerId(remainingChats[0].handlerId); + } else { + setSelectedChatHandlerId(null); + } + } } catch (error) { console.error("Failed to delete chat from database:", error); } @@ -152,8 +169,8 @@ export function useChatHistory(): UseChatHistory { loading, addChat, getChats, - selectedChat, - setSelectedChat, + selectedChatId: selectedChatHandlerId, + setSelectedChatId: setSelectedChatHandlerId, deleteChat, }; } diff --git a/test-proj/ui/src/pages/Home.tsx b/test-proj/ui/src/pages/Home.tsx index 157e29b..53fd093 100644 --- a/test-proj/ui/src/pages/Home.tsx +++ b/test-proj/ui/src/pages/Home.tsx @@ -19,9 +19,9 @@ export default function Home() {

-
+
-
+
{!chatHistory.loading && ( - + { + chatHistory.addChat(handler); + chatHistory.setSelectedChatId(handler); + }} + /> )}
From 35b4df4e6a2673ca5980bcd9f1eb82af9d125ec9 Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 10:22:48 -0400 Subject: [PATCH 04/18] wip --- test-proj/src/test_proj/qa_workflows.py | 29 +- test-proj/ui/src/components/ChatBot.tsx | 333 ++---------------- test-proj/ui/src/components/Sidebar.tsx | 57 ++- .../{chatHistory.ts => useChatHistory.ts} | 11 + ...owHandler.ts => useChatWorkflowHandler.ts} | 0 test-proj/ui/src/libs/useChatbot.ts | 293 +++++++++++++++ test-proj/ui/src/pages/Home.tsx | 5 +- 7 files changed, 394 insertions(+), 334 deletions(-) rename test-proj/ui/src/libs/{chatHistory.ts => useChatHistory.ts} (94%) rename test-proj/ui/src/libs/{chatWorkflowHandler.ts => useChatWorkflowHandler.ts} (100%) create mode 100644 test-proj/ui/src/libs/useChatbot.ts diff --git a/test-proj/src/test_proj/qa_workflows.py b/test-proj/src/test_proj/qa_workflows.py index bfb950c..5d922f6 100644 --- a/test-proj/src/test_proj/qa_workflows.py +++ b/test-proj/src/test_proj/qa_workflows.py @@ -34,8 +34,6 @@ get_llama_parse_client, LLAMA_CLOUD_PROJECT_ID, ) -from llama_index.core.memory import Memory - logger = logging.getLogger(__name__) @@ -57,6 +55,7 @@ class FileDownloadedEvent(Event): class ChatEvent(StartEvent): index_name: str + conversation_history: list[ConversationMessage] = Field(default_factory=list) # Configure LLM and embedding model @@ -225,23 +224,19 @@ async def initialize_chat( initial_state = await ctx.store.get_state() # Store session info in context await ctx.store.set("index_name", index_name) - messages = await initial_state.memory.aget_all() - if len(messages) == 0: - ctx.write_event_to_stream( - AppendChatMessage( - message=ConversationMessage( - role="assistant", - text="Chat initialized. Ask a question (or type 'exit' to quit): ", - ) - ) - ) - else: - for item in messages: - ctx.write_event_to_stream(AppendChatMessage(message=item)) + messages = initial_state.conversation_history + + for item in messages: + ctx.write_event_to_stream(AppendChatMessage(message=item)) + + if ev.conversation_history: + async with ctx.store.edit_state() as state: + state.conversation_history.extend(ev.conversation_history) # Request first user input return InputRequiredEvent(prefix="[waiting for user message]") except Exception as e: + logger.error(f"Error initializing chat: {str(e)}", exc_info=True) return StopEvent( result={ "success": False, @@ -259,7 +254,7 @@ async def process_user_response( user_input = ev.response.strip() initial_state = await ctx.store.get_state() - memory = initial_state.memory + conversation_history = initial_state.conversation_history index_name = initial_state.index_name logger.info(f"User input: {user_input}") @@ -271,7 +266,7 @@ async def process_user_response( result={ "success": True, "message": "Chat session ended.", - "conversation_history": await memory.aget_all(), + "conversation_history": conversation_history, } ) diff --git a/test-proj/ui/src/components/ChatBot.tsx b/test-proj/ui/src/components/ChatBot.tsx index 7d1863f..e69854e 100644 --- a/test-proj/ui/src/components/ChatBot.tsx +++ b/test-proj/ui/src/components/ChatBot.tsx @@ -1,36 +1,25 @@ // This is a temporary chatbot component that is used to test the chatbot functionality. // LlamaIndex will replace it with better chatbot component. -import { useState, useRef, useEffect, FormEvent, KeyboardEvent } from "react"; -import { - Send, - Loader2, - Bot, - User, - MessageSquare, - Trash2, - RefreshCw, -} from "lucide-react"; +import { useChatbot } from "@/libs/useChatbot"; import { Button, - Input, - ScrollArea, Card, CardContent, cn, - useWorkflowHandler, - WorkflowEvent, + Input, + ScrollArea, } from "@llamaindex/ui"; -import { AGENT_NAME } from "../libs/config"; -import { useChatWorkflowHandler } from "@/libs/chatWorkflowHandler"; +import { + Bot, + Loader2, + MessageSquare, + RefreshCw, + Send, + Trash2, + User, +} from "lucide-react"; +import { FormEvent, KeyboardEvent, useEffect, useRef } from "react"; -type Role = "user" | "assistant"; -interface Message { - role: Role; - isPartial?: boolean; - content: string; - timestamp: Date; - error?: boolean; -} export default function ChatBot({ handlerId, onHandlerCreated, @@ -38,103 +27,19 @@ export default function ChatBot({ handlerId?: string; onHandlerCreated?: (handlerId: string) => void; }) { - const workflowHandler = useChatWorkflowHandler({ + const inputRef = useRef(null); + const messagesEndRef = useRef(null); + const chatbot = useChatbot({ handlerId, onHandlerCreated, + focusInput: () => { + inputRef.current?.focus(); + }, }); - const messagesEndRef = useRef(null); - const inputRef = useRef(null); - const [messages, setMessages] = useState([]); - const [input, setInput] = useState(""); - const [isLoading, setIsLoading] = useState(false); - const lastProcessedEventIndexRef = useRef(0); - const [canSend, setCanSend] = useState(false); - const streamingMessageIndexRef = useRef(null); - useEffect(() => { - if (handlerId) { - setIsLoading(false); // whenever handler becomes defined and changed, stop loading - setCanSend(true); - } - }, [handlerId]); - // Deployment + auth setup - const platformToken = (import.meta as any).env?.VITE_LLAMA_CLOUD_API_KEY as - | string - | undefined; - const projectId = (import.meta as any).env?.VITE_LLAMA_DEPLOY_PROJECT_ID as - | string - | undefined; - // UI text defaults const title = "AI Document Assistant"; const placeholder = "Ask me anything about your documents..."; - const welcomeMessage = - "Welcome! 👋 Upload a document with the control above, then ask questions here."; - - // Helper functions for message management - const appendMessage = ( - role: Role, - msg: string, - isPartial: boolean = false - ): void => { - setMessages((prev) => { - const id = `${role}-stream-${Date.now()}`; - const idx = prev.length; - streamingMessageIndexRef.current = idx; - return [ - ...prev, - { - id, - role, - content: msg, - isPartial, - timestamp: new Date(), - }, - ]; - }); - }; - - // Initialize with welcome message - useEffect(() => { - if (messages.length === 0) { - const welcomeMsg: Message = { - role: "assistant", - content: welcomeMessage, - timestamp: new Date(), - }; - setMessages([welcomeMsg]); - } - }, []); - - // Subscribe to task/events using hook (auto stream when handler exists) - const { events } = useWorkflowHandler(handlerId ?? "", Boolean(handlerId)); - - // Process streamed events into messages - useEffect(() => { - if (!events || events.length === 0) return; - let startIdx = lastProcessedEventIndexRef.current; - if (startIdx < 0) startIdx = 0; - if (startIdx >= events.length) return; - - const eventsToProcess = events.slice(startIdx); - const newMessages = toMessages(eventsToProcess); - if (newMessages.length > 0) { - setMessages((prev) => mergeMessages(prev, newMessages)); - } - for (const ev of eventsToProcess) { - const type = ev.type; - if (!type) continue; - if (type.endsWith(".InputRequiredEvent")) { - // ready for next user input; enable send - setCanSend(true); - setIsLoading(false); - inputRef.current?.focus(); - } else if (type.endsWith(".StopEvent")) { - // finished; no summary bubble needed (chat response already streamed) - } - } - lastProcessedEventIndexRef.current = events.length; - }, [events, messages]); const scrollToBottom = () => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); @@ -142,65 +47,11 @@ export default function ChatBot({ useEffect(() => { scrollToBottom(); - }, [messages]); - - // No manual SSE cleanup needed - - const getCommonHeaders = () => ({ - ...(platformToken ? { authorization: `Bearer ${platformToken}` } : {}), - ...(projectId ? { "Project-Id": projectId } : {}), - }); - - // Removed manual SSE ensureEventStream; hook handles streaming + }, [chatbot.messages]); const handleSubmit = async (e: FormEvent) => { e.preventDefault(); - - const trimmedInput = input.trim(); - if (!trimmedInput || isLoading || !canSend) return; - - // Add user message - const userMessage: Message = { - role: "user", - content: trimmedInput, - timestamp: new Date(), - }; - - const newMessages = [...messages, userMessage]; - setMessages(newMessages); - setInput(""); - setIsLoading(true); - setCanSend(false); - - // Immediately create an assistant placeholder to avoid visual gap before deltas - if (streamingMessageIndexRef.current === null) { - appendMessage("assistant", "Thinking...", true); - } - - try { - // Send user input as HumanResponseEvent - await workflowHandler.sendEvent({ - data: { _data: { response: trimmedInput } }, - type: "workflows.events.HumanResponseEvent", - }); - // The assistant reply will be streamed by useWorkflowTask and appended incrementally - } catch (err) { - console.error("Chat error:", err); - - // Add error message - const errorMessage: Message = { - role: "assistant", - content: `Sorry, I encountered an error: ${err instanceof Error ? err.message : "Unknown error"}. Please try again.`, - timestamp: new Date(), - error: true, - }; - - setMessages((prev) => [...prev, errorMessage]); - } finally { - setIsLoading(false); - // Focus back on input - inputRef.current?.focus(); - } + await chatbot.submit(); }; const handleKeyDown = (e: KeyboardEvent) => { @@ -211,31 +62,6 @@ export default function ChatBot({ } }; - const clearChat = () => { - setMessages([ - { - role: "assistant" as const, - content: welcomeMessage, - timestamp: new Date(), - }, - ]); - setInput(""); - inputRef.current?.focus(); - }; - - const retryLastMessage = () => { - const lastUserMessage = messages.filter((m) => m.role === "user").pop(); - if (lastUserMessage) { - // Remove the last assistant message if it was an error - const lastMessage = messages[messages.length - 1]; - if (lastMessage.role === "assistant" && lastMessage.error) { - setMessages((prev) => prev.slice(0, -1)); - } - setInput(lastUserMessage.content); - inputRef.current?.focus(); - } - }; - return (
{title} - {isLoading && ( + {chatbot.isLoading && ( Thinking... )}
- {messages.some((m) => m.error) && ( + {chatbot.messages.some((m) => m.error) && ( )} - {messages.length > 0 && ( + {chatbot.messages.length > 0 && (
); } - -interface _Message { - role: "assistant" | "user"; - content: string; - isPartial?: boolean; - timestamp: string; -} - -interface AppendChatMessageData { - message: ChatMessage; -} -interface ChatMessage { - role: "user" | "assistant"; - text: string; - sources: { - text: string; - score: number; - metadata: Record; - }[]; - timestamp: string; -} - -function mergeMessages(previous: Message[], current: Message[]): Message[] { - const lastPreviousMessage = previous[previous.length - 1]; - const restPrevious = previous.slice(0, -1); - const firstCurrentMessage = current[0]; - const restCurrent = current.slice(1); - if (!lastPreviousMessage || !firstCurrentMessage) { - return [...previous, ...current]; - } - if (lastPreviousMessage.isPartial && firstCurrentMessage.isPartial) { - const lastContent = - lastPreviousMessage.content === "Thinking..." - ? "" - : lastPreviousMessage.content; - const merged = { - ...lastPreviousMessage, - content: lastContent + firstCurrentMessage.content, - }; - return [...restPrevious, merged, ...restCurrent]; - } else if ( - lastPreviousMessage.isPartial && - firstCurrentMessage.role === lastPreviousMessage.role - ) { - return [...restPrevious, firstCurrentMessage, ...restCurrent]; - } else { - return [...previous, ...current]; - } -} - -function toMessages(events: WorkflowEvent[]): Message[] { - const messages: Message[] = []; - for (const ev of events) { - const type = ev.type; - const data = ev.data as any; - const lastMessage = messages[messages.length - 1]; - if (type.endsWith(".ChatDeltaEvent")) { - const delta: string = data?.delta ?? ""; - if (!delta) continue; - if (!lastMessage || !lastMessage.isPartial) { - messages.push({ - role: "assistant", - content: delta, - isPartial: true, - timestamp: new Date(), - }); - } else { - lastMessage.content += delta; - } - } else if (type.endsWith(".AppendChatMessage")) { - if ( - lastMessage && - lastMessage.isPartial && - lastMessage.role === "assistant" - ) { - messages.pop(); - } - const content = ev.data as unknown as AppendChatMessageData; - console.log("AppendChatMessage", content); - messages.push({ - role: content.message.role, - content: content.message.text, - timestamp: new Date(content.message.timestamp), - isPartial: false, - }); - } - } - return messages; -} diff --git a/test-proj/ui/src/components/Sidebar.tsx b/test-proj/ui/src/components/Sidebar.tsx index 744a378..874240e 100644 --- a/test-proj/ui/src/components/Sidebar.tsx +++ b/test-proj/ui/src/components/Sidebar.tsx @@ -1,29 +1,39 @@ -import { X } from "lucide-react"; -import { ScrollArea, cn } from "@llamaindex/ui"; -import { useChatHistory, ChatHistory } from "../libs/chatHistory"; +import { Plus, X } from "lucide-react"; +import { Button, ScrollArea, cn } from "@llamaindex/ui"; +import { ChatHistory, UseChatHistory } from "../libs/useChatHistory"; interface SidebarProps { className?: string; + chatHistory: UseChatHistory; } -export default function Sidebar({ className }: SidebarProps) { - const { loading, getChats, selectedChatId, setSelectedChatId, deleteChat } = - useChatHistory(); +export default function Sidebar({ className, chatHistory }: SidebarProps) { + const { + loading, + getChats, + selectedChatId, + setSelectedChatId, + deleteChat, + createNewChat, + } = chatHistory; const chats = getChats(); const formatTimestamp = (timestamp: string): string => { const date = new Date(timestamp); const now = new Date(); - const diffInHours = (now.getTime() - date.getTime()) / (1000 * 60 * 60); - - if (diffInHours < 1) { - return "Just now"; - } else if (diffInHours < 24) { - return `${Math.floor(diffInHours)}h ago`; - } else if (diffInHours < 24 * 7) { - return `${Math.floor(diffInHours / 24)}d ago`; + const isToday = date.toDateString() === now.toDateString(); + + const timeString = date.toLocaleTimeString([], { + hour: '2-digit', + minute: '2-digit', + second: '2-digit' + }); + + if (isToday) { + return timeString; } else { - return date.toLocaleDateString(); + const dateString = date.toLocaleDateString(); + return `${dateString} ${timeString}`; } }; @@ -45,9 +55,20 @@ export default function Sidebar({ className }: SidebarProps) { > {/* Header */}
-

- Chats -

+
+

+ Chats +

+ +
{/* Chat List */} diff --git a/test-proj/ui/src/libs/chatHistory.ts b/test-proj/ui/src/libs/useChatHistory.ts similarity index 94% rename from test-proj/ui/src/libs/chatHistory.ts rename to test-proj/ui/src/libs/useChatHistory.ts index fde969d..eb9cd4a 100644 --- a/test-proj/ui/src/libs/chatHistory.ts +++ b/test-proj/ui/src/libs/useChatHistory.ts @@ -13,6 +13,9 @@ export interface UseChatHistory { getChats(): ChatHistory[]; selectedChatId: string | null; setSelectedChatId(handlerId: string): void; + createNewChat(): void; + // forces a new chat + chatCounter: number; } const DB_NAME = "chat-history"; @@ -31,6 +34,7 @@ export function useChatHistory(): UseChatHistory { string | null >(null); const [db, setDb] = useState | null>(null); + const [chatCounter, setChatCounter] = useState(0); // Initialize database useEffect(() => { @@ -165,6 +169,11 @@ export function useChatHistory(): UseChatHistory { return chatHistory; }; + const createNewChat = (): void => { + setSelectedChatHandlerId(null); + setChatCounter(prev => prev + 1); + }; + return { loading, addChat, @@ -172,5 +181,7 @@ export function useChatHistory(): UseChatHistory { selectedChatId: selectedChatHandlerId, setSelectedChatId: setSelectedChatHandlerId, deleteChat, + createNewChat, + chatCounter, }; } diff --git a/test-proj/ui/src/libs/chatWorkflowHandler.ts b/test-proj/ui/src/libs/useChatWorkflowHandler.ts similarity index 100% rename from test-proj/ui/src/libs/chatWorkflowHandler.ts rename to test-proj/ui/src/libs/useChatWorkflowHandler.ts diff --git a/test-proj/ui/src/libs/useChatbot.ts b/test-proj/ui/src/libs/useChatbot.ts new file mode 100644 index 0000000..f36c002 --- /dev/null +++ b/test-proj/ui/src/libs/useChatbot.ts @@ -0,0 +1,293 @@ +// This is a temporary chatbot component that is used to test the chatbot functionality. +// LlamaIndex will replace it with better chatbot component. +import { + useWorkflowHandler, + WorkflowEvent +} from "@llamaindex/ui"; +import { + useEffect, + useRef, + useState +} from "react"; +import { useChatWorkflowHandler } from "./useChatWorkflowHandler"; + +export type Role = "user" | "assistant"; +export interface Message { + role: Role; + isPartial?: boolean; + content: string; + timestamp: Date; + error?: boolean; +} + +export interface ChatbotState { + submit(): Promise; + retryLastMessage: () => void; + clearChat: () => void; + setInput: (input: string) => void; + + messages: Message[]; + input: string; + isLoading: boolean; + canSend: boolean; +} + +export function useChatbot({ + handlerId, + onHandlerCreated, + focusInput: focusInput, +}: { + handlerId?: string, + onHandlerCreated?: (handlerId: string) => void, + focusInput?: () => void +}): ChatbotState { + const workflowHandler = useChatWorkflowHandler({ + handlerId, + onHandlerCreated, + }); + const [messages, setMessages] = useState([]); + const [input, setInput] = useState(""); + const [isLoading, setIsLoading] = useState(false); + const lastProcessedEventIndexRef = useRef(0); + const streamingMessageIndexRef = useRef(null); + const [canSend, setCanSend] = useState(false); + + // Whenever handler becomes defined and changed, stop loading + useEffect(() => { + if (handlerId) { + setIsLoading(false); + setCanSend(true); + } + }, [handlerId]); + + // Helper functions for message management + const appendMessage = ( + role: Role, + msg: string, + isPartial: boolean = false + ): void => { + setMessages((prev) => { + const idx = prev.length; + streamingMessageIndexRef.current = idx; + return [ + ...prev, + { + role, + content: msg, + isPartial, + timestamp: new Date(), + }, + ]; + }); + }; + + const welcomeMessage = + "Welcome! 👋 Upload a document with the control above, then ask questions here."; + + // Initialize with welcome message + useEffect(() => { + if (messages.length === 0) { + const welcomeMsg: Message = { + role: "assistant", + content: welcomeMessage, + timestamp: new Date(), + }; + setMessages([welcomeMsg]); + } + }, []); + + // Subscribe to task/events using hook (auto stream when handler exists) + const { events } = useWorkflowHandler(handlerId ?? "", Boolean(handlerId)); + + // Process streamed events into messages + useEffect(() => { + if (!events || events.length === 0) return; + let startIdx = lastProcessedEventIndexRef.current; + if (startIdx < 0) startIdx = 0; + if (startIdx >= events.length) return; + + const eventsToProcess = events.slice(startIdx); + const newMessages = toMessages(eventsToProcess); + if (newMessages.length > 0) { + setMessages((prev) => mergeMessages(prev, newMessages)); + } + for (const ev of eventsToProcess) { + const type = ev.type; + if (!type) continue; + if (type.endsWith(".InputRequiredEvent")) { + // ready for next user input; enable send + setCanSend(true); + setIsLoading(false); + } else if (type.endsWith(".StopEvent")) { + // finished; no summary bubble needed (chat response already streamed) + } + } + lastProcessedEventIndexRef.current = events.length; + }, [events, messages]); + + const clearChat = () => { + setMessages([ + { + role: "assistant" as const, + content: welcomeMessage, + timestamp: new Date(), + }, + ]); + setInput(""); + focusInput?.(); + }; + + const retryLastMessage = () => { + const lastUserMessage = messages.filter((m) => m.role === "user").pop(); + if (lastUserMessage) { + // Remove the last assistant message if it was an error + const lastMessage = messages[messages.length - 1]; + if (lastMessage.role === "assistant" && lastMessage.error) { + setMessages((prev) => prev.slice(0, -1)); + } + setInput(lastUserMessage.content); + focusInput?.(); + } + }; + + const submit = async () => { + const trimmedInput = input.trim(); + if (!trimmedInput || isLoading || !canSend) return; + + // Add user message + const userMessage: Message = { + role: "user", + content: trimmedInput, + timestamp: new Date(), + }; + + const newMessages = [...messages, userMessage]; + setMessages(newMessages); + setInput(""); + setIsLoading(true); + setCanSend(false); + + // Immediately create an assistant placeholder to avoid visual gap before deltas + if (streamingMessageIndexRef.current === null) { + appendMessage("assistant", "Thinking...", true); + } + + try { + // Send user input as HumanResponseEvent + await workflowHandler.sendEvent({ + data: { _data: { response: trimmedInput } }, + type: "workflows.events.HumanResponseEvent", + }); + // The assistant reply will be streamed by useWorkflowTask and appended incrementally + } catch (err) { + console.error("Chat error:", err); + + // Add error message + const errorMessage: Message = { + role: "assistant", + content: `Sorry, I encountered an error: ${err instanceof Error ? err.message : "Unknown error"}. Please try again.`, + timestamp: new Date(), + error: true, + }; + + setMessages((prev) => [...prev, errorMessage]); + } finally { + setIsLoading(false); + // Focus back on input + focusInput?.(); + } + }; + + return { + submit, + retryLastMessage, + messages, + input, + setInput, + isLoading, + canSend, + clearChat, + }; +} + +interface AppendChatMessageData { + message: ChatMessage; +} +interface ChatMessage { + role: "user" | "assistant"; + text: string; + sources: { + text: string; + score: number; + metadata: Record; + }[]; + timestamp: string; +} + +function mergeMessages(previous: Message[], current: Message[]): Message[] { + const lastPreviousMessage = previous[previous.length - 1]; + const restPrevious = previous.slice(0, -1); + const firstCurrentMessage = current[0]; + const restCurrent = current.slice(1); + if (!lastPreviousMessage || !firstCurrentMessage) { + return [...previous, ...current]; + } + if (lastPreviousMessage.isPartial && firstCurrentMessage.isPartial) { + const lastContent = + lastPreviousMessage.content === "Thinking..." + ? "" + : lastPreviousMessage.content; + const merged = { + ...lastPreviousMessage, + content: lastContent + firstCurrentMessage.content, + }; + return [...restPrevious, merged, ...restCurrent]; + } else if ( + lastPreviousMessage.isPartial && + firstCurrentMessage.role === lastPreviousMessage.role + ) { + return [...restPrevious, firstCurrentMessage, ...restCurrent]; + } else { + return [...previous, ...current]; + } +} + +function toMessages(events: WorkflowEvent[]): Message[] { + const messages: Message[] = []; + for (const ev of events) { + const type = ev.type; + const data = ev.data as any; + const lastMessage = messages[messages.length - 1]; + if (type.endsWith(".ChatDeltaEvent")) { + const delta: string = data?.delta ?? ""; + if (!delta) continue; + if (!lastMessage || !lastMessage.isPartial) { + messages.push({ + role: "assistant", + content: delta, + isPartial: true, + timestamp: new Date(), + }); + } else { + lastMessage.content += delta; + } + } else if (type.endsWith(".AppendChatMessage")) { + if ( + lastMessage && + lastMessage.isPartial && + lastMessage.role === "assistant" + ) { + messages.pop(); + } + const content = ev.data as unknown as AppendChatMessageData; + console.log("AppendChatMessage", content); + messages.push({ + role: content.message.role, + content: content.message.text, + timestamp: new Date(content.message.timestamp), + isPartial: false, + }); + } + } + return messages; +} diff --git a/test-proj/ui/src/pages/Home.tsx b/test-proj/ui/src/pages/Home.tsx index 53fd093..f3e4b29 100644 --- a/test-proj/ui/src/pages/Home.tsx +++ b/test-proj/ui/src/pages/Home.tsx @@ -1,7 +1,7 @@ import ChatBot from "../components/ChatBot"; import { WorkflowTrigger } from "@llamaindex/ui"; import { APP_TITLE, INDEX_NAME } from "../libs/config"; -import { useChatHistory } from "@/libs/chatHistory"; +import { useChatHistory } from "@/libs/useChatHistory"; import Sidebar from "@/components/Sidebar"; export default function Home() { @@ -20,7 +20,7 @@ export default function Home() {
- +
{!chatHistory.loading && ( { chatHistory.addChat(handler); From d0f4a882dd83d8868b544d12ff4827238c19275f Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 11:08:10 -0400 Subject: [PATCH 05/18] wip --- test-proj/ui/src/components/Sidebar.tsx | 3 +-- test-proj/ui/src/libs/useChatHistory.ts | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/test-proj/ui/src/components/Sidebar.tsx b/test-proj/ui/src/components/Sidebar.tsx index 874240e..ff347d1 100644 --- a/test-proj/ui/src/components/Sidebar.tsx +++ b/test-proj/ui/src/components/Sidebar.tsx @@ -10,13 +10,12 @@ interface SidebarProps { export default function Sidebar({ className, chatHistory }: SidebarProps) { const { loading, - getChats, + chats, selectedChatId, setSelectedChatId, deleteChat, createNewChat, } = chatHistory; - const chats = getChats(); const formatTimestamp = (timestamp: string): string => { const date = new Date(timestamp); diff --git a/test-proj/ui/src/libs/useChatHistory.ts b/test-proj/ui/src/libs/useChatHistory.ts index eb9cd4a..66d2663 100644 --- a/test-proj/ui/src/libs/useChatHistory.ts +++ b/test-proj/ui/src/libs/useChatHistory.ts @@ -10,7 +10,7 @@ export interface UseChatHistory { loading: boolean; addChat(handlerId: string): void; deleteChat(handlerId: string): void; - getChats(): ChatHistory[]; + chats: ChatHistory[]; selectedChatId: string | null; setSelectedChatId(handlerId: string): void; createNewChat(): void; @@ -177,7 +177,7 @@ export function useChatHistory(): UseChatHistory { return { loading, addChat, - getChats, + chats: chatHistory, selectedChatId: selectedChatHandlerId, setSelectedChatId: setSelectedChatHandlerId, deleteChat, From 58670a86ce27ddcb7d8485220030c58034942023 Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 13:04:50 -0400 Subject: [PATCH 06/18] functional, not pretty --- test-proj/src/test_proj/qa_workflows.py | 16 ++++ test-proj/ui/src/components/ChatBot.tsx | 86 ++++++++----------- test-proj/ui/src/libs/events.ts | 15 ++++ .../ui/src/libs/useChatWorkflowHandler.ts | 54 +++++++++--- test-proj/ui/src/libs/useChatbot.ts | 78 ++++------------- 5 files changed, 125 insertions(+), 124 deletions(-) create mode 100644 test-proj/ui/src/libs/events.ts diff --git a/test-proj/src/test_proj/qa_workflows.py b/test-proj/src/test_proj/qa_workflows.py index 5d922f6..720661d 100644 --- a/test-proj/src/test_proj/qa_workflows.py +++ b/test-proj/src/test_proj/qa_workflows.py @@ -168,6 +168,12 @@ class ChatDeltaEvent(Event): delta: str +class QueryConversationHistoryEvent(HumanResponseEvent): + """Client can call this to trigger replaying AppendChatMessage events""" + + pass + + class ChatWorkflowState(BaseModel): index_name: str | None = None conversation_history: list[ConversationMessage] = Field(default_factory=list) @@ -244,6 +250,15 @@ async def initialize_chat( } ) + @step + async def get_conversation_history( + self, ev: QueryConversationHistoryEvent, ctx: Context[ChatWorkflowState] + ) -> None: + """Get the conversation history from the database""" + hist = (await ctx.store.get_state()).conversation_history + for item in hist: + ctx.write_event_to_stream(AppendChatMessage(message=item)) + @step async def process_user_response( self, ev: HumanResponseEvent, ctx: Context[ChatWorkflowState] @@ -298,6 +313,7 @@ def async_response_gen(self): async for token in stream_response.async_response_gen(): full_text += token ctx.write_event_to_stream(ChatDeltaEvent(delta=token)) + await asyncio.sleep(0) # Temp workaround. Some sort of bug in the server drops events without flushing the event loop # Extract source nodes for citations sources = [] diff --git a/test-proj/ui/src/components/ChatBot.tsx b/test-proj/ui/src/components/ChatBot.tsx index e69854e..050fbe9 100644 --- a/test-proj/ui/src/components/ChatBot.tsx +++ b/test-proj/ui/src/components/ChatBot.tsx @@ -15,7 +15,6 @@ import { MessageSquare, RefreshCw, Send, - Trash2, User, } from "lucide-react"; import { FormEvent, KeyboardEvent, useEffect, useRef } from "react"; @@ -76,11 +75,6 @@ export default function ChatBot({

{title}

- {chatbot.isLoading && ( - - Thinking... - - )}
{chatbot.messages.some((m) => m.error) && ( @@ -92,15 +86,6 @@ export default function ChatBot({ )} - {chatbot.messages.length > 0 && ( - - )}
@@ -165,14 +150,20 @@ export default function ChatBot({ )} > -

- {message.content} -

+ {message.isPartial && !message.content ? ( + + ) : ( + <> +

+ {message.content} +

+ + )}

))} - - {chatbot.isLoading && ( -

-
- -
- - -
-
- - - -
-
-
-
-
- )}
)} @@ -263,3 +226,24 @@ export default function ChatBot({
); } + +const LoadingDots = () => { + return ( +
+
+ + + +
+
+ ); +}; diff --git a/test-proj/ui/src/libs/events.ts b/test-proj/ui/src/libs/events.ts new file mode 100644 index 0000000..da87726 --- /dev/null +++ b/test-proj/ui/src/libs/events.ts @@ -0,0 +1,15 @@ +import { WorkflowEvent } from "@llamaindex/ui"; + +export function createQueryConversationHistoryEvent(): WorkflowEvent { + return { + data: {}, + type: "test_proj.qa_workflows.QueryConversationHistoryEvent", + }; +} + +export function createHumanResponseEvent(response: string): WorkflowEvent { + return { + data: { _data: { response } }, + type: "test_proj.qa_workflows.HumanResponseEvent", + }; +} \ No newline at end of file diff --git a/test-proj/ui/src/libs/useChatWorkflowHandler.ts b/test-proj/ui/src/libs/useChatWorkflowHandler.ts index 9debbaa..4d23431 100644 --- a/test-proj/ui/src/libs/useChatWorkflowHandler.ts +++ b/test-proj/ui/src/libs/useChatWorkflowHandler.ts @@ -1,6 +1,11 @@ -import { useWorkflowHandler, useWorkflowRun } from "@llamaindex/ui"; -import { useEffect, useState } from "react"; +import { + useWorkflowHandler, + useWorkflowRun, + useHandlerStore, +} from "@llamaindex/ui"; +import { useEffect, useRef, useState } from "react"; import { INDEX_NAME } from "./config"; +import { createQueryConversationHistoryEvent } from "./events"; /** * Creates a new chat conversation if no handlerId is provided @@ -13,23 +18,52 @@ export function useChatWorkflowHandler({ onHandlerCreated?: (handlerId: string) => void; }): ReturnType { const create = useWorkflowRun(); + const isQueryingWorkflow = useRef(false); const [thisHandlerId, setThisHandlerId] = useState( handlerId ); - const workflowHandler = useWorkflowHandler(thisHandlerId ?? ""); + const workflowHandler = useWorkflowHandler(thisHandlerId ?? "", true); + const store = useHandlerStore(); const createHandler = async () => { - const handler = await create.runWorkflow("chat", { - index_name: INDEX_NAME, - }); - setThisHandlerId(handler.handler_id); - onHandlerCreated?.(handler.handler_id); + if (isQueryingWorkflow.current) return; + isQueryingWorkflow.current = true; + try { + const handler = await create.runWorkflow("chat", { + index_name: INDEX_NAME, + }); + setThisHandlerId(handler.handler_id); + onHandlerCreated?.(handler.handler_id); + } finally { + isQueryingWorkflow.current = false; + } + }; + const replayHandler = async () => { + if (isQueryingWorkflow.current) return; + isQueryingWorkflow.current = true; + try { + await workflowHandler.sendEvent(createQueryConversationHistoryEvent()); + } finally { + isQueryingWorkflow.current = false; + } }; + useEffect(() => { - if (!handlerId) { + if (!thisHandlerId) { createHandler(); + } else { + // kick it. This is a temp workaround for a bug + store.sync().then(() => { + store.subscribe(thisHandlerId); + }); + } + }, [thisHandlerId]); + + useEffect(() => { + if (thisHandlerId && workflowHandler.isStreaming) { + replayHandler(); } - }, [handlerId]); + }, [thisHandlerId, workflowHandler.isStreaming]); return workflowHandler; } diff --git a/test-proj/ui/src/libs/useChatbot.ts b/test-proj/ui/src/libs/useChatbot.ts index f36c002..2fa88dc 100644 --- a/test-proj/ui/src/libs/useChatbot.ts +++ b/test-proj/ui/src/libs/useChatbot.ts @@ -1,15 +1,9 @@ // This is a temporary chatbot component that is used to test the chatbot functionality. // LlamaIndex will replace it with better chatbot component. -import { - useWorkflowHandler, - WorkflowEvent -} from "@llamaindex/ui"; -import { - useEffect, - useRef, - useState -} from "react"; +import { WorkflowEvent } from "@llamaindex/ui"; +import { useEffect, useRef, useState } from "react"; import { useChatWorkflowHandler } from "./useChatWorkflowHandler"; +import { createHumanResponseEvent } from "./events"; export type Role = "user" | "assistant"; export interface Message { @@ -23,7 +17,6 @@ export interface Message { export interface ChatbotState { submit(): Promise; retryLastMessage: () => void; - clearChat: () => void; setInput: (input: string) => void; messages: Message[]; @@ -37,19 +30,19 @@ export function useChatbot({ onHandlerCreated, focusInput: focusInput, }: { - handlerId?: string, - onHandlerCreated?: (handlerId: string) => void, - focusInput?: () => void + handlerId?: string; + onHandlerCreated?: (handlerId: string) => void; + focusInput?: () => void; }): ChatbotState { const workflowHandler = useChatWorkflowHandler({ handlerId, onHandlerCreated, }); + const { events } = workflowHandler; const [messages, setMessages] = useState([]); const [input, setInput] = useState(""); const [isLoading, setIsLoading] = useState(false); const lastProcessedEventIndexRef = useRef(0); - const streamingMessageIndexRef = useRef(null); const [canSend, setCanSend] = useState(false); // Whenever handler becomes defined and changed, stop loading @@ -60,27 +53,6 @@ export function useChatbot({ } }, [handlerId]); - // Helper functions for message management - const appendMessage = ( - role: Role, - msg: string, - isPartial: boolean = false - ): void => { - setMessages((prev) => { - const idx = prev.length; - streamingMessageIndexRef.current = idx; - return [ - ...prev, - { - role, - content: msg, - isPartial, - timestamp: new Date(), - }, - ]; - }); - }; - const welcomeMessage = "Welcome! 👋 Upload a document with the control above, then ask questions here."; @@ -96,9 +68,6 @@ export function useChatbot({ } }, []); - // Subscribe to task/events using hook (auto stream when handler exists) - const { events } = useWorkflowHandler(handlerId ?? "", Boolean(handlerId)); - // Process streamed events into messages useEffect(() => { if (!events || events.length === 0) return; @@ -125,18 +94,6 @@ export function useChatbot({ lastProcessedEventIndexRef.current = events.length; }, [events, messages]); - const clearChat = () => { - setMessages([ - { - role: "assistant" as const, - content: welcomeMessage, - timestamp: new Date(), - }, - ]); - setInput(""); - focusInput?.(); - }; - const retryLastMessage = () => { const lastUserMessage = messages.filter((m) => m.role === "user").pop(); if (lastUserMessage) { @@ -160,25 +117,22 @@ export function useChatbot({ content: trimmedInput, timestamp: new Date(), }; + const placeHolderMessage: Message = { + role: "assistant", + content: "", + timestamp: new Date(), + isPartial: true, + }; - const newMessages = [...messages, userMessage]; + const newMessages = [...messages, userMessage, placeHolderMessage]; setMessages(newMessages); setInput(""); setIsLoading(true); setCanSend(false); - // Immediately create an assistant placeholder to avoid visual gap before deltas - if (streamingMessageIndexRef.current === null) { - appendMessage("assistant", "Thinking...", true); - } - try { // Send user input as HumanResponseEvent - await workflowHandler.sendEvent({ - data: { _data: { response: trimmedInput } }, - type: "workflows.events.HumanResponseEvent", - }); - // The assistant reply will be streamed by useWorkflowTask and appended incrementally + await workflowHandler.sendEvent(createHumanResponseEvent(trimmedInput)); } catch (err) { console.error("Chat error:", err); @@ -206,7 +160,6 @@ export function useChatbot({ setInput, isLoading, canSend, - clearChat, }; } @@ -280,7 +233,6 @@ function toMessages(events: WorkflowEvent[]): Message[] { messages.pop(); } const content = ev.data as unknown as AppendChatMessageData; - console.log("AppendChatMessage", content); messages.push({ role: content.message.role, content: content.message.text, From 3fdb64a1723db265604fb932e1467b5a13d41c6a Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 13:23:56 -0400 Subject: [PATCH 07/18] wip --- test-proj/ui/src/pages/Home.tsx | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/test-proj/ui/src/pages/Home.tsx b/test-proj/ui/src/pages/Home.tsx index f3e4b29..77db1cf 100644 --- a/test-proj/ui/src/pages/Home.tsx +++ b/test-proj/ui/src/pages/Home.tsx @@ -1,11 +1,23 @@ import ChatBot from "../components/ChatBot"; -import { WorkflowTrigger } from "@llamaindex/ui"; +import { + useWorkflowHandlerList, + WorkflowProgressBar, + WorkflowTrigger, +} from "@llamaindex/ui"; import { APP_TITLE, INDEX_NAME } from "../libs/config"; import { useChatHistory } from "@/libs/useChatHistory"; import Sidebar from "@/components/Sidebar"; +import { Loader } from "lucide-react"; export default function Home() { const chatHistory = useChatHistory(); + const handlers = useWorkflowHandlerList("upload"); + const activeHandlers = handlers.handlers.filter( + (h) => h.status === "running" && h.workflowName === "upload" + ); + const anyActiveHandlers = activeHandlers.length > 0; + console.log("activeHandlers", activeHandlers); + console.log("anyActiveHandlers", anyActiveHandlers); return (
@@ -32,14 +44,19 @@ export default function Home() { }; }} /> + {anyActiveHandlers && ( +
+ +
+ )}
{!chatHistory.loading && ( { + onHandlerCreated={(handler) => { chatHistory.addChat(handler); chatHistory.setSelectedChatId(handler); }} From 1ffe4a8044ce0f4301a0a3f322e08f6c487a7aa5 Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 13:24:11 -0400 Subject: [PATCH 08/18] reformat --- test-proj/ui/src/components/ChatBot.tsx | 16 ++++++++-------- test-proj/ui/src/components/Sidebar.tsx | 16 ++++++++-------- test-proj/ui/src/libs/events.ts | 2 +- test-proj/ui/src/libs/useChatHistory.ts | 6 +++--- test-proj/ui/src/libs/useChatWorkflowHandler.ts | 2 +- test-proj/ui/src/pages/Home.tsx | 2 +- 6 files changed, 22 insertions(+), 22 deletions(-) diff --git a/test-proj/ui/src/components/ChatBot.tsx b/test-proj/ui/src/components/ChatBot.tsx index 050fbe9..af93053 100644 --- a/test-proj/ui/src/components/ChatBot.tsx +++ b/test-proj/ui/src/components/ChatBot.tsx @@ -64,7 +64,7 @@ export default function ChatBot({ return (
{/* Header */} @@ -111,7 +111,7 @@ export default function ChatBot({ key={i} className={cn( "flex gap-3", - message.role === "user" ? "justify-end" : "justify-start" + message.role === "user" ? "justify-end" : "justify-start", )} > {message.role !== "user" && ( @@ -120,7 +120,7 @@ export default function ChatBot({ "w-8 h-8 rounded-full flex items-center justify-center flex-shrink-0", message.error ? "bg-red-100 dark:bg-red-900" - : "bg-blue-100 dark:bg-blue-900" + : "bg-blue-100 dark:bg-blue-900", )} >
@@ -136,7 +136,7 @@ export default function ChatBot({
@@ -157,7 +157,7 @@ export default function ChatBot({

{message.content} @@ -171,7 +171,7 @@ export default function ChatBot({ ? "text-blue-100" : message.error ? "text-red-500 dark:text-red-400" - : "text-gray-500 dark:text-gray-400" + : "text-gray-500 dark:text-gray-400", )} > {message.timestamp.toLocaleTimeString()} diff --git a/test-proj/ui/src/components/Sidebar.tsx b/test-proj/ui/src/components/Sidebar.tsx index ff347d1..ec6bcc4 100644 --- a/test-proj/ui/src/components/Sidebar.tsx +++ b/test-proj/ui/src/components/Sidebar.tsx @@ -21,13 +21,13 @@ export default function Sidebar({ className, chatHistory }: SidebarProps) { const date = new Date(timestamp); const now = new Date(); const isToday = date.toDateString() === now.toDateString(); - - const timeString = date.toLocaleTimeString([], { - hour: '2-digit', - minute: '2-digit', - second: '2-digit' + + const timeString = date.toLocaleTimeString([], { + hour: "2-digit", + minute: "2-digit", + second: "2-digit", }); - + if (isToday) { return timeString; } else { @@ -49,7 +49,7 @@ export default function Sidebar({ className, chatHistory }: SidebarProps) {

{/* Header */} @@ -93,7 +93,7 @@ export default function Sidebar({ className, chatHistory }: SidebarProps) { "flex items-center justify-between px-4 py-3 cursor-pointer hover:bg-gray-50 dark:hover:bg-gray-800 transition-colors", selectedChatId === chat.handlerId ? "bg-blue-50 dark:bg-blue-900/20" - : "" + : "", )} onClick={() => handleChatSelect(chat)} > diff --git a/test-proj/ui/src/libs/events.ts b/test-proj/ui/src/libs/events.ts index da87726..1466812 100644 --- a/test-proj/ui/src/libs/events.ts +++ b/test-proj/ui/src/libs/events.ts @@ -12,4 +12,4 @@ export function createHumanResponseEvent(response: string): WorkflowEvent { data: { _data: { response } }, type: "test_proj.qa_workflows.HumanResponseEvent", }; -} \ No newline at end of file +} diff --git a/test-proj/ui/src/libs/useChatHistory.ts b/test-proj/ui/src/libs/useChatHistory.ts index 66d2663..2ff9a0f 100644 --- a/test-proj/ui/src/libs/useChatHistory.ts +++ b/test-proj/ui/src/libs/useChatHistory.ts @@ -102,7 +102,7 @@ export function useChatHistory(): UseChatHistory { // Sort by timestamp descending (most recent first) return chats.sort( (a, b) => - new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime() + new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime(), ); } catch (error) { console.error("Failed to get chats from database:", error); @@ -152,7 +152,7 @@ export function useChatHistory(): UseChatHistory { // If the deleted chat was selected, select the next available chat or clear selection if (selectedChatHandlerId === handlerId) { const remainingChats = chatHistory.filter( - (c) => c.handlerId !== handlerId + (c) => c.handlerId !== handlerId, ); if (remainingChats.length > 0) { setSelectedChatHandlerId(remainingChats[0].handlerId); @@ -171,7 +171,7 @@ export function useChatHistory(): UseChatHistory { const createNewChat = (): void => { setSelectedChatHandlerId(null); - setChatCounter(prev => prev + 1); + setChatCounter((prev) => prev + 1); }; return { diff --git a/test-proj/ui/src/libs/useChatWorkflowHandler.ts b/test-proj/ui/src/libs/useChatWorkflowHandler.ts index 4d23431..1ad6019 100644 --- a/test-proj/ui/src/libs/useChatWorkflowHandler.ts +++ b/test-proj/ui/src/libs/useChatWorkflowHandler.ts @@ -20,7 +20,7 @@ export function useChatWorkflowHandler({ const create = useWorkflowRun(); const isQueryingWorkflow = useRef(false); const [thisHandlerId, setThisHandlerId] = useState( - handlerId + handlerId, ); const workflowHandler = useWorkflowHandler(thisHandlerId ?? "", true); const store = useHandlerStore(); diff --git a/test-proj/ui/src/pages/Home.tsx b/test-proj/ui/src/pages/Home.tsx index 77db1cf..1d7d2d8 100644 --- a/test-proj/ui/src/pages/Home.tsx +++ b/test-proj/ui/src/pages/Home.tsx @@ -13,7 +13,7 @@ export default function Home() { const chatHistory = useChatHistory(); const handlers = useWorkflowHandlerList("upload"); const activeHandlers = handlers.handlers.filter( - (h) => h.status === "running" && h.workflowName === "upload" + (h) => h.status === "running" && h.workflowName === "upload", ); const anyActiveHandlers = activeHandlers.length > 0; console.log("activeHandlers", activeHandlers); From a79780e8e0aa5c3cb43bf5c3d0c1beda17d2b105 Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 13:28:42 -0400 Subject: [PATCH 09/18] wip --- test-proj/src/test_proj/clients.py | 3 +- test-proj/src/test_proj/qa_workflows.py | 37 ++++++++----------------- 2 files changed, 13 insertions(+), 27 deletions(-) diff --git a/test-proj/src/test_proj/clients.py b/test-proj/src/test_proj/clients.py index 2fe3766..ee67170 100644 --- a/test-proj/src/test_proj/clients.py +++ b/test-proj/src/test_proj/clients.py @@ -4,6 +4,7 @@ from llama_cloud.client import AsyncLlamaCloud from llama_cloud_services import LlamaCloudIndex, LlamaParse +from llama_cloud_services.parse import ResultType # deployed agents may infer their name from the deployment name # Note: Make sure that an agent deployment with this name actually exists @@ -46,7 +47,7 @@ def get_llama_parse_client() -> LlamaParse: adaptive_long_table=True, outlined_table_extraction=True, output_tables_as_HTML=True, - result_type="markdown", + result_type=ResultType.MD, api_key=LLAMA_CLOUD_API_KEY, project_id=LLAMA_CLOUD_PROJECT_ID, custom_client=get_base_cloud_client(), diff --git a/test-proj/src/test_proj/qa_workflows.py b/test-proj/src/test_proj/qa_workflows.py index 720661d..ed05b8d 100644 --- a/test-proj/src/test_proj/qa_workflows.py +++ b/test-proj/src/test_proj/qa_workflows.py @@ -150,10 +150,8 @@ async def parse_document(self, ev: FileDownloadedEvent, ctx: Context) -> StopEve ) except Exception as e: - logger.error(e.stack_trace) - return StopEvent( - result={"success": False, "error": str(e), "stack_trace": e.stack_trace} - ) + logger.error(f"Error parsing document {ev.file_id}: {e}", exc_info=True) + return StopEvent(result={"success": False, "error": str(e)}) class AppendChatMessage(Event): @@ -221,7 +219,7 @@ class ChatWorkflow(Workflow): @step async def initialize_chat( self, ev: ChatEvent, ctx: Context[ChatWorkflowState] - ) -> InputRequiredEvent: + ) -> InputRequiredEvent | StopEvent: """Initialize the chat session and request first input""" try: logger.info(f"Initializing chat {ev.index_name}") @@ -239,7 +237,7 @@ async def initialize_chat( async with ctx.store.edit_state() as state: state.conversation_history.extend(ev.conversation_history) # Request first user input - return InputRequiredEvent(prefix="[waiting for user message]") + return InputRequiredEvent() except Exception as e: logger.error(f"Error initializing chat: {str(e)}", exc_info=True) @@ -271,6 +269,8 @@ async def process_user_response( initial_state = await ctx.store.get_state() conversation_history = initial_state.conversation_history index_name = initial_state.index_name + if not index_name: + raise ValueError("Index name not found in context") logger.info(f"User input: {user_input}") @@ -287,23 +287,6 @@ async def process_user_response( chat_engine = get_chat_engine(index_name) - # Process query with chat engine (streaming) - async def _fake_stream_chat() -> AsyncGenerator[str, None]: - for token in ["Hel", "lo, ", "how ", "are ", "you?"]: - yield token - await asyncio.sleep(0.1) - - async def _fake_chat() -> StreamingAgentChatResponse: - class MockStreamResponse: - def __init__(self): - self.source_nodes = [] - - def async_response_gen(self): - return _fake_stream_chat() - - return MockStreamResponse() - - # stream_response = await _fake_chat() stream_response = await chat_engine.astream_chat( user_input, chat_history=initial_state.chat_messages() ) @@ -313,7 +296,9 @@ def async_response_gen(self): async for token in stream_response.async_response_gen(): full_text += token ctx.write_event_to_stream(ChatDeltaEvent(delta=token)) - await asyncio.sleep(0) # Temp workaround. Some sort of bug in the server drops events without flushing the event loop + await asyncio.sleep( + 0 + ) # Temp workaround. Some sort of bug in the server drops events without flushing the event loop # Extract source nodes for citations sources = [] @@ -324,7 +309,7 @@ def async_response_gen(self): text=node.text[:197] + "..." if len(node.text) >= 200 else node.text, - score=node.score, + score=float(node.score) if node.score else 0.0, metadata=node.metadata, ) ) @@ -341,7 +326,7 @@ def async_response_gen(self): assistant_response, ] ) - return InputRequiredEvent(prefix="[waiting for user message]") + return InputRequiredEvent() except Exception as e: logger.error(f"Error processing query: {str(e)}", exc_info=True) From 924a94d5e72d392694f33420a19172ea8db96fc8 Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 13:31:21 -0400 Subject: [PATCH 10/18] update template --- pyproject.toml.jinja | 1 + src/{{ project_name_snake }}/clients.py | 24 +- src/{{ project_name_snake }}/qa_workflows.py | 211 +++++----- ui/package.json.jinja | 1 + ui/src/components/ChatBot.tsx | 397 ++++--------------- ui/src/components/Sidebar.tsx | 119 ++++++ ui/src/libs/events.ts.jinja | 15 + ui/src/libs/useChatHistory.ts | 187 +++++++++ ui/src/libs/useChatWorkflowHandler.ts | 69 ++++ ui/src/libs/useChatbot.ts | 245 ++++++++++++ ui/src/pages/Home.tsx | 64 ++- 11 files changed, 873 insertions(+), 460 deletions(-) create mode 100644 ui/src/components/Sidebar.tsx create mode 100644 ui/src/libs/events.ts.jinja create mode 100644 ui/src/libs/useChatHistory.ts create mode 100644 ui/src/libs/useChatWorkflowHandler.ts create mode 100644 ui/src/libs/useChatbot.ts diff --git a/pyproject.toml.jinja b/pyproject.toml.jinja index 63d506b..52fa84a 100644 --- a/pyproject.toml.jinja +++ b/pyproject.toml.jinja @@ -12,6 +12,7 @@ dependencies = [ "llama-index-llms-openai>=0.5.6", "llama-index-embeddings-openai>=0.5.1", "python-dotenv>=1.1.1", + "pydantic>=2.11.9", ] [build-system] diff --git a/src/{{ project_name_snake }}/clients.py b/src/{{ project_name_snake }}/clients.py index 9f8d0d8..ee67170 100644 --- a/src/{{ project_name_snake }}/clients.py +++ b/src/{{ project_name_snake }}/clients.py @@ -3,7 +3,8 @@ import httpx from llama_cloud.client import AsyncLlamaCloud -from llama_cloud_services import LlamaParse +from llama_cloud_services import LlamaCloudIndex, LlamaParse +from llama_cloud_services.parse import ResultType # deployed agents may infer their name from the deployment name # Note: Make sure that an agent deployment with this name actually exists @@ -18,7 +19,8 @@ INDEX_NAME = "document_qa_index" -def get_custom_client() -> httpx.AsyncClient: +@functools.cache +def get_base_cloud_client() -> httpx.AsyncClient: return httpx.AsyncClient( timeout=60, headers={"Project-Id": LLAMA_CLOUD_PROJECT_ID} @@ -32,7 +34,7 @@ def get_llama_cloud_client() -> AsyncLlamaCloud: return AsyncLlamaCloud( base_url=LLAMA_CLOUD_BASE_URL, token=LLAMA_CLOUD_API_KEY, - httpx_client=get_custom_client(), + httpx_client=get_base_cloud_client(), ) @@ -45,8 +47,20 @@ def get_llama_parse_client() -> LlamaParse: adaptive_long_table=True, outlined_table_extraction=True, output_tables_as_HTML=True, - result_type="markdown", + result_type=ResultType.MD, api_key=LLAMA_CLOUD_API_KEY, project_id=LLAMA_CLOUD_PROJECT_ID, - custom_client=get_custom_client(), + custom_client=get_base_cloud_client(), + ) + + +@functools.lru_cache(maxsize=None) +def get_index(index_name: str) -> LlamaCloudIndex: + return LlamaCloudIndex.create_index( + name=index_name, + project_id=LLAMA_CLOUD_PROJECT_ID, + api_key=LLAMA_CLOUD_API_KEY, + base_url=LLAMA_CLOUD_BASE_URL, + show_progress=True, + custom_client=get_base_cloud_client(), ) diff --git a/src/{{ project_name_snake }}/qa_workflows.py b/src/{{ project_name_snake }}/qa_workflows.py index 20b4828..ed05b8d 100644 --- a/src/{{ project_name_snake }}/qa_workflows.py +++ b/src/{{ project_name_snake }}/qa_workflows.py @@ -1,4 +1,5 @@ from __future__ import annotations +from collections.abc import AsyncGenerator from datetime import datetime import logging import os @@ -6,16 +7,17 @@ from typing import Any, Literal import httpx -from dotenv import load_dotenv -from llama_cloud import ChatMessage -from llama_cloud.types import RetrievalMode from llama_index.core import Settings -from llama_index.core.chat_engine.types import BaseChatEngine, ChatMode -from llama_index.core.memory import ChatMemoryBuffer -from pydantic import BaseModel, Field +from llama_index.core.chat_engine.types import ( + BaseChatEngine, + ChatMode, + StreamingAgentChatResponse, +) +from llama_index.core.llms import ChatMessage +import asyncio from llama_index.embeddings.openai import OpenAIEmbedding from llama_index.llms.openai import OpenAI -from llama_cloud_services import LlamaCloudIndex +from pydantic import BaseModel, Field from workflows import Workflow, step, Context from workflows.events import ( StartEvent, @@ -27,17 +29,12 @@ from workflows.retry_policy import ConstantDelayRetryPolicy from .clients import ( - LLAMA_CLOUD_API_KEY, - LLAMA_CLOUD_BASE_URL, - get_custom_client, + get_index, get_llama_cloud_client, get_llama_parse_client, LLAMA_CLOUD_PROJECT_ID, ) -load_dotenv() - - logger = logging.getLogger(__name__) @@ -58,15 +55,13 @@ class FileDownloadedEvent(Event): class ChatEvent(StartEvent): index_name: str - session_id: str + conversation_history: list[ConversationMessage] = Field(default_factory=list) # Configure LLM and embedding model Settings.llm = OpenAI(model="gpt-4", temperature=0.1) Settings.embed_model = OpenAIEmbedding(model="text-embedding-3-small") -custom_client = get_custom_client() - class DocumentUploadWorkflow(Workflow): """Workflow to upload and index documents using LlamaParse and LlamaCloud Index""" @@ -136,15 +131,7 @@ async def parse_document(self, ev: FileDownloadedEvent, ctx: Context) -> StopEve documents = result.get_text_documents() # Create or connect to LlamaCloud Index - index = LlamaCloudIndex.create_index( - documents=documents, - name=index_name, - project_id=LLAMA_CLOUD_PROJECT_ID, - api_key=LLAMA_CLOUD_API_KEY, - base_url=LLAMA_CLOUD_BASE_URL, - show_progress=True, - custom_client=custom_client, - ) + index = get_index(index_name) # Insert documents to index logger.info(f"Inserting {len(documents)} documents to {index_name}") @@ -163,10 +150,8 @@ async def parse_document(self, ev: FileDownloadedEvent, ctx: Context) -> StopEve ) except Exception as e: - logger.error(e.stack_trace) - return StopEvent( - result={"success": False, "error": str(e), "stack_trace": e.stack_trace} - ) + logger.error(f"Error parsing document {ev.file_id}: {e}", exc_info=True) + return StopEvent(result={"success": False, "error": str(e)}) class AppendChatMessage(Event): @@ -181,10 +166,21 @@ class ChatDeltaEvent(Event): delta: str +class QueryConversationHistoryEvent(HumanResponseEvent): + """Client can call this to trigger replaying AppendChatMessage events""" + + pass + + class ChatWorkflowState(BaseModel): - conversation_history: list[ChatMessage] = Field(default_factory=list) - session_id: str | None = None index_name: str | None = None + conversation_history: list[ConversationMessage] = Field(default_factory=list) + + def chat_messages(self) -> list[ChatMessage]: + return [ + ChatMessage(role=message.role, content=message.text) + for message in self.conversation_history + ] class SourceMessage(BaseModel): @@ -194,88 +190,57 @@ class SourceMessage(BaseModel): class ConversationMessage(BaseModel): + """ + Mostly just a wrapper for a ChatMessage with extra context for UI. Includes a timestamp and source references. + """ + role: Literal["user", "assistant"] text: str sources: list[SourceMessage] = Field(default_factory=list) timestamp: str = Field(default_factory=lambda: datetime.now().isoformat()) -class ChatWorkflow(Workflow[ChatWorkflowState]): - """Workflow to handle continuous chat queries against indexed documents""" +def get_chat_engine(index_name: str) -> BaseChatEngine: + index = get_index(index_name) + return index.as_chat_engine( + chat_mode=ChatMode.CONTEXT, + llm=Settings.llm, + context_prompt=( + "You are a helpful assistant that answers questions based on the provided documents. " + "Always cite specific information from the documents when answering. " + "If you cannot find the answer in the documents, say so clearly." + ), + ) - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.chat_engines: dict[ - str, BaseChatEngine - ] = {} # Cache chat engines per index + +class ChatWorkflow(Workflow): + """Workflow to handle continuous chat queries against indexed documents""" @step async def initialize_chat( self, ev: ChatEvent, ctx: Context[ChatWorkflowState] - ) -> InputRequiredEvent: + ) -> InputRequiredEvent | StopEvent: """Initialize the chat session and request first input""" try: logger.info(f"Initializing chat {ev.index_name}") index_name = ev.index_name - session_id = ev.session_id + initial_state = await ctx.store.get_state() # Store session info in context await ctx.store.set("index_name", index_name) - await ctx.store.set("session_id", session_id) - if ctx.store.get("conversation_history", None) is None: - await ctx.store.set("conversation_history", []) - - # Create cache key for chat engine - cache_key = f"{index_name}_{session_id}" - - # Initialize chat engine if not exists - if cache_key not in self.chat_engines: - logger.info(f"Initializing chat engine {cache_key}") - # Connect to LlamaCloud Index - index = LlamaCloudIndex( - name=index_name, - project_id=LLAMA_CLOUD_PROJECT_ID, - api_key=LLAMA_CLOUD_API_KEY, - base_url=LLAMA_CLOUD_BASE_URL, - async_httpx_client=custom_client, - ) + messages = initial_state.conversation_history - # Create chat engine with memory - memory = ChatMemoryBuffer.from_defaults( - token_limit=3900, - chat_history=await ctx.store.get( - "conversation_history", default=[] - ), - ) - self.chat_engines[cache_key] = index.as_chat_engine( - chat_mode=ChatMode.CONTEXT, - memory=memory, - llm=Settings.llm, - context_prompt=( - "You are a helpful assistant that answers questions based on the provided documents. " - "Always cite specific information from the documents when answering. " - "If you cannot find the answer in the documents, say so clearly." - ), - verbose=False, - retriever_mode=RetrievalMode.CHUNKS, - ) + for item in messages: + ctx.write_event_to_stream(AppendChatMessage(message=item)) - history = await ctx.store.get("conversation_history", default=[]) - if len(history) == 0: - ctx.write_event_to_stream( - ConversationMessage( - role="assistant", - text="Chat initialized. Ask a question (or type 'exit' to quit): ", - ) - ) - else: - for item in history: - item: ConversationMessage = item - ctx.write_event_to_stream(item) + if ev.conversation_history: + async with ctx.store.edit_state() as state: + state.conversation_history.extend(ev.conversation_history) # Request first user input - return InputRequiredEvent(prefix="[waiting for user message]") + return InputRequiredEvent() except Exception as e: + logger.error(f"Error initializing chat: {str(e)}", exc_info=True) return StopEvent( result={ "success": False, @@ -283,27 +248,35 @@ async def initialize_chat( } ) + @step + async def get_conversation_history( + self, ev: QueryConversationHistoryEvent, ctx: Context[ChatWorkflowState] + ) -> None: + """Get the conversation history from the database""" + hist = (await ctx.store.get_state()).conversation_history + for item in hist: + ctx.write_event_to_stream(AppendChatMessage(message=item)) + @step async def process_user_response( - self, ev: HumanResponseEvent, ctx: Context + self, ev: HumanResponseEvent, ctx: Context[ChatWorkflowState] ) -> InputRequiredEvent | HumanResponseEvent | StopEvent | None: """Process user input and generate response""" try: logger.info(f"Processing user response {ev.response}") user_input = ev.response.strip() - with ctx.store.edit_state() as state: - messages = state.get("conversation_history", default=[]) - messages.append(ConversationMessage(role="user", text=user_input)) - state.set("conversation_history", messages) + + initial_state = await ctx.store.get_state() + conversation_history = initial_state.conversation_history + index_name = initial_state.index_name + if not index_name: + raise ValueError("Index name not found in context") logger.info(f"User input: {user_input}") # Check for exit command if user_input.lower() == "exit": logger.info("User input is exit") - conversation_history = await ctx.store.get( - "conversation_history", default=[] - ) return StopEvent( result={ "success": True, @@ -312,22 +285,20 @@ async def process_user_response( } ) - # Get session info from context - index_name = await ctx.store.get("index_name") - session_id = await ctx.store.get("session_id") - cache_key = f"{index_name}_{session_id}" - - # Get chat engine - chat_engine = self.chat_engines[cache_key] + chat_engine = get_chat_engine(index_name) - # Process query with chat engine (streaming) - stream_response = await chat_engine.astream_chat(user_input) + stream_response = await chat_engine.astream_chat( + user_input, chat_history=initial_state.chat_messages() + ) full_text = "" # Emit streaming deltas to the event stream async for token in stream_response.async_response_gen(): full_text += token ctx.write_event_to_stream(ChatDeltaEvent(delta=token)) + await asyncio.sleep( + 0 + ) # Temp workaround. Some sort of bug in the server drops events without flushing the event loop # Extract source nodes for citations sources = [] @@ -338,25 +309,27 @@ async def process_user_response( text=node.text[:197] + "..." if len(node.text) >= 200 else node.text, - score=node.score, + score=float(node.score) if node.score else 0.0, metadata=node.metadata, ) ) - # Update conversation history - response = ConversationMessage( - role="assistant", text=full_text.strip(), sources=sources - ) - with ctx.store.edit_state() as state: - messages = state.get("conversation_history", default=[]) - messages.append(response) - state.set("conversation_history", messages) - # After streaming completes, emit a summary response event to stream for frontend/main printing - ctx.write_event_to_stream(AppendChatMessage(message=response)) - return InputRequiredEvent(prefix="[waiting for user message]") + assistant_response = ConversationMessage( + role="assistant", text=full_text, sources=sources + ) + ctx.write_event_to_stream(AppendChatMessage(message=assistant_response)) + async with ctx.store.edit_state() as state: + state.conversation_history.extend( + [ + ConversationMessage(role="user", text=user_input), + assistant_response, + ] + ) + return InputRequiredEvent() except Exception as e: + logger.error(f"Error processing query: {str(e)}", exc_info=True) return StopEvent( result={"success": False, "error": f"Error processing query: {str(e)}"} ) diff --git a/ui/package.json.jinja b/ui/package.json.jinja index 2369ee0..c6382a4 100644 --- a/ui/package.json.jinja +++ b/ui/package.json.jinja @@ -17,6 +17,7 @@ "@llamaindex/ui": "^2.1.1", "@llamaindex/workflows-client": "^1.2.0", "@radix-ui/themes": "^3.2.1", + "idb": "^8.0.3", "llama-cloud-services": "^0.3.6", "lucide-react": "^0.544.0", "react": "^19.0.0", diff --git a/ui/src/components/ChatBot.tsx b/ui/src/components/ChatBot.tsx index e65f929..af93053 100644 --- a/ui/src/components/ChatBot.tsx +++ b/ui/src/components/ChatBot.tsx @@ -1,167 +1,44 @@ // This is a temporary chatbot component that is used to test the chatbot functionality. // LlamaIndex will replace it with better chatbot component. -import { useState, useRef, useEffect, FormEvent, KeyboardEvent } from "react"; -import { - Send, - Loader2, - Bot, - User, - MessageSquare, - Trash2, - RefreshCw, -} from "lucide-react"; +import { useChatbot } from "@/libs/useChatbot"; import { Button, - Input, - ScrollArea, Card, CardContent, cn, - useWorkflowRun, - useWorkflowHandler, + Input, + ScrollArea, } from "@llamaindex/ui"; -import { AGENT_NAME } from "../libs/config"; -import { toHumanResponseRawEvent } from "@/libs/utils"; - -type Role = "user" | "assistant"; -interface Message { - id: string; - role: Role; - content: string; - timestamp: Date; - error?: boolean; -} -export default function ChatBot() { - const { runWorkflow } = useWorkflowRun(); - const messagesEndRef = useRef(null); +import { + Bot, + Loader2, + MessageSquare, + RefreshCw, + Send, + User, +} from "lucide-react"; +import { FormEvent, KeyboardEvent, useEffect, useRef } from "react"; + +export default function ChatBot({ + handlerId, + onHandlerCreated, +}: { + handlerId?: string; + onHandlerCreated?: (handlerId: string) => void; +}) { const inputRef = useRef(null); - const [messages, setMessages] = useState([]); - const [input, setInput] = useState(""); - const [isLoading, setIsLoading] = useState(false); - const [handlerId, setHandlerId] = useState(null); - const lastProcessedEventIndexRef = useRef(0); - const [canSend, setCanSend] = useState(false); - const streamingMessageIndexRef = useRef(null); - - // Deployment + auth setup - const deployment = AGENT_NAME || "document-qa"; - const platformToken = (import.meta as any).env?.VITE_LLAMA_CLOUD_API_KEY as - | string - | undefined; - const projectId = (import.meta as any).env?.VITE_LLAMA_DEPLOY_PROJECT_ID as - | string - | undefined; - const defaultIndexName = - (import.meta as any).env?.VITE_DEFAULT_INDEX_NAME || "document_qa_index"; - const sessionIdRef = useRef( - `chat-${Math.random().toString(36).slice(2)}-${Date.now()}`, - ); + const messagesEndRef = useRef(null); + const chatbot = useChatbot({ + handlerId, + onHandlerCreated, + focusInput: () => { + inputRef.current?.focus(); + }, + }); // UI text defaults const title = "AI Document Assistant"; const placeholder = "Ask me anything about your documents..."; - const welcomeMessage = - "Welcome! 👋 Upload a document with the control above, then ask questions here."; - - // Helper functions for message management - const appendMessage = (role: Role, msg: string): void => { - setMessages((prev) => { - const id = `${role}-stream-${Date.now()}`; - const idx = prev.length; - streamingMessageIndexRef.current = idx; - return [ - ...prev, - { - id, - role, - content: msg, - timestamp: new Date(), - }, - ]; - }); - }; - - const updateMessage = (index: number, message: string) => { - setMessages((prev) => { - if (index < 0 || index >= prev.length) return prev; - const copy = [...prev]; - const existing = copy[index]; - copy[index] = { ...existing, content: message }; - return copy; - }); - }; - - // Initialize with welcome message - useEffect(() => { - if (messages.length === 0) { - const welcomeMsg: Message = { - id: "welcome", - role: "assistant", - content: welcomeMessage, - timestamp: new Date(), - }; - setMessages([welcomeMsg]); - } - }, []); - - // Create chat task on init - useEffect(() => { - (async () => { - if (!handlerId) { - const handler = await runWorkflow("chat", { - index_name: defaultIndexName, - session_id: sessionIdRef.current, - }); - setHandlerId(handler.handler_id); - } - })(); - }, []); - - // Subscribe to task/events using hook (auto stream when handler exists) - const { events } = useWorkflowHandler(handlerId ?? "", Boolean(handlerId)); - - // Process streamed events into messages - useEffect(() => { - if (!events || events.length === 0) return; - let startIdx = lastProcessedEventIndexRef.current; - if (startIdx < 0) startIdx = 0; - if (startIdx >= events.length) return; - - for (let i = startIdx; i < events.length; i++) { - const ev: any = events[i]; - const type = ev?.type as string | undefined; - const rawData = ev?.data as any; - if (!type) continue; - const data = (rawData && (rawData._data ?? rawData)) as any; - - if (type.includes("ChatDeltaEvent")) { - const delta: string = data?.delta ?? ""; - if (!delta) continue; - if (streamingMessageIndexRef.current === null) { - appendMessage("assistant", delta); - } else { - const idx = streamingMessageIndexRef.current; - const current = messages[idx!]?.content ?? ""; - if (current === "Thinking...") { - updateMessage(idx!, delta); - } else { - updateMessage(idx!, current + delta); - } - } - } else if (type.includes("ChatResponseEvent")) { - // finalize current stream - streamingMessageIndexRef.current = null; - } else if (type.includes("InputRequiredEvent")) { - // ready for next user input; enable send - setCanSend(true); - setIsLoading(false); - inputRef.current?.focus(); - } else if (type.includes("StopEvent")) { - // finished; no summary bubble needed (chat response already streamed) - } - } - lastProcessedEventIndexRef.current = events.length; - }, [events, messages]); const scrollToBottom = () => { messagesEndRef.current?.scrollIntoView({ behavior: "smooth" }); @@ -169,92 +46,11 @@ export default function ChatBot() { useEffect(() => { scrollToBottom(); - }, [messages]); - - // No manual SSE cleanup needed - - const getCommonHeaders = () => ({ - ...(platformToken ? { authorization: `Bearer ${platformToken}` } : {}), - ...(projectId ? { "Project-Id": projectId } : {}), - }); - - const startChatIfNeeded = async (): Promise => { - if (handlerId) return handlerId; - const handler = await runWorkflow("chat", { - index_name: defaultIndexName, - session_id: sessionIdRef.current, - }); - setHandlerId(handler.handler_id); - return handler.handler_id; - }; - - // Removed manual SSE ensureEventStream; hook handles streaming + }, [chatbot.messages]); const handleSubmit = async (e: FormEvent) => { e.preventDefault(); - - const trimmedInput = input.trim(); - if (!trimmedInput || isLoading || !canSend) return; - - // Add user message - const userMessage: Message = { - id: `user-${Date.now()}`, - role: "user", - content: trimmedInput, - timestamp: new Date(), - }; - - const newMessages = [...messages, userMessage]; - setMessages(newMessages); - setInput(""); - setIsLoading(true); - setCanSend(false); - - // Immediately create an assistant placeholder to avoid visual gap before deltas - if (streamingMessageIndexRef.current === null) { - appendMessage("assistant", "Thinking..."); - } - - try { - // Ensure chat handler exists (created on init) - const hid = await startChatIfNeeded(); - - // Send user input as HumanResponseEvent - const postRes = await fetch(`/deployments/${deployment}/events/${hid}`, { - method: "POST", - headers: { - "Content-Type": "application/json", - ...getCommonHeaders(), - }, - body: JSON.stringify({ - event: JSON.stringify(toHumanResponseRawEvent(trimmedInput)), - }), - }); - if (!postRes.ok) { - throw new Error( - `Failed to send message: ${postRes.status} ${postRes.statusText}`, - ); - } - - // The assistant reply will be streamed by useWorkflowTask and appended incrementally - } catch (err) { - console.error("Chat error:", err); - - // Add error message - const errorMessage: Message = { - id: `error-${Date.now()}`, - role: "assistant", - content: `Sorry, I encountered an error: ${err instanceof Error ? err.message : "Unknown error"}. Please try again.`, - timestamp: new Date(), - error: true, - }; - - setMessages((prev) => [...prev, errorMessage]); - } finally { - setIsLoading(false); - // Focus back on input - inputRef.current?.focus(); - } + await chatbot.submit(); }; const handleKeyDown = (e: KeyboardEvent) => { @@ -265,32 +61,6 @@ export default function ChatBot() { } }; - const clearChat = () => { - setMessages([ - { - id: "welcome", - role: "assistant" as const, - content: welcomeMessage, - timestamp: new Date(), - }, - ]); - setInput(""); - inputRef.current?.focus(); - }; - - const retryLastMessage = () => { - const lastUserMessage = messages.filter((m) => m.role === "user").pop(); - if (lastUserMessage) { - // Remove the last assistant message if it was an error - const lastMessage = messages[messages.length - 1]; - if (lastMessage.role === "assistant" && lastMessage.error) { - setMessages((prev) => prev.slice(0, -1)); - } - setInput(lastUserMessage.content); - inputRef.current?.focus(); - } - }; - return (
{title} - {isLoading && ( - - Thinking... - - )}
- {messages.some((m) => m.error) && ( + {chatbot.messages.some((m) => m.error) && ( )} - {messages.length > 0 && ( - - )}
{/* Messages */} - {messages.length === 0 ? ( + {chatbot.messages.length === 0 ? (
@@ -350,9 +106,9 @@ export default function ChatBot() {
) : (
- {messages.map((message) => ( + {chatbot.messages.map((message, i) => (
-

- {message.content} -

+ {message.isPartial && !message.content ? ( + + ) : ( + <> +

+ {message.content} +

+ + )}

))} - - {isLoading && ( -

-
- -
- - -
-
- - - -
-
-
-
-
- )}
)} @@ -462,21 +196,23 @@ export default function ChatBot() { setInput(e.target.value)} + value={chatbot.input} + onChange={(e) => chatbot.setInput(e.target.value)} onKeyDown={handleKeyDown} placeholder={placeholder} - disabled={isLoading} + disabled={chatbot.isLoading} className="flex-1" autoFocus />
); } + +const LoadingDots = () => { + return ( +
+
+ + + +
+
+ ); +}; diff --git a/ui/src/components/Sidebar.tsx b/ui/src/components/Sidebar.tsx new file mode 100644 index 0000000..ec6bcc4 --- /dev/null +++ b/ui/src/components/Sidebar.tsx @@ -0,0 +1,119 @@ +import { Plus, X } from "lucide-react"; +import { Button, ScrollArea, cn } from "@llamaindex/ui"; +import { ChatHistory, UseChatHistory } from "../libs/useChatHistory"; + +interface SidebarProps { + className?: string; + chatHistory: UseChatHistory; +} + +export default function Sidebar({ className, chatHistory }: SidebarProps) { + const { + loading, + chats, + selectedChatId, + setSelectedChatId, + deleteChat, + createNewChat, + } = chatHistory; + + const formatTimestamp = (timestamp: string): string => { + const date = new Date(timestamp); + const now = new Date(); + const isToday = date.toDateString() === now.toDateString(); + + const timeString = date.toLocaleTimeString([], { + hour: "2-digit", + minute: "2-digit", + second: "2-digit", + }); + + if (isToday) { + return timeString; + } else { + const dateString = date.toLocaleDateString(); + return `${dateString} ${timeString}`; + } + }; + + const handleChatSelect = (chat: ChatHistory): void => { + setSelectedChatId(chat.handlerId); + }; + + const handleDeleteChat = (e: React.MouseEvent, handlerId: string): void => { + e.stopPropagation(); + deleteChat(handlerId); + }; + + return ( +
+ {/* Header */} +
+
+

+ Chats +

+ +
+
+ + {/* Chat List */} + + {loading ? ( +
+
+ Loading... +
+
+ ) : chats.length === 0 ? ( +
+
+ No chats yet +
+
+ ) : ( +
+ {chats.map((chat) => ( +
handleChatSelect(chat)} + > +
+
+ {formatTimestamp(chat.timestamp)} +
+
+ +
+ ))} +
+ )} +
+
+ ); +} diff --git a/ui/src/libs/events.ts.jinja b/ui/src/libs/events.ts.jinja new file mode 100644 index 0000000..8af2a1d --- /dev/null +++ b/ui/src/libs/events.ts.jinja @@ -0,0 +1,15 @@ +import { WorkflowEvent } from "@llamaindex/ui"; + +export function createQueryConversationHistoryEvent(): WorkflowEvent { + return { + data: {}, + type: "{{ project_name_snake }}.qa_workflows.QueryConversationHistoryEvent", + }; +} + +export function createHumanResponseEvent(response: string): WorkflowEvent { + return { + data: { _data: { response } }, + type: "{{ project_name_snake }}.qa_workflows.HumanResponseEvent", + }; +} diff --git a/ui/src/libs/useChatHistory.ts b/ui/src/libs/useChatHistory.ts new file mode 100644 index 0000000..2ff9a0f --- /dev/null +++ b/ui/src/libs/useChatHistory.ts @@ -0,0 +1,187 @@ +import { IDBPDatabase, openDB } from "idb"; +import { useEffect, useState } from "react"; + +export interface ChatHistory { + handlerId: string; + timestamp: string; +} + +export interface UseChatHistory { + loading: boolean; + addChat(handlerId: string): void; + deleteChat(handlerId: string): void; + chats: ChatHistory[]; + selectedChatId: string | null; + setSelectedChatId(handlerId: string): void; + createNewChat(): void; + // forces a new chat + chatCounter: number; +} + +const DB_NAME = "chat-history"; +const DB_VERSION = 1; +const STORE_NAME = "chats"; + +/** + * Hook that tracks workflow handler ids, to use as markers of a chat conversation that can be reloaded. + * Stores chats in IndexedDB + * @returns + */ +export function useChatHistory(): UseChatHistory { + const [loading, setLoading] = useState(true); + const [chatHistory, setChatHistory] = useState([]); + const [selectedChatHandlerId, setSelectedChatHandlerId] = useState< + string | null + >(null); + const [db, setDb] = useState | null>(null); + const [chatCounter, setChatCounter] = useState(0); + + // Initialize database + useEffect(() => { + let thisDb: IDBPDatabase | null = null; + + const initDb = async () => { + try { + thisDb = await openDB(DB_NAME, DB_VERSION, { + upgrade(db) { + if (!db.objectStoreNames.contains(STORE_NAME)) { + const store = db.createObjectStore(STORE_NAME, { + keyPath: "handlerId", + }); + store.createIndex("timestamp", "timestamp"); + } + }, + }); + setDb(thisDb); + } catch (error) { + console.error("Failed to initialize database:", error); + setLoading(false); + } + }; + + initDb(); + + return () => { + thisDb?.close(); + }; + }, []); + + // Load chat history when database is ready + useEffect(() => { + if (!db) return; + + const loadChats = async () => { + try { + setLoading(true); + const chats = await getChatsFromDb(); + setChatHistory(chats); + + // Initialize selectedChat to the latest chat (first in sorted array) + if (chats.length > 0 && !selectedChatHandlerId) { + setSelectedChatHandlerId(chats[0].handlerId); + } + } catch (error) { + console.error("Failed to load chat history:", error); + } finally { + setLoading(false); + } + }; + + loadChats(); + }, [db]); + + const getChatsFromDb = async (): Promise => { + if (!db) return []; + + try { + const transaction = db.transaction(STORE_NAME, "readonly"); + const store = transaction.objectStore(STORE_NAME); + const index = store.index("timestamp"); + const chats = await index.getAll(); + + // Sort by timestamp descending (most recent first) + return chats.sort( + (a, b) => + new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime(), + ); + } catch (error) { + console.error("Failed to get chats from database:", error); + return []; + } + }; + + const addChat = async (handlerId: string): Promise => { + if (!db) return; + + try { + const chat: ChatHistory = { + handlerId, + timestamp: new Date().toISOString(), + }; + + const transaction = db.transaction(STORE_NAME, "readwrite"); + const store = transaction.objectStore(STORE_NAME); + await store.put(chat); + + // Update local state + setChatHistory((prev) => [ + chat, + ...prev.filter((c) => c.handlerId !== handlerId), + ]); + + // Set as selected chat if it's the first chat or if no chat is currently selected + if (!selectedChatHandlerId) { + setSelectedChatHandlerId(chat.handlerId); + } + } catch (error) { + console.error("Failed to add chat to database:", error); + } + }; + + const deleteChat = async (handlerId: string): Promise => { + if (!db) return; + + try { + const transaction = db.transaction(STORE_NAME, "readwrite"); + const store = transaction.objectStore(STORE_NAME); + await store.delete(handlerId); + + // Update local state + setChatHistory((prev) => prev.filter((c) => c.handlerId !== handlerId)); + + // If the deleted chat was selected, select the next available chat or clear selection + if (selectedChatHandlerId === handlerId) { + const remainingChats = chatHistory.filter( + (c) => c.handlerId !== handlerId, + ); + if (remainingChats.length > 0) { + setSelectedChatHandlerId(remainingChats[0].handlerId); + } else { + setSelectedChatHandlerId(null); + } + } + } catch (error) { + console.error("Failed to delete chat from database:", error); + } + }; + + const getChats = (): ChatHistory[] => { + return chatHistory; + }; + + const createNewChat = (): void => { + setSelectedChatHandlerId(null); + setChatCounter((prev) => prev + 1); + }; + + return { + loading, + addChat, + chats: chatHistory, + selectedChatId: selectedChatHandlerId, + setSelectedChatId: setSelectedChatHandlerId, + deleteChat, + createNewChat, + chatCounter, + }; +} diff --git a/ui/src/libs/useChatWorkflowHandler.ts b/ui/src/libs/useChatWorkflowHandler.ts new file mode 100644 index 0000000..1ad6019 --- /dev/null +++ b/ui/src/libs/useChatWorkflowHandler.ts @@ -0,0 +1,69 @@ +import { + useWorkflowHandler, + useWorkflowRun, + useHandlerStore, +} from "@llamaindex/ui"; +import { useEffect, useRef, useState } from "react"; +import { INDEX_NAME } from "./config"; +import { createQueryConversationHistoryEvent } from "./events"; + +/** + * Creates a new chat conversation if no handlerId is provided + */ +export function useChatWorkflowHandler({ + handlerId, + onHandlerCreated, +}: { + handlerId?: string; + onHandlerCreated?: (handlerId: string) => void; +}): ReturnType { + const create = useWorkflowRun(); + const isQueryingWorkflow = useRef(false); + const [thisHandlerId, setThisHandlerId] = useState( + handlerId, + ); + const workflowHandler = useWorkflowHandler(thisHandlerId ?? "", true); + const store = useHandlerStore(); + + const createHandler = async () => { + if (isQueryingWorkflow.current) return; + isQueryingWorkflow.current = true; + try { + const handler = await create.runWorkflow("chat", { + index_name: INDEX_NAME, + }); + setThisHandlerId(handler.handler_id); + onHandlerCreated?.(handler.handler_id); + } finally { + isQueryingWorkflow.current = false; + } + }; + const replayHandler = async () => { + if (isQueryingWorkflow.current) return; + isQueryingWorkflow.current = true; + try { + await workflowHandler.sendEvent(createQueryConversationHistoryEvent()); + } finally { + isQueryingWorkflow.current = false; + } + }; + + useEffect(() => { + if (!thisHandlerId) { + createHandler(); + } else { + // kick it. This is a temp workaround for a bug + store.sync().then(() => { + store.subscribe(thisHandlerId); + }); + } + }, [thisHandlerId]); + + useEffect(() => { + if (thisHandlerId && workflowHandler.isStreaming) { + replayHandler(); + } + }, [thisHandlerId, workflowHandler.isStreaming]); + + return workflowHandler; +} diff --git a/ui/src/libs/useChatbot.ts b/ui/src/libs/useChatbot.ts new file mode 100644 index 0000000..2fa88dc --- /dev/null +++ b/ui/src/libs/useChatbot.ts @@ -0,0 +1,245 @@ +// This is a temporary chatbot component that is used to test the chatbot functionality. +// LlamaIndex will replace it with better chatbot component. +import { WorkflowEvent } from "@llamaindex/ui"; +import { useEffect, useRef, useState } from "react"; +import { useChatWorkflowHandler } from "./useChatWorkflowHandler"; +import { createHumanResponseEvent } from "./events"; + +export type Role = "user" | "assistant"; +export interface Message { + role: Role; + isPartial?: boolean; + content: string; + timestamp: Date; + error?: boolean; +} + +export interface ChatbotState { + submit(): Promise; + retryLastMessage: () => void; + setInput: (input: string) => void; + + messages: Message[]; + input: string; + isLoading: boolean; + canSend: boolean; +} + +export function useChatbot({ + handlerId, + onHandlerCreated, + focusInput: focusInput, +}: { + handlerId?: string; + onHandlerCreated?: (handlerId: string) => void; + focusInput?: () => void; +}): ChatbotState { + const workflowHandler = useChatWorkflowHandler({ + handlerId, + onHandlerCreated, + }); + const { events } = workflowHandler; + const [messages, setMessages] = useState([]); + const [input, setInput] = useState(""); + const [isLoading, setIsLoading] = useState(false); + const lastProcessedEventIndexRef = useRef(0); + const [canSend, setCanSend] = useState(false); + + // Whenever handler becomes defined and changed, stop loading + useEffect(() => { + if (handlerId) { + setIsLoading(false); + setCanSend(true); + } + }, [handlerId]); + + const welcomeMessage = + "Welcome! 👋 Upload a document with the control above, then ask questions here."; + + // Initialize with welcome message + useEffect(() => { + if (messages.length === 0) { + const welcomeMsg: Message = { + role: "assistant", + content: welcomeMessage, + timestamp: new Date(), + }; + setMessages([welcomeMsg]); + } + }, []); + + // Process streamed events into messages + useEffect(() => { + if (!events || events.length === 0) return; + let startIdx = lastProcessedEventIndexRef.current; + if (startIdx < 0) startIdx = 0; + if (startIdx >= events.length) return; + + const eventsToProcess = events.slice(startIdx); + const newMessages = toMessages(eventsToProcess); + if (newMessages.length > 0) { + setMessages((prev) => mergeMessages(prev, newMessages)); + } + for (const ev of eventsToProcess) { + const type = ev.type; + if (!type) continue; + if (type.endsWith(".InputRequiredEvent")) { + // ready for next user input; enable send + setCanSend(true); + setIsLoading(false); + } else if (type.endsWith(".StopEvent")) { + // finished; no summary bubble needed (chat response already streamed) + } + } + lastProcessedEventIndexRef.current = events.length; + }, [events, messages]); + + const retryLastMessage = () => { + const lastUserMessage = messages.filter((m) => m.role === "user").pop(); + if (lastUserMessage) { + // Remove the last assistant message if it was an error + const lastMessage = messages[messages.length - 1]; + if (lastMessage.role === "assistant" && lastMessage.error) { + setMessages((prev) => prev.slice(0, -1)); + } + setInput(lastUserMessage.content); + focusInput?.(); + } + }; + + const submit = async () => { + const trimmedInput = input.trim(); + if (!trimmedInput || isLoading || !canSend) return; + + // Add user message + const userMessage: Message = { + role: "user", + content: trimmedInput, + timestamp: new Date(), + }; + const placeHolderMessage: Message = { + role: "assistant", + content: "", + timestamp: new Date(), + isPartial: true, + }; + + const newMessages = [...messages, userMessage, placeHolderMessage]; + setMessages(newMessages); + setInput(""); + setIsLoading(true); + setCanSend(false); + + try { + // Send user input as HumanResponseEvent + await workflowHandler.sendEvent(createHumanResponseEvent(trimmedInput)); + } catch (err) { + console.error("Chat error:", err); + + // Add error message + const errorMessage: Message = { + role: "assistant", + content: `Sorry, I encountered an error: ${err instanceof Error ? err.message : "Unknown error"}. Please try again.`, + timestamp: new Date(), + error: true, + }; + + setMessages((prev) => [...prev, errorMessage]); + } finally { + setIsLoading(false); + // Focus back on input + focusInput?.(); + } + }; + + return { + submit, + retryLastMessage, + messages, + input, + setInput, + isLoading, + canSend, + }; +} + +interface AppendChatMessageData { + message: ChatMessage; +} +interface ChatMessage { + role: "user" | "assistant"; + text: string; + sources: { + text: string; + score: number; + metadata: Record; + }[]; + timestamp: string; +} + +function mergeMessages(previous: Message[], current: Message[]): Message[] { + const lastPreviousMessage = previous[previous.length - 1]; + const restPrevious = previous.slice(0, -1); + const firstCurrentMessage = current[0]; + const restCurrent = current.slice(1); + if (!lastPreviousMessage || !firstCurrentMessage) { + return [...previous, ...current]; + } + if (lastPreviousMessage.isPartial && firstCurrentMessage.isPartial) { + const lastContent = + lastPreviousMessage.content === "Thinking..." + ? "" + : lastPreviousMessage.content; + const merged = { + ...lastPreviousMessage, + content: lastContent + firstCurrentMessage.content, + }; + return [...restPrevious, merged, ...restCurrent]; + } else if ( + lastPreviousMessage.isPartial && + firstCurrentMessage.role === lastPreviousMessage.role + ) { + return [...restPrevious, firstCurrentMessage, ...restCurrent]; + } else { + return [...previous, ...current]; + } +} + +function toMessages(events: WorkflowEvent[]): Message[] { + const messages: Message[] = []; + for (const ev of events) { + const type = ev.type; + const data = ev.data as any; + const lastMessage = messages[messages.length - 1]; + if (type.endsWith(".ChatDeltaEvent")) { + const delta: string = data?.delta ?? ""; + if (!delta) continue; + if (!lastMessage || !lastMessage.isPartial) { + messages.push({ + role: "assistant", + content: delta, + isPartial: true, + timestamp: new Date(), + }); + } else { + lastMessage.content += delta; + } + } else if (type.endsWith(".AppendChatMessage")) { + if ( + lastMessage && + lastMessage.isPartial && + lastMessage.role === "assistant" + ) { + messages.pop(); + } + const content = ev.data as unknown as AppendChatMessageData; + messages.push({ + role: content.message.role, + content: content.message.text, + timestamp: new Date(content.message.timestamp), + isPartial: false, + }); + } + } + return messages; +} diff --git a/ui/src/pages/Home.tsx b/ui/src/pages/Home.tsx index c8d54b8..1d7d2d8 100644 --- a/ui/src/pages/Home.tsx +++ b/ui/src/pages/Home.tsx @@ -1,8 +1,23 @@ import ChatBot from "../components/ChatBot"; -import { WorkflowTrigger } from "@llamaindex/ui"; +import { + useWorkflowHandlerList, + WorkflowProgressBar, + WorkflowTrigger, +} from "@llamaindex/ui"; import { APP_TITLE, INDEX_NAME } from "../libs/config"; +import { useChatHistory } from "@/libs/useChatHistory"; +import Sidebar from "@/components/Sidebar"; +import { Loader } from "lucide-react"; export default function Home() { + const chatHistory = useChatHistory(); + const handlers = useWorkflowHandlerList("upload"); + const activeHandlers = handlers.handlers.filter( + (h) => h.status === "running" && h.workflowName === "upload", + ); + const anyActiveHandlers = activeHandlers.length > 0; + console.log("activeHandlers", activeHandlers); + console.log("anyActiveHandlers", anyActiveHandlers); return (
@@ -16,21 +31,38 @@ export default function Home() {

-
-
- { - return { - file_id: files[0].fileId, - index_name: INDEX_NAME, - }; - }} - /> -
-
-
- +
+ +
+
+ { + return { + file_id: files[0].fileId, + index_name: INDEX_NAME, + }; + }} + /> + {anyActiveHandlers && ( +
+ +
+ )} +
+
+
+ {!chatHistory.loading && ( + { + chatHistory.addChat(handler); + chatHistory.setSelectedChatId(handler); + }} + /> + )} +
From 400371c340b3a50d33e6f38795dd88a9731688df Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 13:38:41 -0400 Subject: [PATCH 11/18] prettier --- test-proj/ui/src/components/ChatBot.tsx | 157 ++++++++++-------------- test-proj/ui/src/components/Sidebar.tsx | 31 +++-- test-proj/ui/src/pages/Home.tsx | 67 +++++----- 3 files changed, 112 insertions(+), 143 deletions(-) diff --git a/test-proj/ui/src/components/ChatBot.tsx b/test-proj/ui/src/components/ChatBot.tsx index af93053..51b9458 100644 --- a/test-proj/ui/src/components/ChatBot.tsx +++ b/test-proj/ui/src/components/ChatBot.tsx @@ -3,8 +3,6 @@ import { useChatbot } from "@/libs/useChatbot"; import { Button, - Card, - CardContent, cn, Input, ScrollArea, @@ -12,7 +10,6 @@ import { import { Bot, Loader2, - MessageSquare, RefreshCw, Send, User, @@ -62,126 +59,97 @@ export default function ChatBot({ }; return ( -
- {/* Header */} -
-
-
- -

- {title} -

-
-
- {chatbot.messages.some((m) => m.error) && ( - - )} -
+
+ {/* Simplified header - only show retry button when needed */} + {chatbot.messages.some((m) => m.error) && ( +
+
-
+ )} {/* Messages */} - + {chatbot.messages.length === 0 ? ( -
+
- -

- No messages yet + +

+ Welcome! 👋 Upload a document with the control above, then ask questions here.

-

- Start a conversation! +

+ Start by uploading a document to begin your conversation

) : ( -
+
{chatbot.messages.map((message, i) => (
{message.role !== "user" && (
- +
)}
- - - {message.isPartial && !message.content ? ( - - ) : ( - <> -

- {message.content} -

- - )} -

- {message.timestamp.toLocaleTimeString()} + {message.isPartial && !message.content ? ( + + ) : ( +

+ {message.content}

-
-
+ )} +

+ {message.timestamp.toLocaleTimeString()} +

+
{message.role === "user" && ( -
- +
+
)}
@@ -192,8 +160,8 @@ export default function ChatBot({ {/* Input */} -
- +
+ -

+

Press Enter to send • Shift+Enter for new line

@@ -232,15 +201,15 @@ const LoadingDots = () => {
diff --git a/test-proj/ui/src/components/Sidebar.tsx b/test-proj/ui/src/components/Sidebar.tsx index ec6bcc4..d2c9262 100644 --- a/test-proj/ui/src/components/Sidebar.tsx +++ b/test-proj/ui/src/components/Sidebar.tsx @@ -48,24 +48,24 @@ export default function Sidebar({ className, chatHistory }: SidebarProps) { return (
{/* Header */} -
+
-

+

Chats

@@ -74,37 +74,42 @@ export default function Sidebar({ className, chatHistory }: SidebarProps) { {loading ? (
-
+
Loading...
) : chats.length === 0 ? (
-
+
No chats yet
) : ( -
+
{chats.map((chat) => (
handleChatSelect(chat)} >
-
+
{formatTimestamp(chat.timestamp)}
+
+
+ +
)} {/* Messages */} - + +
{chatbot.messages.length === 0 ? (
@@ -157,10 +160,12 @@ export default function ChatBot({
)} +
{/* Input */} -
+
+
Press Enter to send • Shift+Enter for new line

+
); diff --git a/test-proj/ui/src/components/Sidebar.tsx b/test-proj/ui/src/components/Sidebar.tsx index d2c9262..b05b3c7 100644 --- a/test-proj/ui/src/components/Sidebar.tsx +++ b/test-proj/ui/src/components/Sidebar.tsx @@ -1,6 +1,7 @@ -import { Plus, X } from "lucide-react"; +import { Plus, X, ChevronLeft, ChevronRight } from "lucide-react"; import { Button, ScrollArea, cn } from "@llamaindex/ui"; import { ChatHistory, UseChatHistory } from "../libs/useChatHistory"; +import { useState } from "react"; interface SidebarProps { className?: string; @@ -8,6 +9,7 @@ interface SidebarProps { } export default function Sidebar({ className, chatHistory }: SidebarProps) { + const [isCollapsed, setIsCollapsed] = useState(false); const { loading, chats, @@ -48,75 +50,132 @@ export default function Sidebar({ className, chatHistory }: SidebarProps) { return (
{/* Header */}
-

- Chats -

- + {!isCollapsed && ( +

+ Chats +

+ )} +
+ {!isCollapsed && ( + + )} + +
{/* Chat List */} - {loading ? ( -
-
- Loading... -
-
- ) : chats.length === 0 ? ( -
-
- No chats yet -
+ {isCollapsed ? ( + // Collapsed state - show dots for each chat +
+ {!loading && chats.length > 0 && ( + <> + {chats.map((chat) => ( +
handleChatSelect(chat)} + title={formatTimestamp(chat.timestamp)} + > +
+
+ ))} + + + )}
) : ( -
- {chats.map((chat) => ( -
handleChatSelect(chat)} - > -
-
- {formatTimestamp(chat.timestamp)} -
+ // Expanded state + <> + {loading ? ( +
+
+ Loading...
-
- ))} -
+ ) : chats.length === 0 ? ( +
+
+ No chats yet +
+
+ ) : ( +
+ {chats.map((chat) => ( +
handleChatSelect(chat)} + > +
+
+ {formatTimestamp(chat.timestamp)} +
+
+ +
+ ))} +
+ )} + )}
diff --git a/test-proj/ui/src/libs/useChatHistory.ts b/test-proj/ui/src/libs/useChatHistory.ts index 2ff9a0f..92a094c 100644 --- a/test-proj/ui/src/libs/useChatHistory.ts +++ b/test-proj/ui/src/libs/useChatHistory.ts @@ -156,8 +156,10 @@ export function useChatHistory(): UseChatHistory { ); if (remainingChats.length > 0) { setSelectedChatHandlerId(remainingChats[0].handlerId); + setChatCounter((prev) => prev + 1); } else { setSelectedChatHandlerId(null); + setChatCounter((prev) => prev + 1); } } } catch (error) { @@ -165,10 +167,6 @@ export function useChatHistory(): UseChatHistory { } }; - const getChats = (): ChatHistory[] => { - return chatHistory; - }; - const createNewChat = (): void => { setSelectedChatHandlerId(null); setChatCounter((prev) => prev + 1); diff --git a/test-proj/ui/src/pages/Home.tsx b/test-proj/ui/src/pages/Home.tsx index 9fc7b08..3ba3940 100644 --- a/test-proj/ui/src/pages/Home.tsx +++ b/test-proj/ui/src/pages/Home.tsx @@ -1,9 +1,5 @@ import ChatBot from "../components/ChatBot"; -import { - useWorkflowHandlerList, - WorkflowProgressBar, - WorkflowTrigger, -} from "@llamaindex/ui"; +import { useWorkflowHandlerList, WorkflowTrigger } from "@llamaindex/ui"; import { APP_TITLE, INDEX_NAME } from "../libs/config"; import { useChatHistory } from "@/libs/useChatHistory"; import Sidebar from "@/components/Sidebar"; @@ -16,6 +12,7 @@ export default function Home() { (h) => h.status === "running" && h.workflowName === "upload" ); const anyActiveHandlers = activeHandlers.length > 0; + return (
From cba35796c517693e28cd20c5210bd57e74e52932 Mon Sep 17 00:00:00 2001 From: Adrian Lyjak Date: Tue, 23 Sep 2025 14:06:29 -0400 Subject: [PATCH 13/18] fix shift+enter --- test-proj/ui/src/components/ChatBot.tsx | 31 +++- ui/index.html | 6 + ui/src/App.tsx | 21 +++ ui/src/components/ChatBot.tsx | 190 ++++++++++++------------ ui/src/components/Sidebar.tsx | 166 ++++++++++++++------- ui/src/libs/useChatHistory.ts | 6 +- ui/src/pages/Home.tsx | 74 ++++----- 7 files changed, 294 insertions(+), 200 deletions(-) diff --git a/test-proj/ui/src/components/ChatBot.tsx b/test-proj/ui/src/components/ChatBot.tsx index a31e3d2..029cdfc 100644 --- a/test-proj/ui/src/components/ChatBot.tsx +++ b/test-proj/ui/src/components/ChatBot.tsx @@ -4,8 +4,8 @@ import { useChatbot } from "@/libs/useChatbot"; import { Button, cn, - Input, ScrollArea, + Textarea, } from "@llamaindex/ui"; import { Bot, @@ -23,7 +23,7 @@ export default function ChatBot({ handlerId?: string; onHandlerCreated?: (handlerId: string) => void; }) { - const inputRef = useRef(null); + const inputRef = useRef(null); const messagesEndRef = useRef(null); const chatbot = useChatbot({ handlerId, @@ -45,17 +45,35 @@ export default function ChatBot({ scrollToBottom(); }, [chatbot.messages]); + // Reset textarea height when input is cleared + useEffect(() => { + if (!chatbot.input && inputRef.current) { + inputRef.current.style.height = '48px'; // Reset to initial height + } + }, [chatbot.input]); + const handleSubmit = async (e: FormEvent) => { e.preventDefault(); await chatbot.submit(); }; - const handleKeyDown = (e: KeyboardEvent) => { + const handleKeyDown = (e: KeyboardEvent) => { // Submit on Enter (without Shift) if (e.key === "Enter" && !e.shiftKey) { e.preventDefault(); handleSubmit(e as any); } + // Allow Shift+Enter to create new line (default behavior) + }; + + const adjustTextareaHeight = (textarea: HTMLTextAreaElement) => { + textarea.style.height = 'auto'; + textarea.style.height = Math.min(textarea.scrollHeight, 128) + 'px'; // 128px = max-h-32 + }; + + const handleInputChange = (e: React.ChangeEvent) => { + chatbot.setInput(e.target.value); + adjustTextareaHeight(e.target); }; return ( @@ -167,15 +185,16 @@ export default function ChatBot({
- chatbot.setInput(e.target.value)} + onChange={handleInputChange} onKeyDown={handleKeyDown} placeholder={placeholder} disabled={chatbot.isLoading} - className="flex-1 h-12 rounded-xl border-2 focus:border-primary" + className="flex-1 min-h-12 max-h-32 rounded-xl border-2 focus:border-primary resize-none overflow-hidden" autoFocus + style={{ height: '48px' }} // Initial height (min-h-12) /> - )} +
+ {/* Simplified header - only show retry button when needed */} + {chatbot.messages.some((m) => m.error) && ( +
+
+
-
+ )} {/* Messages */} - + +
{chatbot.messages.length === 0 ? ( -
+
- -

- No messages yet + +

+ Welcome! 👋 Upload a document with the control above, then ask questions here.

-

- Start a conversation! +

+ Start by uploading a document to begin your conversation

) : ( -
+
{chatbot.messages.map((message, i) => (
{message.role !== "user" && (
- +
)}
- - - {message.isPartial && !message.content ? ( - - ) : ( - <> -

- {message.content} -

- - )} -

- {message.timestamp.toLocaleTimeString()} + {message.isPartial && !message.content ? ( + + ) : ( +

+ {message.content}

-
-
+ )} +

+ {message.timestamp.toLocaleTimeString()} +

+
{message.role === "user" && ( -
- +
+
)}
@@ -189,20 +178,23 @@ export default function ChatBot({
)} +
{/* Input */} -
- - +
+ +