Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
55f731d
fix: update GraphQL mutation input type and clean up comments and add…
NarwhalChen Feb 27, 2025
f9b51a5
fix: fixing some bugs
NarwhalChen Mar 9, 2025
96eef24
fix: fix typewritter bug
NarwhalChen Mar 9, 2025
8df0262
fix: update nullable fields in chat models and improve stream handlin…
NarwhalChen Mar 9, 2025
8912aa8
Merge branch 'main' into feat-frontend-interactive-chat
NarwhalChen Mar 14, 2025
f9daff6
Merge branch 'main' into feat-frontend-interactive-chat
NarwhalChen Mar 17, 2025
e135500
feat: adding interactive chat and update chatstream
NarwhalChen Mar 17, 2025
0090d22
refactor(backend): remove redundant message saving in chat controller
NarwhalChen Mar 17, 2025
fddbe0b
feat(frontend): add refreshProjects function to managerAgent and useC…
NarwhalChen Mar 17, 2025
51728e2
feat(frontend): implement saveThinkingProcess helper and update messa…
NarwhalChen Mar 17, 2025
75af9cb
feat(frontend): enhance managerAgent to accumulate and save thoughts …
NarwhalChen Mar 18, 2025
cc4366a
feat(frontend): enhance saveThinkingProcess to support typewriter eff…
NarwhalChen Mar 18, 2025
38bb87f
feat(frontend): update saveThinkingProcess to append newlines for con…
NarwhalChen Mar 18, 2025
2306999
feat(frontend): add editorRef and setFilePath to context and update r…
NarwhalChen Mar 18, 2025
f56ce7d
feat(frontend): enhance editFileTool to parse JSON strings and store …
NarwhalChen Mar 18, 2025
90df595
feat(frontend): update CodeEngine to send unescaped content in newCon…
NarwhalChen Mar 18, 2025
e4f66ec
feat(frontend): add UNRELATED task type to categorize non-technical r…
NarwhalChen Mar 18, 2025
464d0ea
feat(frontend): refactor task type handling in managerAgent and agent…
NarwhalChen Mar 18, 2025
57e4b23
feat(frontend): enhance task analysis and validation in managerAgent …
NarwhalChen Mar 18, 2025
e7b2935
refactor(frontend): remove console logs for cleaner code and improved…
NarwhalChen Mar 21, 2025
f26d1d6
feat(frontend): add error handling with toast notifications in manage…
NarwhalChen Mar 21, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 2 additions & 22 deletions backend/src/chat/chat.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,6 @@ export class ChatController {
@GetAuthToken() userId: string,
) {
try {
// Save user's message first
await this.chatService.saveMessage(
chatDto.chatId,
chatDto.message,
MessageRole.User,
);

if (chatDto.stream) {
// Streaming response
res.setHeader('Content-Type', 'text/event-stream');
Expand All @@ -39,6 +32,7 @@ export class ChatController {
chatId: chatDto.chatId,
message: chatDto.message,
model: chatDto.model,
role: MessageRole.User,
});

let fullResponse = '';
Expand All @@ -51,13 +45,6 @@ export class ChatController {
}
}

// Save the complete message
await this.chatService.saveMessage(
chatDto.chatId,
fullResponse,
MessageRole.Assistant,
);

res.write('data: [DONE]\n\n');
res.end();
} else {
Expand All @@ -66,15 +53,8 @@ export class ChatController {
chatId: chatDto.chatId,
message: chatDto.message,
model: chatDto.model,
role: MessageRole.User,
});

// Save the complete message
await this.chatService.saveMessage(
chatDto.chatId,
response,
MessageRole.Assistant,
);

res.json({ content: response });
}
} catch (error) {
Expand Down
20 changes: 10 additions & 10 deletions backend/src/chat/chat.model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,11 @@ class ChatCompletionDelta {

@ObjectType('ChatCompletionChoiceType')
class ChatCompletionChoice {
@Field()
index: number;
@Field({ nullable: true })
index: number | null;

@Field(() => ChatCompletionDelta)
delta: ChatCompletionDelta;
@Field(() => ChatCompletionDelta, { nullable: true })
delta: ChatCompletionDelta | null;

@Field({ nullable: true })
finishReason: string | null;
Expand All @@ -90,14 +90,14 @@ export class ChatCompletionChunk {
@Field()
id: string;

@Field()
object: string;
@Field({ nullable: true })
object: string | null;

@Field()
created: number;
@Field({ nullable: true })
created: number | null;

@Field()
model: string;
@Field({ nullable: true })
model: string | null;

@Field({ nullable: true })
systemFingerprint: string | null;
Expand Down
82 changes: 58 additions & 24 deletions backend/src/chat/chat.resolver.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import { Resolver, Subscription, Args, Query, Mutation } from '@nestjs/graphql';
import { Chat, ChatCompletionChunk } from './chat.model';
import { Chat, ChatCompletionChunk, StreamStatus } from './chat.model';
import { ChatProxyService, ChatService } from './chat.service';
import { UserService } from 'src/user/user.service';
import { Message, MessageRole } from './message.model';
import { Message } from './message.model';
import {
ChatInput,
NewChatInput,
Expand All @@ -12,6 +12,7 @@ import { GetUserIdFromToken } from 'src/decorator/get-auth-token.decorator';
import { Inject, Logger } from '@nestjs/common';
import { JWTAuth } from 'src/decorator/jwt-auth.decorator';
import { PubSubEngine } from 'graphql-subscriptions';
import { Project } from 'src/project/project.model';
@Resolver('Chat')
export class ChatResolver {
private readonly logger = new Logger('ChatResolver');
Expand All @@ -31,45 +32,65 @@ export class ChatResolver {
resolve: (payload) => payload.chatStream,
})
async chatStream(@Args('input') input: ChatInput) {
return this.pubSub.asyncIterator(`chat_stream_${input.chatId}`);
const asyncIterator = this.pubSub.asyncIterator(
`chat_stream_${input.chatId}`,
);
return asyncIterator;
}

@Mutation(() => Boolean)
@JWTAuth()
async triggerChatStream(@Args('input') input: ChatInput): Promise<boolean> {
async saveMessage(@Args('input') input: ChatInput): Promise<boolean> {
try {
await this.chatService.saveMessage(
input.chatId,
input.message,
MessageRole.User,
input.role,
);

return true;
} catch (error) {
this.logger.error('Error in saveMessage:', error);
throw error;
}
}
@Mutation(() => Boolean)
@JWTAuth()
async triggerChatStream(@Args('input') input: ChatInput): Promise<boolean> {
try {
const iterator = this.chatProxyService.streamChat(input);
let accumulatedContent = '';

for await (const chunk of iterator) {
if (chunk) {
const enhancedChunk = {
...chunk,
chatId: input.chatId,
};
try {
for await (const chunk of iterator) {
console.log('received chunk:', chunk);
if (chunk) {
const enhancedChunk = {
...chunk,
chatId: input.chatId,
};

await this.pubSub.publish(`chat_stream_${input.chatId}`, {
chatStream: enhancedChunk,
});

if (chunk.choices?.[0]?.delta?.content) {
accumulatedContent += chunk.choices[0].delta.content;
}
}
}
} finally {
const finalChunk = await iterator.return();
console.log('finalChunk:', finalChunk);

if (finalChunk.value?.status === StreamStatus.DONE) {
await this.pubSub.publish(`chat_stream_${input.chatId}`, {
chatStream: enhancedChunk,
chatStream: {
...finalChunk.value,
chatId: input.chatId,
},
});

if (chunk.choices[0]?.delta?.content) {
accumulatedContent += chunk.choices[0].delta.content;
}
}
}

await this.chatService.saveMessage(
input.chatId,
accumulatedContent,
MessageRole.Assistant,
);

return true;
} catch (error) {
this.logger.error('Error in triggerChatStream:', error);
Expand Down Expand Up @@ -108,6 +129,19 @@ export class ChatResolver {
return this.chatService.getChatDetails(chatId);
}

@JWTAuth()
@Query(() => Project, { nullable: true })
async getCurProject(@Args('chatId') chatId: string): Promise<Project> {
try {
const response = await this.chatService.getProjectByChatId(chatId);
this.logger.log('Loaded project:', response);
return response;
} catch (error) {
this.logger.error('Failed to fetch project:', error);
throw new Error('Failed to fetch project');
}
}

@Mutation(() => Chat)
@JWTAuth()
async createChat(
Expand Down
10 changes: 10 additions & 0 deletions backend/src/chat/chat.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
} from 'src/chat/dto/chat.input';
import { CustomAsyncIterableIterator } from 'src/common/model-provider/types';
import { OpenAIModelProvider } from 'src/common/model-provider/openai-model-provider';
import { Project } from 'src/project/project.model';

@Injectable()
export class ChatProxyService {
Expand Down Expand Up @@ -98,6 +99,15 @@ export class ChatService {
return chat;
}

async getProjectByChatId(chatId: string): Promise<Project> {
const chat = await this.chatRepository.findOne({
where: { id: chatId, isDeleted: false },
relations: ['project'],
});

return chat ? chat.project : null;
}

async createChat(userId: string, newChatInput: NewChatInput): Promise<Chat> {
const user = await this.userRepository.findOne({ where: { id: userId } });
if (!user) {
Expand Down
3 changes: 3 additions & 0 deletions backend/src/chat/dto/chat.input.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
// DTOs for Project APIs
import { InputType, Field } from '@nestjs/graphql';
import { MessageRole } from '../message.model';

@InputType()
export class NewChatInput {
Expand All @@ -26,4 +27,6 @@ export class ChatInput {

@Field()
model: string;
@Field()
role: MessageRole;
}
21 changes: 19 additions & 2 deletions backend/src/common/model-provider/openai-model-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ export class OpenAIModelProvider implements IModelProvider {
let streamIterator: AsyncIterator<OpenAIChatCompletionChunk> | null = null;
const modelName = model || input.model;
const queue = this.getQueueForModel(modelName);

let oldStreamValue: OpenAIChatCompletionChunk | null = null;
const createStream = async () => {
if (!stream) {
const result = await queue.add(async () => {
Expand All @@ -145,6 +145,9 @@ export class OpenAIModelProvider implements IModelProvider {
const currentIterator = await createStream();
const chunk = await currentIterator.next();
const chunkValue = chunk.value as OpenAIChatCompletionChunk;
console.log('isDone:', chunk.done);
console.log('chunk:', chunk);
if (!chunk.done) oldStreamValue = chunkValue;
Comment on lines +148 to +150
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Remove debugging console logs before merging

These console logs appear to be debugging statements that should be removed before merging to production. They could potentially log sensitive information and will clutter the console.

-          console.log('isDone:', chunk.done);
-          console.log('chunk:', chunk);
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
console.log('isDone:', chunk.done);
console.log('chunk:', chunk);
if (!chunk.done) oldStreamValue = chunkValue;
if (!chunk.done) oldStreamValue = chunkValue;

return {
done: chunk.done,
value: {
Expand All @@ -159,9 +162,23 @@ export class OpenAIModelProvider implements IModelProvider {
}
},
async return() {
console.log(stream);
console.log(streamIterator);
console.log('return() called');
Comment on lines +165 to +167
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Remove debugging console logs before merging

These console logs should be removed before merging to production.

-        console.log(stream);
-        console.log(streamIterator);
-        console.log('return() called');
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
console.log(stream);
console.log(streamIterator);
console.log('return() called');

stream = null;
streamIterator = null;
return { done: true, value: undefined };
return {
done: true,
value: {
...oldStreamValue,
status: StreamStatus.DONE,
choices: [
{
finishReason: 'stop',
},
],
},
};
},
async throw(error) {
stream = null;
Expand Down
71 changes: 71 additions & 0 deletions frontend/src/api/ChatStreamAPI.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { ChatInputType } from '@/graphql/type';

export const startChatStream = async (
input: ChatInputType,
token: string,
stream: boolean = false // Default to non-streaming for better performance
): Promise<string> => {
if (!token) {
throw new Error('Not authenticated');
}
const { chatId, message, model } = input;
const response = await fetch('/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({
chatId,
message,
model,
stream,
}),
});
Comment on lines +12 to +24
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Missing role parameter in API request.

The function doesn't include the role field from ChatInputType in the request body, although it's part of the type definition.

  body: JSON.stringify({
    chatId,
    message,
    model,
+   role: input.role,
    stream,
  }),
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
const response = await fetch('/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({
chatId,
message,
model,
stream,
}),
});
const response = await fetch('/api/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({
chatId,
message,
model,
role: input.role,
stream,
}),
});


if (!response.ok) {
throw new Error(
`Network response was not ok: ${response.status} ${response.statusText}`
);
}
// TODO: Handle streaming responses properly
// if (stream) {
// // For streaming responses, aggregate the streamed content
// let fullContent = '';
// const reader = response.body?.getReader();
// if (!reader) {
// throw new Error('No reader available');
// }

// while (true) {
// const { done, value } = await reader.read();
// if (done) break;

// const text = new TextDecoder().decode(value);
// const lines = text.split('\n\n');

// for (const line of lines) {
// if (line.startsWith('data: ')) {
// const data = line.slice(5);
// if (data === '[DONE]') break;
// try {
// const { content } = JSON.parse(data);
// if (content) {
// fullContent += content;
// }
// } catch (e) {
// console.error('Error parsing SSE data:', e);
// }
// }
// }
// }
// return fullContent;
// } else {
// // For non-streaming responses, return the content directly
// const data = await response.json();
// return data.content;
// }

const data = await response.json();
return data.content;
};
Loading
Loading