Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add backend controller and client for Gen AI #57944

Merged
merged 23 commits into from
Apr 13, 2024
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
94 changes: 94 additions & 0 deletions apps/src/aichat/aichatCompletionApi.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import {
Role,
ChatCompletionMessage,
AiCustomizations,
ChatContext,
} from './types';
import HttpClient from '@cdo/apps/util/HttpClient';
import Lab2Registry from '@cdo/apps/lab2/Lab2Registry';

const CHAT_COMPLETION_URL = '/aichat/chat_completion';

/**
* This function sends a POST request to the aichat completion backend controller.
*/

export async function postAichatCompletionMessage(
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
messagesToSend: AichatCompletionMessage[],
temperature: number,
chatContext: ChatContext
): Promise<AichatCompletionMessage | null> {
const payload = {
inputs: messagesToSend,
temperature,
chatContext,
};
const response = await HttpClient.post(
CHAT_COMPLETION_URL,
JSON.stringify(payload),
true,
{
'Content-Type': 'application/json; charset=UTF-8',
}
);
// For now, response will be null if there was an error.
if (response.ok) {
return await response.json();
} else {
return null;
}
}

/**
* This function formats chat completion messages including the system prompt, passes them
* to `postAichatCompletion`, then returns the status of the response and assistant message
* if successful.
* TODO: Awaiting details on how to format input for endpoint.
*/
export async function getAichatCompletionMessage(
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
aiCustomizations: AiCustomizations,
storedMessages: ChatCompletionMessage[],
chatContext: ChatContext
) {
const {systemPrompt, temperature, retrievalContexts} = aiCustomizations;
const messagesToSend = [
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
{role: Role.SYSTEM, content: systemPrompt},
...formatRetrievalContextsForAichatCompletion(retrievalContexts),
...formatMessagesForAichatCompletion(storedMessages),
{role: Role.USER, content: chatContext['userMessage']},
];
let response;
try {
response = await postAichatCompletionMessage(
messagesToSend,
temperature,
chatContext
);
} catch (error) {
Lab2Registry.getInstance()
.getMetricsReporter()
.logError('Error in aichat completion request', error as Error);
}
return response;
}

const formatRetrievalContextsForAichatCompletion = (
retrievalContexts: string[]
): AichatCompletionMessage[] => {
return retrievalContexts.map(context => {
return {role: Role.USER, content: context};
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
});
};

const formatMessagesForAichatCompletion = (
chatMessages: ChatCompletionMessage[]
): AichatCompletionMessage[] => {
return chatMessages.map(message => {
return {role: message.role, content: message.chatMessageText};
});
};

type AichatCompletionMessage = {
role: Role;
content: string;
};
2 changes: 1 addition & 1 deletion apps/src/aichat/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ export const CHAT_COMPLETION_URL = '/openai/chat_completion';
import {
ChatCompletionMessage,
Role,
AITutorInteractionStatus as Status,
AichatInteractionStatus as Status,
} from './types';

const initialChatMessages: ChatCompletionMessage[] = [
Expand Down
97 changes: 39 additions & 58 deletions apps/src/aichat/redux/aichatRedux.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,16 @@ import {
AI_CUSTOMIZATIONS_LABELS,
} from '../views/modelCustomization/constants';
import {initialChatMessages} from '../constants';
import {getChatCompletionMessage} from '../chatApi';
import {getAichatCompletionMessage} from '../aichatCompletionApi';
import {
ChatCompletionMessage,
AichatLevelProperties,
Role,
AITutorInteractionStatus as Status,
AITutorInteractionStatusType,
AichatInteractionStatus as Status,
AiCustomizations,
ModelCardInfo,
Visibility,
LevelAichatSettings,
ChatContext,
} from '../types';
import {RootState} from '@cdo/apps/types/redux';

Expand Down Expand Up @@ -68,7 +67,7 @@ export interface AichatState {
// Denotes if there is an error with the chat completion response
chatMessageError: boolean;
currentAiCustomizations: AiCustomizations;
previouslySavedAiCustomizations?: AiCustomizations;
savedAiCustomizations: AiCustomizations;
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
fieldVisibilities: {[key in keyof AiCustomizations]: Visibility};
}

Expand All @@ -78,6 +77,7 @@ const initialState: AichatState = {
showWarningModal: true,
chatMessageError: false,
currentAiCustomizations: EMPTY_AI_CUSTOMIZATIONS,
savedAiCustomizations: EMPTY_AI_CUSTOMIZATIONS,
fieldVisibilities: DEFAULT_VISIBILITIES,
};

Expand All @@ -90,8 +90,7 @@ export const updateAiCustomization = createAsyncThunk(
'aichat/updateAiCustomization',
async (_, thunkAPI) => {
const state = thunkAPI.getState() as RootState;
const {currentAiCustomizations, previouslySavedAiCustomizations} =
state.aichat;
const {currentAiCustomizations, savedAiCustomizations} = state.aichat;

// Remove any empty example topics on save
const trimmedExampleTopics =
Expand All @@ -117,12 +116,10 @@ export const updateAiCustomization = createAsyncThunk(
.getProjectManager()
?.save({source: JSON.stringify(trimmedCurrentAiCustomizations)}, true);

thunkAPI.dispatch(
setPreviouslySavedAiCustomizations(trimmedCurrentAiCustomizations)
);
thunkAPI.dispatch(setSavedAiCustomizations(trimmedCurrentAiCustomizations));

const changedProperties = findChangedProperties(
previouslySavedAiCustomizations,
savedAiCustomizations,
trimmedCurrentAiCustomizations
);
changedProperties.forEach(property => {
Expand All @@ -140,66 +137,50 @@ export const updateAiCustomization = createAsyncThunk(
}
);

// This thunk's callback function submits a user chat message to the chat completion endpoint,
// waits for a chat completion response, and updates the user message state.
export const submitChatMessage = createAsyncThunk(
'aichat/submitChatMessage',
async (message: string, thunkAPI) => {
// This thunk's callback function submits a user's chat content and AI customizations to
// the chat completion endpoint, then waits for a chat completion response, and updates
// the user messages.
export const submitChatContents = createAsyncThunk(
'aichat/submitChatContents',
async (chatContext: ChatContext, thunkAPI) => {
const state = thunkAPI.getState() as {lab: LabState; aichat: AichatState};
const systemPrompt = (state.lab.levelProperties as AichatLevelProperties)
?.systemPrompt;
// TODO: move a check for undefined systemPrompt to AIchatView and throw an error dialog
// there if systemPrompt is undefined.
if (systemPrompt === undefined) {
throw new Error('systemPrompt is undefined');
}
const aiCustomizations = state.aichat.savedAiCustomizations;
const storedMessages = state.aichat.chatMessages;
const newMessageText = chatContext['userMessage'];
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
const newMessageId =
storedMessages.length === 0
? 1
: storedMessages[storedMessages.length - 1].id + 1;
const appropriateChatMessages = storedMessages.filter(
msg => msg.status === Status.OK
);

// Create the new user ChatCompleteMessage and add to chatMessages.
const newMessage: ChatCompletionMessage = {
id: newMessageId,
role: Role.USER,
status: Status.UNKNOWN,
chatMessageText: message,
status: Status.OK,
chatMessageText: newMessageText,
timestamp: getCurrentTimestamp(),
};
thunkAPI.dispatch(addChatMessage(newMessage));

// Send user message to backend and retrieve assistant response.
const chatApiResponse = await getChatCompletionMessage(
systemPrompt,
newMessageId,
message,
appropriateChatMessages
// Post user content and messages to backend and retrieve assistant response.
const chatApiResponse = await getAichatCompletionMessage(
aiCustomizations,
storedMessages,
chatContext
);

// Find message in chatMessages and update status.
thunkAPI.dispatch(
updateChatMessageStatus({
id: chatApiResponse.id,
status: chatApiResponse.status,
})
);

// Add assistant chat messages to chatMessages.
if (chatApiResponse.assistantResponse) {
console.log('chatApiResponse', chatApiResponse);
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
if (chatApiResponse?.role === Role.ASSISTANT) {
const assistantChatMessage: ChatCompletionMessage = {
id: chatApiResponse.id + 1,
id: newMessageId + 1,
role: Role.ASSISTANT,
status: Status.OK,
chatMessageText: chatApiResponse.assistantResponse,
// The accuracy of this timestamp is debatable since it's not when our backend
// issued the message, but it's good enough for user testing.
timestamp: getCurrentTimestamp(),
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
chatMessageText: chatApiResponse.content,
};
thunkAPI.dispatch(addChatMessage(assistantChatMessage));
} else {
// TODO: Update most recent user message's status if PII or profanity violation.
// latest message's id is stored at `newMessageId`.
console.log('Did not receive assistant response.');
}
}
);
Expand Down Expand Up @@ -236,7 +217,7 @@ const aichatSlice = createSlice({
},
updateChatMessageStatus: (
state,
action: PayloadAction<{id: number; status: AITutorInteractionStatusType}>
action: PayloadAction<{id: number; status: Status}>
) => {
const {id, status} = action.payload;
const chatMessage = state.chatMessages.find(msg => msg.id === id);
Expand Down Expand Up @@ -274,16 +255,16 @@ const aichatSlice = createSlice({
}
}

state.previouslySavedAiCustomizations = reconciledAiCustomizations;
state.savedAiCustomizations = reconciledAiCustomizations;
state.currentAiCustomizations = reconciledAiCustomizations;
state.fieldVisibilities =
levelAichatSettings?.visibilities || DEFAULT_VISIBILITIES;
},
setPreviouslySavedAiCustomizations: (
setSavedAiCustomizations: (
state,
action: PayloadAction<AiCustomizations>
) => {
state.previouslySavedAiCustomizations = action.payload;
state.savedAiCustomizations = action.payload;
},
setAiCustomizationProperty: (
state,
Expand Down Expand Up @@ -316,15 +297,15 @@ const aichatSlice = createSlice({
},
},
extraReducers: builder => {
builder.addCase(submitChatMessage.fulfilled, state => {
builder.addCase(submitChatContents.fulfilled, state => {
state.isWaitingForChatResponse = false;
});
builder.addCase(submitChatMessage.rejected, (state, action) => {
builder.addCase(submitChatContents.rejected, (state, action) => {
state.isWaitingForChatResponse = false;
state.chatMessageError = true;
console.error(action.error);
});
builder.addCase(submitChatMessage.pending, state => {
builder.addCase(submitChatContents.pending, state => {
state.isWaitingForChatResponse = true;
});
},
Expand All @@ -339,7 +320,7 @@ export const {
setShowWarningModal,
updateChatMessageStatus,
setStartingAiCustomizations,
setPreviouslySavedAiCustomizations,
setSavedAiCustomizations,
setAiCustomizationProperty,
setModelCardProperty,
} = aichatSlice.actions;
33 changes: 23 additions & 10 deletions apps/src/aichat/types.ts
Original file line number Diff line number Diff line change
@@ -1,22 +1,35 @@
import {LevelProperties} from '@cdo/apps/lab2/types';
import {
AiTutorInteractionStatus as AITutorInteractionStatus,
PiiTypes as PII,
} from '@cdo/apps/util/sharedConstants';

// TODO: Update this once https://codedotorg.atlassian.net/browse/CT-471 is resolved
export type AITutorInteractionStatusType = string;

export {PII, AITutorInteractionStatus};

export type ChatCompletionMessage = {
id: number;
role: Role;
chatMessageText: string;
status: AITutorInteractionStatusType;
status: AichatInteractionStatus;
timestamp?: string;
};

export type ChatContext = {
userMessage: string;
userId: number;
currentLevelId: string | null;
scriptId: number | null;
channelId: string | undefined;
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
};

export enum AichatInteractionStatus {
ERROR = 'error',
PII_VIOLATION = 'pii_violation',
PROFANITY_VIOLATION = 'profanity_violation',
OK = 'ok',
UNKNOWN = 'unknown',
}

export enum PiiTypes {
fisher-alice marked this conversation as resolved.
Show resolved Hide resolved
EMAIL = 'email',
PHONE = 'phone',
ADDRESS = 'address',
}

export enum Role {
ASSISTANT = 'assistant',
USER = 'user',
Expand Down
2 changes: 1 addition & 1 deletion apps/src/aichat/views/ChatMessage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import {
AichatLevelProperties,
ChatCompletionMessage,
Role,
AITutorInteractionStatus as Status,
AichatInteractionStatus as Status,
} from '../types';
import aichatI18n from '../locale';
import moduleStyles from './chatMessage.module.scss';
Expand Down