Skip to content

Commit

Permalink
fix: toast error
Browse files Browse the repository at this point in the history
  • Loading branch information
Iwamoto Yo committed Aug 30, 2023
1 parent 13141f9 commit 93af1f7
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 4 deletions.
8 changes: 7 additions & 1 deletion components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,10 @@ import Spinner from '../Spinner';
import { ChatInput } from './ChatInput';
import { ChatLoader } from './ChatLoader';
import { ErrorMessageDiv } from './ErrorMessageDiv';
import { MemoizedChatMessage } from './MemoizedChatMessage';
import { ModelSelect } from './ModelSelect';
import { SystemPrompt } from './SystemPrompt';
import { TemperatureSlider } from './Temperature';
import { MemoizedChatMessage } from './MemoizedChatMessage';

interface Props {
stopConversationRef: MutableRefObject<boolean>;
Expand Down Expand Up @@ -127,6 +127,12 @@ export const Chat = memo(({ stopConversationRef }: Props) => {
if (!response.ok) {
homeDispatch({ field: 'loading', value: false });
homeDispatch({ field: 'messageIsStreaming', value: false });
if (response.status === 429) {
toast.error(
'利用量上限に達しました。制限は月初にリセットされます。',
);
return;
}
toast.error(response.statusText);
return;
}
Expand Down
4 changes: 4 additions & 0 deletions middleware.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ export const config = {

// IP ホワイトリストで検証 → 不正であれば BASIC 認証
export default async function middleware(req: NextRequest) {
if (process.env.NODE_ENV === 'development') {
return NextResponse.next();
}

// IP ホワイトリスト検証
const ipWhitelist = process.env.IP_WHITELIST?.split(',');
const reqIps = req.headers.get('x-forwarded-for')?.split(', ');
Expand Down
15 changes: 12 additions & 3 deletions pages/api/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ export const config = {

const handler = async (req: Request): Promise<Response> => {
try {
const { model, messages, key, prompt, temperature } = (await req.json()) as ChatBody;
const { model, messages, key, prompt, temperature } =
(await req.json()) as ChatBody;

await init((imports) => WebAssembly.instantiate(wasm, imports));
const encoding = new Tiktoken(
Expand Down Expand Up @@ -52,12 +53,20 @@ const handler = async (req: Request): Promise<Response> => {

encoding.free();

const stream = await OpenAIStream(model, promptToSend, temperatureToUse, key, messagesToSend);
const stream = await OpenAIStream(
model,
promptToSend,
temperatureToUse,
key,
messagesToSend,
);

return new Response(stream);
} catch (error) {
console.error(error);
if (error instanceof OpenAIError) {
if (error.code === 'insufficient_quota') {
return new Response('Error', { status: 429 });
}
return new Response('Error', { status: 500, statusText: error.message });
} else {
return new Response('Error', { status: 500 });
Expand Down

0 comments on commit 93af1f7

Please sign in to comment.