Skip to content

Commit

Permalink
support multiple AI models
Browse files Browse the repository at this point in the history
  • Loading branch information
nus-rick committed Mar 8, 2024
1 parent 69ca8fc commit 76950e4
Show file tree
Hide file tree
Showing 7 changed files with 95 additions and 6 deletions.
2 changes: 2 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,5 @@ KV_REST_API_URL=XXXXXXXX
KV_REST_API_TOKEN=XXXXXXXX
KV_REST_API_READ_ONLY_TOKEN=XXXXXXXX

# Sign up account on OpenRouter and get API key: https://openrouter.ai/keys
OPENROUTER_API_KEY=XXXXXXXXX
7 changes: 4 additions & 3 deletions app/api/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,13 @@ import { nanoid } from '@/lib/utils'
export const runtime = 'edge'

const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY
baseURL: "https://openrouter.ai/api/v1",
apiKey: process.env.OPENROUTER_API_KEY
})

export async function POST(req: Request) {
const json = await req.json()
const { messages, previewToken } = json
const { messages, previewToken, model } = json
const userId = (await auth())?.user.id

if (!userId) {
Expand All @@ -27,7 +28,7 @@ export async function POST(req: Request) {
}

const res = await openai.chat.completions.create({
model: 'gpt-3.5-turbo',
model: model,
messages,
temperature: 0.7,
stream: true
Expand Down
7 changes: 5 additions & 2 deletions app/layout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { cn } from '@/lib/utils'
import { TailwindIndicator } from '@/components/tailwind-indicator'
import { Providers } from '@/components/providers'
import { Header } from '@/components/header'
import { ModelProvider } from '@/lib/hooks/use-model'

export const metadata = {
metadataBase: new URL(`https://${process.env.VERCEL_URL}`),
Expand Down Expand Up @@ -51,8 +52,10 @@ export default function RootLayout({ children }: RootLayoutProps) {
disableTransitionOnChange
>
<div className="flex flex-col min-h-screen">
<Header />
<main className="flex flex-col flex-1 bg-muted/50">{children}</main>
<ModelProvider>
<Header />
<main className="flex flex-col flex-1 bg-muted/50">{children}</main>
</ModelProvider>
</div>
<TailwindIndicator />
</Providers>
Expand Down
35 changes: 35 additions & 0 deletions components/ai-model-selection.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
'use client';

import * as React from 'react'
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from './ui/select'
import { useModelContext } from '@/lib/hooks/use-model';

export function AiModelSelection() {
const { selectedModel, onSelectModel } = useModelContext()

const handleModelChange = (selectedValue: string) => {
onSelectModel(selectedValue)
}

const modelOptions = [
{ value: 'openchat/openchat-7b:free', label: 'OpenChat 3.5' },
{ value: 'nousresearch/nous-capybara-7b:free', label: 'Nous: Capybara 7B' },
{ value: 'mistralai/mistral-7b-instruct:free', label: 'Mistral 7B Instruct' },
{ value: 'gryphe/mythomist-7b:free', label: 'MythoMist 7B' },
];

return (
<Select value={selectedModel} onValueChange={handleModelChange}>
<SelectTrigger className="w-[280px]">
<SelectValue placeholder="Model" />
</SelectTrigger>
<SelectContent>
{modelOptions.map((option) => (
<SelectItem key={option.value} value={option.value}>
{option.label}
</SelectItem>
))}
</SelectContent>
</Select>
)
}
7 changes: 6 additions & 1 deletion components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import { Button } from './ui/button'
import { Input } from './ui/input'
import { toast } from 'react-hot-toast'
import { usePathname, useRouter } from 'next/navigation'
import { useModelContext } from '@/lib/hooks/use-model'

const IS_PREVIEW = process.env.VERCEL_ENV === 'preview'
export interface ChatProps extends React.ComponentProps<'div'> {
Expand All @@ -31,6 +32,9 @@ export interface ChatProps extends React.ComponentProps<'div'> {
export function Chat({ id, initialMessages, className }: ChatProps) {
const router = useRouter()
const path = usePathname()
const [model, _] = useLocalStorage<string>('ai-model', 'openchat/openchat-7b:free')
const { selectedModel } = useModelContext();

const [previewToken, setPreviewToken] = useLocalStorage<string | null>(
'ai-token',
null
Expand All @@ -43,7 +47,8 @@ export function Chat({ id, initialMessages, className }: ChatProps) {
id,
body: {
id,
previewToken
previewToken,
model: selectedModel
},
onResponse(response) {
if (response.status === 401) {
Expand Down
4 changes: 4 additions & 0 deletions components/header.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { UserMenu } from '@/components/user-menu'
import { SidebarMobile } from './sidebar-mobile'
import { SidebarToggle } from './sidebar-toggle'
import { ChatHistory } from './chat-history'
import { AiModelSelection } from './ai-model-selection'

async function UserOrLogin() {
const session = await auth()
Expand Down Expand Up @@ -54,6 +55,9 @@ export function Header() {
<UserOrLogin />
</React.Suspense>
</div>
<div>
<AiModelSelection />
</div>
<div className="flex items-center justify-end space-x-2">
<a
target="_blank"
Expand Down
39 changes: 39 additions & 0 deletions lib/hooks/use-model.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
'use client'

import { createContext, useContext } from 'react';
import { useLocalStorage } from './use-local-storage';

interface ModelContext {
selectedModel: string
onSelectModel: (newModel: string) => void
}

const ModelContext = createContext<ModelContext | undefined>(undefined);

export const useModelContext = () => {
const context = useContext(ModelContext);

if (!context) {
throw new Error('useSidebarContext must be used within a SidebarProvider')
}
return context
};


interface ModelProviderProps {
children: React.ReactNode
}

export const ModelProvider = ({ children }: ModelProviderProps) => {
const [selectedModel, setSelectedModel] = useLocalStorage('ai-model', 'openchat/openchat-7b:free');

const handleModelChange = (newModel: string) => {
setSelectedModel(newModel);
};

return (
<ModelContext.Provider value={{ selectedModel, onSelectModel: handleModelChange }}>
{children}
</ModelContext.Provider>
);
};

0 comments on commit 76950e4

Please sign in to comment.