Skip to content

Commit

Permalink
chore: cleanup ux, prepare for prompt params tweak (#28)
Browse files Browse the repository at this point in the history
  • Loading branch information
louisgv committed Jun 12, 2023
1 parent 7adf294 commit 2f6184b
Show file tree
Hide file tree
Showing 17 changed files with 53 additions and 40 deletions.
2 changes: 1 addition & 1 deletion apps/desktop/package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@localai/desktop",
"private": true,
"version": "0.2.7",
"version": "0.2.8",
"scripts": {
"dev:next": "next dev -p 1470",
"build:next": "next build",
Expand Down
2 changes: 2 additions & 0 deletions apps/desktop/src-tauri/src/inference/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
mod stop_handler;
pub mod thread;
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
/// Inference thread as in Machine Physical Thread
use std::{convert::Infallible, sync::Arc};

use actix_web::web::Bytes;
Expand All @@ -15,12 +16,10 @@ use serde::{Deserialize, Serialize};
use tokio::task::JoinHandle;

use crate::{
inference_thread::stop_handler::StopHandler,
inference::stop_handler::StopHandler,
model_pool::{self, get_n_threads},
};

mod stop_handler;

#[derive(Serialize, Deserialize, Debug)]
pub struct CompletionRequest {
prompt: String,
Expand Down
2 changes: 1 addition & 1 deletion apps/desktop/src-tauri/src/inference_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use std::sync::{

use crate::abort_stream::AbortStream;
use crate::config::ConfigKey;
use crate::inference_thread::{
use crate::inference::thread::{
start_inference, CompletionRequest, InferenceThreadRequest,
};
use crate::model_pool::{self, spawn_pool};
Expand Down
2 changes: 1 addition & 1 deletion apps/desktop/src-tauri/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ mod abort_stream;
mod config;
mod db;
mod downloader;
mod inference;
mod inference_server;
mod inference_thread;
mod kv_bucket;
mod macros;
mod model_integrity;
Expand Down
2 changes: 1 addition & 1 deletion apps/desktop/src-tauri/src/model_pool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use llm::{load_progress_callback_stdout, ModelArchitecture, VocabularySource};

use std::path::Path;

use crate::inference_thread::ModelGuard;
use crate::inference::thread::ModelGuard;
use std::collections::VecDeque;

pub static LOADED_MODEL_POOL: Lazy<Mutex<VecDeque<Option<ModelGuard>>>> =
Expand Down
1 change: 1 addition & 0 deletions apps/desktop/src-tauri/src/threads_directory.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
/// Thread as in chat/conversation thread
use chrono::Utc;
use rand::Rng;
use serde::{Deserialize, Serialize};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ export const ModelListItem = ({ model }: { model: ModelMetadata }) => {
"text-gray-11 hover:text-gray-12",
"transition-colors group",
activeModel?.path === model.path
? "ring ring-green-7 hover:ring-green-8"
: "ring ring-gray-7 hover:ring-gray-8"
? "border border-green-7 hover:border-green-8"
: "border border-gray-7 hover:border-gray-8"
)}>
<div className="flex items-center justify-between w-full">
<ModelLabel />
Expand Down
4 changes: 2 additions & 2 deletions apps/desktop/src/features/model-downloader/model-selector.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import { toGB } from "~features/model-downloader/model-file"
import { useModelsApi } from "~features/model-downloader/use-models-api"
import { useGlobal } from "~providers/global"

export const ModelSelector = () => {
export const ModelSelector = ({ className = "" }) => {
const {
modelsDirectoryState: { updateModelsDirectory, modelsMap }
} = useGlobal()
Expand All @@ -34,7 +34,7 @@ export const ModelSelector = () => {
)

return (
<div className="flex gap-2 w-full">
<div className={cn("flex gap-2 w-full", className)}>
<Select value={selectedModelHash} onValueChange={setSelectedModelHash}>
<SelectTrigger
className={cn(
Expand Down
2 changes: 1 addition & 1 deletion apps/desktop/src/features/thread/new-thread.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ export const NewThreadButton = ({ className = "" }) => {
setActiveThread(newThread)
setCurrentRoute(Route.Chat)
}}>
<FilePlusIcon /> New Thread
<FilePlusIcon className="w-4 h-4 shrink-0" /> New Thread
</Button>
)
}
2 changes: 1 addition & 1 deletion apps/desktop/src/features/thread/prompt-textarea.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ export const PromptTextarea = ({
className={cn(
"w-12 h-12 justify-center p-0",
"disabled:cursor-not-allowed",
"bg-blue-9 hover:bg-blue-10 text-blue-12 disabled:bg-gray-9 disabled:text-gray-11"
"bg-blue-9 hover:bg-blue-10 text-blue-12 disabled:bg-gray-3 disabled:text-gray-9"
)}
disabled={!isResponding && !prompt}
onClick={isResponding ? onStop : submit}>
Expand Down
13 changes: 5 additions & 8 deletions apps/desktop/src/features/thread/side-bar.tsx
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
import { Button } from "@localai/ui/button"
import { Input } from "@localai/ui/input"
import {
ChatBubbleIcon,
CheckIcon,
Cross2Icon,
FileTextIcon,
Pencil1Icon,
TrashIcon
} from "@radix-ui/react-icons"
import {
MessageText,
PeopleTag,
SidebarCollapse,
SidebarExpand
} from "iconoir-react"
import { PeopleTag, SidebarCollapse, SidebarExpand } from "iconoir-react"
import { useMemo, useRef } from "react"

import { NavButton } from "~features/layout/nav-button"
Expand All @@ -21,7 +18,7 @@ import type { FileInfo } from "~features/model-downloader/model-file"
import { Route, useGlobal } from "~providers/global"

const iconMap = {
chat: MessageText,
chat: FileTextIcon,
agent: PeopleTag
} as const

Expand All @@ -30,7 +27,7 @@ export type ChatType = keyof typeof iconMap
function ChatIcon({ type = undefined as ChatType }) {
const Icon = useMemo(() => iconMap[type], [type])

return <Icon className="h-6 w-6 shrink-0" aria-hidden="true" />
return <Icon className="h-4 w-4 shrink-0" aria-hidden="true" />
}

function ThreadItem({ item = null as FileInfo, index = 0 }) {
Expand Down
2 changes: 1 addition & 1 deletion apps/desktop/src/features/thread/use-active-thread.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ export const useActiveThread = () => {
},
body: JSON.stringify({
prompt: getQAPrompt(text, systemPrompt),
max_tokens: 4200,
max_tokens: undefined,
temperature: 0.9,
stream: true
})
Expand Down
11 changes: 11 additions & 0 deletions apps/desktop/src/features/thread/use-thread-config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import { useState } from "react"

import { useInit } from "~features/inference-server/use-init"

export const useThreadConfig = () => {
const [maxTokens, _setMaxTokens] = useState(-1)

useInit(async () => {
_setMaxTokens(100)
}, [])
}
10 changes: 6 additions & 4 deletions apps/desktop/src/views/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import {
Pokeball
} from "iconoir-react"
import { useMemo } from "react"
import dedent from "ts-dedent"

import { useToggle } from "~features/layout/use-toggle"
import { ViewBody, ViewContainer, ViewHeader } from "~features/layout/view"
Expand Down Expand Up @@ -120,10 +121,11 @@ export const ChatView = () => {
<Textarea
rows={8}
title="Prompt template (WIP)"
defaultValue={`
<BOT>: {SYSTEM}
<HUMAN>: {PROMPT}
<BOT>:`}
defaultValue={dedent`
<BOT>: {SYSTEM}
<HUMAN>: {PROMPT}
<BOT>:
`}
/>
<Input placeholder="Temperature (WIP)" defaultValue={0.47} />
<Input placeholder="Max Tokens (WIP)" defaultValue={0.47} />
Expand Down
29 changes: 15 additions & 14 deletions apps/desktop/src/views/model-manager.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -90,26 +90,27 @@ export function ModelManagerView() {

<ServerConfig />
</ViewHeader>
<ViewBody className="flex flex-col p-8 gap-6">
<ModelSelector />
<ViewBody className="flex flex-col p-4 gap-2">
<ModelSelector className="sticky top-0 z-10 shadow-sm p-1 rounded-lg bg-gray-1 shadow-gray-6" />
{models.length === 0 && (
<p className="text-gray-9 italic pointer-events-none text-center">
{`To start, download a model or change the models directory by
clicking the "..." button.`}
</p>
)}

{models
.sort((a, b) =>
activeModel?.path === a.path
? -1
: activeModel?.path === b.path
? 1
: 0
)
.map((model) => (
<ModelListItem key={model.name} model={model} />
))}
<div className="flex flex-col p-2 gap-6">
{models
.sort((a, b) =>
activeModel?.path === a.path
? -1
: activeModel?.path === b.path
? 1
: 0
)
.map((model) => (
<ModelListItem key={model.name} model={model} />
))}
</div>
</ViewBody>
</ViewContainer>
)
Expand Down

1 comment on commit 2f6184b

@vercel
Copy link

@vercel vercel bot commented on 2f6184b Jun 12, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.