Skip to content

Commit

Permalink
fix: openai setting change not affect in same project lifecycle (will…
Browse files Browse the repository at this point in the history
… affect after reopen project)

fix: chat window loading will not be remove after request failed (for example openAI key invalid)
  • Loading branch information
iptton committed Aug 10, 2023
1 parent b345322 commit eb74909
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 41 deletions.
14 changes: 8 additions & 6 deletions src/main/kotlin/cc/unitmesh/devti/gui/chat/ChatCodingPanel.kt
Expand Up @@ -205,11 +205,13 @@ class ChatCodingPanel(private val chatCodingService: ChatCodingService, val disp
myList.add(messageView)

var text = ""
content.collect {
text += it
messageView.updateSourceContent(text)
messageView.updateContent(text)
messageView.scrollToBottom()
runCatching {
content.collect {
text += it
messageView.updateSourceContent(text)
messageView.updateContent(text)
messageView.scrollToBottom()
}
}

messageView.reRenderAssistantOutput()
Expand All @@ -228,4 +230,4 @@ class ChatCodingPanel(private val chatCodingService: ChatCodingService, val disp
myList.removeAll()
updateUI()
}
}
}
72 changes: 37 additions & 35 deletions src/main/kotlin/cc/unitmesh/devti/llms/openai/OpenAIProvider.kt
Expand Up @@ -2,7 +2,6 @@ package cc.unitmesh.devti.llms.openai

import cc.unitmesh.devti.llms.LLMProvider
import cc.unitmesh.devti.settings.AutoDevSettingsState
import cc.unitmesh.devti.settings.OPENAI_MODEL
import com.intellij.openapi.components.Service
import com.intellij.openapi.diagnostic.Logger
import com.intellij.openapi.diagnostic.logger
Expand All @@ -14,11 +13,9 @@ import com.theokanning.openai.completion.chat.ChatMessageRole
import com.theokanning.openai.service.OpenAiService
import com.theokanning.openai.service.OpenAiService.defaultClient
import com.theokanning.openai.service.OpenAiService.defaultObjectMapper
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.callbackFlow
import kotlinx.coroutines.withContext
import retrofit2.Retrofit
import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory
import retrofit2.converter.jackson.JacksonConverterFactory
Expand All @@ -27,38 +24,40 @@ import java.time.Duration

@Service(Service.Level.PROJECT)
class OpenAIProvider(val project: Project) : LLMProvider {
private var service: OpenAiService

private val timeout = Duration.ofSeconds(600)
private val openAiVersion: String = AutoDevSettingsState.getInstance()?.openAiModel ?: OPENAI_MODEL[0]
private val openAiKey: String = AutoDevSettingsState.getInstance()?.openAiKey ?: ""
private val maxTokenLength: Int = AutoDevSettingsState.maxTokenLength

init {
private val service: OpenAiService
get() {
if (openAiKey.isEmpty()) {
logger.error("openAiKey is empty")
throw IllegalStateException("openAiKey is empty")
}

if (openAiKey.isEmpty()) {
logger.error("openAiKey is empty")
throw Exception("openAiKey is empty")
val openAiProxy = AutoDevSettingsState.getInstance().customOpenAiHost
return if (openAiProxy.isEmpty()) {
OpenAiService(openAiKey, timeout)
} else {
val mapper = defaultObjectMapper()
val client = defaultClient(openAiKey, timeout)

val retrofit = Retrofit.Builder()
.baseUrl(openAiProxy)
.client(client)
.addConverterFactory(JacksonConverterFactory.create(mapper))
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build()

val api = retrofit.create(OpenAiApi::class.java)
OpenAiService(api)
}
}

val openAiProxy = AutoDevSettingsState.getInstance()?.customOpenAiHost
if (openAiProxy.isNullOrEmpty()) {
service = OpenAiService(openAiKey, timeout)
} else {
val mapper = defaultObjectMapper()
val client = defaultClient(openAiKey, timeout)

val retrofit = Retrofit.Builder()
.baseUrl(openAiProxy)
.client(client)
.addConverterFactory(JacksonConverterFactory.create(mapper))
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build()

val api = retrofit.create(OpenAiApi::class.java)
service = OpenAiService(api)
}
}
private val timeout = Duration.ofSeconds(600)
private val openAiVersion: String
get() = AutoDevSettingsState.getInstance().openAiModel
private val openAiKey: String
get() = AutoDevSettingsState.getInstance().openAiKey

private val maxTokenLength: Int
get() = AutoDevSettingsState.maxTokenLength

private val messages: MutableList<ChatMessage> = ArrayList()
private var historyMessageLength: Int = 0
Expand All @@ -80,7 +79,10 @@ class OpenAIProvider(val project: Project) : LLMProvider {
return callbackFlow {
withContext(Dispatchers.IO) {
service.streamChatCompletion(completionRequest)
.doOnError(Throwable::printStackTrace)
.doOnError{ error ->
logger.error("Error in stream", error)
trySend(error.message ?: "Error occurs")
}
.blockingForEach { response ->
val completion = response.choices[0].message
if (completion != null && completion.content != null) {
Expand Down Expand Up @@ -118,4 +120,4 @@ class OpenAIProvider(val project: Project) : LLMProvider {
companion object {
private val logger: Logger = logger<OpenAIProvider>()
}
}
}

0 comments on commit eb74909

Please sign in to comment.