Skip to content

Commit

Permalink
✨ Add support of response stream Add gpt-4-turbo model to the list of…
Browse files Browse the repository at this point in the history
… models. Fix bug when ChatGPT may respond with generated chat name instead of intended answer
  • Loading branch information
Renset committed Apr 21, 2024
1 parent b622022 commit 08eda4d
Show file tree
Hide file tree
Showing 7 changed files with 199 additions and 17 deletions.
8 changes: 4 additions & 4 deletions macai.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -611,7 +611,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
"CODE_SIGN_IDENTITY[sdk=macosx*]" = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1.3.1;
CURRENT_PROJECT_VERSION = 1.4.0;
DEVELOPMENT_TEAM = ZRB8WDV435;
ENABLE_HARDENED_RUNTIME = YES;
ENABLE_PREVIEWS = YES;
Expand All @@ -630,7 +630,7 @@
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 12.0;
MARKETING_VERSION = 1.3.1;
MARKETING_VERSION = 1.4.0;
ONLY_ACTIVE_ARCH = YES;
PRODUCT_BUNDLE_IDENTIFIER = notfullin.com.macai;
PRODUCT_NAME = "$(TARGET_NAME)";
Expand All @@ -653,7 +653,7 @@
CODE_SIGN_IDENTITY = "Apple Development";
"CODE_SIGN_IDENTITY[sdk=macosx*]" = "Apple Development";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1.3.1;
CURRENT_PROJECT_VERSION = 1.4.0;
DEVELOPMENT_TEAM = ZRB8WDV435;
ENABLE_HARDENED_RUNTIME = YES;
ENABLE_PREVIEWS = YES;
Expand All @@ -672,7 +672,7 @@
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 12.0;
MARKETING_VERSION = 1.3.1;
MARKETING_VERSION = 1.4.0;
ONLY_ACTIVE_ARCH = NO;
PRODUCT_BUNDLE_IDENTIFIER = notfullin.com.macai;
PRODUCT_NAME = "$(TARGET_NAME)";
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{
"originHash" : "71519be76e607ad4e7e6a1d261eb9bd084df0cad40df3f9ae9eb024341840c59",
"pins" : [
{
"identity" : "attributedtext",
Expand Down Expand Up @@ -32,10 +33,10 @@
"kind" : "remoteSourceControl",
"location" : "https://github.com/sparkle-project/Sparkle",
"state" : {
"revision" : "631846cc829f0f0cae327df9bafe5a32b7ddadce",
"version" : "2.4.0"
"revision" : "47d3d90aee3c52b6f61d04ceae426e607df62347",
"version" : "2.5.2"
}
}
],
"version" : 2
"version" : 3
}
1 change: 1 addition & 0 deletions macai/Configuration/AppConstants.swift
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ struct AppConstants {
static let chatGptDefaultModel = "gpt-3.5-turbo"
static let chatGptContextSize: Double = 10
static let chatGptSystemMessage: String = String(format: "You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible. Knowledge cutoff: 2021-09-01. Current date: %@", getCurrentFormattedDate())
static let chatGptGenerateChatInstruction: String = "Return a short chat name as summary for this chat based on the previous message content and system message if it's not default. Start chat name with one appropriate emoji. Don't answer to my message, just generate a name."
}

func getCurrentFormattedDate() -> String {
Expand Down
22 changes: 21 additions & 1 deletion macai/UI/Chat/ChatBubbleView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ struct ChatBubbleView: View {
@State var waitingForResponse: Bool?
@State var error = false
@State var initialMessage = false
@State var isStreaming: Bool?
@State private var isPencilIconVisible = false
@State private var wobbleAmount = 0.0
@Environment(\.colorScheme) var colorScheme
Expand Down Expand Up @@ -81,7 +82,6 @@ struct ChatBubbleView: View {
ForEach(0..<elements.count, id: \.self) { index in
switch elements[index] {
case .text(let text):

Text(.init(text))
.textSelection(.enabled)
case .table(let header, let data):
Expand Down Expand Up @@ -133,7 +133,27 @@ struct ChatBubbleView: View {
)
.cornerRadius(16)
if !own {

//if isStreaming ?? false {
// TODO: uncomment when state update is fixed
// VStack {
// Image(systemName: "pencil")
// .foregroundColor(.blue)
// .offset(x: wobbleAmount, y: 0)
// .padding(.top, 8)
// .rotationEffect(.degrees(-wobbleAmount * 0.8))
// .animation(
// .easeIn(duration: 0.3).repeatForever(autoreverses: true),
// value: wobbleAmount
// )
// .onAppear {
// wobbleAmount = 5
// }
// Spacer()
// }
//}
Spacer()

}
}
.contextMenu {
Expand Down
162 changes: 153 additions & 9 deletions macai/UI/Chat/ChatView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,13 @@ struct ChatView: View {
@State private var lastMessageError = false
@State private var newMessage: String = ""
@State private var editSystemMessage: Bool = false
@State private var isStreaming: Bool = false
@StateObject private var store = ChatStore(persistenceController: PersistenceController.shared)
@AppStorage("useChatGptForNames") var useChatGptForNames: Bool = false
@AppStorage("useStream") var useStream: Bool = true

let url = URL(string: AppConstants.apiUrlChatCompletions)

#if os(macOS)
var backgroundColor = Color(NSColor.controlBackgroundColor)
#else
Expand Down Expand Up @@ -62,7 +64,8 @@ struct ChatView: View {
message: messageEntity.body,
index: Int(messageEntity.id),
own: messageEntity.own,
waitingForResponse: false
waitingForResponse: false,
isStreaming: isStreaming
).id(Int64(messageEntity.id))
}
}
Expand All @@ -79,7 +82,9 @@ struct ChatView: View {
Text("Ignore")
Image(systemName: "multiply")
}
Button(action: {sendMessage(ignoreMessageInput: true)}) {
Button(action: {
sendMessage(ignoreMessageInput: true)}
) {
Text("Retry")
Image(systemName: "arrow.clockwise")
}
Expand Down Expand Up @@ -165,6 +170,60 @@ struct ChatView: View {
}

extension ChatView {
private func processDeltaResponse(with data: Data?) {
guard let data = data else {
print("No data received.")
return
}

let dataString = String(data: data, encoding: .utf8)
if (dataString == "[DONE]") {
handleChatCompletion()
return
} else {
print(dataString)
}

do {
let jsonResponse = try JSONSerialization.jsonObject(with: data, options: [])
guard let dictionary = jsonResponse as? [String: Any],
let choices = dictionary["choices"] as? [[String: Any]],
let firstChoice = choices.first else {
print("Failed to parse JSON correctly")
return
}

if let delta = firstChoice["delta"] as? [String: String],
let contentPart = delta["content"] {
self.isStreaming = true
DispatchQueue.main.async {
self.updateUIWithResponse(content: contentPart, role: "assistant")
}
}

if let finishReason = firstChoice["finish_reason"] as? String, finishReason == "stop" {
handleChatCompletion()
}
} catch {
print(String(data: data, encoding: .utf8))
print("Error parsing JSON: \(error)")
}
}

private func handleChatCompletion() {
print("Chat interaction completed.")
DispatchQueue.main.async {
self.resetCurrentMessage()
// TODO: force the child view for update to reflect the new state
self.isStreaming = false
generateChatNameIfNeeded()
}
}

func resetCurrentMessage() {
self.viewContext.rollback()
}

func sendMessage(ignoreMessageInput: Bool = false) {
let messageBody = newMessage

Expand All @@ -175,21 +234,73 @@ extension ChatView {
let request = prepareRequest(with: messageBody)
self.waitingForResponse = true

send(using: request) { data, response, error in
processResponse(with: data, response: response, error: error)
generateChatNameIfNeeded()
if useStream {
Task {
do {
Task {
let request = prepareRequest(with: messageBody)
try? await sendAsync(using: request) { data in
self.processDeltaResponse(with: data)
}
}
} catch {
print("An error occurred: \(error)")
}
}
} else {
send(using: request) { data, response, error in
processResponse(with: data, response: response, error: error)
generateChatNameIfNeeded()
}
}
}

private func sendAsync(using request: URLRequest, processLine: @escaping (Data) -> Void) async throws {
let (stream, response) = try await URLSession.shared.bytes(for: request)
if let httpResponse = response as? HTTPURLResponse {
switch httpResponse.statusCode {
case 200...299:
#if DEBUG
print("Got successful response code from server")
#endif
case 400...599:
self.waitingForResponse = false
self.lastMessageError = true
return
default:
print("Unhandled status code: \(httpResponse.statusCode)")
return
}
} else {
throw URLError(.badServerResponse)
}
for try await line in stream.lines {
if let lineData = line.data(using: .utf8) {
let prefix = "data: "
var index = line.startIndex
if line.starts(with: prefix) {
index = line.index(line.startIndex, offsetBy: prefix.count)
}
let jsonData = String(line[index...]).trimmingCharacters(in: .whitespacesAndNewlines)
if let jsonData = jsonData.data(using: .utf8) {
processLine(jsonData) // Process the JSON data
self.waitingForResponse = false
}
}

}
}


private func generateChatNameIfNeeded() {
guard self.chat.name == "", useChatGptForNames, self.chat.messages.count > 0 else {
#if DEBUG
print("Chat name not needed, skipping generation")
#endif
return }

let requestContent = "Return a short chat name as summary for this chat based on the previous message content and system message if it's not default. Start chat name with one appropriate emoji. Don't answer to my message, just generate a name."
let request = prepareRequest(with: requestContent, model: "gpt-3.5-turbo")
let requestContent = AppConstants.chatGptGenerateChatInstruction
let request = prepareRequest(with: requestContent, model: "gpt-3.5-turbo", forceStreamFalse: true)

send(using: request) { data, response, error in
DispatchQueue.main.async {
Expand All @@ -203,6 +314,15 @@ extension ChatView {
let chatName = messageContent.trimmingCharacters(in: .whitespacesAndNewlines)
self.chat.name = chatName
self.viewContext.saveWithRetry(attempts: 3)

// remove 'generate chat name' instruction from requestMessages
#if DEBUG
print("Length of requestMessages before deletion: \(self.chat.requestMessages.count)")
#endif
self.chat.requestMessages = self.chat.requestMessages.filter { $0["content"] != AppConstants.chatGptGenerateChatInstruction }
#if DEBUG
print("Length of requestMessages after deletion: \(self.chat.requestMessages.count)")
#endif
}
}
}
Expand All @@ -222,7 +342,7 @@ extension ChatView {
newMessage = ""
}

private func prepareRequest(with messageBody: String, model: String = "") -> URLRequest {
private func prepareRequest(with messageBody: String, model: String = "", forceStreamFalse: Bool = false) -> URLRequest {
var request = URLRequest(url: url!)
request.httpMethod = "POST"
request.setValue("Bearer \(gptToken)", forHTTPHeaderField: "Authorization")
Expand All @@ -242,6 +362,7 @@ extension ChatView {

let jsonDict: [String: Any] = [
"model": (model != "") ? model : gptModel,
"stream": forceStreamFalse ? false : useStream,
"messages": Array(chat.requestMessages.prefix(1) + chat.requestMessages.suffix(Int(chatContext) > chat.requestMessages.count - 1 ? chat.requestMessages.count - 1 : Int(chatContext)))
]

Expand Down Expand Up @@ -320,6 +441,29 @@ extension ChatView {
}

private func updateUIWithResponse(content: String, role: String) {
if useStream {
let sortedMessages = chat.messages.sorted(by: { $0.timestamp < $1.timestamp })
if let lastMessage = sortedMessages.last {
if lastMessage.own {
addNewMessageToChat(content: content, role: role)
} else {
lastMessage.body += content
lastMessage.own = false
lastMessage.timestamp = Date()
lastMessage.waitingForResponse = false
// Force the view to update
self.chat.objectWillChange.send()
self.viewContext.saveWithRetry(attempts: 3)
}
}

} else {
addNewMessageToChat(content: content, role: role)
}

}

private func addNewMessageToChat(content: String, role: String) {
let receivedMessage = MessageEntity(context: self.viewContext)
receivedMessage.id = Int64(self.chat.messages.count + 1)
receivedMessage.name = "ChatGPT"
Expand Down
1 change: 1 addition & 0 deletions macai/UI/Preferences/TabChatSettingsView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ struct ChatSettingsView: View {
Picker("", selection: $selectedGptModel) {
Text("gpt-3.5-turbo").tag("gpt-3.5-turbo")
Text("gpt-3.5-turbo-0301").tag("gpt-3.5-turbo-0301")
Text("gpt-4-turbo").tag("gpt-4-turbo")
Text("gpt-4").tag("gpt-4")
Text("gpt-4-0314").tag("gpt-4-0314")
Text("gpt-4-32k").tag("gpt-4-32k")
Expand Down
15 changes: 15 additions & 0 deletions macai/UI/Preferences/TabGeneralSettingsView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ struct GeneralSettingsView: View {
@AppStorage("apiUrl") var apiUrl: String = AppConstants.apiUrlChatCompletions
@AppStorage("chatContext") var chatContext: Double = AppConstants.chatGptContextSize
@AppStorage("useChatGptForNames") var useChatGptForNames: Bool = false
@AppStorage("useStream") var useStream: Bool = true
@Binding var lampColor: Color
@FocusState private var isFocused: Bool

Expand Down Expand Up @@ -107,6 +108,20 @@ struct GeneralSettingsView: View {
.buttonStyle(PlainButtonStyle())
.help("Chat name will be generated based on chat messages. To reduce API costs, model chat-gpt-3.5-turbo will be used for this purpose.")

Spacer()
}
}
Toggle(isOn: $useStream) {
HStack {
Text("Use stream responses")
Button(action: {
}) {
Image(systemName: "questionmark.circle")
.foregroundColor(.blue)
}
.buttonStyle(PlainButtonStyle())
.help("If on, the ChatGPT response will be streamed to the client. This will allow you to see the response in real-time. If off, the response will be sent to the client only after the model has finished processing.")

Spacer()
}
}
Expand Down

0 comments on commit 08eda4d

Please sign in to comment.