diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift index 7987e0a9..30fcde78 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift @@ -210,9 +210,9 @@ struct AssistantConfigurationDemoView: View { @State private var parameters = AssistantParameters(action: .create(model: Model.gpt41106Preview.value)) @State private var isAvatarLoading = false @State private var showAvatarFlow = false - @State private var fileIDS: [String] = [] + @State private var fileIDS = [String]() /// Used mostly to display already uploaded files if any. - @State private var filePickerInitialActions: [FilePickerAction] = [] + @State private var filePickerInitialActions = [FilePickerAction]() private let service: OpenAIService } @@ -229,7 +229,7 @@ extension Binding where Value == String? { AssistantConfigurationDemoView(service: OpenAIServiceFactory.service(apiKey: "")) } -// MARK: InputView +// MARK: - InputView struct InputView: View { let content: Content @@ -251,6 +251,8 @@ struct InputView: View { @Environment(\.inputViewStyle) private var style: InputViewStyle } +// MARK: - InputViewStyle + struct InputViewStyle { let verticalPadding: CGFloat @@ -259,6 +261,8 @@ struct InputViewStyle { } } +// MARK: - InputViewStyleKey + struct InputViewStyleKey: EnvironmentKey { static let defaultValue = InputViewStyle() } @@ -276,6 +280,8 @@ extension View { } } +// MARK: - CheckboxView + struct CheckboxView: View { @Binding var isChecked: Bool @@ -291,6 +297,8 @@ struct CheckboxView: View { } } +// MARK: - CheckboxRow + struct CheckboxRow: View { let title: String @Binding var isChecked: Bool diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift index e857e1af..4967f020 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift @@ -17,7 +17,7 @@ class AssistantConfigurationProvider { } var assistant: AssistantObject? - var assistants: [AssistantObject] = [] + var assistants = [AssistantObject]() var avatarURL: URL? var assistantDeletionStatus: DeletionStatus? diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift index 7e3e4e49..11c9b9d4 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift @@ -14,7 +14,7 @@ class ChatProvider { self.service = service } - var messages: [String] = [] + var messages = [String]() var errorMessage = "" var message = "" var usage: ChatUsage? diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift index 8c6fed14..f15115b4 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift @@ -46,7 +46,7 @@ class ChatFunctionCallProvider { // MARK: - Public Properties /// To be used for UI purposes. - var chatDisplayMessages: [ChatMessageDisplayModel] = [] + var chatDisplayMessages = [ChatMessageDisplayModel]() @MainActor func generateImage(arguments: String) async throws -> String { @@ -174,8 +174,8 @@ class ChatFunctionCallProvider { private let service: OpenAIService private var lastDisplayedMessageID: UUID? /// To be used for a new request - private var chatMessageParameters: [ChatCompletionParameters.Message] = [] - private var availableFunctions: [FunctionCallDefinition: @MainActor (String) async throws -> String] = [:] + private var chatMessageParameters = [ChatCompletionParameters.Message]() + private var availableFunctions = [FunctionCallDefinition: @MainActor (String) async throws -> String]() // MARK: - Private Methods diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift index b98e70a4..2ef6d402 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift @@ -32,7 +32,7 @@ class ChatFunctionsCallStreamProvider { // MARK: - Public Properties /// To be used for UI purposes. - var chatDisplayMessages: [ChatMessageDisplayModel] = [] + var chatDisplayMessages = [ChatMessageDisplayModel]() @MainActor func generateImage(arguments: String) async throws -> String { @@ -163,7 +163,7 @@ class ChatFunctionsCallStreamProvider { } func createAssistantMessage() -> ChatCompletionParameters.Message { - var toolCalls: [ToolCall] = [] + var toolCalls = [ToolCall]() for (_, functionCallStreamedResponse) in functionCallsMap { let toolCall = functionCallStreamedResponse.toolCall // Intentionally force unwrapped to catch errrors quickly on demo. // This should be properly handled. @@ -178,7 +178,7 @@ class ChatFunctionsCallStreamProvider { func createToolsMessages() async throws -> [ChatCompletionParameters.Message] { - var toolMessages: [ChatCompletionParameters.Message] = [] + var toolMessages = [ChatCompletionParameters.Message]() for (key, functionCallStreamedResponse) in functionCallsMap { let name = functionCallStreamedResponse.name let id = functionCallStreamedResponse.id @@ -222,9 +222,9 @@ class ChatFunctionsCallStreamProvider { private let service: OpenAIService private var lastDisplayedMessageID: UUID? /// To be used for a new request - private var chatMessageParameters: [ChatCompletionParameters.Message] = [] - private var functionCallsMap: [FunctionCallDefinition: FunctionCallStreamedResponse] = [:] - private var availableFunctions: [FunctionCallDefinition: @MainActor (String) async throws -> String] = [:] + private var chatMessageParameters = [ChatCompletionParameters.Message]() + private var functionCallsMap = [FunctionCallDefinition: FunctionCallStreamedResponse]() + private var availableFunctions = [FunctionCallDefinition: @MainActor (String) async throws -> String]() // MARK: - Private Methods diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift index 001c55d4..0e7ea307 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift @@ -19,7 +19,7 @@ class ChatFluidConversationProvider { // MARK: - Public Properties /// A collection of messages for display in the UI, representing the conversation. - var chatMessages: [ChatDisplayMessage] = [] + var chatMessages = [ChatDisplayMessage]() // MARK: - Public Methods @@ -94,9 +94,9 @@ class ChatFluidConversationProvider { /// Tracks the identifier of the last message displayed, enabling updates in the from the streaming API response. private var lastDisplayedMessageID: UUID? /// Stores the initial chat message's delta, which uniquely includes metadata like `role`. - private var firstChatMessageResponseDelta: [String: ChatCompletionChunkObject.ChatChoice.Delta] = [:] + private var firstChatMessageResponseDelta = [String: ChatCompletionChunkObject.ChatChoice.Delta]() /// Builds a history of messages sent and received, enhancing the chat's context for future requests. - private var parameterMessages: [ChatCompletionParameters.Message] = [] + private var parameterMessages = [ChatCompletionParameters.Message]() // MARK: - Private Methods diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift index 14c16c63..49e274bf 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift @@ -1,15 +1,262 @@ -// Updated content according to SwiftFormat's wrap rule +// +// ChatStructuredOutputToolProvider.swift +// SwiftOpenAIExample +// +// Created by James Rochabrun on 8/11/24. +// import Foundation +import SwiftOpenAI +import SwiftUI -// Assuming the original content has long lines that need to be broken up -// Example of how you might break a long line for readability and formatting: +// MARK: - StructuredToolCall -struct ChatStructuredOutputToolProvider { - let exampleLongString = - "This is an example of a very long string that will need to be broken up according to the SwiftFormat wrap rule so that it is more readable in the codebase." +enum StructuredToolCall: String, CaseIterable { - func someFunction() { - print(exampleLongString) + case structureUI = "structured_ui" + + var functionTool: ChatCompletionParameters.Tool { + switch self { + case .structureUI: + .init( + function: .init( + name: rawValue, + strict: true, + description: "Dynamically generated UI", + parameters: structureUISchema)) + } + } + + var structureUISchema: JSONSchema { + JSONSchema( + type: .object, + properties: [ + "type": JSONSchema( + type: .string, + description: "The type of the UI component", + additionalProperties: false, + enum: ["div", "button", "header", "section", "field", "form"]), + "label": JSONSchema( + type: .string, + description: "The label of the UI component, used for buttons or form fields", + additionalProperties: false), + "children": JSONSchema( + type: .array, + description: "Nested UI components", + items: JSONSchema(ref: "#"), + additionalProperties: false), + "attributes": JSONSchema( + type: .array, + description: "Arbitrary attributes for the UI component, suitable for any element", + items: JSONSchema( + type: .object, + properties: [ + "name": JSONSchema( + type: .string, + description: "The name of the attribute, for example onClick or className", + additionalProperties: false), + "value": JSONSchema( + type: .string, + description: "The value of the attribute", + additionalProperties: false), + ], + required: ["name", "value"], + additionalProperties: false), + additionalProperties: false), + ], + required: ["type", "label", "children", "attributes"], + additionalProperties: false) + } +} + +// MARK: - ChatStructuredOutputToolProvider + +@Observable +final class ChatStructuredOutputToolProvider { + + init(service: OpenAIService, customModel: String? = nil) { + self.service = service + self.customModel = customModel + } + + // MARK: - Init + + let customModel: String? + + var chatDisplayMessages = [ChatMessageDisplayModel]() + let systemMessage = ChatCompletionParameters.Message(role: .system, content: .text("You are a math tutor")) + + func startChat( + prompt: String) + async throws + { + await startNewUserDisplayMessage(prompt) + await startNewAssistantEmptyDisplayMessage() + + let userMessage = createUserMessage(prompt) + chatMessageParameters.append(userMessage) + + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o20240806 + } + + let parameters = ChatCompletionParameters( + messages: [systemMessage] + chatMessageParameters, + model: model, + tools: StructuredToolCall.allCases.map(\.functionTool)) + + do { + let chat = try await service.startChat(parameters: parameters) + guard let assistantMessage = chat.choices?.first?.message else { return } + let content = assistantMessage.content ?? "" + await updateLastAssistantMessage(.init(content: .content(.init(text: content)), origin: .received(.gpt))) + if let toolCalls = assistantMessage.toolCalls { + availableFunctions = [.structureUI: getStructureOutput(arguments:)] + // Append the `assistantMessage` in to the `chatMessageParameters` to extend the conversation + let parameterAssistantMessage = ChatCompletionParameters.Message( + role: .assistant, + content: .text(content), toolCalls: assistantMessage.toolCalls) + + chatMessageParameters.append(parameterAssistantMessage) + + /// # Step 4: send the info for each function call and function response to the model + for toolCall in toolCalls { + let name = toolCall.function.name + let id = toolCall.id + let functionToCall = availableFunctions[StructuredToolCall(rawValue: name!)!]! + let arguments = toolCall.function.arguments + let content = functionToCall(arguments) + let toolMessage = ChatCompletionParameters.Message( + role: .tool, + content: .text(content), + name: name, + toolCallID: id) + chatMessageParameters.append(toolMessage) + } + + /// # get a new response from the model where it can see the function response + await continueChat() + } + + } catch let error as APIError { + // If an error occurs, update the UI to display the error message. + await updateLastAssistantMessage(.init(content: .error("\(error.displayDescription)"), origin: .received(.gpt))) + } + } + + // MARK: Tool functions + + func getStructureOutput(arguments: String) -> String { + arguments + } + + private let service: OpenAIService + private var lastDisplayedMessageID: UUID? + private var chatMessageParameters = [ChatCompletionParameters.Message]() + private var availableFunctions = [StructuredToolCall: (String) -> String]() + +} + +// MARK: UI related + +extension ChatStructuredOutputToolProvider { + + func createUserMessage( + _ prompt: String) + -> ChatCompletionParameters.Message + { + .init(role: .user, content: .text(prompt)) + } + + func continueChat() async { + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o + } + + let paramsForChat = ChatCompletionParameters( + messages: chatMessageParameters, + model: model) + do { + let chat = try await service.startChat(parameters: paramsForChat) + guard let assistantMessage = chat.choices?.first?.message else { return } + await updateLastAssistantMessage(.init(content: .content(.init(text: assistantMessage.content)), origin: .received(.gpt))) + } catch { + // If an error occurs, update the UI to display the error message. + await updateLastAssistantMessage(.init(content: .error("\(error)"), origin: .received(.gpt))) + } + } + + // MARK: - Private Methods + + @MainActor + private func startNewUserDisplayMessage(_ prompt: String) { + let startingMessage = ChatMessageDisplayModel( + content: .content(.init(text: prompt)), + origin: .sent) + addMessage(startingMessage) + } + + @MainActor + private func startNewAssistantEmptyDisplayMessage() { + let newMessage = ChatMessageDisplayModel( + content: .content(.init(text: "")), + origin: .received(.gpt)) + addMessage(newMessage) + } + + @MainActor + private func updateLastAssistantMessage( + _ message: ChatMessageDisplayModel) + { + guard let id = lastDisplayedMessageID, let index = chatDisplayMessages.firstIndex(where: { $0.id == id }) else { return } + + var lastMessage = chatDisplayMessages[index] + + switch message.content { + case .content(let newMedia): + switch lastMessage.content { + case .content(let lastMedia): + var updatedMedia = lastMedia + if + let newText = newMedia.text, + var lastMediaText = lastMedia.text + { + lastMediaText += newText + updatedMedia.text = lastMediaText + } else { + updatedMedia.text = "" + } + if let urls = newMedia.urls { + updatedMedia.urls = urls + } + lastMessage.content = .content(updatedMedia) + + case .error: + break + } + + case .error: + lastMessage.content = message.content + } + + chatDisplayMessages[index] = ChatMessageDisplayModel( + id: id, + content: lastMessage.content, + origin: message.origin) + } + + @MainActor + private func addMessage(_ message: ChatMessageDisplayModel) { + let newMessageId = message.id + lastDisplayedMessageID = newMessageId + withAnimation { + chatDisplayMessages.append(message) + } } } diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift index a19b9ca4..4e4851a4 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift @@ -19,7 +19,7 @@ final class ChatStructuredOutputProvider { } var message = "" - var messages: [String] = [] + var messages = [String]() var errorMessage = "" // MARK: - Public Methods diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift index 27df24be..ebb83d74 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift @@ -14,7 +14,7 @@ class EmbeddingsProvider { self.service = service } - var embeddings: [EmbeddingObject] = [] + var embeddings = [EmbeddingObject]() func createEmbeddings( parameters: EmbeddingParameter) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift index 83af4b17..fa285a3a 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift @@ -115,6 +115,8 @@ extension View { } } +// MARK: - DeletionStatus + @retroactive Equatable + extension DeletionStatus: @retroactive Equatable { public static func ==(lhs: DeletionStatus, rhs: DeletionStatus) -> Bool { lhs.id == rhs.id diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift index 93c81e5e..e55d9450 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift @@ -13,11 +13,11 @@ final class FilesPickerProvider { self.service = service } - var files: [FileObject] = [] + var files = [FileObject]() var uploadedFile: FileObject? var deletedStatus: DeletionStatus? var retrievedFile: FileObject? - var fileContent: [[String: Any]] = [] + var fileContent = [[String: Any]]() func listFiles() async throws { files = try await service.listFiles().data diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift index aa8102e5..d3013f09 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift @@ -14,11 +14,11 @@ class FilesProvider { self.service = service } - var files: [FileObject] = [] + var files = [FileObject]() var uploadedFile: FileObject? var deletedStatus: DeletionStatus? var retrievedFile: FileObject? - var fileContent: [[String: Any]] = [] + var fileContent = [[String: Any]]() func listFiles() async throws { files = try await service.listFiles().data diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift index 86b5cbe2..16fbad63 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift @@ -17,8 +17,8 @@ class FineTuningJobProvider { var createdFineTuningJob: FineTuningJobObject? var canceledFineTuningJob: FineTuningJobObject? var retrievedFineTuningJob: FineTuningJobObject? - var fineTunedJobs: [FineTuningJobObject] = [] - var finteTuningEventObjects: [FineTuningJobEventObject] = [] + var fineTunedJobs = [FineTuningJobObject]() + var finteTuningEventObjects = [FineTuningJobEventObject]() func createFineTuningJob( parameters: FineTuningJobParameters) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift index a4816cf3..782ed78c 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift @@ -14,7 +14,7 @@ class ImagesProvider { self.service = service } - var images: [URL] = [] + var images = [URL]() func createImages( parameters: ImageCreateParameters) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift index be6f42e3..9b88f4da 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift @@ -14,7 +14,7 @@ class ModelsProvider { self.service = service } - var models: [ModelObject] = [] + var models = [ModelObject]() var retrievedModel: ModelObject? var deletionStatus: DeletionStatus? diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift index 2ade7dd3..6e5fba76 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift @@ -33,7 +33,7 @@ class ResponseStreamProvider { } } - var messages: [ResponseMessage] = [] + var messages = [ResponseMessage]() var isStreaming = false var currentStreamingMessage: ResponseMessage? var error: String? @@ -103,7 +103,7 @@ class ResponseStreamProvider { do { // Build input array with conversation history - var inputArray: [InputItem] = [] + var inputArray = [InputItem]() // Add conversation history for message in messages.dropLast(2) { // Exclude current user message and streaming placeholder diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift index 504d19fd..8e2a27c2 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift @@ -96,7 +96,7 @@ struct ChatDisplayMessageView: View { } } - @State private var urls: [URL] = [] + @State private var urls = [URL]() } #Preview { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift index 2010733e..a8a1c5f1 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift @@ -126,9 +126,9 @@ struct ChatVisionDemoView: View { @State private var chatProvider: ChatVisionProvider @State private var isLoading = false @State private var prompt = "" - @State private var selectedItems: [PhotosPickerItem] = [] - @State private var selectedImages: [Image] = [] - @State private var selectedImageURLS: [URL] = [] + @State private var selectedItems = [PhotosPickerItem]() + @State private var selectedImages = [Image]() + @State private var selectedImageURLS = [URL]() /// Called when the user taps on the send button. Clears the selected images and prompt. private func resetInput() { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift index 4dd3b8ad..11aa2585 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift @@ -19,7 +19,7 @@ class ChatVisionProvider { // MARK: - Public Properties /// A collection of messages for display in the UI, representing the conversation. - var chatMessages: [ChatDisplayMessage] = [] + var chatMessages = [ChatDisplayMessage]() // MARK: - Public Methods @@ -71,7 +71,7 @@ class ChatVisionProvider { /// Tracks the identifier of the last message displayed, enabling updates in the from the streaming API response. private var lastDisplayedMessageID: UUID? /// Stores the initial chat message's delta, which uniquely includes metadata like `role`. - private var firstChatMessageResponseDelta: [String: ChatCompletionChunkObject.ChatChoice.Delta] = [:] + private var firstChatMessageResponseDelta = [String: ChatCompletionChunkObject.ChatChoice.Delta]() // MARK: - Private Methods diff --git a/Sources/OpenAI/AIProxy/AIProxyService.swift b/Sources/OpenAI/AIProxy/AIProxyService.swift index f7745539..7fae1e1b 100644 --- a/Sources/OpenAI/AIProxy/AIProxyService.swift +++ b/Sources/OpenAI/AIProxy/AIProxyService.swift @@ -167,7 +167,7 @@ struct AIProxyService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let lastJobID, let limit { queryItems = [.init(name: "after", value: lastJobID), .init(name: "limit", value: "\(limit)")] } else if let lastJobID { @@ -218,7 +218,7 @@ struct AIProxyService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let lastEventId, let limit { queryItems = [.init(name: "after", value: lastEventId), .init(name: "limit", value: "\(limit)")] } else if let lastEventId { @@ -527,7 +527,7 @@ struct AIProxyService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -685,7 +685,7 @@ struct AIProxyService: OpenAIService { runID _: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -767,7 +767,7 @@ struct AIProxyService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -865,7 +865,7 @@ struct AIProxyService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -990,7 +990,7 @@ struct AIProxyService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1031,7 +1031,7 @@ struct AIProxyService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1126,7 +1126,7 @@ struct AIProxyService: OpenAIService { filter: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1241,7 +1241,7 @@ struct AIProxyService: OpenAIService { filter: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } diff --git a/Sources/OpenAI/Private/Networking/HTTPClient.swift b/Sources/OpenAI/Private/Networking/HTTPClient.swift index 70a17dc1..7778fc1c 100644 --- a/Sources/OpenAI/Private/Networking/HTTPClient.swift +++ b/Sources/OpenAI/Private/Networking/HTTPClient.swift @@ -44,7 +44,7 @@ public struct HTTPRequest { throw URLError(.unsupportedURL) } - var headers: [String: String] = [:] + var headers = [String: String]() if let allHTTPHeaderFields = urlRequest.allHTTPHeaderFields { headers = allHTTPHeaderFields } diff --git a/Sources/OpenAI/Public/Parameters/Assistant/AssistantParameters.swift b/Sources/OpenAI/Public/Parameters/Assistant/AssistantParameters.swift index f31b496e..71632c72 100644 --- a/Sources/OpenAI/Public/Parameters/Assistant/AssistantParameters.swift +++ b/Sources/OpenAI/Public/Parameters/Assistant/AssistantParameters.swift @@ -55,7 +55,7 @@ public struct AssistantParameters: Encodable { /// The system instructions that the assistant uses. The maximum length is 32768 characters. public var instructions: String? /// A list of tool enabled on the assistant. There can be a maximum of 128 tools per assistant. Tools can be of types code_interpreter, retrieval, or function. Defaults to [] - public var tools: [AssistantObject.Tool] = [] + public var tools = [AssistantObject.Tool]() /// A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs. public var toolResources: ToolResources? /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long. diff --git a/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageEditParameters.swift b/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageEditParameters.swift index 0630e9ea..3bd16b86 100644 --- a/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageEditParameters.swift +++ b/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageEditParameters.swift @@ -76,7 +76,7 @@ public struct CreateImageEditParameters: Encodable { size: String? = nil, user: String? = nil) { - var imageDataArray: [Data] = [] + var imageDataArray = [Data]() for image in images { #if canImport(UIKit) @@ -223,7 +223,7 @@ public struct CreateImageEditParameters: Encodable { extension CreateImageEditParameters: MultipartFormDataParameters { public func encode(boundary: String) -> Data { - var entries: [MultipartFormDataEntry] = [] + var entries = [MultipartFormDataEntry]() // Add images (possibly multiple for gpt-image-1) for (index, imageData) in image.enumerated() { diff --git a/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageVariationParameters.swift b/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageVariationParameters.swift index c0cb9b8b..96ed5f1b 100644 --- a/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageVariationParameters.swift +++ b/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageVariationParameters.swift @@ -128,7 +128,7 @@ public struct CreateImageVariationParameters: Encodable { extension CreateImageVariationParameters: MultipartFormDataParameters { public func encode(boundary: String) -> Data { - var entries: [MultipartFormDataEntry] = [] + var entries = [MultipartFormDataEntry]() // Add image file entries.append(.file( diff --git a/Sources/OpenAI/Public/Parameters/Model.swift b/Sources/OpenAI/Public/Parameters/Model.swift index 44eca61d..38c93141 100644 --- a/Sources/OpenAI/Public/Parameters/Model.swift +++ b/Sources/OpenAI/Public/Parameters/Model.swift @@ -69,6 +69,8 @@ public enum Model { /// High-throughput tasks, especially simple instruction-following or classification case gpt5Nano + case gpt5Codex + /// Images case dalle2 case dalle3 @@ -103,6 +105,7 @@ public enum Model { case .gpt5: "gpt-5" case .gpt5Mini: "gpt-5-mini" case .gpt5Nano: "gpt-5-nano" + case .gpt5Codex: "gpt-5-codex" case .custom(let model): model } } diff --git a/Sources/OpenAI/Public/ResponseModels/Response/OutputItem.swift b/Sources/OpenAI/Public/ResponseModels/Response/OutputItem.swift index 3b43c282..7f1cbda1 100644 --- a/Sources/OpenAI/Public/ResponseModels/Response/OutputItem.swift +++ b/Sources/OpenAI/Public/ResponseModels/Response/OutputItem.swift @@ -32,6 +32,8 @@ public enum OutputItem: Decodable { case mcpListTools(MCPListTools) /// A request for human approval of a tool invocation case mcpApprovalRequest(MCPApprovalRequest) + /// A custom tool call that returns plain text + case customToolCall(CustomToolCall) public init(from decoder: Decoder) throws { let container = try decoder.container(keyedBy: CodingKeys.self) @@ -86,6 +88,10 @@ public enum OutputItem: Decodable { let mcpApprovalRequest = try MCPApprovalRequest(from: decoder) self = .mcpApprovalRequest(mcpApprovalRequest) + case "custom_tool_call": + let customToolCall = try CustomToolCall(from: decoder) + self = .customToolCall(customToolCall) + default: throw DecodingError.dataCorruptedError( forKey: .type, @@ -210,6 +216,30 @@ public enum OutputItem: Decodable { } } + // MARK: - Custom Tool Call + + /// A custom tool call that returns plain text instead of JSON + public struct CustomToolCall: Decodable { + /// The unique ID of the custom tool call + public let id: String + /// The type of the custom tool call. Always "custom_tool_call" + public let type: String + /// The status of the item. One of "in_progress", "completed", or "incomplete" + public let status: String? + /// The call ID for this custom tool call + public let callId: String + /// The plain text input to the custom tool + public let input: String + /// The name of the custom tool + public let name: String + + enum CodingKeys: String, CodingKey { + case id, type, status + case callId = "call_id" + case input, name + } + } + // MARK: - Web Search Tool Call /// The results of a web search tool call diff --git a/Sources/OpenAI/Public/Service/DefaultOpenAIService.swift b/Sources/OpenAI/Public/Service/DefaultOpenAIService.swift index 933d4c92..08d8c858 100644 --- a/Sources/OpenAI/Public/Service/DefaultOpenAIService.swift +++ b/Sources/OpenAI/Public/Service/DefaultOpenAIService.swift @@ -149,7 +149,7 @@ struct DefaultOpenAIService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let lastJobID, let limit { queryItems = [.init(name: "after", value: lastJobID), .init(name: "limit", value: "\(limit)")] } else if let lastJobID { @@ -200,7 +200,7 @@ struct DefaultOpenAIService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let lastEventId, let limit { queryItems = [.init(name: "after", value: lastEventId), .init(name: "limit", value: "\(limit)")] } else if let lastEventId { @@ -504,7 +504,7 @@ struct DefaultOpenAIService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -662,7 +662,7 @@ struct DefaultOpenAIService: OpenAIService { runID: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -747,7 +747,7 @@ struct DefaultOpenAIService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -845,7 +845,7 @@ struct DefaultOpenAIService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -970,7 +970,7 @@ struct DefaultOpenAIService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1011,7 +1011,7 @@ struct DefaultOpenAIService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1106,7 +1106,7 @@ struct DefaultOpenAIService: OpenAIService { filter: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1221,7 +1221,7 @@ struct DefaultOpenAIService: OpenAIService { filter: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } diff --git a/Sources/OpenAI/Public/Service/OpenAIService.swift b/Sources/OpenAI/Public/Service/OpenAIService.swift index 9917c319..438f9fda 100644 --- a/Sources/OpenAI/Public/Service/OpenAIService.swift +++ b/Sources/OpenAI/Public/Service/OpenAIService.swift @@ -1033,7 +1033,7 @@ extension OpenAIService { description: errorMessage, statusCode: response.statusCode) } - var content: [[String: Any]] = [] + var content = [[String: Any]]() if let jsonString = String(data: data, encoding: String.Encoding.utf8) { let lines = jsonString.split(separator: "\n") for line in lines { diff --git a/Sources/OpenAI/Public/Shared/Tool.swift b/Sources/OpenAI/Public/Shared/Tool.swift index 83aae462..b7c5c82d 100644 --- a/Sources/OpenAI/Public/Shared/Tool.swift +++ b/Sources/OpenAI/Public/Shared/Tool.swift @@ -21,6 +21,9 @@ public enum Tool: Codable { /// This tool searches the web for relevant results to use in a response case webSearch(WebSearchTool) + /// A custom tool that returns plain text instead of JSON + case custom(CustomTool) + public init(from decoder: Decoder) throws { let container = try decoder.container(keyedBy: CodingKeys.self) let type = try container.decode(String.self, forKey: .type) @@ -36,6 +39,8 @@ public enum Tool: Codable { self = try .computerUse(singleValueContainer.decode(ComputerUseTool.self)) case "web_search_preview", "web_search_preview_2025_03_11": self = try .webSearch(singleValueContainer.decode(WebSearchTool.self)) + case "custom": + self = try .custom(singleValueContainer.decode(CustomTool.self)) default: throw DecodingError.dataCorruptedError( forKey: .type, @@ -412,6 +417,32 @@ public enum Tool: Codable { } } + /// A custom tool that returns plain text instead of JSON + public struct CustomTool: Codable { + public init( + name: String, + description: String? = nil) + { + self.name = name + self.description = description + } + + /// The name of the custom tool + public let name: String + + /// A description of what the custom tool does + public let description: String? + + /// The type of the custom tool. Always custom + public let type = "custom" + + enum CodingKeys: String, CodingKey { + case name + case description + case type + } + } + /// Approximate location parameters for the search public struct UserLocation: Codable { public init( @@ -480,6 +511,8 @@ public enum Tool: Codable { try container.encode(tool) case .webSearch(let tool): try container.encode(tool) + case .custom(let tool): + try container.encode(tool) } } diff --git a/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift b/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift index b4e151bd..523167d7 100644 --- a/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift +++ b/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift @@ -26,6 +26,9 @@ public enum ToolChoiceMode: Codable { /// Use this option to force the model to call a specific function. case functionTool(FunctionTool) + /// Use this option to force the model to call a specific custom tool. + case customTool(CustomToolChoice) + public init(from decoder: Decoder) throws { let container = try decoder.singleValueContainer() @@ -46,6 +49,8 @@ public enum ToolChoiceMode: Codable { self = .hostedTool(hostedTool) } else if let functionTool = try? container.decode(FunctionTool.self) { self = .functionTool(functionTool) + } else if let customTool = try? container.decode(CustomToolChoice.self) { + self = .customTool(customTool) } else { throw DecodingError.dataCorruptedError( in: container, @@ -67,6 +72,8 @@ public enum ToolChoiceMode: Codable { try container.encode(toolType) case .functionTool(let tool): try container.encode(tool) + case .customTool(let tool): + try container.encode(tool) } } } @@ -142,3 +149,23 @@ public struct FunctionTool: Codable { case type } } + +// MARK: - CustomToolChoice + +/// Custom tool choice specification +public struct CustomToolChoice: Codable { + /// The name of the custom tool to call + public var name: String + + /// For custom tool calling, the type is always custom + public var type = "custom" + + public init(name: String) { + self.name = name + } + + enum CodingKeys: String, CodingKey { + case name + case type + } +} diff --git a/Tests/OpenAITests/ResponseStreamEventTests.swift b/Tests/OpenAITests/ResponseStreamEventTests.swift index 84310e85..c0d91e0c 100644 --- a/Tests/OpenAITests/ResponseStreamEventTests.swift +++ b/Tests/OpenAITests/ResponseStreamEventTests.swift @@ -545,7 +545,7 @@ final class ResponseStreamEventTests: XCTestCase { ] let decoder = JSONDecoder() - var receivedEvents: [ResponseStreamEvent] = [] + var receivedEvents = [ResponseStreamEvent]() // Decode all events for eventJson in events {