From 92d5aefc7c8f34b02e8cb313524b2ec9846ed83e Mon Sep 17 00:00:00 2001 From: jamesrochabrun Date: Mon, 29 Sep 2025 23:58:37 -0700 Subject: [PATCH 1/6] Fix for compilation error --- .../ChatStructuredOutputToolProvider.swift | 267 +++++++++++++++++- 1 file changed, 259 insertions(+), 8 deletions(-) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift index 14c16c63..cfd9b292 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift @@ -1,15 +1,266 @@ -// Updated content according to SwiftFormat's wrap rule +// +// ChatStructuredOutputToolProvider.swift +// SwiftOpenAIExample +// +// Created by James Rochabrun on 8/11/24. +// import Foundation +import SwiftOpenAI +import SwiftUI -// Assuming the original content has long lines that need to be broken up -// Example of how you might break a long line for readability and formatting: +// MARK: - StructuredToolCall -struct ChatStructuredOutputToolProvider { - let exampleLongString = - "This is an example of a very long string that will need to be broken up according to the SwiftFormat wrap rule so that it is more readable in the codebase." +enum StructuredToolCall: String, CaseIterable { - func someFunction() { - print(exampleLongString) + case structureUI = "structured_ui" + + var functionTool: ChatCompletionParameters.Tool { + switch self { + case .structureUI: + .init( + function: .init( + name: rawValue, + strict: true, + description: "Dynamically generated UI", + parameters: structureUISchema)) + } + } + + var structureUISchema: JSONSchema { + JSONSchema( + type: .object, + properties: [ + "type": JSONSchema( + type: .string, + description: "The type of the UI component", + additionalProperties: false, + enum: ["div", "button", "header", "section", "field", "form"] + ), + "label": JSONSchema( + type: .string, + description: "The label of the UI component, used for buttons or form fields", + additionalProperties: false + ), + "children": JSONSchema( + type: .array, + description: "Nested UI components", + items: JSONSchema(ref: "#"), + additionalProperties: false + ), + "attributes": JSONSchema( + type: .array, + description: "Arbitrary attributes for the UI component, suitable for any element", + items: JSONSchema( + type: .object, + properties: [ + "name": JSONSchema( + type: .string, + description: "The name of the attribute, for example onClick or className", + additionalProperties: false + ), + "value": JSONSchema( + type: .string, + description: "The value of the attribute", + additionalProperties: false + ), + ], + required: ["name", "value"], + additionalProperties: false), + additionalProperties: false + ), + ], + required: ["type", "label", "children", "attributes"], + additionalProperties: false) + } +} + +// MARK: - ChatStructuredOutputToolProvider + +@Observable +final class ChatStructuredOutputToolProvider { + + // MARK: - Init + + let customModel: String? + + init(service: OpenAIService, customModel: String? = nil) { + self.service = service + self.customModel = customModel + } + + var chatDisplayMessages: [ChatMessageDisplayModel] = [] + let systemMessage = ChatCompletionParameters.Message(role: .system, content: .text("You are a math tutor")) + + func startChat( + prompt: String) + async throws + { + await startNewUserDisplayMessage(prompt) + await startNewAssistantEmptyDisplayMessage() + + let userMessage = createUserMessage(prompt) + chatMessageParameters.append(userMessage) + + let model: Model = if let customModel = customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o20240806 + } + + let parameters = ChatCompletionParameters( + messages: [systemMessage] + chatMessageParameters, + model: model, + tools: StructuredToolCall.allCases.map(\.functionTool)) + + do { + let chat = try await service.startChat(parameters: parameters) + guard let assistantMessage = chat.choices?.first?.message else { return } + let content = assistantMessage.content ?? "" + await updateLastAssistantMessage(.init(content: .content(.init(text: content)), origin: .received(.gpt))) + if let toolCalls = assistantMessage.toolCalls { + availableFunctions = [.structureUI: getStructureOutput(arguments:)] + // Append the `assistantMessage` in to the `chatMessageParameters` to extend the conversation + let parameterAssistantMessage = ChatCompletionParameters.Message( + role: .assistant, + content: .text(content), toolCalls: assistantMessage.toolCalls) + + chatMessageParameters.append(parameterAssistantMessage) + + /// # Step 4: send the info for each function call and function response to the model + for toolCall in toolCalls { + let name = toolCall.function.name + let id = toolCall.id + let functionToCall = availableFunctions[StructuredToolCall(rawValue: name!)!]! + let arguments = toolCall.function.arguments + let content = functionToCall(arguments) + let toolMessage = ChatCompletionParameters.Message( + role: .tool, + content: .text(content), + name: name, + toolCallID: id) + chatMessageParameters.append(toolMessage) + } + + /// # get a new response from the model where it can see the function response + await continueChat() + } + + } catch let error as APIError { + // If an error occurs, update the UI to display the error message. + await updateLastAssistantMessage(.init(content: .error("\(error.displayDescription)"), origin: .received(.gpt))) + } + } + + // MARK: Tool functions + + func getStructureOutput(arguments: String) -> String { + arguments + } + + private let service: OpenAIService + private var lastDisplayedMessageID: UUID? + private var chatMessageParameters: [ChatCompletionParameters.Message] = [] + private var availableFunctions: [StructuredToolCall: (String) -> String] = [:] + +} + +// MARK: UI related + +extension ChatStructuredOutputToolProvider { + + func createUserMessage( + _ prompt: String) + -> ChatCompletionParameters.Message + { + .init(role: .user, content: .text(prompt)) + } + + func continueChat() async { + let model: Model = if let customModel = customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o + } + + let paramsForChat = ChatCompletionParameters( + messages: chatMessageParameters, + model: model) + do { + let chat = try await service.startChat(parameters: paramsForChat) + guard let assistantMessage = chat.choices?.first?.message else { return } + await updateLastAssistantMessage(.init(content: .content(.init(text: assistantMessage.content)), origin: .received(.gpt))) + } catch { + // If an error occurs, update the UI to display the error message. + await updateLastAssistantMessage(.init(content: .error("\(error)"), origin: .received(.gpt))) + } + } + + // MARK: - Private Methods + + @MainActor + private func startNewUserDisplayMessage(_ prompt: String) { + let startingMessage = ChatMessageDisplayModel( + content: .content(.init(text: prompt)), + origin: .sent) + addMessage(startingMessage) + } + + @MainActor + private func startNewAssistantEmptyDisplayMessage() { + let newMessage = ChatMessageDisplayModel( + content: .content(.init(text: "")), + origin: .received(.gpt)) + addMessage(newMessage) + } + + @MainActor + private func updateLastAssistantMessage( + _ message: ChatMessageDisplayModel) + { + guard let id = lastDisplayedMessageID, let index = chatDisplayMessages.firstIndex(where: { $0.id == id }) else { return } + + var lastMessage = chatDisplayMessages[index] + + switch message.content { + case .content(let newMedia): + switch lastMessage.content { + case .content(let lastMedia): + var updatedMedia = lastMedia + if + let newText = newMedia.text, + var lastMediaText = lastMedia.text + { + lastMediaText += newText + updatedMedia.text = lastMediaText + } else { + updatedMedia.text = "" + } + if let urls = newMedia.urls { + updatedMedia.urls = urls + } + lastMessage.content = .content(updatedMedia) + + case .error: + break + } + + case .error: + lastMessage.content = message.content + } + + chatDisplayMessages[index] = ChatMessageDisplayModel( + id: id, + content: lastMessage.content, + origin: message.origin) + } + + @MainActor + private func addMessage(_ message: ChatMessageDisplayModel) { + let newMessageId = message.id + lastDisplayedMessageID = newMessageId + withAnimation { + chatDisplayMessages.append(message) + } } } From e3d3d3529b61decfd56568b1d95db1b11208f7ee Mon Sep 17 00:00:00 2001 From: jamesrochabrun Date: Tue, 30 Sep 2025 00:17:39 -0700 Subject: [PATCH 2/6] Add custom tool support to Response API MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add support for custom tools that return plain text instead of JSON in the Response API. Changes: - Add CustomTool struct and .custom case to Tool enum - Add CustomToolCall struct and .customToolCall case to OutputItem enum - Add CustomToolChoice struct and .customTool case to ToolChoiceMode enum Custom tools enable models to return plain text responses without JSON wrapping, useful for scenarios like code execution, mathematical expressions, or structured text outputs. Note: CustomToolCallOutput was intentionally not included as it is not documented in the OpenAI API documentation for multi-turn conversations with custom tools. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../ResponseModels/Response/OutputItem.swift | 30 +++++++++++++++++ Sources/OpenAI/Public/Shared/Tool.swift | 33 +++++++++++++++++++ .../OpenAI/Public/Shared/ToolChoiceMode.swift | 27 +++++++++++++++ 3 files changed, 90 insertions(+) diff --git a/Sources/OpenAI/Public/ResponseModels/Response/OutputItem.swift b/Sources/OpenAI/Public/ResponseModels/Response/OutputItem.swift index 3b43c282..7f1cbda1 100644 --- a/Sources/OpenAI/Public/ResponseModels/Response/OutputItem.swift +++ b/Sources/OpenAI/Public/ResponseModels/Response/OutputItem.swift @@ -32,6 +32,8 @@ public enum OutputItem: Decodable { case mcpListTools(MCPListTools) /// A request for human approval of a tool invocation case mcpApprovalRequest(MCPApprovalRequest) + /// A custom tool call that returns plain text + case customToolCall(CustomToolCall) public init(from decoder: Decoder) throws { let container = try decoder.container(keyedBy: CodingKeys.self) @@ -86,6 +88,10 @@ public enum OutputItem: Decodable { let mcpApprovalRequest = try MCPApprovalRequest(from: decoder) self = .mcpApprovalRequest(mcpApprovalRequest) + case "custom_tool_call": + let customToolCall = try CustomToolCall(from: decoder) + self = .customToolCall(customToolCall) + default: throw DecodingError.dataCorruptedError( forKey: .type, @@ -210,6 +216,30 @@ public enum OutputItem: Decodable { } } + // MARK: - Custom Tool Call + + /// A custom tool call that returns plain text instead of JSON + public struct CustomToolCall: Decodable { + /// The unique ID of the custom tool call + public let id: String + /// The type of the custom tool call. Always "custom_tool_call" + public let type: String + /// The status of the item. One of "in_progress", "completed", or "incomplete" + public let status: String? + /// The call ID for this custom tool call + public let callId: String + /// The plain text input to the custom tool + public let input: String + /// The name of the custom tool + public let name: String + + enum CodingKeys: String, CodingKey { + case id, type, status + case callId = "call_id" + case input, name + } + } + // MARK: - Web Search Tool Call /// The results of a web search tool call diff --git a/Sources/OpenAI/Public/Shared/Tool.swift b/Sources/OpenAI/Public/Shared/Tool.swift index 83aae462..b7c5c82d 100644 --- a/Sources/OpenAI/Public/Shared/Tool.swift +++ b/Sources/OpenAI/Public/Shared/Tool.swift @@ -21,6 +21,9 @@ public enum Tool: Codable { /// This tool searches the web for relevant results to use in a response case webSearch(WebSearchTool) + /// A custom tool that returns plain text instead of JSON + case custom(CustomTool) + public init(from decoder: Decoder) throws { let container = try decoder.container(keyedBy: CodingKeys.self) let type = try container.decode(String.self, forKey: .type) @@ -36,6 +39,8 @@ public enum Tool: Codable { self = try .computerUse(singleValueContainer.decode(ComputerUseTool.self)) case "web_search_preview", "web_search_preview_2025_03_11": self = try .webSearch(singleValueContainer.decode(WebSearchTool.self)) + case "custom": + self = try .custom(singleValueContainer.decode(CustomTool.self)) default: throw DecodingError.dataCorruptedError( forKey: .type, @@ -412,6 +417,32 @@ public enum Tool: Codable { } } + /// A custom tool that returns plain text instead of JSON + public struct CustomTool: Codable { + public init( + name: String, + description: String? = nil) + { + self.name = name + self.description = description + } + + /// The name of the custom tool + public let name: String + + /// A description of what the custom tool does + public let description: String? + + /// The type of the custom tool. Always custom + public let type = "custom" + + enum CodingKeys: String, CodingKey { + case name + case description + case type + } + } + /// Approximate location parameters for the search public struct UserLocation: Codable { public init( @@ -480,6 +511,8 @@ public enum Tool: Codable { try container.encode(tool) case .webSearch(let tool): try container.encode(tool) + case .custom(let tool): + try container.encode(tool) } } diff --git a/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift b/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift index b4e151bd..523167d7 100644 --- a/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift +++ b/Sources/OpenAI/Public/Shared/ToolChoiceMode.swift @@ -26,6 +26,9 @@ public enum ToolChoiceMode: Codable { /// Use this option to force the model to call a specific function. case functionTool(FunctionTool) + /// Use this option to force the model to call a specific custom tool. + case customTool(CustomToolChoice) + public init(from decoder: Decoder) throws { let container = try decoder.singleValueContainer() @@ -46,6 +49,8 @@ public enum ToolChoiceMode: Codable { self = .hostedTool(hostedTool) } else if let functionTool = try? container.decode(FunctionTool.self) { self = .functionTool(functionTool) + } else if let customTool = try? container.decode(CustomToolChoice.self) { + self = .customTool(customTool) } else { throw DecodingError.dataCorruptedError( in: container, @@ -67,6 +72,8 @@ public enum ToolChoiceMode: Codable { try container.encode(toolType) case .functionTool(let tool): try container.encode(tool) + case .customTool(let tool): + try container.encode(tool) } } } @@ -142,3 +149,23 @@ public struct FunctionTool: Codable { case type } } + +// MARK: - CustomToolChoice + +/// Custom tool choice specification +public struct CustomToolChoice: Codable { + /// The name of the custom tool to call + public var name: String + + /// For custom tool calling, the type is always custom + public var type = "custom" + + public init(name: String) { + self.name = name + } + + enum CodingKeys: String, CodingKey { + case name + case type + } +} From 10478c2473e85151f24da3ced516279776af44e7 Mon Sep 17 00:00:00 2001 From: jamesrochabrun Date: Tue, 30 Sep 2025 00:26:41 -0700 Subject: [PATCH 3/6] Add gpt5Codex model MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add support for GPT-5 Codex model. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- Sources/OpenAI/Public/Parameters/Model.swift | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Sources/OpenAI/Public/Parameters/Model.swift b/Sources/OpenAI/Public/Parameters/Model.swift index 44eca61d..6c043b0f 100644 --- a/Sources/OpenAI/Public/Parameters/Model.swift +++ b/Sources/OpenAI/Public/Parameters/Model.swift @@ -68,6 +68,8 @@ public enum Model { case gpt5Mini /// High-throughput tasks, especially simple instruction-following or classification case gpt5Nano + + case gpt5Codex /// Images case dalle2 @@ -103,6 +105,7 @@ public enum Model { case .gpt5: "gpt-5" case .gpt5Mini: "gpt-5-mini" case .gpt5Nano: "gpt-5-nano" + case .gpt5Codex: "gpt-5-codex" case .custom(let model): model } } From 22ccd35a23a7451cf3c8758e18591b2007c01cfe Mon Sep 17 00:00:00 2001 From: jamesrochabrun Date: Tue, 30 Sep 2025 00:29:18 -0700 Subject: [PATCH 4/6] Fix swiftformat trailing space in Model.swift --- Sources/OpenAI/Public/Parameters/Model.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Sources/OpenAI/Public/Parameters/Model.swift b/Sources/OpenAI/Public/Parameters/Model.swift index 6c043b0f..38c93141 100644 --- a/Sources/OpenAI/Public/Parameters/Model.swift +++ b/Sources/OpenAI/Public/Parameters/Model.swift @@ -68,7 +68,7 @@ public enum Model { case gpt5Mini /// High-throughput tasks, especially simple instruction-following or classification case gpt5Nano - + case gpt5Codex /// Images From 3d4e9629f9e75c042cc8490c714fdc4458781982 Mon Sep 17 00:00:00 2001 From: jamesrochabrun Date: Tue, 30 Sep 2025 00:33:40 -0700 Subject: [PATCH 5/6] Fix swiftformat linting issues MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Apply swiftformat fixes to ChatStructuredOutputToolProvider: - Organize declarations (move init before properties) - Wrap multiline conditional assignments - Remove redundant optional binding - Fix indentation and trailing spaces 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../ChatStructuredOutputToolProvider.swift | 34 ++++++++++--------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift index cfd9b292..7312976a 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift @@ -80,15 +80,15 @@ enum StructuredToolCall: String, CaseIterable { @Observable final class ChatStructuredOutputToolProvider { - // MARK: - Init - - let customModel: String? - init(service: OpenAIService, customModel: String? = nil) { self.service = service self.customModel = customModel } + // MARK: - Init + + let customModel: String? + var chatDisplayMessages: [ChatMessageDisplayModel] = [] let systemMessage = ChatCompletionParameters.Message(role: .system, content: .text("You are a math tutor")) @@ -102,12 +102,13 @@ final class ChatStructuredOutputToolProvider { let userMessage = createUserMessage(prompt) chatMessageParameters.append(userMessage) - let model: Model = if let customModel = customModel, !customModel.isEmpty { - .custom(customModel) - } else { - .gpt4o20240806 - } - + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o20240806 + } + let parameters = ChatCompletionParameters( messages: [systemMessage] + chatMessageParameters, model: model, @@ -177,12 +178,13 @@ extension ChatStructuredOutputToolProvider { } func continueChat() async { - let model: Model = if let customModel = customModel, !customModel.isEmpty { - .custom(customModel) - } else { - .gpt4o - } - + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o + } + let paramsForChat = ChatCompletionParameters( messages: chatMessageParameters, model: model) From 80e5ab1cea66e6067f231cc81f6aa3c00f0cd58e Mon Sep 17 00:00:00 2001 From: jamesrochabrun Date: Tue, 30 Sep 2025 00:42:45 -0700 Subject: [PATCH 6/6] Apply swiftformat 0.58.2 formatting fixes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Upgrade swiftformat from 0.56.1 to 0.58.2 to match CI version and fix all linting issues. Changes applied: - Convert property declarations to inferred types (propertyTypes) - Add MARK comments before top-level types (markTypes) - Wrap long lines (wrap) - Organize declarations (organizeDeclarations) - Remove trailing spaces (trailingSpace) - Fix indentation (indent) 27 files formatted across Examples, Sources, and Tests directories. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- .../AssistantConfigurationDemoView.swift | 14 ++++++++--- .../AssistantConfigurationProvider.swift | 2 +- .../ChatDemo/ChatProvider.swift | 2 +- .../Completion/ChatFunctionCallProvider.swift | 6 ++--- .../ChatFunctionsCallStreamProvider.swift | 12 +++++----- .../ChatFluidConversationProvider.swift | 6 ++--- .../ChatStructuredOutputToolProvider.swift | 24 +++++++------------ .../ChatStructuredOutputProvider.swift | 2 +- .../EmbeddingsDemo/Embeddingsprovider.swift | 2 +- .../Files/FileAttachmentView.swift | 2 ++ .../Files/FilesPickerProvider.swift | 4 ++-- .../FilesDemo/FilesProvider.swift | 4 ++-- .../FineTuningJobProvider.swift | 4 ++-- .../ImagesDemo/ImagesProvider.swift | 2 +- .../ModelsDemo/ModelsProvider.swift | 2 +- .../ResponseStreamProvider.swift | 4 ++-- .../SharedUI/ChatDisplayMessageView.swift | 2 +- .../Vision/ChatVisionDemoView.swift | 6 ++--- .../Vision/ChatVisionProvider.swift | 4 ++-- Sources/OpenAI/AIProxy/AIProxyService.swift | 20 ++++++++-------- .../Private/Networking/HTTPClient.swift | 2 +- .../Assistant/AssistantParameters.swift | 2 +- .../ImageGen/CreateImageEditParameters.swift | 4 ++-- .../CreateImageVariationParameters.swift | 2 +- .../Public/Service/DefaultOpenAIService.swift | 20 ++++++++-------- .../OpenAI/Public/Service/OpenAIService.swift | 2 +- .../ResponseStreamEventTests.swift | 2 +- 27 files changed, 81 insertions(+), 77 deletions(-) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift index 7987e0a9..30fcde78 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift @@ -210,9 +210,9 @@ struct AssistantConfigurationDemoView: View { @State private var parameters = AssistantParameters(action: .create(model: Model.gpt41106Preview.value)) @State private var isAvatarLoading = false @State private var showAvatarFlow = false - @State private var fileIDS: [String] = [] + @State private var fileIDS = [String]() /// Used mostly to display already uploaded files if any. - @State private var filePickerInitialActions: [FilePickerAction] = [] + @State private var filePickerInitialActions = [FilePickerAction]() private let service: OpenAIService } @@ -229,7 +229,7 @@ extension Binding where Value == String? { AssistantConfigurationDemoView(service: OpenAIServiceFactory.service(apiKey: "")) } -// MARK: InputView +// MARK: - InputView struct InputView: View { let content: Content @@ -251,6 +251,8 @@ struct InputView: View { @Environment(\.inputViewStyle) private var style: InputViewStyle } +// MARK: - InputViewStyle + struct InputViewStyle { let verticalPadding: CGFloat @@ -259,6 +261,8 @@ struct InputViewStyle { } } +// MARK: - InputViewStyleKey + struct InputViewStyleKey: EnvironmentKey { static let defaultValue = InputViewStyle() } @@ -276,6 +280,8 @@ extension View { } } +// MARK: - CheckboxView + struct CheckboxView: View { @Binding var isChecked: Bool @@ -291,6 +297,8 @@ struct CheckboxView: View { } } +// MARK: - CheckboxRow + struct CheckboxRow: View { let title: String @Binding var isChecked: Bool diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift index e857e1af..4967f020 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift @@ -17,7 +17,7 @@ class AssistantConfigurationProvider { } var assistant: AssistantObject? - var assistants: [AssistantObject] = [] + var assistants = [AssistantObject]() var avatarURL: URL? var assistantDeletionStatus: DeletionStatus? diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift index 7e3e4e49..11c9b9d4 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatProvider.swift @@ -14,7 +14,7 @@ class ChatProvider { self.service = service } - var messages: [String] = [] + var messages = [String]() var errorMessage = "" var message = "" var usage: ChatUsage? diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift index 8c6fed14..f15115b4 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift @@ -46,7 +46,7 @@ class ChatFunctionCallProvider { // MARK: - Public Properties /// To be used for UI purposes. - var chatDisplayMessages: [ChatMessageDisplayModel] = [] + var chatDisplayMessages = [ChatMessageDisplayModel]() @MainActor func generateImage(arguments: String) async throws -> String { @@ -174,8 +174,8 @@ class ChatFunctionCallProvider { private let service: OpenAIService private var lastDisplayedMessageID: UUID? /// To be used for a new request - private var chatMessageParameters: [ChatCompletionParameters.Message] = [] - private var availableFunctions: [FunctionCallDefinition: @MainActor (String) async throws -> String] = [:] + private var chatMessageParameters = [ChatCompletionParameters.Message]() + private var availableFunctions = [FunctionCallDefinition: @MainActor (String) async throws -> String]() // MARK: - Private Methods diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift index b98e70a4..2ef6d402 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift @@ -32,7 +32,7 @@ class ChatFunctionsCallStreamProvider { // MARK: - Public Properties /// To be used for UI purposes. - var chatDisplayMessages: [ChatMessageDisplayModel] = [] + var chatDisplayMessages = [ChatMessageDisplayModel]() @MainActor func generateImage(arguments: String) async throws -> String { @@ -163,7 +163,7 @@ class ChatFunctionsCallStreamProvider { } func createAssistantMessage() -> ChatCompletionParameters.Message { - var toolCalls: [ToolCall] = [] + var toolCalls = [ToolCall]() for (_, functionCallStreamedResponse) in functionCallsMap { let toolCall = functionCallStreamedResponse.toolCall // Intentionally force unwrapped to catch errrors quickly on demo. // This should be properly handled. @@ -178,7 +178,7 @@ class ChatFunctionsCallStreamProvider { func createToolsMessages() async throws -> [ChatCompletionParameters.Message] { - var toolMessages: [ChatCompletionParameters.Message] = [] + var toolMessages = [ChatCompletionParameters.Message]() for (key, functionCallStreamedResponse) in functionCallsMap { let name = functionCallStreamedResponse.name let id = functionCallStreamedResponse.id @@ -222,9 +222,9 @@ class ChatFunctionsCallStreamProvider { private let service: OpenAIService private var lastDisplayedMessageID: UUID? /// To be used for a new request - private var chatMessageParameters: [ChatCompletionParameters.Message] = [] - private var functionCallsMap: [FunctionCallDefinition: FunctionCallStreamedResponse] = [:] - private var availableFunctions: [FunctionCallDefinition: @MainActor (String) async throws -> String] = [:] + private var chatMessageParameters = [ChatCompletionParameters.Message]() + private var functionCallsMap = [FunctionCallDefinition: FunctionCallStreamedResponse]() + private var availableFunctions = [FunctionCallDefinition: @MainActor (String) async throws -> String]() // MARK: - Private Methods diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift index 001c55d4..0e7ea307 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift @@ -19,7 +19,7 @@ class ChatFluidConversationProvider { // MARK: - Public Properties /// A collection of messages for display in the UI, representing the conversation. - var chatMessages: [ChatDisplayMessage] = [] + var chatMessages = [ChatDisplayMessage]() // MARK: - Public Methods @@ -94,9 +94,9 @@ class ChatFluidConversationProvider { /// Tracks the identifier of the last message displayed, enabling updates in the from the streaming API response. private var lastDisplayedMessageID: UUID? /// Stores the initial chat message's delta, which uniquely includes metadata like `role`. - private var firstChatMessageResponseDelta: [String: ChatCompletionChunkObject.ChatChoice.Delta] = [:] + private var firstChatMessageResponseDelta = [String: ChatCompletionChunkObject.ChatChoice.Delta]() /// Builds a history of messages sent and received, enhancing the chat's context for future requests. - private var parameterMessages: [ChatCompletionParameters.Message] = [] + private var parameterMessages = [ChatCompletionParameters.Message]() // MARK: - Private Methods diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift index 7312976a..49e274bf 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructuredOutputToolProvider.swift @@ -35,19 +35,16 @@ enum StructuredToolCall: String, CaseIterable { type: .string, description: "The type of the UI component", additionalProperties: false, - enum: ["div", "button", "header", "section", "field", "form"] - ), + enum: ["div", "button", "header", "section", "field", "form"]), "label": JSONSchema( type: .string, description: "The label of the UI component, used for buttons or form fields", - additionalProperties: false - ), + additionalProperties: false), "children": JSONSchema( type: .array, description: "Nested UI components", items: JSONSchema(ref: "#"), - additionalProperties: false - ), + additionalProperties: false), "attributes": JSONSchema( type: .array, description: "Arbitrary attributes for the UI component, suitable for any element", @@ -57,18 +54,15 @@ enum StructuredToolCall: String, CaseIterable { "name": JSONSchema( type: .string, description: "The name of the attribute, for example onClick or className", - additionalProperties: false - ), + additionalProperties: false), "value": JSONSchema( type: .string, description: "The value of the attribute", - additionalProperties: false - ), + additionalProperties: false), ], required: ["name", "value"], additionalProperties: false), - additionalProperties: false - ), + additionalProperties: false), ], required: ["type", "label", "children", "attributes"], additionalProperties: false) @@ -89,7 +83,7 @@ final class ChatStructuredOutputToolProvider { let customModel: String? - var chatDisplayMessages: [ChatMessageDisplayModel] = [] + var chatDisplayMessages = [ChatMessageDisplayModel]() let systemMessage = ChatCompletionParameters.Message(role: .system, content: .text("You are a math tutor")) func startChat( @@ -161,8 +155,8 @@ final class ChatStructuredOutputToolProvider { private let service: OpenAIService private var lastDisplayedMessageID: UUID? - private var chatMessageParameters: [ChatCompletionParameters.Message] = [] - private var availableFunctions: [StructuredToolCall: (String) -> String] = [:] + private var chatMessageParameters = [ChatCompletionParameters.Message]() + private var availableFunctions = [StructuredToolCall: (String) -> String]() } diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift index a19b9ca4..4e4851a4 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift @@ -19,7 +19,7 @@ final class ChatStructuredOutputProvider { } var message = "" - var messages: [String] = [] + var messages = [String]() var errorMessage = "" // MARK: - Public Methods diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift index 27df24be..ebb83d74 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/EmbeddingsDemo/Embeddingsprovider.swift @@ -14,7 +14,7 @@ class EmbeddingsProvider { self.service = service } - var embeddings: [EmbeddingObject] = [] + var embeddings = [EmbeddingObject]() func createEmbeddings( parameters: EmbeddingParameter) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift index 83af4b17..fa285a3a 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift @@ -115,6 +115,8 @@ extension View { } } +// MARK: - DeletionStatus + @retroactive Equatable + extension DeletionStatus: @retroactive Equatable { public static func ==(lhs: DeletionStatus, rhs: DeletionStatus) -> Bool { lhs.id == rhs.id diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift index 93c81e5e..e55d9450 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift @@ -13,11 +13,11 @@ final class FilesPickerProvider { self.service = service } - var files: [FileObject] = [] + var files = [FileObject]() var uploadedFile: FileObject? var deletedStatus: DeletionStatus? var retrievedFile: FileObject? - var fileContent: [[String: Any]] = [] + var fileContent = [[String: Any]]() func listFiles() async throws { files = try await service.listFiles().data diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift index aa8102e5..d3013f09 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FilesDemo/FilesProvider.swift @@ -14,11 +14,11 @@ class FilesProvider { self.service = service } - var files: [FileObject] = [] + var files = [FileObject]() var uploadedFile: FileObject? var deletedStatus: DeletionStatus? var retrievedFile: FileObject? - var fileContent: [[String: Any]] = [] + var fileContent = [[String: Any]]() func listFiles() async throws { files = try await service.listFiles().data diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift index 86b5cbe2..16fbad63 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/FineTuningDemo/FineTuningJobProvider.swift @@ -17,8 +17,8 @@ class FineTuningJobProvider { var createdFineTuningJob: FineTuningJobObject? var canceledFineTuningJob: FineTuningJobObject? var retrievedFineTuningJob: FineTuningJobObject? - var fineTunedJobs: [FineTuningJobObject] = [] - var finteTuningEventObjects: [FineTuningJobEventObject] = [] + var fineTunedJobs = [FineTuningJobObject]() + var finteTuningEventObjects = [FineTuningJobEventObject]() func createFineTuningJob( parameters: FineTuningJobParameters) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift index a4816cf3..782ed78c 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ImagesDemo/ImagesProvider.swift @@ -14,7 +14,7 @@ class ImagesProvider { self.service = service } - var images: [URL] = [] + var images = [URL]() func createImages( parameters: ImageCreateParameters) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift index be6f42e3..9b88f4da 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ModelsDemo/ModelsProvider.swift @@ -14,7 +14,7 @@ class ModelsProvider { self.service = service } - var models: [ModelObject] = [] + var models = [ModelObject]() var retrievedModel: ModelObject? var deletionStatus: DeletionStatus? diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift index 2ade7dd3..6e5fba76 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift @@ -33,7 +33,7 @@ class ResponseStreamProvider { } } - var messages: [ResponseMessage] = [] + var messages = [ResponseMessage]() var isStreaming = false var currentStreamingMessage: ResponseMessage? var error: String? @@ -103,7 +103,7 @@ class ResponseStreamProvider { do { // Build input array with conversation history - var inputArray: [InputItem] = [] + var inputArray = [InputItem]() // Add conversation history for message in messages.dropLast(2) { // Exclude current user message and streaming placeholder diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift index 504d19fd..8e2a27c2 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift @@ -96,7 +96,7 @@ struct ChatDisplayMessageView: View { } } - @State private var urls: [URL] = [] + @State private var urls = [URL]() } #Preview { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift index 2010733e..a8a1c5f1 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift @@ -126,9 +126,9 @@ struct ChatVisionDemoView: View { @State private var chatProvider: ChatVisionProvider @State private var isLoading = false @State private var prompt = "" - @State private var selectedItems: [PhotosPickerItem] = [] - @State private var selectedImages: [Image] = [] - @State private var selectedImageURLS: [URL] = [] + @State private var selectedItems = [PhotosPickerItem]() + @State private var selectedImages = [Image]() + @State private var selectedImageURLS = [URL]() /// Called when the user taps on the send button. Clears the selected images and prompt. private func resetInput() { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift index 4dd3b8ad..11aa2585 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift @@ -19,7 +19,7 @@ class ChatVisionProvider { // MARK: - Public Properties /// A collection of messages for display in the UI, representing the conversation. - var chatMessages: [ChatDisplayMessage] = [] + var chatMessages = [ChatDisplayMessage]() // MARK: - Public Methods @@ -71,7 +71,7 @@ class ChatVisionProvider { /// Tracks the identifier of the last message displayed, enabling updates in the from the streaming API response. private var lastDisplayedMessageID: UUID? /// Stores the initial chat message's delta, which uniquely includes metadata like `role`. - private var firstChatMessageResponseDelta: [String: ChatCompletionChunkObject.ChatChoice.Delta] = [:] + private var firstChatMessageResponseDelta = [String: ChatCompletionChunkObject.ChatChoice.Delta]() // MARK: - Private Methods diff --git a/Sources/OpenAI/AIProxy/AIProxyService.swift b/Sources/OpenAI/AIProxy/AIProxyService.swift index f7745539..7fae1e1b 100644 --- a/Sources/OpenAI/AIProxy/AIProxyService.swift +++ b/Sources/OpenAI/AIProxy/AIProxyService.swift @@ -167,7 +167,7 @@ struct AIProxyService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let lastJobID, let limit { queryItems = [.init(name: "after", value: lastJobID), .init(name: "limit", value: "\(limit)")] } else if let lastJobID { @@ -218,7 +218,7 @@ struct AIProxyService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let lastEventId, let limit { queryItems = [.init(name: "after", value: lastEventId), .init(name: "limit", value: "\(limit)")] } else if let lastEventId { @@ -527,7 +527,7 @@ struct AIProxyService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -685,7 +685,7 @@ struct AIProxyService: OpenAIService { runID _: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -767,7 +767,7 @@ struct AIProxyService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -865,7 +865,7 @@ struct AIProxyService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -990,7 +990,7 @@ struct AIProxyService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1031,7 +1031,7 @@ struct AIProxyService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1126,7 +1126,7 @@ struct AIProxyService: OpenAIService { filter: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1241,7 +1241,7 @@ struct AIProxyService: OpenAIService { filter: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } diff --git a/Sources/OpenAI/Private/Networking/HTTPClient.swift b/Sources/OpenAI/Private/Networking/HTTPClient.swift index 70a17dc1..7778fc1c 100644 --- a/Sources/OpenAI/Private/Networking/HTTPClient.swift +++ b/Sources/OpenAI/Private/Networking/HTTPClient.swift @@ -44,7 +44,7 @@ public struct HTTPRequest { throw URLError(.unsupportedURL) } - var headers: [String: String] = [:] + var headers = [String: String]() if let allHTTPHeaderFields = urlRequest.allHTTPHeaderFields { headers = allHTTPHeaderFields } diff --git a/Sources/OpenAI/Public/Parameters/Assistant/AssistantParameters.swift b/Sources/OpenAI/Public/Parameters/Assistant/AssistantParameters.swift index f31b496e..71632c72 100644 --- a/Sources/OpenAI/Public/Parameters/Assistant/AssistantParameters.swift +++ b/Sources/OpenAI/Public/Parameters/Assistant/AssistantParameters.swift @@ -55,7 +55,7 @@ public struct AssistantParameters: Encodable { /// The system instructions that the assistant uses. The maximum length is 32768 characters. public var instructions: String? /// A list of tool enabled on the assistant. There can be a maximum of 128 tools per assistant. Tools can be of types code_interpreter, retrieval, or function. Defaults to [] - public var tools: [AssistantObject.Tool] = [] + public var tools = [AssistantObject.Tool]() /// A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs. public var toolResources: ToolResources? /// Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long. diff --git a/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageEditParameters.swift b/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageEditParameters.swift index 0630e9ea..3bd16b86 100644 --- a/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageEditParameters.swift +++ b/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageEditParameters.swift @@ -76,7 +76,7 @@ public struct CreateImageEditParameters: Encodable { size: String? = nil, user: String? = nil) { - var imageDataArray: [Data] = [] + var imageDataArray = [Data]() for image in images { #if canImport(UIKit) @@ -223,7 +223,7 @@ public struct CreateImageEditParameters: Encodable { extension CreateImageEditParameters: MultipartFormDataParameters { public func encode(boundary: String) -> Data { - var entries: [MultipartFormDataEntry] = [] + var entries = [MultipartFormDataEntry]() // Add images (possibly multiple for gpt-image-1) for (index, imageData) in image.enumerated() { diff --git a/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageVariationParameters.swift b/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageVariationParameters.swift index c0cb9b8b..96ed5f1b 100644 --- a/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageVariationParameters.swift +++ b/Sources/OpenAI/Public/Parameters/ImageGen/CreateImageVariationParameters.swift @@ -128,7 +128,7 @@ public struct CreateImageVariationParameters: Encodable { extension CreateImageVariationParameters: MultipartFormDataParameters { public func encode(boundary: String) -> Data { - var entries: [MultipartFormDataEntry] = [] + var entries = [MultipartFormDataEntry]() // Add image file entries.append(.file( diff --git a/Sources/OpenAI/Public/Service/DefaultOpenAIService.swift b/Sources/OpenAI/Public/Service/DefaultOpenAIService.swift index 933d4c92..08d8c858 100644 --- a/Sources/OpenAI/Public/Service/DefaultOpenAIService.swift +++ b/Sources/OpenAI/Public/Service/DefaultOpenAIService.swift @@ -149,7 +149,7 @@ struct DefaultOpenAIService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let lastJobID, let limit { queryItems = [.init(name: "after", value: lastJobID), .init(name: "limit", value: "\(limit)")] } else if let lastJobID { @@ -200,7 +200,7 @@ struct DefaultOpenAIService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let lastEventId, let limit { queryItems = [.init(name: "after", value: lastEventId), .init(name: "limit", value: "\(limit)")] } else if let lastEventId { @@ -504,7 +504,7 @@ struct DefaultOpenAIService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -662,7 +662,7 @@ struct DefaultOpenAIService: OpenAIService { runID: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -747,7 +747,7 @@ struct DefaultOpenAIService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -845,7 +845,7 @@ struct DefaultOpenAIService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -970,7 +970,7 @@ struct DefaultOpenAIService: OpenAIService { limit: Int? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1011,7 +1011,7 @@ struct DefaultOpenAIService: OpenAIService { before: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1106,7 +1106,7 @@ struct DefaultOpenAIService: OpenAIService { filter: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } @@ -1221,7 +1221,7 @@ struct DefaultOpenAIService: OpenAIService { filter: String? = nil) async throws -> OpenAIResponse { - var queryItems: [URLQueryItem] = [] + var queryItems = [URLQueryItem]() if let limit { queryItems.append(.init(name: "limit", value: "\(limit)")) } diff --git a/Sources/OpenAI/Public/Service/OpenAIService.swift b/Sources/OpenAI/Public/Service/OpenAIService.swift index 9917c319..438f9fda 100644 --- a/Sources/OpenAI/Public/Service/OpenAIService.swift +++ b/Sources/OpenAI/Public/Service/OpenAIService.swift @@ -1033,7 +1033,7 @@ extension OpenAIService { description: errorMessage, statusCode: response.statusCode) } - var content: [[String: Any]] = [] + var content = [[String: Any]]() if let jsonString = String(data: data, encoding: String.Encoding.utf8) { let lines = jsonString.split(separator: "\n") for line in lines { diff --git a/Tests/OpenAITests/ResponseStreamEventTests.swift b/Tests/OpenAITests/ResponseStreamEventTests.swift index 84310e85..c0d91e0c 100644 --- a/Tests/OpenAITests/ResponseStreamEventTests.swift +++ b/Tests/OpenAITests/ResponseStreamEventTests.swift @@ -545,7 +545,7 @@ final class ResponseStreamEventTests: XCTestCase { ] let decoder = JSONDecoder() - var receivedEvents: [ResponseStreamEvent] = [] + var receivedEvents = [ResponseStreamEvent]() // Decode all events for eventJson in events {