diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatDemoView.swift index febe0de6..30dd8b46 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatDemo/ChatDemoView.swift @@ -9,7 +9,8 @@ import SwiftOpenAI import SwiftUI struct ChatDemoView: View { - init(service: OpenAIService) { + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel _chatProvider = State(initialValue: ChatProvider(service: service)) } @@ -18,6 +19,8 @@ struct ChatDemoView: View { case chatCompeltionStream } + let customModel: String? + var body: some View { ScrollView { VStack { @@ -64,11 +67,18 @@ struct ChatDemoView: View { let content = ChatCompletionParameters.Message.ContentType.text(prompt) prompt = "" + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o + } + let parameters = ChatCompletionParameters( messages: [.init( role: .user, content: content)], - model: .custom("claude-3-7-sonnet-20250219")) + model: model) switch selectedSegment { case .chatCompletion: try await chatProvider.startChat(parameters: parameters) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallDemoView.swift index 3f01d8cf..e6b1edf7 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallDemoView.swift @@ -9,10 +9,13 @@ import SwiftOpenAI import SwiftUI struct ChatFunctionCallDemoView: View { - init(service: OpenAIService) { - _chatProvider = State(initialValue: ChatFunctionCallProvider(service: service)) + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel + _chatProvider = State(initialValue: ChatFunctionCallProvider(service: service, customModel: customModel)) } + let customModel: String? + var body: some View { ScrollViewReader { proxy in VStack { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift index f15115b4..9e2c3225 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Completion/ChatFunctionCallProvider.swift @@ -37,12 +37,15 @@ enum FunctionCallDefinition: String, CaseIterable { @Observable class ChatFunctionCallProvider { - // MARK: - Initializer - - init(service: OpenAIService) { + init(service: OpenAIService, customModel: String? = nil) { self.service = service + self.customModel = customModel } + // MARK: - Initializer + + let customModel: String? + // MARK: - Public Properties /// To be used for UI purposes. @@ -52,14 +55,13 @@ class ChatFunctionCallProvider { func generateImage(arguments: String) async throws -> String { let dictionary = arguments.toDictionary()! let prompt = dictionary["prompt"] as! String - let count = (dictionary["count"] as? Int) ?? 1 let assistantMessage = ChatMessageDisplayModel( content: .content(.init(text: "Generating images...")), origin: .received(.gpt)) updateLastAssistantMessage(assistantMessage) - let urls = try await service.createImages(parameters: .init(prompt: prompt, model: .dallE2)).data?.compactMap(\.url) + let urls = try await service.createImages(parameters: .init(prompt: prompt, model: .dallE3)).data?.compactMap(\.url) .compactMap { URL(string: $0) } ?? [] let dalleAssistantMessage = ChatMessageDisplayModel( @@ -90,9 +92,16 @@ class ChatFunctionCallProvider { let tools = FunctionCallDefinition.allCases.map(\.functionTool) + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt41106Preview + } + let parameters = ChatCompletionParameters( messages: chatMessageParameters, - model: .gpt41106Preview, + model: model, toolChoice: ToolChoice.auto, tools: tools) @@ -149,9 +158,16 @@ class ChatFunctionCallProvider { chatMessageParameters.insert(systemMessage, at: 0) + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt41106Preview + } + let paramsForChat = ChatCompletionParameters( messages: chatMessageParameters, - model: .gpt41106Preview) + model: model) do { let chat = try await service.startChat(parameters: paramsForChat) guard let assistantMessage = chat.choices?.first?.message else { return } diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift index 2ef6d402..3f0b29ca 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCallStreamProvider.swift @@ -23,12 +23,15 @@ struct FunctionCallStreamedResponse { @Observable class ChatFunctionsCallStreamProvider { - // MARK: - Initializer - - init(service: OpenAIService) { + init(service: OpenAIService, customModel: String? = nil) { self.service = service + self.customModel = customModel } + // MARK: - Initializer + + let customModel: String? + // MARK: - Public Properties /// To be used for UI purposes. @@ -84,9 +87,16 @@ class ChatFunctionsCallStreamProvider { let tools = FunctionCallDefinition.allCases.map(\.functionTool) + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt35Turbo1106 + } + let parameters = ChatCompletionParameters( messages: chatMessageParameters, - model: .gpt35Turbo1106, + model: model, toolChoice: ToolChoice.auto, tools: tools) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCalllStreamDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCalllStreamDemoView.swift index fae790c5..2e6baadd 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCalllStreamDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatFunctionsCall/Stream/ChatFunctionsCalllStreamDemoView.swift @@ -9,10 +9,13 @@ import SwiftOpenAI import SwiftUI struct ChatFunctionsCalllStreamDemoView: View { - init(service: OpenAIService) { - _chatProvider = State(initialValue: ChatFunctionsCallStreamProvider(service: service)) + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel + _chatProvider = State(initialValue: ChatFunctionsCallStreamProvider(service: service, customModel: customModel)) } + let customModel: String? + var body: some View { ScrollViewReader { proxy in VStack { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift index 0e7ea307..a8febe34 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatFluidConversationProvider.swift @@ -10,12 +10,15 @@ import SwiftUI @Observable class ChatFluidConversationProvider { - // MARK: - Initializer - - init(service: OpenAIService) { + init(service: OpenAIService, customModel: String? = nil) { self.service = service + self.customModel = customModel } + // MARK: - Initializer + + let customModel: String? + // MARK: - Public Properties /// A collection of messages for display in the UI, representing the conversation. diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatStreamFluidConversationDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatStreamFluidConversationDemoView.swift index bd2b1590..6c7b70ae 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatStreamFluidConversationDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStreamFluidConversationDemo/ChatStreamFluidConversationDemoView.swift @@ -9,8 +9,9 @@ import SwiftOpenAI import SwiftUI struct ChatStreamFluidConversationDemoView: View { - init(service: OpenAIService) { - _chatProvider = State(initialValue: ChatFluidConversationProvider(service: service)) + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel + _chatProvider = State(initialValue: ChatFluidConversationProvider(service: service, customModel: customModel)) } enum GPTModel: String, CaseIterable { @@ -18,6 +19,8 @@ struct ChatStreamFluidConversationDemoView: View { case gpt4 = "GPT-4" } + let customModel: String? + var body: some View { ScrollViewReader { proxy in VStack { @@ -74,9 +77,16 @@ struct ChatStreamFluidConversationDemoView: View { prompt = "" } /// Make the request + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + selectedModel == .gpt3dot5 ? .gpt35Turbo : .gpt4 + } + try await chatProvider.startStreamedChat(parameters: .init( messages: [.init(role: .user, content: .text(prompt))], - model: selectedModel == .gpt3dot5 ? .gpt35Turbo : .gpt4), prompt: prompt) + model: model), prompt: prompt) } } label: { Image(systemName: "paperplane") diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructureOutputToolDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructureOutputToolDemoView.swift index f2f078e6..7d0ae0ae 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructureOutputToolDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructureOutputTool/ChatStructureOutputToolDemoView.swift @@ -10,10 +10,13 @@ import SwiftOpenAI import SwiftUI struct ChatStructureOutputToolDemoView: View { - init(service: OpenAIService) { - chatProvider = .init(service: service) + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel + chatProvider = .init(service: service, customModel: customModel) } + let customModel: String? + var body: some View { ScrollViewReader { proxy in VStack { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputDemoView.swift index 5bd3eb90..d1665b45 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputDemoView.swift @@ -112,8 +112,9 @@ let responseFormatSchema = JSONSchemaResponseFormat( // ) struct ChatStructuredOutputDemoView: View { - init(service: OpenAIService) { - _chatProvider = State(initialValue: ChatStructuredOutputProvider(service: service)) + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel + _chatProvider = State(initialValue: ChatStructuredOutputProvider(service: service, customModel: customModel)) } enum ChatConfig { @@ -121,6 +122,8 @@ struct ChatStructuredOutputDemoView: View { case chatCompeltionStream } + let customModel: String? + var body: some View { ScrollView { VStack { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift index 4e4851a4..cb702054 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ChatStructuredOutputs/ChatStructuredOutputProvider.swift @@ -12,12 +12,15 @@ import SwiftOpenAI @Observable final class ChatStructuredOutputProvider { - // MARK: - Initializer - - init(service: OpenAIService) { + init(service: OpenAIService, customModel: String? = nil) { self.service = service + self.customModel = customModel } + // MARK: - Initializer + + let customModel: String? + var message = "" var messages = [String]() var errorMessage = "" diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/LocalChatDemo/LocalChatDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/LocalChatDemo/LocalChatDemoView.swift index 08f30d36..7a8851f0 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/LocalChatDemo/LocalChatDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/LocalChatDemo/LocalChatDemoView.swift @@ -33,7 +33,8 @@ import SwiftUI /// }'``` struct LocalChatDemoView: View { - init(service: OpenAIService) { + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel _chatProvider = State(initialValue: ChatProvider(service: service)) } @@ -42,6 +43,8 @@ struct LocalChatDemoView: View { case chatCompeltionStream } + let customModel: String? + var body: some View { ScrollView { VStack { @@ -88,12 +91,19 @@ struct LocalChatDemoView: View { let content = ChatCompletionParameters.Message.ContentType.text(prompt) prompt = "" + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + // Make sure you run `ollama pull llama3` in your terminal to download this model. + .custom("llama3") + } + let parameters = ChatCompletionParameters( messages: [.init( role: .user, content: content)], - // Make sure you run `ollama pull llama3` in your terminal to download this model. - model: .custom("llama3")) + model: model) switch selectedSegment { case .chatCompletion: try await chatProvider.startChat(parameters: parameters) diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/OptionsListView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/OptionsListView.swift index 140aeb90..6874015b 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/OptionsListView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/OptionsListView.swift @@ -39,8 +39,21 @@ struct OptionsListView: View { var options: [APIOption] var body: some View { - List(options, id: \.self, selection: $selection) { option in - Text(option.rawValue) + VStack { + // Custom model input field + VStack(alignment: .leading, spacing: 8) { + Text("Custom Model (Optional)") + .font(.caption) + .foregroundColor(.secondary) + TextField("e.g., grok-beta, claude-3-opus, etc.", text: $customModel) + .textFieldStyle(.roundedBorder) + .autocapitalization(.none) + .disableAutocorrection(true) + } + .padding() + List(options, id: \.self, selection: $selection) { option in + Text(option.rawValue) + } } .sheet(item: $selection) { selection in VStack { @@ -51,11 +64,11 @@ struct OptionsListView: View { case .audio: AudioDemoView(service: openAIService) case .chat: - ChatDemoView(service: openAIService) + ChatDemoView(service: openAIService, customModel: customModel) case .chatPredictedOutput: - ChatPredictedOutputDemoView(service: openAIService) + ChatPredictedOutputDemoView(service: openAIService, customModel: customModel) case .vision: - ChatVisionDemoView(service: openAIService) + ChatVisionDemoView(service: openAIService, customModel: customModel) case .embeddings: EmbeddingsDemoView(service: openAIService) case .fineTuning: @@ -65,21 +78,21 @@ struct OptionsListView: View { case .images: ImagesDemoView(service: openAIService) case .localChat: - LocalChatDemoView(service: openAIService) + LocalChatDemoView(service: openAIService, customModel: customModel) case .models: ModelsDemoView(service: openAIService) case .moderations: ModerationDemoView(service: openAIService) case .chatHistoryConversation: - ChatStreamFluidConversationDemoView(service: openAIService) + ChatStreamFluidConversationDemoView(service: openAIService, customModel: customModel) case .chatFunctionCall: ChatFunctionCallDemoView(service: openAIService) case .chatFunctionsCallStream: - ChatFunctionsCalllStreamDemoView(service: openAIService) + ChatFunctionsCalllStreamDemoView(service: openAIService, customModel: customModel) case .chatStructuredOutput: - ChatStructuredOutputDemoView(service: openAIService) + ChatStructuredOutputDemoView(service: openAIService, customModel: customModel) case .chatStructuredOutputTool: - ChatStructureOutputToolDemoView(service: openAIService) + ChatStructureOutputToolDemoView(service: openAIService, customModel: customModel) case .configureAssistant: AssistantConfigurationDemoView(service: openAIService) case .realTimeAPI: @@ -92,4 +105,5 @@ struct OptionsListView: View { } @State private var selection: APIOption? = nil + @State private var customModel = "" } diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/PredictedOutputsDemo/ChatPredictedOutputDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/PredictedOutputsDemo/ChatPredictedOutputDemoView.swift index 24b7e995..05fab7d8 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/PredictedOutputsDemo/ChatPredictedOutputDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/PredictedOutputsDemo/ChatPredictedOutputDemoView.swift @@ -13,10 +13,13 @@ import SwiftUI /// https://platform.openai.com/docs/guides/predicted-outputs struct ChatPredictedOutputDemoView: View { - init(service: OpenAIService) { + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel chatProvider = ChatProvider(service: service) } + let customModel: String? + var body: some View { ScrollView { VStack { @@ -48,13 +51,20 @@ struct ChatPredictedOutputDemoView: View { let content = ChatCompletionParameters.Message.ContentType.text(prompt) prompt = "" + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o + } + let parameters = ChatCompletionParameters( messages: [ .init(role: .system, content: .text(systemMessage)), .init(role: .user, content: content), .init(role: .user, content: .text(predictedCode)), ], // Sending the predicted code as another user message. - model: .gpt4o, + model: model, prediction: .init(content: .text(predictedCode))) try await chatProvider.startChat(parameters: parameters) } diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift index 6e5fba76..b36153a5 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/ResponseAPIDemo/ResponseStreamProvider.swift @@ -124,10 +124,10 @@ class ResponseStreamProvider { let parameters = ModelResponseParameter( input: .array(inputArray), - model: .custom("gpt-4.1"), + model: .gpt5, instructions: "You are a helpful assistant. Use the conversation history to provide contextual responses.", maxOutputTokens: 1000, - previousResponseId: previousResponseId, temperature: 0.7) + previousResponseId: previousResponseId) let stream = try await service.responseCreateStream(parameters) var accumulatedText = "" diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift index a8a1c5f1..7fd661e7 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift @@ -10,10 +10,13 @@ import SwiftOpenAI import SwiftUI struct ChatVisionDemoView: View { - init(service: OpenAIService) { - _chatProvider = State(initialValue: ChatVisionProvider(service: service)) + init(service: OpenAIService, customModel: String? = nil) { + self.customModel = customModel + _chatProvider = State(initialValue: ChatVisionProvider(service: service, customModel: customModel)) } + let customModel: String? + var body: some View { ScrollViewReader { proxy in VStack { @@ -78,9 +81,16 @@ struct ChatVisionDemoView: View { .text(prompt), ] + selectedImageURLS.map { .imageUrl(.init(url: $0)) } resetInput() + let model: Model = + if let customModel, !customModel.isEmpty { + .custom(customModel) + } else { + .gpt4o + } + try await chatProvider.startStreamedChat(parameters: .init( messages: [.init(role: .user, content: .contentArray(content))], - model: .gpt4o, maxTokens: 300), content: content) + model: model, maxTokens: 300), content: content) } } label: { Image(systemName: "paperplane") diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift index 11aa2585..2f8349e3 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionProvider.swift @@ -10,12 +10,15 @@ import SwiftUI @Observable class ChatVisionProvider { - // MARK: - Initializer - - init(service: OpenAIService) { + init(service: OpenAIService, customModel: String? = nil) { self.service = service + self.customModel = customModel } + // MARK: - Initializer + + let customModel: String? + // MARK: - Public Properties /// A collection of messages for display in the UI, representing the conversation.