From 38d57cbefa8728b628690fc58f807ee14dfb0d98 Mon Sep 17 00:00:00 2001 From: Haibo Yang Date: Tue, 5 Aug 2025 03:58:28 -0700 Subject: [PATCH 01/25] [FirebaseAI] sync with quickstart-android (#1741) * feat: refactor main menu layout and clean up chat-related components * Some minor layout fixes * iterate over all TextParts * add navRoute in Sample * fix style in light/dark mode * change Hstack to Label for multi-lines * add .inline for navigationTitleMode --- .../project.pbxproj | 511 ++++++++++-------- .../ChatExample/Models/ChatMessage.swift | 18 + .../Screens/ConversationScreen.swift | 24 +- .../ViewModels/ConversationViewModel.swift | 32 +- .../FirebaseAIExample/ContentView.swift | 134 +++-- .../ImagenExample/ImagenScreen.swift | 16 +- .../ImagenExample/ImagenViewModel.swift | 11 +- .../Views/FilterChipView.swift | 55 ++ .../Views/SampleCardView.swift | 124 +++++ .../GenerativeAIUIComponents/Package.swift | 8 +- .../Models/Sample.swift | 218 ++++++++ .../Models/UseCase.swift | 26 + 12 files changed, 878 insertions(+), 299 deletions(-) create mode 100644 firebaseai/FirebaseAIExample/Views/FilterChipView.swift create mode 100644 firebaseai/FirebaseAIExample/Views/SampleCardView.swift create mode 100644 firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift create mode 100644 firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift diff --git a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj index 89084e975..443e12082 100644 --- a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj +++ b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj @@ -3,72 +3,158 @@ archiveVersion = 1; classes = { }; - objectVersion = 77; + objectVersion = 60; objects = { /* Begin PBXBuildFile section */ + 7200F3082E3A054300CDC51C /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */; }; + 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 72DA044E2E385DF3004FED7D /* ChatMessage.swift */; }; 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 869200B22B879C4F00482873 /* GoogleService-Info.plist */; }; - 86A67E8D2E9FECCF00EDFB8A /* cmark-gfm in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E8C2E9FECCF00EDFB8A /* cmark-gfm */; }; - 86A67E8F2E9FECCF00EDFB8A /* cmark-gfm-extensions in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E8E2E9FECCF00EDFB8A /* cmark-gfm-extensions */; }; - 86A67E912E9FED0600EDFB8A /* NetworkImage in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E902E9FED0600EDFB8A /* NetworkImage */; }; - 86A67E932E9FED1700EDFB8A /* NetworkImage in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E922E9FED1700EDFB8A /* NetworkImage */; }; - 86A67E952E9FED2200EDFB8A /* cmark-gfm in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E942E9FED2200EDFB8A /* cmark-gfm */; }; - 86A67E972E9FED2200EDFB8A /* cmark-gfm-extensions in Frameworks */ = {isa = PBXBuildFile; productRef = 86A67E962E9FED2200EDFB8A /* cmark-gfm-extensions */; }; - 86BB55FF2E8B2D6D0054B8B5 /* MarkdownUI in Frameworks */ = {isa = PBXBuildFile; productRef = 86BB55E42E8B2D6D0054B8B5 /* MarkdownUI */; }; - 86BB56002E8B2D6D0054B8B5 /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 86BB55E62E8B2D6D0054B8B5 /* GenerativeAIUIComponents */; }; - 86BB56042E8B2D6D0054B8B5 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 869200B22B879C4F00482873 /* GoogleService-Info.plist */; }; + 86C1F4832BC726150026816F /* FunctionCallingScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */; }; + 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */; }; + 88263BF02B239C09008AB09B /* ErrorView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88263BEE2B239BFE008AB09B /* ErrorView.swift */; }; + 88263BF12B239C11008AB09B /* ErrorDetailsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 889873842B208563005B4896 /* ErrorDetailsView.swift */; }; + 8848C8332B0D04BC007B434F /* FirebaseAIExampleApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */; }; + 8848C8352B0D04BC007B434F /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8848C8342B0D04BC007B434F /* ContentView.swift */; }; + 8848C8372B0D04BD007B434F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8848C8362B0D04BD007B434F /* Assets.xcassets */; }; + 8848C83A2B0D04BD007B434F /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */; }; + 886F95D52B17BA010036F07A /* GenerateContentScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */; }; + 886F95D62B17BA010036F07A /* GenerateContentViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */; }; 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */ = {isa = PBXBuildFile; productRef = 886F95D72B17BA420036F07A /* MarkdownUI */; }; - 886F95E32B17D6630036F07A /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */; }; + 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */; }; + 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */; }; + 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5A2B11133E00C08E95 /* MessageView.swift */; }; + 886F95DF2B17D5010036F07A /* BouncingDots.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5C2B11135000C08E95 /* BouncingDots.swift */; }; + 886F95E02B17D5010036F07A /* ConversationViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */; }; + 886F95E12B17D5010036F07A /* ConversationScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */; }; + A5E8E3C92C3B4F388A7A4A19 /* FilterChipView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */; }; + A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5E8E3C62C3B4F388A7A4A16 /* SampleCardView.swift */; }; + AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */; }; + AEE793E02E256D3900708F02 /* GroundedResponseView.swift in Sources */ = {isa = PBXBuildFile; fileRef = AEE793DD2E256D3900708F02 /* GroundedResponseView.swift */; }; DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */ = {isa = PBXBuildFile; productRef = DE26D95E2DBB3E9F007E6668 /* FirebaseAI */; }; + DEFECAA92D7B4CCD00EF9621 /* ImagenViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = DEFECAA72D7B4CCD00EF9621 /* ImagenViewModel.swift */; }; + DEFECAAA2D7B4CCD00EF9621 /* ImagenScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = DEFECAA62D7B4CCD00EF9621 /* ImagenScreen.swift */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ + 726634072E37011C00554974 /* Package.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = Package.swift; path = GenerativeAIUIComponents/Package.swift; sourceTree = ""; }; + 72DA044E2E385DF3004FED7D /* ChatMessage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatMessage.swift; sourceTree = ""; }; 869200B22B879C4F00482873 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; }; - 86BB56082E8B2D6D0054B8B5 /* FirebaseAIExampleZip.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FirebaseAIExampleZip.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingScreen.swift; sourceTree = ""; }; + 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingViewModel.swift; sourceTree = ""; }; + 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoReasoningViewModel.swift; sourceTree = ""; }; + 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoReasoningScreen.swift; sourceTree = ""; }; + 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GenerateContentScreen.swift; sourceTree = ""; }; + 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GenerateContentViewModel.swift; sourceTree = ""; }; + 88263BEE2B239BFE008AB09B /* ErrorView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ErrorView.swift; sourceTree = ""; }; 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FirebaseAIExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FirebaseAIExampleApp.swift; sourceTree = ""; }; + 8848C8342B0D04BC007B434F /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; + 8848C8362B0D04BD007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 8848C84A2B0D051F007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 8848C84D2B0D051F007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 8848C85C2B0D056D007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 8848C85F2B0D056D007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 889873842B208563005B4896 /* ErrorDetailsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ErrorDetailsView.swift; sourceTree = ""; }; 88B8A9352B0FCBA700424728 /* GenerativeAIUIComponents */ = {isa = PBXFileReference; lastKnownFileType = wrapper; path = GenerativeAIUIComponents; sourceTree = ""; }; + 88E10F482B110D5400C08E95 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 88E10F4B2B110D5400C08E95 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationScreen.swift; sourceTree = ""; }; + 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationViewModel.swift; sourceTree = ""; }; + 88E10F5A2B11133E00C08E95 /* MessageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageView.swift; sourceTree = ""; }; + 88E10F5C2B11135000C08E95 /* BouncingDots.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BouncingDots.swift; sourceTree = ""; }; + A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FilterChipView.swift; sourceTree = ""; }; + A5E8E3C62C3B4F388A7A4A16 /* SampleCardView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SampleCardView.swift; sourceTree = ""; }; + AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GoogleSearchSuggestionView.swift; sourceTree = ""; }; + AEE793DD2E256D3900708F02 /* GroundedResponseView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GroundedResponseView.swift; sourceTree = ""; }; + DEFECAA62D7B4CCD00EF9621 /* ImagenScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImagenScreen.swift; sourceTree = ""; }; + DEFECAA72D7B4CCD00EF9621 /* ImagenViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImagenViewModel.swift; sourceTree = ""; }; /* End PBXFileReference section */ -/* Begin PBXFileSystemSynchronizedRootGroup section */ - 863E95812EC7B70200BE4F4E /* FirebaseAIExample */ = { - isa = PBXFileSystemSynchronizedRootGroup; - path = FirebaseAIExample; - sourceTree = ""; - }; -/* End PBXFileSystemSynchronizedRootGroup section */ - /* Begin PBXFrameworksBuildPhase section */ - 86BB55FD2E8B2D6D0054B8B5 /* Frameworks */ = { - isa = PBXFrameworksBuildPhase; - buildActionMask = 2147483647; - files = ( - 86A67E932E9FED1700EDFB8A /* NetworkImage in Frameworks */, - 86BB55FF2E8B2D6D0054B8B5 /* MarkdownUI in Frameworks */, - 86A67E952E9FED2200EDFB8A /* cmark-gfm in Frameworks */, - 86A67E972E9FED2200EDFB8A /* cmark-gfm-extensions in Frameworks */, - 86BB56002E8B2D6D0054B8B5 /* GenerativeAIUIComponents in Frameworks */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; 8848C82C2B0D04BC007B434F /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */, 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */, - 86A67E8D2E9FECCF00EDFB8A /* cmark-gfm in Frameworks */, - 86A67E912E9FED0600EDFB8A /* NetworkImage in Frameworks */, - 86A67E8F2E9FECCF00EDFB8A /* cmark-gfm-extensions in Frameworks */, - 886F95E32B17D6630036F07A /* GenerativeAIUIComponents in Frameworks */, + 7200F3082E3A054300CDC51C /* GenerativeAIUIComponents in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 72DA044D2E385DED004FED7D /* Models */ = { + isa = PBXGroup; + children = ( + 72DA044E2E385DF3004FED7D /* ChatMessage.swift */, + ); + path = Models; + sourceTree = ""; + }; + 86C1F47F2BC726150026816F /* Screens */ = { + isa = PBXGroup; + children = ( + 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */, + ); + path = Screens; + sourceTree = ""; + }; + 86C1F4812BC726150026816F /* ViewModels */ = { + isa = PBXGroup; + children = ( + 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */, + ); + path = ViewModels; + sourceTree = ""; + }; + 86C1F4822BC726150026816F /* FunctionCallingExample */ = { + isa = PBXGroup; + children = ( + 86C1F4812BC726150026816F /* ViewModels */, + 86C1F47F2BC726150026816F /* Screens */, + ); + path = FunctionCallingExample; + sourceTree = ""; + }; + 8802666E2B0FC39000CF7CB6 /* ViewModels */ = { + isa = PBXGroup; + children = ( + 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */, + ); + path = ViewModels; + sourceTree = ""; + }; + 880266742B0FC39000CF7CB6 /* Screens */ = { + isa = PBXGroup; + children = ( + 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */, + ); + path = Screens; + sourceTree = ""; + }; + 88209C1A2B0FBDC300F64795 /* Screens */ = { + isa = PBXGroup; + children = ( + 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */, + ); + path = Screens; + sourceTree = ""; + }; + 88209C1C2B0FBDC300F64795 /* ViewModels */ = { + isa = PBXGroup; + children = ( + 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */, + ); + path = ViewModels; + sourceTree = ""; + }; 88209C222B0FBE1700F64795 /* Frameworks */ = { isa = PBXGroup; children = ( + 726634072E37011C00554974 /* Package.swift */, ); name = Frameworks; sourceTree = ""; @@ -76,9 +162,14 @@ 8848C8262B0D04BC007B434F = { isa = PBXGroup; children = ( - 863E95812EC7B70200BE4F4E /* FirebaseAIExample */, + DEFECAA82D7B4CCD00EF9621 /* ImagenScreen */, 88B8A9352B0FCBA700424728 /* GenerativeAIUIComponents */, 869200B22B879C4F00482873 /* GoogleService-Info.plist */, + 8848C8312B0D04BC007B434F /* FirebaseAIExample */, + 8848C8452B0D051E007B434F /* GenerativeAITextExample */, + 8848C8572B0D056C007B434F /* GenerativeAIMultimodalExample */, + 88E10F432B110D5300C08E95 /* ChatExample */, + 86C1F4822BC726150026816F /* FunctionCallingExample */, 8848C8302B0D04BC007B434F /* Products */, 88209C222B0FBE1700F64795 /* Frameworks */, ); @@ -88,41 +179,147 @@ isa = PBXGroup; children = ( 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */, - 86BB56082E8B2D6D0054B8B5 /* FirebaseAIExampleZip.app */, ); name = Products; sourceTree = ""; }; -/* End PBXGroup section */ - -/* Begin PBXNativeTarget section */ - 86BB55E32E8B2D6D0054B8B5 /* FirebaseAIExampleZip */ = { - isa = PBXNativeTarget; - buildConfigurationList = 86BB56052E8B2D6D0054B8B5 /* Build configuration list for PBXNativeTarget "FirebaseAIExampleZip" */; - buildPhases = ( - 86BB55E92E8B2D6D0054B8B5 /* Sources */, - 86BB55FD2E8B2D6D0054B8B5 /* Frameworks */, - 86BB56012E8B2D6D0054B8B5 /* Resources */, + 8848C8312B0D04BC007B434F /* FirebaseAIExample */ = { + isa = PBXGroup; + children = ( + 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */, + 8848C8342B0D04BC007B434F /* ContentView.swift */, + 8848C8362B0D04BD007B434F /* Assets.xcassets */, + 8848C8382B0D04BD007B434F /* Preview Content */, + A5E8E3C22C3B4F388A7A4A12 /* Views */, ); - buildRules = ( + path = FirebaseAIExample; + sourceTree = ""; + }; + 8848C8382B0D04BD007B434F /* Preview Content */ = { + isa = PBXGroup; + children = ( + 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */, ); - dependencies = ( + path = "Preview Content"; + sourceTree = ""; + }; + 8848C8452B0D051E007B434F /* GenerativeAITextExample */ = { + isa = PBXGroup; + children = ( + 88209C1C2B0FBDC300F64795 /* ViewModels */, + 88209C1A2B0FBDC300F64795 /* Screens */, + 8848C84A2B0D051F007B434F /* Assets.xcassets */, + 8848C84C2B0D051F007B434F /* Preview Content */, ); - fileSystemSynchronizedGroups = ( - 863E95812EC7B70200BE4F4E /* FirebaseAIExample */, + path = GenerativeAITextExample; + sourceTree = ""; + }; + 8848C84C2B0D051F007B434F /* Preview Content */ = { + isa = PBXGroup; + children = ( + 8848C84D2B0D051F007B434F /* Preview Assets.xcassets */, ); - name = FirebaseAIExampleZip; - packageProductDependencies = ( - 86BB55E42E8B2D6D0054B8B5 /* MarkdownUI */, - 86BB55E62E8B2D6D0054B8B5 /* GenerativeAIUIComponents */, - 86A67E922E9FED1700EDFB8A /* NetworkImage */, - 86A67E942E9FED2200EDFB8A /* cmark-gfm */, - 86A67E962E9FED2200EDFB8A /* cmark-gfm-extensions */, + path = "Preview Content"; + sourceTree = ""; + }; + 8848C8572B0D056C007B434F /* GenerativeAIMultimodalExample */ = { + isa = PBXGroup; + children = ( + 8802666E2B0FC39000CF7CB6 /* ViewModels */, + 880266742B0FC39000CF7CB6 /* Screens */, + 8848C85C2B0D056D007B434F /* Assets.xcassets */, + 8848C85E2B0D056D007B434F /* Preview Content */, ); - productName = GenerativeAIExample; - productReference = 86BB56082E8B2D6D0054B8B5 /* FirebaseAIExampleZip.app */; - productType = "com.apple.product-type.application"; + path = GenerativeAIMultimodalExample; + sourceTree = ""; + }; + 8848C85E2B0D056D007B434F /* Preview Content */ = { + isa = PBXGroup; + children = ( + 8848C85F2B0D056D007B434F /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 88E10F432B110D5300C08E95 /* ChatExample */ = { + isa = PBXGroup; + children = ( + 72DA044D2E385DED004FED7D /* Models */, + 88E10F502B11123600C08E95 /* ViewModels */, + 88E10F512B11124100C08E95 /* Views */, + 88E10F532B1112B900C08E95 /* Screens */, + 88E10F482B110D5400C08E95 /* Assets.xcassets */, + 88E10F4A2B110D5400C08E95 /* Preview Content */, + ); + path = ChatExample; + sourceTree = ""; + }; + 88E10F4A2B110D5400C08E95 /* Preview Content */ = { + isa = PBXGroup; + children = ( + 88E10F4B2B110D5400C08E95 /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 88E10F502B11123600C08E95 /* ViewModels */ = { + isa = PBXGroup; + children = ( + 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */, + ); + path = ViewModels; + sourceTree = ""; + }; + 88E10F512B11124100C08E95 /* Views */ = { + isa = PBXGroup; + children = ( + AEE793DE2E256D3900708F02 /* Grounding */, + 88263BEE2B239BFE008AB09B /* ErrorView.swift */, + 88E10F5A2B11133E00C08E95 /* MessageView.swift */, + 88E10F5C2B11135000C08E95 /* BouncingDots.swift */, + 889873842B208563005B4896 /* ErrorDetailsView.swift */, + ); + path = Views; + sourceTree = ""; + }; + 88E10F532B1112B900C08E95 /* Screens */ = { + isa = PBXGroup; + children = ( + 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */, + ); + path = Screens; + sourceTree = ""; + }; + A5E8E3C22C3B4F388A7A4A12 /* Views */ = { + isa = PBXGroup; + children = ( + A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */, + A5E8E3C62C3B4F388A7A4A16 /* SampleCardView.swift */, + ); + path = Views; + sourceTree = ""; + }; + AEE793DE2E256D3900708F02 /* Grounding */ = { + isa = PBXGroup; + children = ( + AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */, + AEE793DD2E256D3900708F02 /* GroundedResponseView.swift */, + ); + path = Grounding; + sourceTree = ""; + }; + DEFECAA82D7B4CCD00EF9621 /* ImagenScreen */ = { + isa = PBXGroup; + children = ( + DEFECAA62D7B4CCD00EF9621 /* ImagenScreen.swift */, + DEFECAA72D7B4CCD00EF9621 /* ImagenViewModel.swift */, + ); + path = ImagenScreen; + sourceTree = ""; }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ 8848C82E2B0D04BC007B434F /* FirebaseAIExample */ = { isa = PBXNativeTarget; buildConfigurationList = 8848C83D2B0D04BD007B434F /* Build configuration list for PBXNativeTarget "FirebaseAIExample" */; @@ -135,17 +332,11 @@ ); dependencies = ( ); - fileSystemSynchronizedGroups = ( - 863E95812EC7B70200BE4F4E /* FirebaseAIExample */, - ); name = FirebaseAIExample; packageProductDependencies = ( 886F95D72B17BA420036F07A /* MarkdownUI */, - 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */, DE26D95E2DBB3E9F007E6668 /* FirebaseAI */, - 86A67E8C2E9FECCF00EDFB8A /* cmark-gfm */, - 86A67E8E2E9FECCF00EDFB8A /* cmark-gfm-extensions */, - 86A67E902E9FED0600EDFB8A /* NetworkImage */, + 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */, ); productName = GenerativeAIExample; productReference = 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */; @@ -160,7 +351,6 @@ BuildIndependentTargetsInParallel = 1; LastSwiftUpdateCheck = 1510; LastUpgradeCheck = 1510; - ORGANIZATIONNAME = "Google LLC"; TargetAttributes = { 8848C82E2B0D04BC007B434F = { CreatedOnToolsVersion = 15.1; @@ -168,6 +358,7 @@ }; }; buildConfigurationList = 8848C82A2B0D04BC007B434F /* Build configuration list for PBXProject "FirebaseAIExample" */; + compatibilityVersion = "Xcode 14.0"; developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( @@ -175,37 +366,28 @@ Base, ); mainGroup = 8848C8262B0D04BC007B434F; - minimizedProjectReferenceProxies = 1; packageReferences = ( 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */, DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */, DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */, - 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */, + 7200F3062E3A054300CDC51C /* XCLocalSwiftPackageReference "GenerativeAIUIComponents" */, ); - preferredProjectObjectVersion = 77; productRefGroup = 8848C8302B0D04BC007B434F /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( 8848C82E2B0D04BC007B434F /* FirebaseAIExample */, - 86BB55E32E8B2D6D0054B8B5 /* FirebaseAIExampleZip */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ - 86BB56012E8B2D6D0054B8B5 /* Resources */ = { - isa = PBXResourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - 86BB56042E8B2D6D0054B8B5 /* GoogleService-Info.plist in Resources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; 8848C82D2B0D04BC007B434F /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + 8848C83A2B0D04BD007B434F /* Preview Assets.xcassets in Resources */, + 8848C8372B0D04BD007B434F /* Assets.xcassets in Resources */, 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */, ); runOnlyForDeploymentPostprocessing = 0; @@ -213,83 +395,37 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ - 86BB55E92E8B2D6D0054B8B5 /* Sources */ = { - isa = PBXSourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - ); - runOnlyForDeploymentPostprocessing = 0; - }; 8848C82B2B0D04BC007B434F /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + 86C1F4832BC726150026816F /* FunctionCallingScreen.swift in Sources */, + 886F95DF2B17D5010036F07A /* BouncingDots.swift in Sources */, + 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */, + 88263BF12B239C11008AB09B /* ErrorDetailsView.swift in Sources */, + 8848C8352B0D04BC007B434F /* ContentView.swift in Sources */, + 886F95D52B17BA010036F07A /* GenerateContentScreen.swift in Sources */, + 8848C8332B0D04BC007B434F /* FirebaseAIExampleApp.swift in Sources */, + 886F95E02B17D5010036F07A /* ConversationViewModel.swift in Sources */, + 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */, + 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */, + DEFECAA92D7B4CCD00EF9621 /* ImagenViewModel.swift in Sources */, + DEFECAAA2D7B4CCD00EF9621 /* ImagenScreen.swift in Sources */, + 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */, + 886F95E12B17D5010036F07A /* ConversationScreen.swift in Sources */, + 88263BF02B239C09008AB09B /* ErrorView.swift in Sources */, + 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */, + 886F95D62B17BA010036F07A /* GenerateContentViewModel.swift in Sources */, + A5E8E3C92C3B4F388A7A4A19 /* FilterChipView.swift in Sources */, + A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */, + AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */, + AEE793E02E256D3900708F02 /* GroundedResponseView.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ /* Begin XCBuildConfiguration section */ - 86BB56062E8B2D6D0054B8B5 /* Debug */ = { - isa = XCBuildConfiguration; - buildSettings = { - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; - CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"FirebaseAIExample/Preview Content\""; - DEVELOPMENT_TEAM = ""; - ENABLE_PREVIEWS = YES; - ENABLE_USER_SCRIPT_SANDBOXING = NO; - GENERATE_INFOPLIST_FILE = YES; - INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; - INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; - INFOPLIST_KEY_UILaunchScreen_Generation = YES; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - LD_RUNPATH_SEARCH_PATHS = ( - "$(inherited)", - "@executable_path/Frameworks", - ); - MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.google.firebase.quickstart.FirebaseAIExample; - PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Debug; - }; - 86BB56072E8B2D6D0054B8B5 /* Release */ = { - isa = XCBuildConfiguration; - buildSettings = { - ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; - ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; - CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_ASSET_PATHS = "\"FirebaseAIExample/Preview Content\""; - DEVELOPMENT_TEAM = ""; - ENABLE_PREVIEWS = YES; - ENABLE_USER_SCRIPT_SANDBOXING = NO; - GENERATE_INFOPLIST_FILE = YES; - INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; - INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; - INFOPLIST_KEY_UILaunchScreen_Generation = YES; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; - LD_RUNPATH_SEARCH_PATHS = ( - "$(inherited)", - "@executable_path/Frameworks", - ); - MARKETING_VERSION = 1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.google.firebase.quickstart.FirebaseAIExample; - PRODUCT_NAME = "$(TARGET_NAME)"; - SWIFT_EMIT_LOC_STRINGS = YES; - SWIFT_VERSION = 5.0; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Release; - }; 8848C83B2B0D04BD007B434F /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { @@ -472,15 +608,6 @@ /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ - 86BB56052E8B2D6D0054B8B5 /* Build configuration list for PBXNativeTarget "FirebaseAIExampleZip" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - 86BB56062E8B2D6D0054B8B5 /* Debug */, - 86BB56072E8B2D6D0054B8B5 /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; 8848C82A2B0D04BC007B434F /* Build configuration list for PBXProject "FirebaseAIExample" */ = { isa = XCConfigurationList; buildConfigurations = ( @@ -501,29 +628,20 @@ }; /* End XCConfigurationList section */ -/* Begin XCRemoteSwiftPackageReference section */ - 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/swiftlang/swift-cmark"; - requirement = { - kind = revision; - revision = 3ccff77b2dc5b96b77db3da0d68d28068593fa53; - }; - }; - 86BB55E52E8B2D6D0054B8B5 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { - isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/gonzalezreal/swift-markdown-ui"; - requirement = { - kind = revision; - revision = 55441810c0f678c78ed7e2ebd46dde89228e02fc; - }; +/* Begin XCLocalSwiftPackageReference section */ + 7200F3062E3A054300CDC51C /* XCLocalSwiftPackageReference "GenerativeAIUIComponents" */ = { + isa = XCLocalSwiftPackageReference; + relativePath = GenerativeAIUIComponents; }; +/* End XCLocalSwiftPackageReference section */ + +/* Begin XCRemoteSwiftPackageReference section */ 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { isa = XCRemoteSwiftPackageReference; repositoryURL = "https://github.com/gonzalezreal/swift-markdown-ui"; requirement = { kind = revision; - revision = 5f613358148239d0292c0cef674a3c2314737f9e; + revision = 55441810c0f678c78ed7e2ebd46dde89228e02fc; }; }; DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */ = { @@ -539,48 +657,13 @@ repositoryURL = "https://github.com/firebase/firebase-ios-sdk.git"; requirement = { kind = upToNextMajorVersion; - minimumVersion = 12.6.0; + minimumVersion = 12.0.0; }; }; /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ - 86A67E8C2E9FECCF00EDFB8A /* cmark-gfm */ = { - isa = XCSwiftPackageProductDependency; - package = 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */; - productName = "cmark-gfm"; - }; - 86A67E8E2E9FECCF00EDFB8A /* cmark-gfm-extensions */ = { - isa = XCSwiftPackageProductDependency; - package = 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */; - productName = "cmark-gfm-extensions"; - }; - 86A67E902E9FED0600EDFB8A /* NetworkImage */ = { - isa = XCSwiftPackageProductDependency; - package = DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */; - productName = NetworkImage; - }; - 86A67E922E9FED1700EDFB8A /* NetworkImage */ = { - isa = XCSwiftPackageProductDependency; - package = DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */; - productName = NetworkImage; - }; - 86A67E942E9FED2200EDFB8A /* cmark-gfm */ = { - isa = XCSwiftPackageProductDependency; - package = 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */; - productName = "cmark-gfm"; - }; - 86A67E962E9FED2200EDFB8A /* cmark-gfm-extensions */ = { - isa = XCSwiftPackageProductDependency; - package = 86A67E8B2E9FECCF00EDFB8A /* XCRemoteSwiftPackageReference "swift-cmark" */; - productName = "cmark-gfm-extensions"; - }; - 86BB55E42E8B2D6D0054B8B5 /* MarkdownUI */ = { - isa = XCSwiftPackageProductDependency; - package = 86BB55E52E8B2D6D0054B8B5 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */; - productName = MarkdownUI; - }; - 86BB55E62E8B2D6D0054B8B5 /* GenerativeAIUIComponents */ = { + 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */ = { isa = XCSwiftPackageProductDependency; productName = GenerativeAIUIComponents; }; @@ -589,10 +672,6 @@ package = 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */; productName = MarkdownUI; }; - 886F95E22B17D6630036F07A /* GenerativeAIUIComponents */ = { - isa = XCSwiftPackageProductDependency; - productName = GenerativeAIUIComponents; - }; DE26D95E2DBB3E9F007E6668 /* FirebaseAI */ = { isa = XCSwiftPackageProductDependency; package = DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */; diff --git a/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift b/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift index 79f18c5e4..a974cff7f 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift @@ -74,3 +74,21 @@ extension ChatMessage { static var sample = samples[0] } + +extension ChatMessage { + static func from(_ modelContent: ModelContent) -> ChatMessage? { + // TODO: add non-text parts to message when multi-model support is added + let text = modelContent.parts.compactMap { ($0 as? TextPart)?.text }.joined() + guard !text.isEmpty else { + return nil + } + + let participant: Participant = (modelContent.role == "user") ? .user : .system + + return ChatMessage(message: text, participant: participant) + } + + static func from(_ modelContents: [ModelContent]) -> [ChatMessage] { + return modelContents.compactMap { from($0) } + } +} diff --git a/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift b/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift index d12341dea..8798ee3e4 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift @@ -22,22 +22,16 @@ import SwiftUI struct ConversationScreen: View { let firebaseService: FirebaseAI - let title: String @StateObject var viewModel: ConversationViewModel @State private var userPrompt = "" - init(firebaseService: FirebaseAI, title: String, searchGroundingEnabled: Bool = false) { - let model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001", - tools: searchGroundingEnabled ? [.googleSearch()] : [] - ) - self.title = title + init(firebaseService: FirebaseAI, sample: Sample? = nil) { self.firebaseService = firebaseService _viewModel = StateObject(wrappedValue: ConversationViewModel(firebaseService: firebaseService, - model: model)) + sample: sample)) } enum FocusedField: Hashable { @@ -99,9 +93,14 @@ struct ConversationScreen: View { } } } - .navigationTitle(title) + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) .onAppear { focusedField = .message + // Set initial prompt from viewModel if available + if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { + userPrompt = viewModel.initialPrompt + } } } @@ -125,16 +124,17 @@ struct ConversationScreen: View { private func newChat() { viewModel.startNewChat() + userPrompt = "" } } struct ConversationScreen_Previews: PreviewProvider { struct ContainerView: View { @StateObject var viewModel = ConversationViewModel(firebaseService: FirebaseAI - .firebaseAI()) // Example service init + .firebaseAI(), sample: nil) // Example service init var body: some View { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI(), title: "Chat sample") + ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) .onAppear { viewModel.messages = ChatMessage.samples } @@ -143,7 +143,7 @@ struct ConversationScreen_Previews: PreviewProvider { static var previews: some View { NavigationStack { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI(), title: "Chat sample") + ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) } } } diff --git a/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift b/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift index 1326ac95a..7fdf30d88 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift @@ -19,6 +19,7 @@ #endif import Foundation import UIKit +import GenerativeAIUIComponents @MainActor class ConversationViewModel: ObservableObject { @@ -33,21 +34,37 @@ class ConversationViewModel: ObservableObject { return error != nil } + @Published var initialPrompt: String = "" + @Published var title: String = "" + private var model: GenerativeModel private var chat: Chat private var stopGenerating = false private var chatTask: Task? - init(firebaseService: FirebaseAI, model: GenerativeModel? = nil) { - if let model { - self.model = model + private var sample: Sample? + + init(firebaseService: FirebaseAI, sample: Sample? = nil) { + self.sample = sample + + // create a generative model with sample data + model = firebaseService.generativeModel( + modelName: "gemini-2.0-flash-001", + tools: sample?.tools, + systemInstruction: sample?.systemInstruction + ) + + if let chatHistory = sample?.chatHistory, !chatHistory.isEmpty { + // Initialize with sample chat history if it's available + messages = ChatMessage.from(chatHistory) + chat = model.startChat(history: chatHistory) } else { - self.model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001" - ) + chat = model.startChat() } - chat = self.model.startChat() + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" } func sendMessage(_ text: String, streaming: Bool = true) async { @@ -64,6 +81,7 @@ class ConversationViewModel: ObservableObject { error = nil chat = model.startChat() messages.removeAll() + initialPrompt = "" } func stop() { diff --git a/firebaseai/FirebaseAIExample/ContentView.swift b/firebaseai/FirebaseAIExample/ContentView.swift index 73bc64fcf..0cc5b16a5 100644 --- a/firebaseai/FirebaseAIExample/ContentView.swift +++ b/firebaseai/FirebaseAIExample/ContentView.swift @@ -13,11 +13,8 @@ // limitations under the License. import SwiftUI -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif +import FirebaseAI +import GenerativeAIUIComponents enum BackendOption: String, CaseIterable, Identifiable { case googleAI = "Gemini Developer API" @@ -37,73 +34,96 @@ enum BackendOption: String, CaseIterable, Identifiable { struct ContentView: View { @State private var selectedBackend: BackendOption = .googleAI @State private var firebaseService: FirebaseAI = FirebaseAI.firebaseAI(backend: .googleAI()) + @State private var selectedUseCase: UseCase = .text + + var filteredSamples: [Sample] { + Sample.samples.filter { $0.useCases.contains(selectedUseCase) } + } + + let columns = [ + GridItem(.adaptive(minimum: 150)), + ] var body: some View { NavigationStack { - List { - Section("Configuration") { - Picker("Backend", selection: $selectedBackend) { - ForEach(BackendOption.allCases) { option in - Text(option.rawValue).tag(option) + ScrollView { + VStack(alignment: .leading, spacing: 20) { + // Backend Configuration + VStack(alignment: .leading) { + Text("Backend Configuration") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + + Picker("Backend", selection: $selectedBackend) { + ForEach(BackendOption.allCases) { option in + Text(option.rawValue) + .tag(option) + } } + .pickerStyle(SegmentedPickerStyle()) + .padding(.horizontal) } - } - Section("Examples") { - NavigationLink { - GenerateContentScreen(firebaseService: firebaseService) - } label: { - Label("Generate Content", systemImage: "doc.text") - } - NavigationLink { - GenerateContentFromTemplateScreen(firebaseService: firebaseService) - } label: { - Label("Generate Content from Template", systemImage: "doc.text.fill") - } - NavigationLink { - PhotoReasoningScreen(firebaseService: firebaseService) - } label: { - Label("Multi-modal", systemImage: "doc.richtext") - } - NavigationLink { - ConversationScreen(firebaseService: firebaseService, title: "Chat") - } label: { - Label("Chat", systemImage: "ellipsis.message.fill") - } - NavigationLink { - ConversationScreen( - firebaseService: firebaseService, - title: "Grounding", - searchGroundingEnabled: true - ) - } label: { - Label("Grounding with Google Search", systemImage: "magnifyingglass") - } - NavigationLink { - FunctionCallingScreen(firebaseService: firebaseService) - } label: { - Label("Function Calling", systemImage: "function") - } - NavigationLink { - ImagenScreen(firebaseService: firebaseService) - } label: { - Label("Imagen", systemImage: "camera.circle") + // Use Case Filter + VStack(alignment: .leading) { + Text("Filter by use case") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + + ScrollView(.horizontal, showsIndicators: false) { + HStack(spacing: 10) { + ForEach(UseCase.allCases) { useCase in + FilterChipView(useCase: useCase, isSelected: selectedUseCase == useCase) { + selectedUseCase = useCase + } + } + } + .padding(.horizontal) + } } - NavigationLink { - ImagenFromTemplateScreen(firebaseService: firebaseService) - } label: { - Label("Imagen from Template", systemImage: "camera.circle.fill") + + // Samples + VStack(alignment: .leading) { + Text("Samples") + .font(.system(size: 20, weight: .bold)) + .padding(.horizontal) + + LazyVGrid(columns: columns, spacing: 20) { + ForEach(filteredSamples) { sample in + NavigationLink(destination: destinationView(for: sample)) { + SampleCardView(sample: sample) + } + .buttonStyle(PlainButtonStyle()) + } + } + .padding(.horizontal) } } + .padding(.vertical) } - .navigationTitle("Generative AI Examples") + .background(Color(.systemGroupedBackground)) + .navigationTitle("Firebase AI Logic") .onChange(of: selectedBackend) { newBackend in firebaseService = newBackend.backendValue - // Note: This might cause views that hold the old service instance to misbehave - // unless they are also correctly updated or recreated. } } } + + @ViewBuilder + private func destinationView(for sample: Sample) -> some View { + switch sample.navRoute { + case "ConversationScreen": + ConversationScreen(firebaseService: firebaseService, sample: sample) + case "ImagenScreen": + ImagenScreen(firebaseService: firebaseService, sample: sample) + case "PhotoReasoningScreen": + PhotoReasoningScreen(firebaseService: firebaseService) + case "FunctionCallingScreen": + FunctionCallingScreen(firebaseService: firebaseService) + default: + EmptyView() + } + } } #Preview { diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift b/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift index 830bd3bc7..031660665 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift +++ b/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift @@ -24,9 +24,14 @@ struct ImagenScreen: View { let firebaseService: FirebaseAI @StateObject var viewModel: ImagenViewModel - init(firebaseService: FirebaseAI) { + @State + private var userPrompt = "" + + init(firebaseService: FirebaseAI, sample: Sample? = nil) { self.firebaseService = firebaseService - _viewModel = StateObject(wrappedValue: ImagenViewModel(firebaseService: firebaseService)) + _viewModel = + StateObject(wrappedValue: ImagenViewModel(firebaseService: firebaseService, + sample: sample)) } enum FocusedField: Hashable { @@ -40,7 +45,7 @@ struct ImagenScreen: View { ZStack { ScrollView { VStack { - InputField("Enter a prompt to generate an image", text: $viewModel.userInput) { + InputField("Enter a prompt to generate an image", text: $userPrompt) { Image( systemName: viewModel.inProgress ? "stop.circle.fill" : "paperplane.circle.fill" ) @@ -75,12 +80,15 @@ struct ImagenScreen: View { .navigationTitle("Imagen example") .onAppear { focusedField = .message + if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { + userPrompt = viewModel.initialPrompt + } } } private func sendMessage() { Task { - await viewModel.generateImage(prompt: viewModel.userInput) + await viewModel.generateImage(prompt: userPrompt) focusedField = .message } } diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift b/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift index 826f88660..a111bca57 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift +++ b/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift @@ -20,13 +20,14 @@ import Foundation import OSLog import SwiftUI +import GenerativeAIUIComponents @MainActor class ImagenViewModel: ObservableObject { private var logger = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "generative-ai") @Published - var userInput: String = "" + var initialPrompt: String = "" @Published var images = [UIImage]() @@ -41,7 +42,11 @@ class ImagenViewModel: ObservableObject { private var generateImagesTask: Task? - init(firebaseService: FirebaseAI) { + private var sample: Sample? + + init(firebaseService: FirebaseAI, sample: Sample? = nil) { + self.sample = sample + let modelName = "imagen-3.0-generate-002" let safetySettings = ImagenSafetySettings( safetyFilterLevel: .blockLowAndAbove @@ -55,6 +60,8 @@ class ImagenViewModel: ObservableObject { generationConfig: generationConfig, safetySettings: safetySettings ) + + initialPrompt = sample?.initialPrompt ?? "" } func generateImage(prompt: String) async { diff --git a/firebaseai/FirebaseAIExample/Views/FilterChipView.swift b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift new file mode 100644 index 000000000..8c6ad2bf1 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift @@ -0,0 +1,55 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI +import GenerativeAIUIComponents + +struct FilterChipView: View { + let useCase: UseCase + let isSelected: Bool + let action: () -> Void + + var body: some View { + Button(action: action) { + Text(useCase.rawValue) + .padding(.horizontal) + } + .filterChipStyle(isSelected: isSelected) + } +} + +private struct FilterChipStyle: ViewModifier { + let isSelected: Bool + + func body(content: Content) -> some View { + if isSelected { + content.buttonStyle(.borderedProminent) + } else { + content.buttonStyle(.bordered) + } + } +} + +extension View { + func filterChipStyle(isSelected: Bool) -> some View { + modifier(FilterChipStyle(isSelected: isSelected)) + } +} + +#Preview { + VStack(spacing: 16) { + FilterChipView(useCase: .text, isSelected: true) {} + FilterChipView(useCase: .text, isSelected: false) {} + } +} diff --git a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift new file mode 100644 index 000000000..139a391b5 --- /dev/null +++ b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift @@ -0,0 +1,124 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI +import GenerativeAIUIComponents + +struct SampleCardView: View { + let sample: Sample + + var body: some View { + GroupBox { + Text(sample.description) + .font(.system(size: 14)) + .foregroundColor(.secondary) + .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading) + } label: { + if let useCase = sample.useCases.first { + Label(sample.title, systemImage: systemName(for: useCase)) + .font(.system(size: 17, weight: .medium)) + .foregroundColor(color(for: useCase)) + } else { + Text(sample.title) + .font(.system(size: 17, weight: .medium)) + } + } + .groupBoxStyle(CardGroupBoxStyle()) + .frame(maxWidth: .infinity, minHeight: 150, maxHeight: .infinity, alignment: .top) + } + + private func systemName(for useCase: UseCase) -> String { + switch useCase { + case .text: "text.bubble.fill" + case .image: "photo.fill" + case .video: "video.fill" + case .audio: "waveform" + case .document: "doc.fill" + case .functionCalling: "gearshape.2.fill" + } + } + + private func color(for useCase: UseCase) -> Color { + switch useCase { + case .text:.blue + case .image:.purple + case .video:.red + case .audio:.orange + case .document:.gray + case .functionCalling:.green + } + } +} + +public struct CardGroupBoxStyle: GroupBoxStyle { + private var cornerRadius: CGFloat { + if #available(iOS 26.0, *) { + return 28 + } else { + return 12 + } + } + + public func makeBody(configuration: Configuration) -> some View { + VStack(alignment: .leading, spacing: 12) { + configuration.label + configuration.content + } + .padding() + .background(Color(.secondarySystemGroupedBackground)) + .clipShape(RoundedRectangle(cornerRadius: cornerRadius, style: .continuous)) + } +} + +#Preview { + let samples = [ + Sample( + title: "Sample 1", + description: "This is the first sample card.", + useCases: [.text], + navRoute: "ConversationScreen" + ), + Sample( + title: "Sample 2", + description: "This is the second sample card.", + useCases: [.image], + navRoute: "PhotoReasoningScreen" + ), + Sample( + title: "Sample 3", + description: "This is the third sample card.", + useCases: [.video], + navRoute: "ConversationScreen" + ), + Sample( + title: "Sample 4", + description: "This is the fourth sample card, which is a bit longer to see how the text wraps and if everything still aligns correctly.", + useCases: [.audio], + navRoute: "ConversationScreen" + ), + ] + + ScrollView { + LazyVGrid(columns: [ + GridItem(.flexible()), + GridItem(.flexible()), + ], spacing: 16) { + ForEach(samples) { sample in + SampleCardView(sample: sample) + } + } + .padding() + } + .background(Color(.systemGroupedBackground)) +} diff --git a/firebaseai/GenerativeAIUIComponents/Package.swift b/firebaseai/GenerativeAIUIComponents/Package.swift index 808f5f42a..b174a6284 100644 --- a/firebaseai/GenerativeAIUIComponents/Package.swift +++ b/firebaseai/GenerativeAIUIComponents/Package.swift @@ -27,9 +27,15 @@ let package = Package( targets: ["GenerativeAIUIComponents"] ), ], + dependencies: [ + .package(url: "https://github.com/firebase/firebase-ios-sdk.git", from: "12.0.0"), + ], targets: [ .target( - name: "GenerativeAIUIComponents" + name: "GenerativeAIUIComponents", + dependencies: [ + .product(name: "FirebaseAI", package: "firebase-ios-sdk"), + ] ), ] ) diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift new file mode 100644 index 000000000..3d7637a77 --- /dev/null +++ b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift @@ -0,0 +1,218 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import FirebaseAI + +public struct Sample: Identifiable { + public let id = UUID() + public let title: String + public let description: String + public let useCases: [UseCase] + public let navRoute: String + public let chatHistory: [ModelContent]? + public let initialPrompt: String? + public let systemInstruction: ModelContent? + public let tools: [Tool]? + + public init(title: String, + description: String, + useCases: [UseCase], + navRoute: String, + chatHistory: [ModelContent]? = nil, + initialPrompt: String? = nil, + systemInstruction: ModelContent? = nil, + tools: [Tool]? = nil) { + self.title = title + self.description = description + self.useCases = useCases + self.navRoute = navRoute + self.chatHistory = chatHistory + self.initialPrompt = initialPrompt + self.systemInstruction = systemInstruction + self.tools = tools + } +} + +extension Sample { + public static let samples: [Sample] = [ + // Text + Sample( + title: "Travel tips", + description: "The user wants the model to help a new traveler" + + " with travel tips", + useCases: [.text], + navRoute: "ConversationScreen", + chatHistory: [ + ModelContent( + role: "user", + parts: "I have never traveled before. When should I book a flight?" + ), + ModelContent( + role: "model", + parts: "You should book flights a couple of months ahead of time. It will be cheaper and more flexible for you." + ), + ModelContent(role: "user", parts: "Do I need a passport?"), + ModelContent( + role: "model", + parts: "If you are traveling outside your own country, make sure your passport is up-to-date and valid for more than 6 months during your travel." + ), + ], + initialPrompt: "What else is important when traveling?", + systemInstruction: ModelContent(parts: "You are a Travel assistant. You will answer" + + " questions the user asks based on the information listed" + + " in Relevant Information. Do not hallucinate. Do not use" + + " the internet."), + ), + Sample( + title: "Chatbot recommendations for courses", + description: "A chatbot suggests courses for a performing arts program.", + useCases: [.text], + navRoute: "ConversationScreen", + initialPrompt: "I am interested in Performing Arts. I have taken Theater 1A.", + systemInstruction: ModelContent(parts: "You are a chatbot for the county's performing and fine arts" + + " program. You help students decide what course they will" + + " take during the summer."), + ), + // Image + Sample( + title: "Blog post creator", + description: "Create a blog post from an image file stored in Cloud Storage.", + useCases: [.image], + navRoute: "PhotoReasoningScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you help me create a blog post about this image?"), + ModelContent( + role: "model", + parts: "I'd be happy to help you create a blog post! Please share the image you'd like me to analyze and write about." + ), + ], + initialPrompt: "Please analyze this image and create an engaging blog post" + ), + Sample( + title: "Imagen 3 - image generation", + description: "Generate images using Imagen 3", + useCases: [.image], + navRoute: "ImagenScreen", + initialPrompt: "A photo of a modern building with water in the background" + ), + Sample( + title: "Gemini 2.0 Flash - image generation", + description: "Generate and/or edit images using Gemini 2.0 Flash", + useCases: [.image], + navRoute: "PhotoReasoningScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you edit this image to make it brighter?"), + ModelContent( + role: "model", + parts: "I can help you edit images using Gemini 2.0 Flash. Please share the image you'd like me to modify." + ), + ], + initialPrompt: "" + ), + // Video + Sample( + title: "Hashtags for a video", + description: "Generate hashtags for a video ad stored in Cloud Storage.", + useCases: [.video], + navRoute: "ConversationScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you suggest hashtags for my product video?"), + ModelContent( + role: "model", + parts: "I'd be happy to help you generate relevant hashtags! Please share your video or describe what it's about so I can suggest appropriate hashtags." + ), + ], + initialPrompt: "" + ), + Sample( + title: "Summarize video", + description: "Summarize a video and extract important dialogue.", + useCases: [.video], + navRoute: "ConversationScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you summarize this video for me?"), + ModelContent( + role: "model", + parts: "I can help you summarize videos and extract key dialogue. Please share the video you'd like me to analyze." + ), + ], + initialPrompt: "" + ), + // Audio + Sample( + title: "Audio Summarization", + description: "Summarize an audio file", + useCases: [.audio], + navRoute: "ConversationScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you summarize this audio recording?"), + ModelContent( + role: "model", + parts: "I can help you summarize audio files. Please share the audio recording you'd like me to analyze." + ), + ], + initialPrompt: "" + ), + Sample( + title: "Translation from audio", + description: "Translate an audio file stored in Cloud Storage", + useCases: [.audio], + navRoute: "ConversationScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you translate this audio from Spanish to English?"), + ModelContent( + role: "model", + parts: "I can help you translate audio files. Please share the audio file you'd like me to translate." + ), + ], + initialPrompt: "" + ), + // Document + Sample( + title: "Document comparison", + description: "Compare the contents of 2 documents." + + " Only supported by the Vertex AI Gemini API because the documents are stored in Cloud Storage", + useCases: [.document], + navRoute: "ConversationScreen", + chatHistory: [ + ModelContent(role: "user", parts: "Can you compare these two documents for me?"), + ModelContent( + role: "model", + parts: "I can help you compare documents using the Vertex AI Gemini API. Please share the two documents you'd like me to compare." + ), + ], + initialPrompt: "" + ), + // Function Calling + Sample( + title: "Currency conversion", + description: "Use function calling to convert currency", + useCases: [.functionCalling, .text], + navRoute: "FunctionCallingScreen", + initialPrompt: "What is 100 Euros in USD?" + ), + // Grounding + Sample( + title: "Grounding with Google Search", + description: "Use Grounding with Google Search to get responses based on up-to-date information from the web.", + useCases: [.text], + navRoute: "ConversationScreen", + initialPrompt: "What's the weather in Chicago this weekend?", + tools: [.googleSearch()] + ), + ] + + public static var sample = samples[0] +} diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift new file mode 100644 index 000000000..5448dc01b --- /dev/null +++ b/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift @@ -0,0 +1,26 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation + +public enum UseCase: String, CaseIterable, Identifiable { + case text = "Text" + case image = "Image" + case video = "Video" + case audio = "Audio" + case document = "Document" + case functionCalling = "Function Calling" + + public var id: String { rawValue } +} From 0900fc9ab415d3aca28823f9b054e6c82ac9f440 Mon Sep 17 00:00:00 2001 From: Haibo Yang Date: Fri, 15 Aug 2025 06:27:29 -0700 Subject: [PATCH 02/25] [FirebaseAI ] Integrate conversationkit (#1745) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * integrate conversationkit to firebaseai * refract functioncalling logic * remove fatalError * change preview for screens * ♻️ Use ConversationKit * ♻️ Refactor error handling * ✨ Bring back pre-filled user messages * 🧹Cleanup * ⬆️ Use latest ConversationKit version * fix style check * add errordetailview for imagenexample * ✏️ Fix typo Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> * ✏️ Fix typo Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> * ✏️ Fix typo Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> * ✏️ Fix typo Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> * fix style and change ci --------- Co-authored-by: Peter Friese Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- .../project.pbxproj | 161 +++++---- .../ChatExample/Models/ChatMessage.swift | 76 ++-- .../ChatExample/Screens/ChatScreen.swift | 69 ++++ .../Screens/ConversationScreen.swift | 149 -------- ...ionViewModel.swift => ChatViewModel.swift} | 30 +- .../ChatExample/Views/ErrorView.swift | 100 ----- .../ChatExample/Views/MessageView.swift | 77 ++-- .../FirebaseAIExample/ContentView.swift | 7 +- .../Screens/FunctionCallingScreen.swift | 131 ++----- .../ViewModels/FunctionCallingViewModel.swift | 341 +++++++++--------- .../Screens/PhotoReasoningScreen.swift | 1 - .../ImagenExample/ImagenScreen.swift | 35 +- .../ImagenExample/ImagenViewModel.swift | 10 +- .../Views/FilterChipView.swift | 1 - .../Views/SampleCardView.swift | 1 - .../Services/WeatherService.swift | 27 ++ .../GenerativeAIUIComponents/Package.swift | 41 --- .../Models/Sample.swift | 39 +- .../Models/UseCase.swift | 0 .../Views}/InputField.swift | 0 .../Views}/MultimodalInputField.swift | 0 21 files changed, 575 insertions(+), 721 deletions(-) create mode 100644 firebaseai/FirebaseAIExample/ChatExample/Screens/ChatScreen.swift delete mode 100644 firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift rename firebaseai/FirebaseAIExample/ChatExample/ViewModels/{ConversationViewModel.swift => ChatViewModel.swift} (78%) delete mode 100644 firebaseai/FirebaseAIExample/ChatExample/Views/ErrorView.swift create mode 100644 firebaseai/FunctionCallingExample/Services/WeatherService.swift delete mode 100644 firebaseai/GenerativeAIUIComponents/Package.swift rename firebaseai/{GenerativeAIUIComponents/Sources/GenerativeAIUIComponents => UIComponents}/Models/Sample.swift (87%) rename firebaseai/{GenerativeAIUIComponents/Sources/GenerativeAIUIComponents => UIComponents}/Models/UseCase.swift (100%) rename firebaseai/{GenerativeAIUIComponents/Sources/GenerativeAIUIComponents => UIComponents/Views}/InputField.swift (100%) rename firebaseai/{GenerativeAIUIComponents/Sources/GenerativeAIUIComponents => UIComponents/Views}/MultimodalInputField.swift (100%) diff --git a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj index 443e12082..2217a26aa 100644 --- a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj +++ b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj @@ -3,30 +3,33 @@ archiveVersion = 1; classes = { }; - objectVersion = 60; + objectVersion = 56; objects = { /* Begin PBXBuildFile section */ - 7200F3082E3A054300CDC51C /* GenerativeAIUIComponents in Frameworks */ = {isa = PBXBuildFile; productRef = 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */; }; + 726490D92E3F39E000A92700 /* Sample.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D22E3F39D200A92700 /* Sample.swift */; }; + 726490DA2E3F39E000A92701 /* UseCase.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D32E3F39D200A92700 /* UseCase.swift */; }; + 726490DC2E3F39E000A92703 /* InputField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D72E3F39D900A92700 /* InputField.swift */; }; + 726490DD2E3F39E000A92704 /* MultimodalInputField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */; }; 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 72DA044E2E385DF3004FED7D /* ChatMessage.swift */; }; + 72E040752E448731003D4135 /* WeatherService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 72E040742E44872C003D4135 /* WeatherService.swift */; }; 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 869200B22B879C4F00482873 /* GoogleService-Info.plist */; }; 86C1F4832BC726150026816F /* FunctionCallingScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */; }; 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */; }; - 88263BF02B239C09008AB09B /* ErrorView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88263BEE2B239BFE008AB09B /* ErrorView.swift */; }; 88263BF12B239C11008AB09B /* ErrorDetailsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 889873842B208563005B4896 /* ErrorDetailsView.swift */; }; + 884298E12E4B8110005F535F /* ConversationKit in Frameworks */ = {isa = PBXBuildFile; productRef = 884298E02E4B8110005F535F /* ConversationKit */; }; 8848C8332B0D04BC007B434F /* FirebaseAIExampleApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */; }; 8848C8352B0D04BC007B434F /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8848C8342B0D04BC007B434F /* ContentView.swift */; }; 8848C8372B0D04BD007B434F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8848C8362B0D04BD007B434F /* Assets.xcassets */; }; 8848C83A2B0D04BD007B434F /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */; }; - 886F95D52B17BA010036F07A /* GenerateContentScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */; }; - 886F95D62B17BA010036F07A /* GenerateContentViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */; }; + 885D0CA12E4CB7CD00A217A0 /* ConversationKit in Frameworks */ = {isa = PBXBuildFile; productRef = 885D0CA02E4CB7CD00A217A0 /* ConversationKit */; }; 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */ = {isa = PBXBuildFile; productRef = 886F95D72B17BA420036F07A /* MarkdownUI */; }; 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */; }; 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */; }; 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5A2B11133E00C08E95 /* MessageView.swift */; }; 886F95DF2B17D5010036F07A /* BouncingDots.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5C2B11135000C08E95 /* BouncingDots.swift */; }; - 886F95E02B17D5010036F07A /* ConversationViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */; }; - 886F95E12B17D5010036F07A /* ConversationScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */; }; + 886F95E02B17D5010036F07A /* ChatViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F562B1112F600C08E95 /* ChatViewModel.swift */; }; + 886F95E12B17D5010036F07A /* ChatScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F542B1112CA00C08E95 /* ChatScreen.swift */; }; A5E8E3C92C3B4F388A7A4A19 /* FilterChipView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */; }; A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A5E8E3C62C3B4F388A7A4A16 /* SampleCardView.swift */; }; AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */; }; @@ -37,31 +40,29 @@ /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 726634072E37011C00554974 /* Package.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = Package.swift; path = GenerativeAIUIComponents/Package.swift; sourceTree = ""; }; + 726490D22E3F39D200A92700 /* Sample.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Sample.swift; sourceTree = ""; }; + 726490D32E3F39D200A92700 /* UseCase.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UseCase.swift; sourceTree = ""; }; + 726490D72E3F39D900A92700 /* InputField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InputField.swift; sourceTree = ""; }; + 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultimodalInputField.swift; sourceTree = ""; }; 72DA044E2E385DF3004FED7D /* ChatMessage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatMessage.swift; sourceTree = ""; }; + 72E040742E44872C003D4135 /* WeatherService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WeatherService.swift; sourceTree = ""; }; 869200B22B879C4F00482873 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; }; 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingScreen.swift; sourceTree = ""; }; 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingViewModel.swift; sourceTree = ""; }; 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoReasoningViewModel.swift; sourceTree = ""; }; 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoReasoningScreen.swift; sourceTree = ""; }; - 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GenerateContentScreen.swift; sourceTree = ""; }; - 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GenerateContentViewModel.swift; sourceTree = ""; }; - 88263BEE2B239BFE008AB09B /* ErrorView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ErrorView.swift; sourceTree = ""; }; 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FirebaseAIExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FirebaseAIExampleApp.swift; sourceTree = ""; }; 8848C8342B0D04BC007B434F /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; 8848C8362B0D04BD007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; - 8848C84A2B0D051F007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - 8848C84D2B0D051F007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 8848C85C2B0D056D007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 8848C85F2B0D056D007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 889873842B208563005B4896 /* ErrorDetailsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ErrorDetailsView.swift; sourceTree = ""; }; - 88B8A9352B0FCBA700424728 /* GenerativeAIUIComponents */ = {isa = PBXFileReference; lastKnownFileType = wrapper; path = GenerativeAIUIComponents; sourceTree = ""; }; 88E10F482B110D5400C08E95 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 88E10F4B2B110D5400C08E95 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; - 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationScreen.swift; sourceTree = ""; }; - 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationViewModel.swift; sourceTree = ""; }; + 88E10F542B1112CA00C08E95 /* ChatScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatScreen.swift; sourceTree = ""; }; + 88E10F562B1112F600C08E95 /* ChatViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatViewModel.swift; sourceTree = ""; }; 88E10F5A2B11133E00C08E95 /* MessageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageView.swift; sourceTree = ""; }; 88E10F5C2B11135000C08E95 /* BouncingDots.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BouncingDots.swift; sourceTree = ""; }; A5E8E3C52C3B4F388A7A4A15 /* FilterChipView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FilterChipView.swift; sourceTree = ""; }; @@ -77,15 +78,43 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 884298E12E4B8110005F535F /* ConversationKit in Frameworks */, DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */, + 885D0CA12E4CB7CD00A217A0 /* ConversationKit in Frameworks */, 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */, - 7200F3082E3A054300CDC51C /* GenerativeAIUIComponents in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 726490D12E3F39C900A92700 /* UIComponents */ = { + isa = PBXGroup; + children = ( + 726490D42E3F39D200A92700 /* Models */, + 726490D62E3F39D600A92700 /* Views */, + ); + path = UIComponents; + sourceTree = ""; + }; + 726490D42E3F39D200A92700 /* Models */ = { + isa = PBXGroup; + children = ( + 726490D22E3F39D200A92700 /* Sample.swift */, + 726490D32E3F39D200A92700 /* UseCase.swift */, + ); + path = Models; + sourceTree = ""; + }; + 726490D62E3F39D600A92700 /* Views */ = { + isa = PBXGroup; + children = ( + 726490D72E3F39D900A92700 /* InputField.swift */, + 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */, + ); + path = Views; + sourceTree = ""; + }; 72DA044D2E385DED004FED7D /* Models */ = { isa = PBXGroup; children = ( @@ -94,6 +123,14 @@ path = Models; sourceTree = ""; }; + 72E040732E448720003D4135 /* Services */ = { + isa = PBXGroup; + children = ( + 72E040742E44872C003D4135 /* WeatherService.swift */, + ); + path = Services; + sourceTree = ""; + }; 86C1F47F2BC726150026816F /* Screens */ = { isa = PBXGroup; children = ( @@ -113,6 +150,7 @@ 86C1F4822BC726150026816F /* FunctionCallingExample */ = { isa = PBXGroup; children = ( + 72E040732E448720003D4135 /* Services */, 86C1F4812BC726150026816F /* ViewModels */, 86C1F47F2BC726150026816F /* Screens */, ); @@ -135,26 +173,9 @@ path = Screens; sourceTree = ""; }; - 88209C1A2B0FBDC300F64795 /* Screens */ = { - isa = PBXGroup; - children = ( - 88209C1B2B0FBDC300F64795 /* GenerateContentScreen.swift */, - ); - path = Screens; - sourceTree = ""; - }; - 88209C1C2B0FBDC300F64795 /* ViewModels */ = { - isa = PBXGroup; - children = ( - 88209C1D2B0FBDC300F64795 /* GenerateContentViewModel.swift */, - ); - path = ViewModels; - sourceTree = ""; - }; 88209C222B0FBE1700F64795 /* Frameworks */ = { isa = PBXGroup; children = ( - 726634072E37011C00554974 /* Package.swift */, ); name = Frameworks; sourceTree = ""; @@ -162,11 +183,10 @@ 8848C8262B0D04BC007B434F = { isa = PBXGroup; children = ( + 726490D12E3F39C900A92700 /* UIComponents */, DEFECAA82D7B4CCD00EF9621 /* ImagenScreen */, - 88B8A9352B0FCBA700424728 /* GenerativeAIUIComponents */, 869200B22B879C4F00482873 /* GoogleService-Info.plist */, 8848C8312B0D04BC007B434F /* FirebaseAIExample */, - 8848C8452B0D051E007B434F /* GenerativeAITextExample */, 8848C8572B0D056C007B434F /* GenerativeAIMultimodalExample */, 88E10F432B110D5300C08E95 /* ChatExample */, 86C1F4822BC726150026816F /* FunctionCallingExample */, @@ -203,25 +223,6 @@ path = "Preview Content"; sourceTree = ""; }; - 8848C8452B0D051E007B434F /* GenerativeAITextExample */ = { - isa = PBXGroup; - children = ( - 88209C1C2B0FBDC300F64795 /* ViewModels */, - 88209C1A2B0FBDC300F64795 /* Screens */, - 8848C84A2B0D051F007B434F /* Assets.xcassets */, - 8848C84C2B0D051F007B434F /* Preview Content */, - ); - path = GenerativeAITextExample; - sourceTree = ""; - }; - 8848C84C2B0D051F007B434F /* Preview Content */ = { - isa = PBXGroup; - children = ( - 8848C84D2B0D051F007B434F /* Preview Assets.xcassets */, - ); - path = "Preview Content"; - sourceTree = ""; - }; 8848C8572B0D056C007B434F /* GenerativeAIMultimodalExample */ = { isa = PBXGroup; children = ( @@ -265,7 +266,7 @@ 88E10F502B11123600C08E95 /* ViewModels */ = { isa = PBXGroup; children = ( - 88E10F562B1112F600C08E95 /* ConversationViewModel.swift */, + 88E10F562B1112F600C08E95 /* ChatViewModel.swift */, ); path = ViewModels; sourceTree = ""; @@ -274,7 +275,6 @@ isa = PBXGroup; children = ( AEE793DE2E256D3900708F02 /* Grounding */, - 88263BEE2B239BFE008AB09B /* ErrorView.swift */, 88E10F5A2B11133E00C08E95 /* MessageView.swift */, 88E10F5C2B11135000C08E95 /* BouncingDots.swift */, 889873842B208563005B4896 /* ErrorDetailsView.swift */, @@ -285,7 +285,7 @@ 88E10F532B1112B900C08E95 /* Screens */ = { isa = PBXGroup; children = ( - 88E10F542B1112CA00C08E95 /* ConversationScreen.swift */, + 88E10F542B1112CA00C08E95 /* ChatScreen.swift */, ); path = Screens; sourceTree = ""; @@ -336,7 +336,8 @@ packageProductDependencies = ( 886F95D72B17BA420036F07A /* MarkdownUI */, DE26D95E2DBB3E9F007E6668 /* FirebaseAI */, - 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */, + 884298E02E4B8110005F535F /* ConversationKit */, + 885D0CA02E4CB7CD00A217A0 /* ConversationKit */, ); productName = GenerativeAIExample; productReference = 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */; @@ -370,7 +371,7 @@ 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */, DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */, DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */, - 7200F3062E3A054300CDC51C /* XCLocalSwiftPackageReference "GenerativeAIUIComponents" */, + 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */, ); productRefGroup = 8848C8302B0D04BC007B434F /* Products */; projectDirPath = ""; @@ -404,22 +405,24 @@ 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */, 88263BF12B239C11008AB09B /* ErrorDetailsView.swift in Sources */, 8848C8352B0D04BC007B434F /* ContentView.swift in Sources */, - 886F95D52B17BA010036F07A /* GenerateContentScreen.swift in Sources */, 8848C8332B0D04BC007B434F /* FirebaseAIExampleApp.swift in Sources */, - 886F95E02B17D5010036F07A /* ConversationViewModel.swift in Sources */, + 886F95E02B17D5010036F07A /* ChatViewModel.swift in Sources */, 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */, 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */, DEFECAA92D7B4CCD00EF9621 /* ImagenViewModel.swift in Sources */, DEFECAAA2D7B4CCD00EF9621 /* ImagenScreen.swift in Sources */, 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */, - 886F95E12B17D5010036F07A /* ConversationScreen.swift in Sources */, - 88263BF02B239C09008AB09B /* ErrorView.swift in Sources */, + 72E040752E448731003D4135 /* WeatherService.swift in Sources */, + 886F95E12B17D5010036F07A /* ChatScreen.swift in Sources */, 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */, - 886F95D62B17BA010036F07A /* GenerateContentViewModel.swift in Sources */, A5E8E3C92C3B4F388A7A4A19 /* FilterChipView.swift in Sources */, A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */, AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */, AEE793E02E256D3900708F02 /* GroundedResponseView.swift in Sources */, + 726490D92E3F39E000A92700 /* Sample.swift in Sources */, + 726490DA2E3F39E000A92701 /* UseCase.swift in Sources */, + 726490DC2E3F39E000A92703 /* InputField.swift in Sources */, + 726490DD2E3F39E000A92704 /* MultimodalInputField.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -478,7 +481,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; @@ -535,7 +538,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 16.0; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; LOCALIZATION_PREFERS_STRING_CATALOGS = YES; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; @@ -628,13 +631,6 @@ }; /* End XCConfigurationList section */ -/* Begin XCLocalSwiftPackageReference section */ - 7200F3062E3A054300CDC51C /* XCLocalSwiftPackageReference "GenerativeAIUIComponents" */ = { - isa = XCLocalSwiftPackageReference; - relativePath = GenerativeAIUIComponents; - }; -/* End XCLocalSwiftPackageReference section */ - /* Begin XCRemoteSwiftPackageReference section */ 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { isa = XCRemoteSwiftPackageReference; @@ -644,6 +640,14 @@ revision = 55441810c0f678c78ed7e2ebd46dde89228e02fc; }; }; + 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */ = { + isa = XCRemoteSwiftPackageReference; + repositoryURL = "https://github.com/peterfriese/ConversationKit"; + requirement = { + kind = upToNextMajorVersion; + minimumVersion = 0.0.2; + }; + }; DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */ = { isa = XCRemoteSwiftPackageReference; repositoryURL = "https://github.com/gonzalezreal/NetworkImage"; @@ -663,9 +667,14 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ - 7200F3072E3A054300CDC51C /* GenerativeAIUIComponents */ = { + 884298E02E4B8110005F535F /* ConversationKit */ = { + isa = XCSwiftPackageProductDependency; + productName = ConversationKit; + }; + 885D0CA02E4CB7CD00A217A0 /* ConversationKit */ = { isa = XCSwiftPackageProductDependency; - productName = GenerativeAIUIComponents; + package = 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */; + productName = ConversationKit; }; 886F95D72B17BA420036F07A /* MarkdownUI */ = { isa = XCSwiftPackageProductDependency; diff --git a/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift b/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift index a974cff7f..829d1d7ab 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift @@ -18,35 +18,65 @@ import FirebaseAI #endif import Foundation +import ConversationKit -enum Participant { - case system - case user -} +public struct ChatMessage: Message { + public let id: UUID = .init() + public var content: String? + public let imageURL: String? + public let participant: Participant + public let error: (any Error)? + public var pending = false + public var groundingMetadata: GroundingMetadata? -struct ChatMessage: Identifiable, Equatable { - let id = UUID().uuidString - var message: String - var groundingMetadata: GroundingMetadata? - let participant: Participant - var pending = false + public init(content: String? = nil, imageURL: String? = nil, participant: Participant, + error: (any Error)? = nil, pending: Bool = false) { + self.content = content + self.imageURL = imageURL + self.participant = participant + self.error = error + self.pending = pending + } - static func pending(participant: Participant) -> ChatMessage { - Self(message: "", participant: participant, pending: true) + // Protocol-required initializer + public init(content: String?, imageURL: String?, participant: Participant) { + self.content = content + self.imageURL = imageURL + self.participant = participant + error = nil } +} - // TODO(andrewheard): Add Equatable conformance to GroundingMetadata and remove this - static func == (lhs: ChatMessage, rhs: ChatMessage) -> Bool { - lhs.id == rhs.id && lhs.message == rhs.message && lhs.participant == rhs.participant && lhs - .pending == rhs.pending +extension ChatMessage { + public static func pending(participant: Participant) -> ChatMessage { + Self(content: "", participant: participant, pending: true) } } +// Implement Equatable and Hashable for ChatMessage (ignore error) extension ChatMessage { + public static func == (lhs: ChatMessage, rhs: ChatMessage) -> Bool { + lhs.id == rhs.id && + lhs.content == rhs.content && + lhs.imageURL == rhs.imageURL && + lhs.participant == rhs.participant + // intentionally ignore `error` + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(id) + hasher.combine(content) + hasher.combine(imageURL) + hasher.combine(participant) + // intentionally ignore `error` + } +} + +public extension ChatMessage { static var samples: [ChatMessage] = [ - .init(message: "Hello. What can I do for you today?", participant: .system), - .init(message: "Show me a simple loop in Swift.", participant: .user), - .init(message: """ + .init(content: "Hello. What can I do for you today?", participant: .other), + .init(content: "Show me a simple loop in Swift.", participant: .user), + .init(content: """ Sure, here is a simple loop in Swift: # Example 1 @@ -69,13 +99,13 @@ extension ChatMessage { ``` This loop calculates the sum of the numbers from 1 to 100. The variable sum is initialized to 0, and then the for loop iterates over the range of numbers from 1 to 100. The variable i is assigned each number in the range, and the value of i is added to the sum variable. After the loop has finished executing, the value of sum is printed to the console. - """, participant: .system), + """, participant: .other), ] static var sample = samples[0] } -extension ChatMessage { +public extension ChatMessage { static func from(_ modelContent: ModelContent) -> ChatMessage? { // TODO: add non-text parts to message when multi-model support is added let text = modelContent.parts.compactMap { ($0 as? TextPart)?.text }.joined() @@ -83,9 +113,9 @@ extension ChatMessage { return nil } - let participant: Participant = (modelContent.role == "user") ? .user : .system + let participant: Participant = (modelContent.role == "user") ? .user : .other - return ChatMessage(message: text, participant: participant) + return ChatMessage(content: text, participant: participant) } static func from(_ modelContents: [ModelContent]) -> [ChatMessage] { diff --git a/firebaseai/FirebaseAIExample/ChatExample/Screens/ChatScreen.swift b/firebaseai/FirebaseAIExample/ChatExample/Screens/ChatScreen.swift new file mode 100644 index 000000000..880a97ce4 --- /dev/null +++ b/firebaseai/FirebaseAIExample/ChatExample/Screens/ChatScreen.swift @@ -0,0 +1,69 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import SwiftUI +import ConversationKit + +struct ChatScreen: View { + let firebaseService: FirebaseAI + @StateObject var viewModel: ChatViewModel + + init(firebaseService: FirebaseAI, sample: Sample? = nil) { + self.firebaseService = firebaseService + _viewModel = + StateObject(wrappedValue: ChatViewModel(firebaseService: firebaseService, + sample: sample)) + } + + var body: some View { + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .disableAttachments() + .onSendMessage { message in + Task { + await viewModel.sendMessage(message.content ?? "", streaming: true) + } + } + .onError { error in + viewModel.presentErrorDetails = true + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) + } + } + + private func newChat() { + viewModel.startNewChat() + } +} + +#Preview { + ChatScreen(firebaseService: FirebaseAI.firebaseAI()) +} diff --git a/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift b/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift deleted file mode 100644 index 8798ee3e4..000000000 --- a/firebaseai/FirebaseAIExample/ChatExample/Screens/ConversationScreen.swift +++ /dev/null @@ -1,149 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import GenerativeAIUIComponents -import SwiftUI - -struct ConversationScreen: View { - let firebaseService: FirebaseAI - @StateObject var viewModel: ConversationViewModel - - @State - private var userPrompt = "" - - init(firebaseService: FirebaseAI, sample: Sample? = nil) { - self.firebaseService = firebaseService - _viewModel = - StateObject(wrappedValue: ConversationViewModel(firebaseService: firebaseService, - sample: sample)) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - ScrollViewReader { scrollViewProxy in - List { - ForEach(viewModel.messages) { message in - MessageView(message: message) - } - if let error = viewModel.error { - ErrorView(error: error) - .tag("errorView") - } - } - .listStyle(.plain) - .onChange(of: viewModel.messages, perform: { newValue in - if viewModel.hasError { - // wait for a short moment to make sure we can actually scroll to the bottom - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo("errorView", anchor: .bottom) - } - focusedField = .message - } - } else { - guard let lastMessage = viewModel.messages.last else { return } - - // wait for a short moment to make sure we can actually scroll to the bottom - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom) - } - focusedField = .message - } - } - }) - } - InputField("Message...", text: $userPrompt) { - Image(systemName: viewModel.busy ? "stop.circle.fill" : "arrow.up.circle.fill") - .font(.title) - } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } - } - .onTapGesture { - focusedField = nil - } - .toolbar { - ToolbarItem(placement: .primaryAction) { - Button(action: newChat) { - Image(systemName: "square.and.pencil") - } - } - } - .navigationTitle(viewModel.title) - .navigationBarTitleDisplayMode(.inline) - .onAppear { - focusedField = .message - // Set initial prompt from viewModel if available - if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { - userPrompt = viewModel.initialPrompt - } - } - } - - private func sendMessage() { - Task { - let prompt = userPrompt - userPrompt = "" - await viewModel.sendMessage(prompt, streaming: true) - } - } - - private func sendOrStop() { - focusedField = nil - - if viewModel.busy { - viewModel.stop() - } else { - sendMessage() - } - } - - private func newChat() { - viewModel.startNewChat() - userPrompt = "" - } -} - -struct ConversationScreen_Previews: PreviewProvider { - struct ContainerView: View { - @StateObject var viewModel = ConversationViewModel(firebaseService: FirebaseAI - .firebaseAI(), sample: nil) // Example service init - - var body: some View { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) - .onAppear { - viewModel.messages = ChatMessage.samples - } - } - } - - static var previews: some View { - NavigationStack { - ConversationScreen(firebaseService: FirebaseAI.firebaseAI()) - } - } -} diff --git a/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift b/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ChatViewModel.swift similarity index 78% rename from firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift rename to firebaseai/FirebaseAIExample/ChatExample/ViewModels/ChatViewModel.swift index 7fdf30d88..c9aead280 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ConversationViewModel.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ChatViewModel.swift @@ -19,10 +19,9 @@ #endif import Foundation import UIKit -import GenerativeAIUIComponents @MainActor -class ConversationViewModel: ObservableObject { +class ChatViewModel: ObservableObject { /// This array holds both the user's and the system's chat messages @Published var messages = [ChatMessage]() @@ -34,6 +33,8 @@ class ConversationViewModel: ObservableObject { return error != nil } + @Published var presentErrorDetails: Bool = false + @Published var initialPrompt: String = "" @Published var title: String = "" @@ -99,11 +100,11 @@ class ConversationViewModel: ObservableObject { } // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) + let userMessage = ChatMessage(content: text, participant: .user) messages.append(userMessage) // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) + let systemMessage = ChatMessage.pending(participant: .other) messages.append(systemMessage) do { @@ -111,7 +112,8 @@ class ConversationViewModel: ObservableObject { for try await chunk in responseStream { messages[messages.count - 1].pending = false if let text = chunk.text { - messages[messages.count - 1].message += text + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text } if let candidate = chunk.candidates.first { @@ -124,7 +126,11 @@ class ConversationViewModel: ObservableObject { } catch { self.error = error print(error.localizedDescription) - messages.removeLast() + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } @@ -139,11 +145,11 @@ class ConversationViewModel: ObservableObject { } // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) + let userMessage = ChatMessage(content: text, participant: .user) messages.append(userMessage) // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) + let systemMessage = ChatMessage.pending(participant: .other) messages.append(systemMessage) do { @@ -152,7 +158,7 @@ class ConversationViewModel: ObservableObject { if let responseText = response?.text { // replace pending message with backend response - messages[messages.count - 1].message = responseText + messages[messages.count - 1].content = responseText messages[messages.count - 1].pending = false if let candidate = response?.candidates.first { @@ -164,7 +170,11 @@ class ConversationViewModel: ObservableObject { } catch { self.error = error print(error.localizedDescription) - messages.removeLast() + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/ErrorView.swift b/firebaseai/FirebaseAIExample/ChatExample/Views/ErrorView.swift deleted file mode 100644 index bd321426c..000000000 --- a/firebaseai/FirebaseAIExample/ChatExample/Views/ErrorView.swift +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import SwiftUI - -struct ErrorView: View { - var error: Error - @State private var isDetailsSheetPresented = false - var body: some View { - HStack { - Text("An error occurred.") - Button(action: { isDetailsSheetPresented.toggle() }) { - Image(systemName: "info.circle") - } - } - .frame(maxWidth: .infinity, alignment: .center) - .listRowSeparator(.hidden) - .sheet(isPresented: $isDetailsSheetPresented) { - ErrorDetailsView(error: error) - } - } -} - -#Preview { - NavigationView { - let errorPromptBlocked = GenerateContentError.promptBlocked( - response: GenerateContentResponse( - candidates: [ - Candidate( - content: ModelContent(role: "model", parts: [ - """ - A _hypothetical_ model response. - Cillum ex aliqua amet aliquip labore amet eiusmod consectetur reprehenderit sit commodo. - """, - ]), - safetyRatings: [ - SafetyRating( - category: .dangerousContent, - probability: .high, - probabilityScore: 0.8, - severity: .medium, - severityScore: 0.9, - blocked: true - ), - SafetyRating( - category: .harassment, - probability: .low, - probabilityScore: 0.5, - severity: .low, - severityScore: 0.6, - blocked: false - ), - SafetyRating( - category: .hateSpeech, - probability: .low, - probabilityScore: 0.3, - severity: .medium, - severityScore: 0.2, - blocked: false - ), - SafetyRating( - category: .sexuallyExplicit, - probability: .low, - probabilityScore: 0.2, - severity: .negligible, - severityScore: 0.5, - blocked: false - ), - ], - finishReason: FinishReason.other, - citationMetadata: nil - ), - ] - ) - ) - List { - MessageView(message: ChatMessage.samples[0]) - MessageView(message: ChatMessage.samples[1]) - ErrorView(error: errorPromptBlocked) - } - .listStyle(.plain) - .navigationTitle("Chat example") - } -} diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift b/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift index f35dd7b35..b0f10ce99 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift @@ -41,17 +41,31 @@ extension View { } struct MessageContentView: View { + @Environment(\.presentErrorAction) var presentErrorAction var message: ChatMessage var body: some View { if message.pending { BouncingDots() } else { + // Error Message + if let error = message.error { + HStack { + Text("An error occurred.") + Button("More information", systemImage: "info.circle") { + presentErrorAction?(error) + } + .labelStyle(.iconOnly) + } + } + // Grounded Response - if let groundingMetadata = message.groundingMetadata { + else if let groundingMetadata = message.groundingMetadata { GroundedResponseView(message: message, groundingMetadata: groundingMetadata) - } else { - // Non-grounded response + } + + // Non-grounded response + else { ResponseTextView(message: message) } } @@ -62,11 +76,11 @@ struct ResponseTextView: View { var message: ChatMessage var body: some View { - Markdown(message.message) + Markdown(message.content ?? "") .markdownTextStyle { FontFamilyVariant(.normal) FontSize(.em(0.85)) - ForegroundColor(message.participant == .system ? Color(UIColor.label) : .white) + ForegroundColor(message.participant == .other ? Color(UIColor.label) : .white) } .markdownBlockStyle(\.codeBlock) { configuration in configuration.label @@ -87,24 +101,41 @@ struct ResponseTextView: View { struct MessageView: View { var message: ChatMessage + private var participantLabel: String { + message.participant == .user ? "User" : "Model" + } + var body: some View { - HStack { - if message.participant == .user { - Spacer() - } - MessageContentView(message: message) - .padding(10) - .background(message.participant == .system - ? Color(UIColor.systemFill) - : Color(UIColor.systemBlue)) - .roundedCorner(10, - corners: [ - .topLeft, - .topRight, - message.participant == .system ? .bottomRight : .bottomLeft, - ]) - if message.participant == .system { - Spacer() + VStack(alignment: message.participant == .user ? .trailing : .leading, spacing: 4) { + // Sender label + Text(participantLabel) + .font(.caption2) + .fontWeight(.medium) + .foregroundColor(.secondary) + .textCase(.uppercase) + .padding(.horizontal, 8) + .padding(.vertical, 2) + .frame(maxWidth: .infinity, alignment: message.participant == .user ? .trailing : .leading) + + // Message content + HStack { + if message.participant == .user { + Spacer() + } + MessageContentView(message: message) + .padding(10) + .background(message.participant == .other + ? Color(UIColor.systemFill) + : Color(UIColor.systemBlue)) + .roundedCorner(10, + corners: [ + .topLeft, + .topRight, + message.participant == .other ? .bottomRight : .bottomLeft, + ]) + if message.participant == .other { + Spacer() + } } } .listRowSeparator(.hidden) @@ -118,7 +149,7 @@ struct MessageView_Previews: PreviewProvider { MessageView(message: ChatMessage.samples[0]) MessageView(message: ChatMessage.samples[1]) MessageView(message: ChatMessage.samples[2]) - MessageView(message: ChatMessage(message: "Hello!", participant: .system, pending: true)) + MessageView(message: ChatMessage(content: "Hello!", participant: .other, pending: true)) } .listStyle(.plain) .navigationTitle("Chat example") diff --git a/firebaseai/FirebaseAIExample/ContentView.swift b/firebaseai/FirebaseAIExample/ContentView.swift index 0cc5b16a5..5af66fad7 100644 --- a/firebaseai/FirebaseAIExample/ContentView.swift +++ b/firebaseai/FirebaseAIExample/ContentView.swift @@ -14,7 +14,6 @@ import SwiftUI import FirebaseAI -import GenerativeAIUIComponents enum BackendOption: String, CaseIterable, Identifiable { case googleAI = "Gemini Developer API" @@ -112,14 +111,14 @@ struct ContentView: View { @ViewBuilder private func destinationView(for sample: Sample) -> some View { switch sample.navRoute { - case "ConversationScreen": - ConversationScreen(firebaseService: firebaseService, sample: sample) + case "ChatScreen": + ChatScreen(firebaseService: firebaseService, sample: sample) case "ImagenScreen": ImagenScreen(firebaseService: firebaseService, sample: sample) case "PhotoReasoningScreen": PhotoReasoningScreen(firebaseService: firebaseService) case "FunctionCallingScreen": - FunctionCallingScreen(firebaseService: firebaseService) + FunctionCallingScreen(firebaseService: firebaseService, sample: sample) default: EmptyView() } diff --git a/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift b/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift index 8af6b5568..732900985 100644 --- a/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift +++ b/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift @@ -12,106 +12,50 @@ // See the License for the specific language governing permissions and // limitations under the License. -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import GenerativeAIUIComponents +import FirebaseAI import SwiftUI +import ConversationKit struct FunctionCallingScreen: View { let firebaseService: FirebaseAI @StateObject var viewModel: FunctionCallingViewModel - @State - private var userPrompt = "What is 100 Euros in U.S. Dollars?" - - init(firebaseService: FirebaseAI) { + init(firebaseService: FirebaseAI, sample: Sample? = nil) { self.firebaseService = firebaseService _viewModel = - StateObject(wrappedValue: FunctionCallingViewModel(firebaseService: firebaseService)) - } - - enum FocusedField: Hashable { - case message + StateObject(wrappedValue: FunctionCallingViewModel(firebaseService: firebaseService, + sample: sample)) } - @FocusState - var focusedField: FocusedField? - var body: some View { - VStack { - ScrollViewReader { scrollViewProxy in - List { - Text("Interact with a currency conversion API using function calling in Gemini.") - ForEach(viewModel.messages) { message in - MessageView(message: message) - } - if let error = viewModel.error { - ErrorView(error: error) - .tag("errorView") - } - } - .listStyle(.plain) - .onChange(of: viewModel.messages, perform: { newValue in - if viewModel.hasError { - // Wait for a short moment to make sure we can actually scroll to the bottom. - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo("errorView", anchor: .bottom) - } - focusedField = .message - } - } else { - guard let lastMessage = viewModel.messages.last else { return } - - // Wait for a short moment to make sure we can actually scroll to the bottom. - DispatchQueue.main.asyncAfter(deadline: .now() + 0.05) { - withAnimation { - scrollViewProxy.scrollTo(lastMessage.id, anchor: .bottom) - } - focusedField = .message - } - } - }) - .onTapGesture { - focusedField = nil + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .disableAttachments() + .onSendMessage { message in + Task { + await viewModel.sendMessage(message.content ?? "", streaming: true) } } - InputField("Message...", text: $userPrompt) { - Image(systemName: viewModel.busy ? "stop.circle.fill" : "arrow.up.circle.fill") - .font(.title) + .onError { error in + viewModel.presentErrorDetails = true } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } - } - .toolbar { - ToolbarItem(placement: .primaryAction) { - Button(action: newChat) { - Image(systemName: "square.and.pencil") + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) } } - } - .navigationTitle("Function Calling") - .onAppear { - focusedField = .message - } - } - - private func sendMessage() { - Task { - let prompt = userPrompt - userPrompt = "" - await viewModel.sendMessage(prompt, streaming: true) - } - } - - private func sendOrStop() { - if viewModel.busy { - viewModel.stop() - } else { - sendMessage() + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) } } @@ -120,21 +64,6 @@ struct FunctionCallingScreen: View { } } -struct FunctionCallingScreen_Previews: PreviewProvider { - struct ContainerView: View { - @StateObject var viewModel = FunctionCallingViewModel(firebaseService: FirebaseAI.firebaseAI()) - - var body: some View { - FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) - .onAppear { - viewModel.messages = ChatMessage.samples - } - } - } - - static var previews: some View { - NavigationStack { - FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) - } - } +#Preview { + FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) } diff --git a/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift b/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift index a5a4412c9..cc48d50f4 100644 --- a/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift +++ b/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift @@ -33,39 +33,57 @@ class FunctionCallingViewModel: ObservableObject { return error != nil } - /// Function calls pending processing - private var functionCalls = [FunctionCallPart]() + @Published var presentErrorDetails: Bool = false + + @Published var initialPrompt: String = "" + @Published var title: String = "" private var model: GenerativeModel private var chat: Chat private var chatTask: Task? - init(firebaseService: FirebaseAI) { // Accept FirebaseAI instance - model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001", - tools: [.functionDeclarations([ - FunctionDeclaration( - name: "get_exchange_rate", - description: "Get the exchange rate for currencies between countries", - parameters: [ - "currency_from": .enumeration( - values: ["USD", "EUR", "JPY", "GBP", "AUD", "CAD"], - description: "The currency to convert from in ISO 4217 format" - ), - "currency_to": .enumeration( - values: ["USD", "EUR", "JPY", "GBP", "AUD", "CAD"], - description: "The currency to convert to in ISO 4217 format" - ), - ] - ), - ])] - ) - chat = model.startChat() // Initialize chat with the model from the service + private var sample: Sample? + + init(firebaseService: FirebaseAI, sample: Sample? = nil) { + self.sample = sample + + // create a generative model with sample data + model = firebaseService.generativeModel( + modelName: "gemini-2.0-flash-001", + tools: sample?.tools, + systemInstruction: sample?.systemInstruction + ) + + chat = model.startChat() + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" } func sendMessage(_ text: String, streaming: Bool = true) async { error = nil + if streaming { + await internalSendMessageStreaming(text) + } else { + await internalSendMessage(text) + } + } + + func startNewChat() { + stop() + error = nil + chat = model.startChat() + messages.removeAll() + initialPrompt = "" + } + + func stop() { + chatTask?.cancel() + error = nil + } + + private func internalSendMessageStreaming(_ text: String) async { chatTask?.cancel() chatTask = Task { @@ -75,185 +93,182 @@ class FunctionCallingViewModel: ObservableObject { } // first, add the user's message to the chat - let userMessage = ChatMessage(message: text, participant: .user) + let userMessage = ChatMessage(content: text, participant: .user) messages.append(userMessage) // add a pending message while we're waiting for a response from the backend - let systemMessage = ChatMessage.pending(participant: .system) + let systemMessage = ChatMessage.pending(participant: .other) messages.append(systemMessage) - print(messages) do { - repeat { - if streaming { - try await internalSendMessageStreaming(text) + let responseStream = try chat.sendMessageStream(text) + + for try await chunk in responseStream { + if !chunk.functionCalls.isEmpty { + try await handleFunctionCallsStreaming(chunk) } else { - try await internalSendMessage(text) + if let text = chunk.text { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text + messages[messages.count - 1].pending = false + } } - } while !functionCalls.isEmpty + } + } catch { self.error = error print(error.localizedDescription) - messages.removeLast() + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } - func startNewChat() { - stop() - error = nil - chat = model.startChat() - messages.removeAll() - } - - func stop() { + private func internalSendMessage(_ text: String) async { chatTask?.cancel() - error = nil - } - private func internalSendMessageStreaming(_ text: String) async throws { - let functionResponses = try await processFunctionCalls() - let responseStream: AsyncThrowingStream - if functionResponses.isEmpty { - responseStream = try chat.sendMessageStream(text) - } else { - for functionResponse in functionResponses { - messages.insert(functionResponse.chatMessage(), at: messages.count - 1) + chatTask = Task { + busy = true + defer { + busy = false } - responseStream = try chat.sendMessageStream([functionResponses.modelContent()]) - } - for try await chunk in responseStream { - processResponseContent(content: chunk) - } - } - private func internalSendMessage(_ text: String) async throws { - let functionResponses = try await processFunctionCalls() - let response: GenerateContentResponse - if functionResponses.isEmpty { - response = try await chat.sendMessage(text) - } else { - for functionResponse in functionResponses { - messages.insert(functionResponse.chatMessage(), at: messages.count - 1) - } - response = try await chat.sendMessage([functionResponses.modelContent()]) - } - processResponseContent(content: response) - } + // first, add the user's message to the chat + let userMessage = ChatMessage(content: text, participant: .user) + messages.append(userMessage) - func processResponseContent(content: GenerateContentResponse) { - guard let candidate = content.candidates.first else { - fatalError("No candidate.") - } + // add a pending message while we're waiting for a response from the backend + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) - for part in candidate.content.parts { - switch part { - case let textPart as TextPart: - // replace pending message with backend response - messages[messages.count - 1].message += textPart.text - messages[messages.count - 1].pending = false - case let functionCallPart as FunctionCallPart: - messages.insert(functionCallPart.chatMessage(), at: messages.count - 1) - functionCalls.append(functionCallPart) - default: - fatalError("Unsupported response part: \(part)") + do { + let response = try await chat.sendMessage(text) + + if !response.functionCalls.isEmpty { + try await handleFunctionCalls(response) + } else { + if let responseText = response.text { + // replace pending message with backend response + messages[messages.count - 1].content = responseText + messages[messages.count - 1].pending = false + } + } + } catch { + self.error = error + print(error.localizedDescription) + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage } } } - func processFunctionCalls() async throws -> [FunctionResponsePart] { + private func handleFunctionCallsStreaming(_ response: GenerateContentResponse) async throws { var functionResponses = [FunctionResponsePart]() - for functionCall in functionCalls { + + for functionCall in response.functionCalls { switch functionCall.name { - case "get_exchange_rate": - let exchangeRates = getExchangeRate(args: functionCall.args) - functionResponses.append(FunctionResponsePart( - name: "get_exchange_rate", - response: exchangeRates - )) + case "fetchWeather": + guard case let .string(city) = functionCall.args["city"], + case let .string(state) = functionCall.args["state"], + case let .string(date) = functionCall.args["date"] else { + throw NSError( + domain: "FunctionCallingError", + code: 0, + userInfo: [ + NSLocalizedDescriptionKey: "Malformed arguments for fetchWeather: \(functionCall.args)", + ] + ) + } + + functionResponses.append( + FunctionResponsePart( + name: functionCall.name, + response: WeatherService.fetchWeather(city: city, state: state, date: date) + ) + ) default: - fatalError("Unknown function named \"\(functionCall.name)\".") + print("Unknown function named \"\(functionCall.name)\".") } } - functionCalls = [] - return functionResponses - } - - // MARK: - Callable Functions - - func getExchangeRate(args: JSONObject) -> JSONObject { - // 1. Validate and extract the parameters provided by the model (from a `FunctionCall`) - guard case let .string(from) = args["currency_from"] else { - fatalError("Missing `currency_from` parameter.") - } - guard case let .string(to) = args["currency_to"] else { - fatalError("Missing `currency_to` parameter.") - } - - // 2. Get the exchange rate - let allRates: [String: [String: Double]] = [ - "AUD": ["CAD": 0.89265, "EUR": 0.6072, "GBP": 0.51714, "JPY": 97.75, "USD": 0.66379], - "CAD": ["AUD": 1.1203, "EUR": 0.68023, "GBP": 0.57933, "JPY": 109.51, "USD": 0.74362], - "EUR": ["AUD": 1.6469, "CAD": 1.4701, "GBP": 0.85168, "JPY": 160.99, "USD": 1.0932], - "GBP": ["AUD": 1.9337, "CAD": 1.7261, "EUR": 1.1741, "JPY": 189.03, "USD": 1.2836], - "JPY": ["AUD": 0.01023, "CAD": 0.00913, "EUR": 0.00621, "GBP": 0.00529, "USD": 0.00679], - "USD": ["AUD": 1.5065, "CAD": 1.3448, "EUR": 0.91475, "GBP": 0.77907, "JPY": 147.26], - ] - guard let fromRates = allRates[from] else { - return ["error": .string("No data for currency \(from).")] - } - guard let toRate = fromRates[to] else { - return ["error": .string("No data for currency \(to).")] + if !functionResponses.isEmpty { + let finalResponse = try await chat + .sendMessageStream([ModelContent(role: "function", parts: functionResponses)]) + + for try await chunk in finalResponse { + guard let candidate = chunk.candidates.first else { + throw NSError( + domain: "FunctionCallingError", + code: 1, + userInfo: [NSLocalizedDescriptionKey: "No candidate in response chunk"] + ) + } + + for part in candidate.content.parts { + if let textPart = part as? TextPart { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + textPart.text + messages[messages.count - 1].pending = false + } + } + } } - - // 3. Return the exchange rates as a JSON object (returned to the model in a `FunctionResponse`) - return ["rates": .number(toRate)] } -} -private extension FunctionCallPart { - func chatMessage() -> ChatMessage { - let encoder = JSONEncoder() - encoder.outputFormatting = .prettyPrinted + private func handleFunctionCalls(_ response: GenerateContentResponse) async throws { + var functionResponses = [FunctionResponsePart]() - let jsonData: Data - do { - jsonData = try encoder.encode(self) - } catch { - fatalError("JSON Encoding Failed: \(error.localizedDescription)") - } - guard let json = String(data: jsonData, encoding: .utf8) else { - fatalError("Failed to convert JSON data to a String.") + for functionCall in response.functionCalls { + switch functionCall.name { + case "fetchWeather": + guard case let .string(city) = functionCall.args["city"], + case let .string(state) = functionCall.args["state"], + case let .string(date) = functionCall.args["date"] else { + throw NSError( + domain: "FunctionCallingError", + code: 0, + userInfo: [ + NSLocalizedDescriptionKey: "Malformed arguments for fetchWeather: \(functionCall.args)", + ] + ) + } + + functionResponses.append( + FunctionResponsePart( + name: functionCall.name, + response: WeatherService.fetchWeather(city: city, state: state, date: date) + ) + ) + default: + print("Unknown function named \"\(functionCall.name)\".") + } } - let messageText = "Function call requested by model:\n```\n\(json)\n```" - return ChatMessage(message: messageText, participant: .system) - } -} + if !functionResponses.isEmpty { + let finalResponse = try await chat + .sendMessage([ModelContent(role: "function", parts: functionResponses)]) -private extension FunctionResponsePart { - func chatMessage() -> ChatMessage { - let encoder = JSONEncoder() - encoder.outputFormatting = .prettyPrinted + guard let candidate = finalResponse.candidates.first else { + throw NSError( + domain: "FunctionCallingError", + code: 1, + userInfo: [NSLocalizedDescriptionKey: "No candidate in response"] + ) + } - let jsonData: Data - do { - jsonData = try encoder.encode(self) - } catch { - fatalError("JSON Encoding Failed: \(error.localizedDescription)") - } - guard let json = String(data: jsonData, encoding: .utf8) else { - fatalError("Failed to convert JSON data to a String.") + for part in candidate.content.parts { + if let textPart = part as? TextPart { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + textPart.text + messages[messages.count - 1].pending = false + } + } } - let messageText = "Function response returned by app:\n```\n\(json)\n```" - - return ChatMessage(message: messageText, participant: .user) - } -} - -private extension [FunctionResponsePart] { - func modelContent() -> ModelContent { - return ModelContent(role: "function", parts: self) } } diff --git a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift b/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift index 469cb7586..8abe52513 100644 --- a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift +++ b/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift @@ -12,7 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -import GenerativeAIUIComponents import MarkdownUI import PhotosUI import SwiftUI diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift b/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift index 031660665..4d546dc94 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift +++ b/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift @@ -13,12 +13,8 @@ // limitations under the License. import SwiftUI -import GenerativeAIUIComponents -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif +import FirebaseAI +import ConversationKit struct ImagenScreen: View { let firebaseService: FirebaseAI @@ -45,14 +41,21 @@ struct ImagenScreen: View { ZStack { ScrollView { VStack { - InputField("Enter a prompt to generate an image", text: $userPrompt) { - Image( - systemName: viewModel.inProgress ? "stop.circle.fill" : "paperplane.circle.fill" - ) - .font(.title) + MessageComposerView(message: $userPrompt) + .padding(.bottom, 10) + .focused($focusedField, equals: .message) + .disableAttachments() + .onSubmitAction { sendOrStop() } + + if let error = viewModel.error { + HStack { + Text("An error occurred.") + Button("More information", systemImage: "info.circle") { + viewModel.presentErrorDetails = true + } + .labelStyle(.iconOnly) + } } - .focused($focusedField, equals: .message) - .onSubmit { sendOrStop() } let spacing: CGFloat = 10 LazyVGrid(columns: [ @@ -77,7 +80,13 @@ struct ImagenScreen: View { .onTapGesture { focusedField = nil } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } .navigationTitle("Imagen example") + .navigationBarTitleDisplayMode(.inline) .onAppear { focusedField = .message if userPrompt.isEmpty && !viewModel.initialPrompt.isEmpty { diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift b/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift index a111bca57..f2e3efa2e 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift +++ b/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift @@ -20,7 +20,6 @@ import Foundation import OSLog import SwiftUI -import GenerativeAIUIComponents @MainActor class ImagenViewModel: ObservableObject { @@ -33,7 +32,13 @@ class ImagenViewModel: ObservableObject { var images = [UIImage]() @Published - var errorMessage: String? + var error: Error? + var hasError: Bool { + return error != nil + } + + @Published + var presentErrorDetails: Bool = false @Published var inProgress = false @@ -88,6 +93,7 @@ class ImagenViewModel: ObservableObject { } } catch { if !Task.isCancelled { + self.error = error logger.error("Error generating images: \(error)") } } diff --git a/firebaseai/FirebaseAIExample/Views/FilterChipView.swift b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift index 8c6ad2bf1..b3701db1e 100644 --- a/firebaseai/FirebaseAIExample/Views/FilterChipView.swift +++ b/firebaseai/FirebaseAIExample/Views/FilterChipView.swift @@ -13,7 +13,6 @@ // limitations under the License. import SwiftUI -import GenerativeAIUIComponents struct FilterChipView: View { let useCase: UseCase diff --git a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift index 139a391b5..af4c4680b 100644 --- a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift +++ b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift @@ -13,7 +13,6 @@ // limitations under the License. import SwiftUI -import GenerativeAIUIComponents struct SampleCardView: View { let sample: Sample diff --git a/firebaseai/FunctionCallingExample/Services/WeatherService.swift b/firebaseai/FunctionCallingExample/Services/WeatherService.swift new file mode 100644 index 000000000..ccadcda1f --- /dev/null +++ b/firebaseai/FunctionCallingExample/Services/WeatherService.swift @@ -0,0 +1,27 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import Foundation +import UIKit + +class WeatherService { + public static func fetchWeather(city: String, state: String, date: String) -> JSONObject { + return [ + "temperature": .number(38), + "chancePrecipitation": .string("56%"), + "cloudCover": .string("partlyCloudy"), + ] + } +} diff --git a/firebaseai/GenerativeAIUIComponents/Package.swift b/firebaseai/GenerativeAIUIComponents/Package.swift deleted file mode 100644 index b174a6284..000000000 --- a/firebaseai/GenerativeAIUIComponents/Package.swift +++ /dev/null @@ -1,41 +0,0 @@ -// swift-tools-version: 5.9 -// The swift-tools-version declares the minimum version of Swift required to build this package. - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -import PackageDescription - -let package = Package( - name: "GenerativeAIUIComponents", - platforms: [ - .iOS(.v16), - ], - products: [ - .library( - name: "GenerativeAIUIComponents", - targets: ["GenerativeAIUIComponents"] - ), - ], - dependencies: [ - .package(url: "https://github.com/firebase/firebase-ios-sdk.git", from: "12.0.0"), - ], - targets: [ - .target( - name: "GenerativeAIUIComponents", - dependencies: [ - .product(name: "FirebaseAI", package: "firebase-ios-sdk"), - ] - ), - ] -) diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift b/firebaseai/UIComponents/Models/Sample.swift similarity index 87% rename from firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift rename to firebaseai/UIComponents/Models/Sample.swift index 3d7637a77..65649235b 100644 --- a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/Sample.swift +++ b/firebaseai/UIComponents/Models/Sample.swift @@ -53,7 +53,7 @@ extension Sample { description: "The user wants the model to help a new traveler" + " with travel tips", useCases: [.text], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent( role: "user", @@ -79,7 +79,7 @@ extension Sample { title: "Chatbot recommendations for courses", description: "A chatbot suggests courses for a performing arts program.", useCases: [.text], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", initialPrompt: "I am interested in Performing Arts. I have taken Theater 1A.", systemInstruction: ModelContent(parts: "You are a chatbot for the county's performing and fine arts" + " program. You help students decide what course they will" + @@ -90,7 +90,7 @@ extension Sample { title: "Blog post creator", description: "Create a blog post from an image file stored in Cloud Storage.", useCases: [.image], - navRoute: "PhotoReasoningScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you help me create a blog post about this image?"), ModelContent( @@ -111,7 +111,7 @@ extension Sample { title: "Gemini 2.0 Flash - image generation", description: "Generate and/or edit images using Gemini 2.0 Flash", useCases: [.image], - navRoute: "PhotoReasoningScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you edit this image to make it brighter?"), ModelContent( @@ -126,7 +126,7 @@ extension Sample { title: "Hashtags for a video", description: "Generate hashtags for a video ad stored in Cloud Storage.", useCases: [.video], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you suggest hashtags for my product video?"), ModelContent( @@ -140,7 +140,7 @@ extension Sample { title: "Summarize video", description: "Summarize a video and extract important dialogue.", useCases: [.video], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you summarize this video for me?"), ModelContent( @@ -155,7 +155,7 @@ extension Sample { title: "Audio Summarization", description: "Summarize an audio file", useCases: [.audio], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you summarize this audio recording?"), ModelContent( @@ -169,7 +169,7 @@ extension Sample { title: "Translation from audio", description: "Translate an audio file stored in Cloud Storage", useCases: [.audio], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you translate this audio from Spanish to English?"), ModelContent( @@ -185,7 +185,7 @@ extension Sample { description: "Compare the contents of 2 documents." + " Only supported by the Vertex AI Gemini API because the documents are stored in Cloud Storage", useCases: [.document], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", chatHistory: [ ModelContent(role: "user", parts: "Can you compare these two documents for me?"), ModelContent( @@ -197,18 +197,31 @@ extension Sample { ), // Function Calling Sample( - title: "Currency conversion", - description: "Use function calling to convert currency", + title: "Weather Chat", + description: "Use function calling to get the weather conditions" + + " for a specific US city on a specific date.", useCases: [.functionCalling, .text], navRoute: "FunctionCallingScreen", - initialPrompt: "What is 100 Euros in USD?" + initialPrompt: "What was the weather in Boston, MA on October 17, 2024?", + tools: [.functionDeclarations([ + FunctionDeclaration( + name: "fetchWeather", + description: "Get the weather conditions for a specific US city on a specific date", + parameters: [ + "city": .string(description: "The US city of the location"), + "state": .string(description: "The US state of the location"), + "date": .string(description: "The date for which to get the weather." + + " Date must be in the format: YYYY-MM-DD"), + ] + ), + ])] ), // Grounding Sample( title: "Grounding with Google Search", description: "Use Grounding with Google Search to get responses based on up-to-date information from the web.", useCases: [.text], - navRoute: "ConversationScreen", + navRoute: "ChatScreen", initialPrompt: "What's the weather in Chicago this weekend?", tools: [.googleSearch()] ), diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift b/firebaseai/UIComponents/Models/UseCase.swift similarity index 100% rename from firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/Models/UseCase.swift rename to firebaseai/UIComponents/Models/UseCase.swift diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/InputField.swift b/firebaseai/UIComponents/Views/InputField.swift similarity index 100% rename from firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/InputField.swift rename to firebaseai/UIComponents/Views/InputField.swift diff --git a/firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/MultimodalInputField.swift b/firebaseai/UIComponents/Views/MultimodalInputField.swift similarity index 100% rename from firebaseai/GenerativeAIUIComponents/Sources/GenerativeAIUIComponents/MultimodalInputField.swift rename to firebaseai/UIComponents/Views/MultimodalInputField.swift From d63db728b052d670053ece9e40dce8319128e6ba Mon Sep 17 00:00:00 2001 From: Haibo Yang Date: Tue, 2 Sep 2025 13:44:54 -0700 Subject: [PATCH 03/25] [FirebaseAI] Add Multimodal Analysis demos (#1750) --- .../project.pbxproj | 223 +++++++++------ .../ChatExample/Models/ChatMessage.swift | 21 +- .../ChatExample/Screens/ChatScreen.swift | 14 +- .../ViewModels/ChatViewModel.swift | 34 ++- .../ChatExample/Views/MessageView.swift | 28 +- .../FirebaseAIExample/ContentView.swift | 34 +-- .../Screens/FunctionCallingScreen.swift | 14 +- .../ViewModels/FunctionCallingViewModel.swift | 36 ++- .../Screens/PhotoReasoningScreen.swift | 92 ------ .../ViewModels/PhotoReasoningViewModel.swift | 123 -------- .../Screens/GenerateContentScreen.swift | 89 ------ .../ViewModels/GenerateContentViewModel.swift | 70 ----- .../Views/SampleCardView.swift | 2 + .../Screens/GroundingScreen.swift | 67 +++++ .../ViewModels/GroundingViewModel.swift | 177 ++++++++++++ .../Views}/GoogleSearchSuggestionView.swift | 0 .../Views}/GroundedResponseView.swift | 0 .../ImagenExample/ImagenScreen.swift | 12 +- .../ImagenExample/ImagenViewModel.swift | 14 +- .../Models/MultimodalAttachment.swift | 267 ++++++++++++++++++ .../Preview Assets.xcassets}/Contents.json | 0 .../Screens/MultimodalScreen.swift | 202 +++++++++++++ .../ViewModels/MultimodalViewModel.swift | 217 ++++++++++++++ .../Views/AttachmentPreviewCard.swift | 188 ++++++++++++ firebaseai/UIComponents/Models/Sample.swift | 121 ++++---- firebaseai/UIComponents/Models/UseCase.swift | 1 + .../UIComponents/Views/InputField.swift | 83 ------ .../Views/MultimodalInputField.swift | 183 ------------ scripts/test.sh | 9 + 29 files changed, 1450 insertions(+), 871 deletions(-) delete mode 100644 firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift delete mode 100644 firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/ViewModels/PhotoReasoningViewModel.swift delete mode 100644 firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentScreen.swift delete mode 100644 firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentViewModel.swift create mode 100644 firebaseai/GroundingExample/Screens/GroundingScreen.swift create mode 100644 firebaseai/GroundingExample/ViewModels/GroundingViewModel.swift rename firebaseai/{FirebaseAIExample/ChatExample/Views/Grounding => GroundingExample/Views}/GoogleSearchSuggestionView.swift (100%) rename firebaseai/{FirebaseAIExample/ChatExample/Views/Grounding => GroundingExample/Views}/GroundedResponseView.swift (100%) rename firebaseai/{FirebaseAIExample => }/ImagenExample/ImagenScreen.swift (91%) rename firebaseai/{FirebaseAIExample => }/ImagenExample/ImagenViewModel.swift (84%) create mode 100644 firebaseai/MultimodalExample/Models/MultimodalAttachment.swift rename {storage/StorageExample (iOS)/Assets.xcassets => firebaseai/MultimodalExample/Preview Content/Preview Assets.xcassets}/Contents.json (100%) create mode 100644 firebaseai/MultimodalExample/Screens/MultimodalScreen.swift create mode 100644 firebaseai/MultimodalExample/ViewModels/MultimodalViewModel.swift create mode 100644 firebaseai/MultimodalExample/Views/AttachmentPreviewCard.swift delete mode 100644 firebaseai/UIComponents/Views/InputField.swift delete mode 100644 firebaseai/UIComponents/Views/MultimodalInputField.swift diff --git a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj index 2217a26aa..1c1279840 100644 --- a/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj +++ b/firebaseai/FirebaseAIExample.xcodeproj/project.pbxproj @@ -7,25 +7,27 @@ objects = { /* Begin PBXBuildFile section */ + 7210F4A22E52317E002FE9F2 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7210F4972E52317E002FE9F2 /* Preview Assets.xcassets */; }; + 7210F4A32E52317E002FE9F2 /* MultimodalViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7210F49B2E52317E002FE9F2 /* MultimodalViewModel.swift */; }; + 7210F4A42E52317E002FE9F2 /* AttachmentPreviewCard.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7210F49D2E52317E002FE9F2 /* AttachmentPreviewCard.swift */; }; + 7210F4A52E52317E002FE9F2 /* MultimodalAttachment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7210F4952E52317E002FE9F2 /* MultimodalAttachment.swift */; }; + 7210F4A62E52317E002FE9F2 /* MultimodalScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7210F4992E52317E002FE9F2 /* MultimodalScreen.swift */; }; + 7210F4B12E525A64002FE9F2 /* ConversationKit in Frameworks */ = {isa = PBXBuildFile; productRef = 7210F4B02E525A64002FE9F2 /* ConversationKit */; }; + 7210F4BA2E526AA1002FE9F2 /* GroundingScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7210F4B92E526A9B002FE9F2 /* GroundingScreen.swift */; }; + 7210F4BC2E526AB2002FE9F2 /* GroundingViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7210F4BB2E526AAA002FE9F2 /* GroundingViewModel.swift */; }; + 7210F4C82E527A39002FE9F2 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 7210F4C72E527A39002FE9F2 /* GoogleService-Info.plist */; }; 726490D92E3F39E000A92700 /* Sample.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D22E3F39D200A92700 /* Sample.swift */; }; 726490DA2E3F39E000A92701 /* UseCase.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D32E3F39D200A92700 /* UseCase.swift */; }; - 726490DC2E3F39E000A92703 /* InputField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D72E3F39D900A92700 /* InputField.swift */; }; - 726490DD2E3F39E000A92704 /* MultimodalInputField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */; }; 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */ = {isa = PBXBuildFile; fileRef = 72DA044E2E385DF3004FED7D /* ChatMessage.swift */; }; 72E040752E448731003D4135 /* WeatherService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 72E040742E44872C003D4135 /* WeatherService.swift */; }; - 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = 869200B22B879C4F00482873 /* GoogleService-Info.plist */; }; 86C1F4832BC726150026816F /* FunctionCallingScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */; }; 86C1F4842BC726150026816F /* FunctionCallingViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */; }; 88263BF12B239C11008AB09B /* ErrorDetailsView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 889873842B208563005B4896 /* ErrorDetailsView.swift */; }; - 884298E12E4B8110005F535F /* ConversationKit in Frameworks */ = {isa = PBXBuildFile; productRef = 884298E02E4B8110005F535F /* ConversationKit */; }; 8848C8332B0D04BC007B434F /* FirebaseAIExampleApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */; }; 8848C8352B0D04BC007B434F /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8848C8342B0D04BC007B434F /* ContentView.swift */; }; 8848C8372B0D04BD007B434F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8848C8362B0D04BD007B434F /* Assets.xcassets */; }; 8848C83A2B0D04BD007B434F /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */; }; - 885D0CA12E4CB7CD00A217A0 /* ConversationKit in Frameworks */ = {isa = PBXBuildFile; productRef = 885D0CA02E4CB7CD00A217A0 /* ConversationKit */; }; 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */ = {isa = PBXBuildFile; productRef = 886F95D72B17BA420036F07A /* MarkdownUI */; }; - 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */; }; - 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */ = {isa = PBXBuildFile; fileRef = 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */; }; 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5A2B11133E00C08E95 /* MessageView.swift */; }; 886F95DF2B17D5010036F07A /* BouncingDots.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F5C2B11135000C08E95 /* BouncingDots.swift */; }; 886F95E02B17D5010036F07A /* ChatViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 88E10F562B1112F600C08E95 /* ChatViewModel.swift */; }; @@ -40,26 +42,26 @@ /* End PBXBuildFile section */ /* Begin PBXFileReference section */ + 7210F4952E52317E002FE9F2 /* MultimodalAttachment.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultimodalAttachment.swift; sourceTree = ""; }; + 7210F4972E52317E002FE9F2 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + 7210F4992E52317E002FE9F2 /* MultimodalScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultimodalScreen.swift; sourceTree = ""; }; + 7210F49B2E52317E002FE9F2 /* MultimodalViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultimodalViewModel.swift; sourceTree = ""; }; + 7210F49D2E52317E002FE9F2 /* AttachmentPreviewCard.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttachmentPreviewCard.swift; sourceTree = ""; }; + 7210F4B92E526A9B002FE9F2 /* GroundingScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GroundingScreen.swift; sourceTree = ""; }; + 7210F4BB2E526AAA002FE9F2 /* GroundingViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GroundingViewModel.swift; sourceTree = ""; }; + 7210F4C72E527A39002FE9F2 /* GoogleService-Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; }; 726490D22E3F39D200A92700 /* Sample.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Sample.swift; sourceTree = ""; }; 726490D32E3F39D200A92700 /* UseCase.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UseCase.swift; sourceTree = ""; }; - 726490D72E3F39D900A92700 /* InputField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InputField.swift; sourceTree = ""; }; - 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MultimodalInputField.swift; sourceTree = ""; }; 72DA044E2E385DF3004FED7D /* ChatMessage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatMessage.swift; sourceTree = ""; }; 72E040742E44872C003D4135 /* WeatherService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WeatherService.swift; sourceTree = ""; }; - 869200B22B879C4F00482873 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; }; 86C1F47E2BC726150026816F /* FunctionCallingScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingScreen.swift; sourceTree = ""; }; 86C1F4802BC726150026816F /* FunctionCallingViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FunctionCallingViewModel.swift; sourceTree = ""; }; - 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoReasoningViewModel.swift; sourceTree = ""; }; - 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = PhotoReasoningScreen.swift; sourceTree = ""; }; 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FirebaseAIExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; 8848C8322B0D04BC007B434F /* FirebaseAIExampleApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FirebaseAIExampleApp.swift; sourceTree = ""; }; 8848C8342B0D04BC007B434F /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; 8848C8362B0D04BD007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 8848C8392B0D04BD007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; - 8848C85C2B0D056D007B434F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - 8848C85F2B0D056D007B434F /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 889873842B208563005B4896 /* ErrorDetailsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ErrorDetailsView.swift; sourceTree = ""; }; - 88E10F482B110D5400C08E95 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 88E10F4B2B110D5400C08E95 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 88E10F542B1112CA00C08E95 /* ChatScreen.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatScreen.swift; sourceTree = ""; }; 88E10F562B1112F600C08E95 /* ChatViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatViewModel.swift; sourceTree = ""; }; @@ -78,9 +80,8 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 884298E12E4B8110005F535F /* ConversationKit in Frameworks */, + 7210F4B12E525A64002FE9F2 /* ConversationKit in Frameworks */, DE26D95F2DBB3E9F007E6668 /* FirebaseAI in Frameworks */, - 885D0CA12E4CB7CD00A217A0 /* ConversationKit in Frameworks */, 886F95D82B17BA420036F07A /* MarkdownUI in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; @@ -88,6 +89,93 @@ /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ + 7210F4962E52317E002FE9F2 /* Models */ = { + isa = PBXGroup; + children = ( + 7210F4952E52317E002FE9F2 /* MultimodalAttachment.swift */, + ); + path = Models; + sourceTree = ""; + }; + 7210F4982E52317E002FE9F2 /* Preview Content */ = { + isa = PBXGroup; + children = ( + 7210F4972E52317E002FE9F2 /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + 7210F49A2E52317E002FE9F2 /* Screens */ = { + isa = PBXGroup; + children = ( + 7210F4992E52317E002FE9F2 /* MultimodalScreen.swift */, + ); + path = Screens; + sourceTree = ""; + }; + 7210F49C2E52317E002FE9F2 /* ViewModels */ = { + isa = PBXGroup; + children = ( + 7210F49B2E52317E002FE9F2 /* MultimodalViewModel.swift */, + ); + path = ViewModels; + sourceTree = ""; + }; + 7210F49E2E52317E002FE9F2 /* Views */ = { + isa = PBXGroup; + children = ( + 7210F49D2E52317E002FE9F2 /* AttachmentPreviewCard.swift */, + ); + path = Views; + sourceTree = ""; + }; + 7210F4A02E52317E002FE9F2 /* MultimodalExample */ = { + isa = PBXGroup; + children = ( + 7210F4962E52317E002FE9F2 /* Models */, + 7210F4982E52317E002FE9F2 /* Preview Content */, + 7210F49A2E52317E002FE9F2 /* Screens */, + 7210F49C2E52317E002FE9F2 /* ViewModels */, + 7210F49E2E52317E002FE9F2 /* Views */, + ); + path = MultimodalExample; + sourceTree = ""; + }; + 7210F4B42E526A5B002FE9F2 /* GroundingExample */ = { + isa = PBXGroup; + children = ( + 7210F4B82E526A82002FE9F2 /* Screens */, + 7210F4B62E526A69002FE9F2 /* ViewModels */, + 7210F4B52E526A64002FE9F2 /* Views */, + ); + path = GroundingExample; + sourceTree = ""; + }; + 7210F4B52E526A64002FE9F2 /* Views */ = { + isa = PBXGroup; + children = ( + AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */, + AEE793DD2E256D3900708F02 /* GroundedResponseView.swift */, + ); + path = Views; + sourceTree = ""; + }; + 7210F4B62E526A69002FE9F2 /* ViewModels */ = { + isa = PBXGroup; + children = ( + 7210F4BB2E526AAA002FE9F2 /* GroundingViewModel.swift */, + ); + path = ViewModels; + sourceTree = ""; + }; + 7210F4B82E526A82002FE9F2 /* Screens */ = { + isa = PBXGroup; + children = ( + 7210F4B92E526A9B002FE9F2 /* GroundingScreen.swift */, + ); + path = Screens; + sourceTree = ""; + }; 726490D12E3F39C900A92700 /* UIComponents */ = { isa = PBXGroup; children = ( @@ -109,8 +197,6 @@ 726490D62E3F39D600A92700 /* Views */ = { isa = PBXGroup; children = ( - 726490D72E3F39D900A92700 /* InputField.swift */, - 726490D82E3F39DC00A92700 /* MultimodalInputField.swift */, ); path = Views; sourceTree = ""; @@ -157,22 +243,6 @@ path = FunctionCallingExample; sourceTree = ""; }; - 8802666E2B0FC39000CF7CB6 /* ViewModels */ = { - isa = PBXGroup; - children = ( - 8802666F2B0FC39000CF7CB6 /* PhotoReasoningViewModel.swift */, - ); - path = ViewModels; - sourceTree = ""; - }; - 880266742B0FC39000CF7CB6 /* Screens */ = { - isa = PBXGroup; - children = ( - 880266752B0FC39000CF7CB6 /* PhotoReasoningScreen.swift */, - ); - path = Screens; - sourceTree = ""; - }; 88209C222B0FBE1700F64795 /* Frameworks */ = { isa = PBXGroup; children = ( @@ -183,15 +253,16 @@ 8848C8262B0D04BC007B434F = { isa = PBXGroup; children = ( + 7210F4C72E527A39002FE9F2 /* GoogleService-Info.plist */, + 7210F4A02E52317E002FE9F2 /* MultimodalExample */, 726490D12E3F39C900A92700 /* UIComponents */, - DEFECAA82D7B4CCD00EF9621 /* ImagenScreen */, - 869200B22B879C4F00482873 /* GoogleService-Info.plist */, + DEFECAA82D7B4CCD00EF9621 /* ImagenExample */, 8848C8312B0D04BC007B434F /* FirebaseAIExample */, - 8848C8572B0D056C007B434F /* GenerativeAIMultimodalExample */, 88E10F432B110D5300C08E95 /* ChatExample */, 86C1F4822BC726150026816F /* FunctionCallingExample */, 8848C8302B0D04BC007B434F /* Products */, 88209C222B0FBE1700F64795 /* Frameworks */, + 7210F4B42E526A5B002FE9F2 /* GroundingExample */, ); sourceTree = ""; }; @@ -223,25 +294,6 @@ path = "Preview Content"; sourceTree = ""; }; - 8848C8572B0D056C007B434F /* GenerativeAIMultimodalExample */ = { - isa = PBXGroup; - children = ( - 8802666E2B0FC39000CF7CB6 /* ViewModels */, - 880266742B0FC39000CF7CB6 /* Screens */, - 8848C85C2B0D056D007B434F /* Assets.xcassets */, - 8848C85E2B0D056D007B434F /* Preview Content */, - ); - path = GenerativeAIMultimodalExample; - sourceTree = ""; - }; - 8848C85E2B0D056D007B434F /* Preview Content */ = { - isa = PBXGroup; - children = ( - 8848C85F2B0D056D007B434F /* Preview Assets.xcassets */, - ); - path = "Preview Content"; - sourceTree = ""; - }; 88E10F432B110D5300C08E95 /* ChatExample */ = { isa = PBXGroup; children = ( @@ -249,7 +301,6 @@ 88E10F502B11123600C08E95 /* ViewModels */, 88E10F512B11124100C08E95 /* Views */, 88E10F532B1112B900C08E95 /* Screens */, - 88E10F482B110D5400C08E95 /* Assets.xcassets */, 88E10F4A2B110D5400C08E95 /* Preview Content */, ); path = ChatExample; @@ -274,7 +325,6 @@ 88E10F512B11124100C08E95 /* Views */ = { isa = PBXGroup; children = ( - AEE793DE2E256D3900708F02 /* Grounding */, 88E10F5A2B11133E00C08E95 /* MessageView.swift */, 88E10F5C2B11135000C08E95 /* BouncingDots.swift */, 889873842B208563005B4896 /* ErrorDetailsView.swift */, @@ -299,22 +349,13 @@ path = Views; sourceTree = ""; }; - AEE793DE2E256D3900708F02 /* Grounding */ = { - isa = PBXGroup; - children = ( - AEE793DC2E256D3900708F02 /* GoogleSearchSuggestionView.swift */, - AEE793DD2E256D3900708F02 /* GroundedResponseView.swift */, - ); - path = Grounding; - sourceTree = ""; - }; - DEFECAA82D7B4CCD00EF9621 /* ImagenScreen */ = { + DEFECAA82D7B4CCD00EF9621 /* ImagenExample */ = { isa = PBXGroup; children = ( DEFECAA62D7B4CCD00EF9621 /* ImagenScreen.swift */, DEFECAA72D7B4CCD00EF9621 /* ImagenViewModel.swift */, ); - path = ImagenScreen; + path = ImagenExample; sourceTree = ""; }; /* End PBXGroup section */ @@ -336,8 +377,7 @@ packageProductDependencies = ( 886F95D72B17BA420036F07A /* MarkdownUI */, DE26D95E2DBB3E9F007E6668 /* FirebaseAI */, - 884298E02E4B8110005F535F /* ConversationKit */, - 885D0CA02E4CB7CD00A217A0 /* ConversationKit */, + 7210F4B02E525A64002FE9F2 /* ConversationKit */, ); productName = GenerativeAIExample; productReference = 8848C82F2B0D04BC007B434F /* FirebaseAIExample.app */; @@ -371,7 +411,7 @@ 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */, DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */, DEFECAAB2D7BB49700EF9621 /* XCRemoteSwiftPackageReference "firebase-ios-sdk" */, - 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */, + 7210F4AF2E525A64002FE9F2 /* XCRemoteSwiftPackageReference "ConversationKit" */, ); productRefGroup = 8848C8302B0D04BC007B434F /* Products */; projectDirPath = ""; @@ -388,8 +428,9 @@ buildActionMask = 2147483647; files = ( 8848C83A2B0D04BD007B434F /* Preview Assets.xcassets in Resources */, + 7210F4C82E527A39002FE9F2 /* GoogleService-Info.plist in Resources */, 8848C8372B0D04BD007B434F /* Assets.xcassets in Resources */, - 869200B32B879C4F00482873 /* GoogleService-Info.plist in Resources */, + 7210F4A22E52317E002FE9F2 /* Preview Assets.xcassets in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -408,10 +449,13 @@ 8848C8332B0D04BC007B434F /* FirebaseAIExampleApp.swift in Sources */, 886F95E02B17D5010036F07A /* ChatViewModel.swift in Sources */, 886F95DD2B17D5010036F07A /* MessageView.swift in Sources */, - 886F95DC2B17BAEF0036F07A /* PhotoReasoningScreen.swift in Sources */, + 7210F4BC2E526AB2002FE9F2 /* GroundingViewModel.swift in Sources */, DEFECAA92D7B4CCD00EF9621 /* ImagenViewModel.swift in Sources */, + 7210F4A32E52317E002FE9F2 /* MultimodalViewModel.swift in Sources */, + 7210F4A42E52317E002FE9F2 /* AttachmentPreviewCard.swift in Sources */, + 7210F4A52E52317E002FE9F2 /* MultimodalAttachment.swift in Sources */, + 7210F4A62E52317E002FE9F2 /* MultimodalScreen.swift in Sources */, DEFECAAA2D7B4CCD00EF9621 /* ImagenScreen.swift in Sources */, - 886F95DB2B17BAEF0036F07A /* PhotoReasoningViewModel.swift in Sources */, 72E040752E448731003D4135 /* WeatherService.swift in Sources */, 886F95E12B17D5010036F07A /* ChatScreen.swift in Sources */, 72DA044F2E385DF3004FED7D /* ChatMessage.swift in Sources */, @@ -419,10 +463,9 @@ A5E8E3CA2C3B4F388A7A4A1A /* SampleCardView.swift in Sources */, AEE793DF2E256D3900708F02 /* GoogleSearchSuggestionView.swift in Sources */, AEE793E02E256D3900708F02 /* GroundedResponseView.swift in Sources */, + 7210F4BA2E526AA1002FE9F2 /* GroundingScreen.swift in Sources */, 726490D92E3F39E000A92700 /* Sample.swift in Sources */, 726490DA2E3F39E000A92701 /* UseCase.swift in Sources */, - 726490DC2E3F39E000A92703 /* InputField.swift in Sources */, - 726490DD2E3F39E000A92704 /* MultimodalInputField.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -632,20 +675,20 @@ /* End XCConfigurationList section */ /* Begin XCRemoteSwiftPackageReference section */ - 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { + 7210F4AF2E525A64002FE9F2 /* XCRemoteSwiftPackageReference "ConversationKit" */ = { isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/gonzalezreal/swift-markdown-ui"; + repositoryURL = "https://github.com/peterfriese/ConversationKit"; requirement = { - kind = revision; - revision = 55441810c0f678c78ed7e2ebd46dde89228e02fc; + branch = main; + kind = branch; }; }; - 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */ = { + 88209C212B0FBDF700F64795 /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { isa = XCRemoteSwiftPackageReference; - repositoryURL = "https://github.com/peterfriese/ConversationKit"; + repositoryURL = "https://github.com/gonzalezreal/swift-markdown-ui"; requirement = { - kind = upToNextMajorVersion; - minimumVersion = 0.0.2; + kind = revision; + revision = 55441810c0f678c78ed7e2ebd46dde89228e02fc; }; }; DEA09AC32B1FCE22001962D9 /* XCRemoteSwiftPackageReference "NetworkImage" */ = { @@ -667,13 +710,9 @@ /* End XCRemoteSwiftPackageReference section */ /* Begin XCSwiftPackageProductDependency section */ - 884298E02E4B8110005F535F /* ConversationKit */ = { - isa = XCSwiftPackageProductDependency; - productName = ConversationKit; - }; - 885D0CA02E4CB7CD00A217A0 /* ConversationKit */ = { + 7210F4B02E525A64002FE9F2 /* ConversationKit */ = { isa = XCSwiftPackageProductDependency; - package = 885D0C9F2E4CB7CD00A217A0 /* XCRemoteSwiftPackageReference "ConversationKit" */; + package = 7210F4AF2E525A64002FE9F2 /* XCRemoteSwiftPackageReference "ConversationKit" */; productName = ConversationKit; }; 886F95D72B17BA420036F07A /* MarkdownUI */ = { diff --git a/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift b/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift index 829d1d7ab..c672d4d02 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/Models/ChatMessage.swift @@ -19,27 +19,34 @@ #endif import Foundation import ConversationKit +import UIKit public struct ChatMessage: Message { public let id: UUID = .init() public var content: String? - public let imageURL: String? public let participant: Participant public let error: (any Error)? public var pending = false public var groundingMetadata: GroundingMetadata? + public var attachments: [MultimodalAttachment] = [] + public var image: UIImage? + // required by the Message protocol, but not used in this app + public var imageURL: String? public init(content: String? = nil, imageURL: String? = nil, participant: Participant, - error: (any Error)? = nil, pending: Bool = false) { + error: (any Error)? = nil, pending: Bool = false, + attachments: [MultimodalAttachment] = [], image: UIImage? = nil) { self.content = content self.imageURL = imageURL self.participant = participant self.error = error self.pending = pending + self.attachments = attachments + self.image = image } // Protocol-required initializer - public init(content: String?, imageURL: String?, participant: Participant) { + public init(content: String?, imageURL: String? = nil, participant: Participant) { self.content = content self.imageURL = imageURL self.participant = participant @@ -58,16 +65,18 @@ extension ChatMessage { public static func == (lhs: ChatMessage, rhs: ChatMessage) -> Bool { lhs.id == rhs.id && lhs.content == rhs.content && - lhs.imageURL == rhs.imageURL && - lhs.participant == rhs.participant + lhs.participant == rhs.participant && + lhs.image == rhs.image && + lhs.attachments == rhs.attachments // intentionally ignore `error` } public func hash(into hasher: inout Hasher) { hasher.combine(id) hasher.combine(content) - hasher.combine(imageURL) hasher.combine(participant) + hasher.combine(image) + hasher.combine(attachments) // intentionally ignore `error` } } diff --git a/firebaseai/FirebaseAIExample/ChatExample/Screens/ChatScreen.swift b/firebaseai/FirebaseAIExample/ChatExample/Screens/ChatScreen.swift index 880a97ce4..4f35ddb11 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/Screens/ChatScreen.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/Screens/ChatScreen.swift @@ -17,13 +17,13 @@ import SwiftUI import ConversationKit struct ChatScreen: View { - let firebaseService: FirebaseAI + let backendType: BackendOption @StateObject var viewModel: ChatViewModel - init(firebaseService: FirebaseAI, sample: Sample? = nil) { - self.firebaseService = firebaseService + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType _viewModel = - StateObject(wrappedValue: ChatViewModel(firebaseService: firebaseService, + StateObject(wrappedValue: ChatViewModel(backendType: backendType, sample: sample)) } @@ -35,9 +35,7 @@ struct ChatScreen: View { } .disableAttachments() .onSendMessage { message in - Task { - await viewModel.sendMessage(message.content ?? "", streaming: true) - } + await viewModel.sendMessage(message.content ?? "", streaming: true) } .onError { error in viewModel.presentErrorDetails = true @@ -65,5 +63,5 @@ struct ChatScreen: View { } #Preview { - ChatScreen(firebaseService: FirebaseAI.firebaseAI()) + ChatScreen(backendType: .googleAI) } diff --git a/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ChatViewModel.swift b/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ChatViewModel.swift index c9aead280..1ed3c2367 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ChatViewModel.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/ViewModels/ChatViewModel.swift @@ -40,24 +40,27 @@ class ChatViewModel: ObservableObject { private var model: GenerativeModel private var chat: Chat - private var stopGenerating = false private var chatTask: Task? private var sample: Sample? + private var backendType: BackendOption - init(firebaseService: FirebaseAI, sample: Sample? = nil) { + init(backendType: BackendOption, sample: Sample? = nil) { self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) - // create a generative model with sample data model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001", - tools: sample?.tools, + modelName: sample?.modelName ?? "gemini-2.5-flash", + generationConfig: sample?.generationConfig, systemInstruction: sample?.systemInstruction ) if let chatHistory = sample?.chatHistory, !chatHistory.isEmpty { - // Initialize with sample chat history if it's available messages = ChatMessage.from(chatHistory) chat = model.startChat(history: chatHistory) } else { @@ -116,13 +119,14 @@ class ChatViewModel: ObservableObject { .content = (messages[messages.count - 1].content ?? "") + text } - if let candidate = chunk.candidates.first { - if let groundingMetadata = candidate.groundingMetadata { - self.messages[self.messages.count - 1].groundingMetadata = groundingMetadata + if let inlineDataPart = chunk.inlineDataParts.first { + if let uiImage = UIImage(data: inlineDataPart.data) { + messages[messages.count - 1].image = uiImage + } else { + print("Failed to convert inline data to UIImage") } } } - } catch { self.error = error print(error.localizedDescription) @@ -160,11 +164,13 @@ class ChatViewModel: ObservableObject { // replace pending message with backend response messages[messages.count - 1].content = responseText messages[messages.count - 1].pending = false + } - if let candidate = response?.candidates.first { - if let groundingMetadata = candidate.groundingMetadata { - self.messages[self.messages.count - 1].groundingMetadata = groundingMetadata - } + if let inlineDataPart = response?.inlineDataParts.first { + if let uiImage = UIImage(data: inlineDataPart.data) { + messages[messages.count - 1].image = uiImage + } else { + print("Failed to convert inline data to UIImage") } } } catch { diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift b/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift index b0f10ce99..c3468d1a2 100644 --- a/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift +++ b/firebaseai/FirebaseAIExample/ChatExample/Views/MessageView.swift @@ -57,16 +57,28 @@ struct MessageContentView: View { } .labelStyle(.iconOnly) } - } + } else { + VStack(alignment: .leading, spacing: 8) { + if message.participant == .user && !message.attachments.isEmpty { + AttachmentPreviewScrollView(attachments: message.attachments) + } - // Grounded Response - else if let groundingMetadata = message.groundingMetadata { - GroundedResponseView(message: message, groundingMetadata: groundingMetadata) - } + if let image = message.image { + Image(uiImage: image) + .resizable() + .aspectRatio(contentMode: .fit) + .frame(maxWidth: 300, maxHeight: 300) + .clipShape(RoundedRectangle(cornerRadius: 8)) + } - // Non-grounded response - else { - ResponseTextView(message: message) + // Grounded Response + if let groundingMetadata = message.groundingMetadata { + GroundedResponseView(message: message, groundingMetadata: groundingMetadata) + } else { + // Non-grounded response + ResponseTextView(message: message) + } + } } } } diff --git a/firebaseai/FirebaseAIExample/ContentView.swift b/firebaseai/FirebaseAIExample/ContentView.swift index 5af66fad7..d3bcaa5ab 100644 --- a/firebaseai/FirebaseAIExample/ContentView.swift +++ b/firebaseai/FirebaseAIExample/ContentView.swift @@ -18,25 +18,20 @@ import FirebaseAI enum BackendOption: String, CaseIterable, Identifiable { case googleAI = "Gemini Developer API" case vertexAI = "Vertex AI Gemini API" - var id: String { rawValue } - var backendValue: FirebaseAI { - switch self { - case .googleAI: - return FirebaseAI.firebaseAI(backend: .googleAI()) - case .vertexAI: - return FirebaseAI.firebaseAI(backend: .vertexAI()) - } - } + var id: String { rawValue } } struct ContentView: View { @State private var selectedBackend: BackendOption = .googleAI - @State private var firebaseService: FirebaseAI = FirebaseAI.firebaseAI(backend: .googleAI()) - @State private var selectedUseCase: UseCase = .text + @State private var selectedUseCase: UseCase = .all var filteredSamples: [Sample] { - Sample.samples.filter { $0.useCases.contains(selectedUseCase) } + if selectedUseCase == .all { + return Sample.samples + } else { + return Sample.samples.filter { $0.useCases.contains(selectedUseCase) } + } } let columns = [ @@ -102,9 +97,6 @@ struct ContentView: View { } .background(Color(.systemGroupedBackground)) .navigationTitle("Firebase AI Logic") - .onChange(of: selectedBackend) { newBackend in - firebaseService = newBackend.backendValue - } } } @@ -112,13 +104,15 @@ struct ContentView: View { private func destinationView(for sample: Sample) -> some View { switch sample.navRoute { case "ChatScreen": - ChatScreen(firebaseService: firebaseService, sample: sample) + ChatScreen(backendType: selectedBackend, sample: sample) case "ImagenScreen": - ImagenScreen(firebaseService: firebaseService, sample: sample) - case "PhotoReasoningScreen": - PhotoReasoningScreen(firebaseService: firebaseService) + ImagenScreen(backendType: selectedBackend, sample: sample) + case "MultimodalScreen": + MultimodalScreen(backendType: selectedBackend, sample: sample) case "FunctionCallingScreen": - FunctionCallingScreen(firebaseService: firebaseService, sample: sample) + FunctionCallingScreen(backendType: selectedBackend, sample: sample) + case "GroundingScreen": + GroundingScreen(backendType: selectedBackend, sample: sample) default: EmptyView() } diff --git a/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift b/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift index 732900985..7e814be3e 100644 --- a/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift +++ b/firebaseai/FirebaseAIExample/FunctionCallingExample/Screens/FunctionCallingScreen.swift @@ -17,13 +17,13 @@ import SwiftUI import ConversationKit struct FunctionCallingScreen: View { - let firebaseService: FirebaseAI + let backendType: BackendOption @StateObject var viewModel: FunctionCallingViewModel - init(firebaseService: FirebaseAI, sample: Sample? = nil) { - self.firebaseService = firebaseService + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType _viewModel = - StateObject(wrappedValue: FunctionCallingViewModel(firebaseService: firebaseService, + StateObject(wrappedValue: FunctionCallingViewModel(backendType: backendType, sample: sample)) } @@ -35,9 +35,7 @@ struct FunctionCallingScreen: View { } .disableAttachments() .onSendMessage { message in - Task { - await viewModel.sendMessage(message.content ?? "", streaming: true) - } + await viewModel.sendMessage(message.content ?? "", streaming: true) } .onError { error in viewModel.presentErrorDetails = true @@ -65,5 +63,5 @@ struct FunctionCallingScreen: View { } #Preview { - FunctionCallingScreen(firebaseService: FirebaseAI.firebaseAI()) + FunctionCallingScreen(backendType: .googleAI) } diff --git a/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift b/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift index cc48d50f4..14f17d06b 100644 --- a/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift +++ b/firebaseai/FirebaseAIExample/FunctionCallingExample/ViewModels/FunctionCallingViewModel.swift @@ -44,13 +44,19 @@ class FunctionCallingViewModel: ObservableObject { private var chatTask: Task? private var sample: Sample? + private var backendType: BackendOption - init(firebaseService: FirebaseAI, sample: Sample? = nil) { + init(backendType: BackendOption, sample: Sample? = nil) { self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) // create a generative model with sample data model = firebaseService.generativeModel( - modelName: "gemini-2.0-flash-001", + modelName: sample?.modelName ?? "gemini-2.0-flash-001", tools: sample?.tools, systemInstruction: sample?.systemInstruction ) @@ -103,18 +109,24 @@ class FunctionCallingViewModel: ObservableObject { do { let responseStream = try chat.sendMessageStream(text) + var functionCalls = [FunctionCallPart]() + for try await chunk in responseStream { if !chunk.functionCalls.isEmpty { - try await handleFunctionCallsStreaming(chunk) - } else { - if let text = chunk.text { - messages[messages.count - 1] - .content = (messages[messages.count - 1].content ?? "") + text - messages[messages.count - 1].pending = false - } + functionCalls.append(contentsOf: chunk.functionCalls) + } + if let text = chunk.text { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text + messages[messages.count - 1].pending = false } } + // On functionCalls, never keep reading the old stream or call the second API inside the first for-loop. + // Start a NEW stream only after the function response turn is sent. + if !functionCalls.isEmpty { + try await handleFunctionCallsStreaming(functionCalls) + } } catch { self.error = error print(error.localizedDescription) @@ -168,10 +180,10 @@ class FunctionCallingViewModel: ObservableObject { } } - private func handleFunctionCallsStreaming(_ response: GenerateContentResponse) async throws { + private func handleFunctionCallsStreaming(_ functionCalls: [FunctionCallPart]) async throws { var functionResponses = [FunctionResponsePart]() - for functionCall in response.functionCalls { + for functionCall in functionCalls { switch functionCall.name { case "fetchWeather": guard case let .string(city) = functionCall.args["city"], @@ -198,7 +210,7 @@ class FunctionCallingViewModel: ObservableObject { } if !functionResponses.isEmpty { - let finalResponse = try await chat + let finalResponse = try chat .sendMessageStream([ModelContent(role: "function", parts: functionResponses)]) for try await chunk in finalResponse { diff --git a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift b/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift deleted file mode 100644 index 8abe52513..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/Screens/PhotoReasoningScreen.swift +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import MarkdownUI -import PhotosUI -import SwiftUI -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif - -struct PhotoReasoningScreen: View { - let firebaseService: FirebaseAI - @StateObject var viewModel: PhotoReasoningViewModel - - init(firebaseService: FirebaseAI) { - self.firebaseService = firebaseService - _viewModel = - StateObject(wrappedValue: PhotoReasoningViewModel(firebaseService: firebaseService)) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - MultimodalInputField(text: $viewModel.userInput, selection: $viewModel.selectedItems) - .focused($focusedField, equals: .message) - .onSubmit { - onSendTapped() - } - - ScrollViewReader { scrollViewProxy in - List { - if let outputText = viewModel.outputText { - HStack(alignment: .top) { - if viewModel.inProgress { - ProgressView() - } else { - Image(systemName: "cloud.circle.fill") - .font(.title2) - } - - Markdown("\(outputText)") - } - .listRowSeparator(.hidden) - } - } - .listStyle(.plain) - } - } - .onTapGesture { - focusedField = nil - } - .navigationTitle("Multimodal example") - .onAppear { - focusedField = .message - } - } - - // MARK: - Actions - - private func onSendTapped() { - focusedField = nil - - Task { - await viewModel.reason() - } - } -} - -#Preview { - NavigationStack { - PhotoReasoningScreen(firebaseService: FirebaseAI.firebaseAI()) - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/ViewModels/PhotoReasoningViewModel.swift b/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/ViewModels/PhotoReasoningViewModel.swift deleted file mode 100644 index 11113fb81..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAIMultimodalExample/ViewModels/PhotoReasoningViewModel.swift +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import Foundation -import OSLog -import PhotosUI -import SwiftUI - -@MainActor -class PhotoReasoningViewModel: ObservableObject { - // Maximum value for the larger of the two image dimensions (height and width) in pixels. This is - // being used to reduce the image size in bytes. - private static let largestImageDimension = 768.0 - - private var logger = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "generative-ai") - - @Published - var userInput: String = "" - - @Published - var selectedItems = [PhotosPickerItem]() - - @Published - var outputText: String? = nil - - @Published - var errorMessage: String? - - @Published - var inProgress = false - - private var model: GenerativeModel? - - init(firebaseService: FirebaseAI) { - model = firebaseService.generativeModel(modelName: "gemini-2.0-flash-001") - } - - func reason() async { - defer { - inProgress = false - } - guard let model else { - return - } - - do { - inProgress = true - errorMessage = nil - outputText = "" - - let prompt = "Look at the image(s), and then answer the following question: \(userInput)" - - var images = [any PartsRepresentable]() - for item in selectedItems { - if let data = try? await item.loadTransferable(type: Data.self) { - guard let image = UIImage(data: data) else { - logger.error("Failed to parse data as an image, skipping.") - continue - } - if image.size.fits(largestDimension: PhotoReasoningViewModel.largestImageDimension) { - images.append(image) - } else { - guard let resizedImage = image - .preparingThumbnail(of: image.size - .aspectFit(largestDimension: PhotoReasoningViewModel.largestImageDimension)) else { - logger.error("Failed to resize image: \(image)") - continue - } - - images.append(resizedImage) - } - } - } - - let outputContentStream = try model.generateContentStream(prompt, images) - - // stream response - for try await outputContent in outputContentStream { - guard let line = outputContent.text else { - return - } - - outputText = (outputText ?? "") + line - } - } catch { - logger.error("\(error.localizedDescription)") - errorMessage = error.localizedDescription - } - } -} - -private extension CGSize { - func fits(largestDimension length: CGFloat) -> Bool { - return width <= length && height <= length - } - - func aspectFit(largestDimension length: CGFloat) -> CGSize { - let aspectRatio = width / height - if width > height { - let width = min(self.width, length) - return CGSize(width: width, height: round(width / aspectRatio)) - } else { - let height = min(self.height, length) - return CGSize(width: round(height * aspectRatio), height: height) - } - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentScreen.swift b/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentScreen.swift deleted file mode 100644 index 5c5e34a0c..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAITextExample/Screens/GenerateContentScreen.swift +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import MarkdownUI -import SwiftUI -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import GenerativeAIUIComponents - -struct GenerateContentScreen: View { - let firebaseService: FirebaseAI - @StateObject var viewModel: GenerateContentViewModel - @State var userInput = "" - - init(firebaseService: FirebaseAI) { - self.firebaseService = firebaseService - _viewModel = - StateObject(wrappedValue: GenerateContentViewModel(firebaseService: firebaseService)) - } - - enum FocusedField: Hashable { - case message - } - - @FocusState - var focusedField: FocusedField? - - var body: some View { - VStack { - VStack(alignment: .leading) { - Text("Enter some text, then tap on _Go_ to run generateContent on it.") - .padding(.horizontal, 6) - InputField("Enter generate content input", text: $userInput) { - Text("Go") - } - .focused($focusedField, equals: .message) - .onSubmit { onGenerateContentTapped() } - } - .padding(.horizontal, 16) - - List { - HStack(alignment: .top) { - if viewModel.inProgress { - ProgressView() - } else { - Image(systemName: "cloud.circle.fill") - .font(.title2) - } - - Markdown("\(viewModel.outputText)") - } - .listRowSeparator(.hidden) - } - .listStyle(.plain) - } - .onTapGesture { - focusedField = nil - } - .navigationTitle("Text example") - } - - private func onGenerateContentTapped() { - focusedField = nil - - Task { - await viewModel.generateContent(inputText: userInput) - } - } -} - -#Preview { - NavigationStack { - GenerateContentScreen(firebaseService: FirebaseAI.firebaseAI()) - } -} diff --git a/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentViewModel.swift b/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentViewModel.swift deleted file mode 100644 index fe2344ba0..000000000 --- a/firebaseai/FirebaseAIExample/GenerativeAITextExample/ViewModels/GenerateContentViewModel.swift +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#if canImport(FirebaseAILogic) - import FirebaseAILogic -#else - import FirebaseAI -#endif -import Foundation -import OSLog - -@MainActor -class GenerateContentViewModel: ObservableObject { - private var logger = Logger(subsystem: Bundle.main.bundleIdentifier!, category: "generative-ai") - - @Published - var outputText = "" - - @Published - var errorMessage: String? - - @Published - var inProgress = false - - private var model: GenerativeModel? - - init(firebaseService: FirebaseAI) { - model = firebaseService.generativeModel(modelName: "gemini-2.0-flash-001") - } - - func generateContent(inputText: String) async { - defer { - inProgress = false - } - guard let model else { - return - } - - do { - inProgress = true - errorMessage = nil - outputText = "" - - let outputContentStream = try model.generateContentStream(inputText) - - // stream response - for try await outputContent in outputContentStream { - guard let line = outputContent.text else { - return - } - - outputText = outputText + line - } - } catch { - logger.error("\(error.localizedDescription)") - errorMessage = error.localizedDescription - } - } -} diff --git a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift index af4c4680b..58034475a 100644 --- a/firebaseai/FirebaseAIExample/Views/SampleCardView.swift +++ b/firebaseai/FirebaseAIExample/Views/SampleCardView.swift @@ -39,6 +39,7 @@ struct SampleCardView: View { private func systemName(for useCase: UseCase) -> String { switch useCase { + case .all: "square.grid.2x2.fill" case .text: "text.bubble.fill" case .image: "photo.fill" case .video: "video.fill" @@ -50,6 +51,7 @@ struct SampleCardView: View { private func color(for useCase: UseCase) -> Color { switch useCase { + case .all:.primary case .text:.blue case .image:.purple case .video:.red diff --git a/firebaseai/GroundingExample/Screens/GroundingScreen.swift b/firebaseai/GroundingExample/Screens/GroundingScreen.swift new file mode 100644 index 000000000..33c63a23b --- /dev/null +++ b/firebaseai/GroundingExample/Screens/GroundingScreen.swift @@ -0,0 +1,67 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import SwiftUI +import ConversationKit + +struct GroundingScreen: View { + let backendType: BackendOption + @StateObject var viewModel: GroundingViewModel + + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType + _viewModel = + StateObject(wrappedValue: GroundingViewModel(backendType: backendType, + sample: sample)) + } + + var body: some View { + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .disableAttachments() + .onSendMessage { message in + await viewModel.sendMessage(message.content ?? "", streaming: true) + } + .onError { error in + viewModel.presentErrorDetails = true + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) + } + } + + private func newChat() { + viewModel.startNewChat() + } +} + +#Preview { + GroundingScreen(backendType: .googleAI) +} diff --git a/firebaseai/GroundingExample/ViewModels/GroundingViewModel.swift b/firebaseai/GroundingExample/ViewModels/GroundingViewModel.swift new file mode 100644 index 000000000..2682f3ca9 --- /dev/null +++ b/firebaseai/GroundingExample/ViewModels/GroundingViewModel.swift @@ -0,0 +1,177 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import Foundation +import UIKit + +@MainActor +class GroundingViewModel: ObservableObject { + /// This array holds both the user's and the system's chat messages + @Published var messages = [ChatMessage]() + + /// Indicates we're waiting for the model to finish + @Published var busy = false + + @Published var error: Error? + var hasError: Bool { + return error != nil + } + + @Published var presentErrorDetails: Bool = false + + @Published var initialPrompt: String = "" + @Published var title: String = "" + + private var model: GenerativeModel + private var chat: Chat + + private var chatTask: Task? + + private var sample: Sample? + + private var backendType: BackendOption + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + + model = firebaseService.generativeModel( + modelName: sample?.modelName ?? "gemini-2.5-flash", + tools: sample?.tools, + systemInstruction: sample?.systemInstruction + ) + + chat = model.startChat() + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" + } + + func sendMessage(_ text: String, streaming: Bool = true) async { + error = nil + if streaming { + await internalSendMessageStreaming(text) + } else { + await internalSendMessage(text) + } + } + + func startNewChat() { + stop() + error = nil + chat = model.startChat() + messages.removeAll() + initialPrompt = "" + } + + func stop() { + chatTask?.cancel() + error = nil + } + + private func internalSendMessageStreaming(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + busy = true + defer { + busy = false + } + + // first, add the user's message to the chat + let userMessage = ChatMessage(content: text, participant: .user) + messages.append(userMessage) + + // add a pending message while we're waiting for a response from the backend + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + let responseStream = try chat.sendMessageStream(text) + for try await chunk in responseStream { + messages[messages.count - 1].pending = false + if let text = chunk.text { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text + } + + if let candidate = chunk.candidates.first { + if let groundingMetadata = candidate.groundingMetadata { + self.messages[self.messages.count - 1].groundingMetadata = groundingMetadata + } + } + } + + } catch { + self.error = error + print(error.localizedDescription) + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } + + private func internalSendMessage(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + busy = true + defer { + busy = false + } + + // first, add the user's message to the chat + let userMessage = ChatMessage(content: text, participant: .user) + messages.append(userMessage) + + // add a pending message while we're waiting for a response from the backend + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + var response: GenerateContentResponse? + response = try await chat.sendMessage(text) + + if let responseText = response?.text { + // replace pending message with backend response + messages[messages.count - 1].content = responseText + messages[messages.count - 1].pending = false + + if let candidate = response?.candidates.first { + if let groundingMetadata = candidate.groundingMetadata { + self.messages[self.messages.count - 1].groundingMetadata = groundingMetadata + } + } + } + + } catch { + self.error = error + print(error.localizedDescription) + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } +} diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/Grounding/GoogleSearchSuggestionView.swift b/firebaseai/GroundingExample/Views/GoogleSearchSuggestionView.swift similarity index 100% rename from firebaseai/FirebaseAIExample/ChatExample/Views/Grounding/GoogleSearchSuggestionView.swift rename to firebaseai/GroundingExample/Views/GoogleSearchSuggestionView.swift diff --git a/firebaseai/FirebaseAIExample/ChatExample/Views/Grounding/GroundedResponseView.swift b/firebaseai/GroundingExample/Views/GroundedResponseView.swift similarity index 100% rename from firebaseai/FirebaseAIExample/ChatExample/Views/Grounding/GroundedResponseView.swift rename to firebaseai/GroundingExample/Views/GroundedResponseView.swift diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift b/firebaseai/ImagenExample/ImagenScreen.swift similarity index 91% rename from firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift rename to firebaseai/ImagenExample/ImagenScreen.swift index 4d546dc94..9c658bc52 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenScreen.swift +++ b/firebaseai/ImagenExample/ImagenScreen.swift @@ -17,16 +17,16 @@ import FirebaseAI import ConversationKit struct ImagenScreen: View { - let firebaseService: FirebaseAI + let backendType: BackendOption @StateObject var viewModel: ImagenViewModel @State private var userPrompt = "" - init(firebaseService: FirebaseAI, sample: Sample? = nil) { - self.firebaseService = firebaseService + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType _viewModel = - StateObject(wrappedValue: ImagenViewModel(firebaseService: firebaseService, + StateObject(wrappedValue: ImagenViewModel(backendType: backendType, sample: sample)) } @@ -47,7 +47,7 @@ struct ImagenScreen: View { .disableAttachments() .onSubmitAction { sendOrStop() } - if let error = viewModel.error { + if viewModel.error != nil { HStack { Text("An error occurred.") Button("More information", systemImage: "info.circle") { @@ -137,5 +137,5 @@ struct ProgressOverlay: View { } #Preview { - ImagenScreen(firebaseService: FirebaseAI.firebaseAI()) + ImagenScreen(backendType: .googleAI) } diff --git a/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift b/firebaseai/ImagenExample/ImagenViewModel.swift similarity index 84% rename from firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift rename to firebaseai/ImagenExample/ImagenViewModel.swift index f2e3efa2e..1fbab98df 100644 --- a/firebaseai/FirebaseAIExample/ImagenExample/ImagenViewModel.swift +++ b/firebaseai/ImagenExample/ImagenViewModel.swift @@ -44,13 +44,19 @@ class ImagenViewModel: ObservableObject { var inProgress = false private let model: ImagenModel + private var backendType: BackendOption private var generateImagesTask: Task? private var sample: Sample? - init(firebaseService: FirebaseAI, sample: Sample? = nil) { + init(backendType: BackendOption, sample: Sample? = nil) { self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) let modelName = "imagen-3.0-generate-002" let safetySettings = ImagenSafetySettings( @@ -79,16 +85,16 @@ class ImagenViewModel: ObservableObject { } do { - // 4. Call generateImages with the text prompt + // 1. Call generateImages with the text prompt let response = try await model.generateImages(prompt: prompt) - // 5. Print the reason images were filtered out, if any. + // 2. Print the reason images were filtered out, if any. if let filteredReason = response.filteredReason { print("Image(s) Blocked: \(filteredReason)") } if !Task.isCancelled { - // 6. Convert the image data to UIImage for display in the UI + // 3. Convert the image data to UIImage for display in the UI images = response.images.compactMap { UIImage(data: $0.data) } } } catch { diff --git a/firebaseai/MultimodalExample/Models/MultimodalAttachment.swift b/firebaseai/MultimodalExample/Models/MultimodalAttachment.swift new file mode 100644 index 000000000..f4f0bc86f --- /dev/null +++ b/firebaseai/MultimodalExample/Models/MultimodalAttachment.swift @@ -0,0 +1,267 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import Foundation +import SwiftUI +import PhotosUI +import FirebaseAI + +public enum MultimodalAttachmentError: LocalizedError { + case unsupportedFileType(extension: String) + case noDataAvailable + case loadingFailed(Error) + case mimeTypeMismatch(expected: String, provided: String, extension: String) + + public var errorDescription: String? { + switch self { + case let .unsupportedFileType(ext): + return "Unsupported file format: .\(ext). Please select a supported format file." + case .noDataAvailable: + return "File data is not available" + case let .loadingFailed(error): + return "File loading failed: \(error.localizedDescription)" + case let .mimeTypeMismatch(expected, provided, ext): + return "MIME type mismatch for .\(ext) file: expected '\(expected)', got '\(provided)'" + } + } +} + +// MultimodalAttachment is a struct used for transporting data between ViewModels and AttachmentPreviewCard +public struct MultimodalAttachment: Identifiable, Equatable, Hashable { + public let id = UUID() + public let mimeType: String + public let data: Data? + public let url: URL? + public var isCloudStorage: Bool = false + + public static func == (lhs: MultimodalAttachment, rhs: MultimodalAttachment) -> Bool { + return lhs.id == rhs.id + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(id) + } + + public init(mimeType: String, data: Data? = nil, url: URL? = nil) { + self.mimeType = mimeType + self.data = data + self.url = url + } + + public init(fileDataPart: FileDataPart) { + mimeType = fileDataPart.mimeType + data = nil + url = URL(string: fileDataPart.uri) + isCloudStorage = true + } +} + +// validate file type & mime type +extension MultimodalAttachment { + public static let supportedFileExtensions: Set = [ + // Documents / text + "pdf", "txt", "text", + // Images + "jpg", "jpeg", "png", "webp", + // Video + "flv", "mov", "qt", "mpeg", "mpg", "ps", "mp4", "webm", "wmv", "3gp", "3gpp", + // Audio + "aac", "flac", "mp3", "m4a", "mpga", "mp4a", "opus", "pcm", "raw", "wav", "weba", + ] + + public static func validateFileType(url: URL) throws { + let fileExtension = url.pathExtension.lowercased() + guard !fileExtension.isEmpty else { + throw MultimodalAttachmentError.unsupportedFileType(extension: "No extension") + } + + guard supportedFileExtensions.contains(fileExtension) else { + throw MultimodalAttachmentError.unsupportedFileType(extension: fileExtension) + } + } + + public static func validateMimeTypeMatch(url: URL, mimeType: String) throws { + let expectedMimeType = getMimeType(for: url) + + guard mimeType == expectedMimeType else { + throw MultimodalAttachmentError.mimeTypeMismatch( + expected: expectedMimeType, + provided: mimeType, + extension: url.pathExtension + ) + } + } + + public static func validatePhotoType(_ item: PhotosPickerItem) throws -> String { + guard let fileExtension = item.supportedContentTypes.first?.preferredFilenameExtension else { + throw MultimodalAttachmentError.unsupportedFileType(extension: "No extension") + } + + guard supportedFileExtensions.contains(fileExtension) else { + throw MultimodalAttachmentError.unsupportedFileType(extension: fileExtension) + } + + guard let fileMimeType = item.supportedContentTypes.first?.preferredMIMEType else { + throw MultimodalAttachmentError.unsupportedFileType(extension: "No MIME type") + } + + return fileMimeType + } +} + +// load data from picker item or url +extension MultimodalAttachment { + public static func fromPhotosPickerItem(_ item: PhotosPickerItem) async throws + -> MultimodalAttachment { + let fileMimeType = try validatePhotoType(item) + + do { + guard let data = try await item.loadTransferable(type: Data.self) else { + throw MultimodalAttachmentError.noDataAvailable + } + + return MultimodalAttachment( + mimeType: fileMimeType, + data: data + ) + } catch let error as MultimodalAttachmentError { + throw error + } catch { + throw MultimodalAttachmentError.loadingFailed(error) + } + } + + public static func fromFilePickerItem(from url: URL) async throws -> MultimodalAttachment { + try validateFileType(url: url) + + do { + let data = try await Task.detached(priority: .utility) { + try Data(contentsOf: url) + }.value + + let mimeType = Self.getMimeType(for: url) + + return MultimodalAttachment( + mimeType: mimeType, + data: data, + url: url + ) + } catch { + throw MultimodalAttachmentError.loadingFailed(error) + } + } + + public static func fromURL(_ url: URL, mimeType: String) async throws -> MultimodalAttachment { + try validateFileType(url: url) + try validateMimeTypeMatch(url: url, mimeType: mimeType) + + do { + let data = try await Task.detached(priority: .utility) { + try Data(contentsOf: url) + }.value + + return MultimodalAttachment( + mimeType: mimeType, + data: data, + url: url + ) + } catch { + throw MultimodalAttachmentError.loadingFailed(error) + } + } + + public func toInlineDataPart() async -> InlineDataPart? { + if let data = data, !data.isEmpty { + return InlineDataPart(data: data, mimeType: mimeType) + } + + // If the data is not available, try to read it from the url. + guard let url = url else { return nil } + do { + let data = try await Task.detached(priority: .utility) { + try Data(contentsOf: url) + }.value + + guard !data.isEmpty else { return nil } + return InlineDataPart(data: data, mimeType: mimeType) + } catch { + return nil + } + } + + private static func getMimeType(for url: URL) -> String { + let fileExtension = url.pathExtension.lowercased() + + switch fileExtension { + // Documents / text + case "pdf": + return "application/pdf" + case "txt", "text": + return "text/plain" + + // Images + case "jpg", "jpeg": + return "image/jpeg" + case "png": + return "image/png" + case "webp": + return "image/webp" + + // Video + case "flv": + return "video/x-flv" + case "mov", "qt": + return "video/quicktime" + case "mpeg": + return "video/mpeg" + case "mpg": + return "video/mpg" + case "ps": + return "video/mpegps" + case "mp4": + return "video/mp4" + case "webm": + return "video/webm" + case "wmv": + return "video/wmv" + case "3gp", "3gpp": + return "video/3gpp" + + // Audio + case "aac": + return "audio/aac" + case "flac": + return "audio/flac" + case "mp3": + return "audio/mpeg" + case "m4a": + return "audio/m4a" + case "mpga": + return "audio/mpga" + case "mp4a": + return "audio/mp4" + case "opus": + return "audio/opus" + case "pcm", "raw": + return "audio/pcm" + case "wav": + return "audio/wav" + case "weba": + return "audio/webm" + + default: + return "application/octet-stream" + } + } +} diff --git a/storage/StorageExample (iOS)/Assets.xcassets/Contents.json b/firebaseai/MultimodalExample/Preview Content/Preview Assets.xcassets/Contents.json similarity index 100% rename from storage/StorageExample (iOS)/Assets.xcassets/Contents.json rename to firebaseai/MultimodalExample/Preview Content/Preview Assets.xcassets/Contents.json diff --git a/firebaseai/MultimodalExample/Screens/MultimodalScreen.swift b/firebaseai/MultimodalExample/Screens/MultimodalScreen.swift new file mode 100644 index 000000000..1f593f20c --- /dev/null +++ b/firebaseai/MultimodalExample/Screens/MultimodalScreen.swift @@ -0,0 +1,202 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import SwiftUI +import PhotosUI +import ConversationKit + +struct MultimodalScreen: View { + let backendType: BackendOption + @StateObject var viewModel: MultimodalViewModel + + @State private var showingPhotoPicker = false + @State private var showingFilePicker = false + @State private var showingLinkDialog = false + @State private var linkText = "" + @State private var linkMimeType = "" + @State private var selectedPhotoItems = [PhotosPickerItem]() + + init(backendType: BackendOption, sample: Sample? = nil) { + self.backendType = backendType + _viewModel = + StateObject(wrappedValue: MultimodalViewModel(backendType: backendType, + sample: sample)) + } + + private var attachmentPreviewScrollView: some View { + AttachmentPreviewScrollView( + attachments: viewModel.attachments, + onAttachmentRemove: viewModel.removeAttachment + ) + } + + var body: some View { + NavigationStack { + ConversationView(messages: $viewModel.messages, + userPrompt: viewModel.initialPrompt) { message in + MessageView(message: message) + } + .attachmentActions { + Button(action: showLinkDialog) { + Label("Link", systemImage: "link") + } + Button(action: showFilePicker) { + Label("File", systemImage: "doc.text") + } + Button(action: showPhotoPicker) { + Label("Photo", systemImage: "photo.on.rectangle.angled") + } + } + .attachmentPreview { attachmentPreviewScrollView } + .onSendMessage { message in + await viewModel.sendMessage(message.content ?? "", streaming: true) + } + .onError { error in + viewModel.presentErrorDetails = true + } + .sheet(isPresented: $viewModel.presentErrorDetails) { + if let error = viewModel.error { + ErrorDetailsView(error: error) + } + } + .photosPicker( + isPresented: $showingPhotoPicker, + selection: $selectedPhotoItems, + maxSelectionCount: 5, + matching: .any(of: [.images, .videos]) + ) + .fileImporter( + isPresented: $showingFilePicker, + allowedContentTypes: [.pdf, .audio], + allowsMultipleSelection: true + ) { result in + handleFileImport(result) + } + .alert("Add Web URL", isPresented: $showingLinkDialog) { + TextField("Enter URL", text: $linkText) + TextField("Enter mimeType", text: $linkMimeType) + Button("Add") { + handleLinkAttachment() + } + Button("Cancel", role: .cancel) { + linkText = "" + linkMimeType = "" + } + } + } + .onChange(of: selectedPhotoItems) { _, newItems in + handlePhotoSelection(newItems) + } + .toolbar { + ToolbarItem(placement: .primaryAction) { + Button(action: newChat) { + Image(systemName: "square.and.pencil") + } + } + } + .navigationTitle(viewModel.title) + .navigationBarTitleDisplayMode(.inline) + } + + private func newChat() { + viewModel.startNewChat() + } + + private func showPhotoPicker() { + showingPhotoPicker = true + } + + private func showFilePicker() { + showingFilePicker = true + } + + private func showLinkDialog() { + showingLinkDialog = true + } + + private func handlePhotoSelection(_ items: [PhotosPickerItem]) { + Task { + for item in items { + do { + let attachment = try await MultimodalAttachment.fromPhotosPickerItem(item) + await MainActor.run { + viewModel.addAttachment(attachment) + } + } catch { + await MainActor.run { + viewModel.error = error + viewModel.presentErrorDetails = true + } + } + } + await MainActor.run { + selectedPhotoItems = [] + } + } + } + + private func handleFileImport(_ result: Result<[URL], Error>) { + switch result { + case let .success(urls): + Task { + for url in urls { + do { + let attachment = try await MultimodalAttachment.fromFilePickerItem(from: url) + await MainActor.run { + viewModel.addAttachment(attachment) + } + } catch { + await MainActor.run { + viewModel.error = error + viewModel.presentErrorDetails = true + } + } + } + } + case let .failure(error): + viewModel.error = error + viewModel.presentErrorDetails = true + } + } + + private func handleLinkAttachment() { + guard !linkText.isEmpty, let url = URL(string: linkText) else { + return + } + + let trimmedMime = linkMimeType.lowercased().trimmingCharacters(in: .whitespacesAndNewlines) + Task { + do { + let attachment = try await MultimodalAttachment.fromURL(url, mimeType: trimmedMime) + await MainActor.run { + viewModel.addAttachment(attachment) + } + } catch { + await MainActor.run { + viewModel.error = error + viewModel.presentErrorDetails = true + } + } + await MainActor.run { + linkText = "" + linkMimeType = "" + } + } + } +} + +#Preview { + MultimodalScreen(backendType: .googleAI) +} diff --git a/firebaseai/MultimodalExample/ViewModels/MultimodalViewModel.swift b/firebaseai/MultimodalExample/ViewModels/MultimodalViewModel.swift new file mode 100644 index 000000000..496cb21cb --- /dev/null +++ b/firebaseai/MultimodalExample/ViewModels/MultimodalViewModel.swift @@ -0,0 +1,217 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import FirebaseAI +import Foundation +import OSLog +import PhotosUI +import SwiftUI +import AVFoundation + +@MainActor +class MultimodalViewModel: ObservableObject { + @Published var messages = [ChatMessage]() + @Published var initialPrompt: String = "" + @Published var title: String = "" + @Published var error: Error? + @Published var inProgress = false + + @Published var presentErrorDetails: Bool = false + + @Published var attachments = [MultimodalAttachment]() + + private var model: GenerativeModel + private var chat: Chat + private var chatTask: Task? + private let logger = Logger(subsystem: "com.example.firebaseai", category: "MultimodalViewModel") + + private var sample: Sample? + private var backendType: BackendOption + private var fileDataParts: [FileDataPart]? + + init(backendType: BackendOption, sample: Sample? = nil) { + self.sample = sample + self.backendType = backendType + + let firebaseService = backendType == .googleAI + ? FirebaseAI.firebaseAI(backend: .googleAI()) + : FirebaseAI.firebaseAI(backend: .vertexAI()) + + model = firebaseService.generativeModel( + modelName: sample?.modelName ?? "gemini-2.5-flash", + systemInstruction: sample?.systemInstruction + ) + + if let chatHistory = sample?.chatHistory, !chatHistory.isEmpty { + messages = ChatMessage.from(chatHistory) + chat = model.startChat(history: chatHistory) + } else { + chat = model.startChat() + } + + initialPrompt = sample?.initialPrompt ?? "" + title = sample?.title ?? "" + + fileDataParts = sample?.fileDataParts + if let fileDataParts = fileDataParts, !fileDataParts.isEmpty { + for fileDataPart in fileDataParts { + attachments.append(MultimodalAttachment(fileDataPart: fileDataPart)) + } + } + } + + func sendMessage(_ text: String, streaming: Bool = true) async { + error = nil + if streaming { + await internalSendMessageStreaming(text) + } else { + await internalSendMessage(text) + } + } + + func startNewChat() { + stop() + error = nil + chat = model.startChat() + messages.removeAll() + attachments.removeAll() + initialPrompt = "" + } + + func stop() { + chatTask?.cancel() + error = nil + } + + private func internalSendMessageStreaming(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + inProgress = true + defer { + inProgress = false + } + + let userMessage = ChatMessage(content: text, participant: .user, attachments: attachments) + messages.append(userMessage) + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + var parts: [any PartsRepresentable] = [text] + + if backendType == .vertexAI, let fileDataParts = fileDataParts { + // This is a patch for Cloud Storage support. Only available when using Vertex AI Gemini API. + // For non-text inputs (e.g., media files), you can attach files from Cloud Storage to the request. + // if you do not want to use Cloud Storage, you can remove this `if` statement. + // Reference: https://firebase.google.com/docs/ai-logic/solutions/cloud-storage + for fileDataPart in fileDataParts { + parts.append(fileDataPart) + } + } else { + for attachment in attachments { + if let inlineDataPart = await attachment.toInlineDataPart() { + parts.append(inlineDataPart) + } + } + } + + attachments.removeAll() + + let responseStream = try chat.sendMessageStream(parts) + for try await chunk in responseStream { + messages[messages.count - 1].pending = false + if let text = chunk.text { + messages[messages.count - 1] + .content = (messages[messages.count - 1].content ?? "") + text + } + } + } catch { + self.error = error + logger.error("\(error.localizedDescription)") + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } + + private func internalSendMessage(_ text: String) async { + chatTask?.cancel() + + chatTask = Task { + inProgress = true + defer { + inProgress = false + } + let userMessage = ChatMessage(content: text, participant: .user, attachments: attachments) + messages.append(userMessage) + + let systemMessage = ChatMessage.pending(participant: .other) + messages.append(systemMessage) + + do { + var parts: [any PartsRepresentable] = [text] + + if backendType == .vertexAI, let fileDataParts = fileDataParts { + // This is a patch for Cloud Storage support. Only available when using Vertex AI Gemini API. + // For non-text inputs (e.g., media files), you can attach files from Cloud Storage to the request. + // if you do not want to use Cloud Storage, you can remove this `if` statement. + // Reference: https://firebase.google.com/docs/ai-logic/solutions/cloud-storage + for fileDataPart in fileDataParts { + parts.append(fileDataPart) + } + } else { + for attachment in attachments { + if let inlineDataPart = await attachment.toInlineDataPart() { + parts.append(inlineDataPart) + } + } + } + + attachments.removeAll() + + let response = try await chat.sendMessage(parts) + + if let responseText = response.text { + messages[messages.count - 1].content = responseText + messages[messages.count - 1].pending = false + } + } catch { + self.error = error + logger.error("\(error.localizedDescription)") + let errorMessage = ChatMessage(content: "An error occurred. Please try again.", + participant: .other, + error: error, + pending: false) + messages[messages.count - 1] = errorMessage + } + } + } + + func addAttachment(_ attachment: MultimodalAttachment) { + attachments.append(attachment) + } + + func removeAttachment(_ attachment: MultimodalAttachment) { + if attachment.isCloudStorage { + // Remove corresponding fileDataPart when attachment is deleted. + fileDataParts?.removeAll { $0.uri == attachment.url?.absoluteString } + } + + attachments.removeAll { $0.id == attachment.id } + } +} diff --git a/firebaseai/MultimodalExample/Views/AttachmentPreviewCard.swift b/firebaseai/MultimodalExample/Views/AttachmentPreviewCard.swift new file mode 100644 index 000000000..2bb37e25a --- /dev/null +++ b/firebaseai/MultimodalExample/Views/AttachmentPreviewCard.swift @@ -0,0 +1,188 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import SwiftUI + +private enum AttachmentType: String { + case image, video, audio, pdf, other + + init(mimeType: String) { + let mt = mimeType.lowercased() + if mt.hasPrefix("image/") { self = .image } + else if mt.hasPrefix("video/") { self = .video } + else if mt.hasPrefix("audio/") { self = .audio } + else if mt == "application/pdf" { self = .pdf } + else { self = .other } + } + + var systemImageName: String { + switch self { + case .image: return "photo" + case .video: return "video" + case .audio: return "waveform" + case .pdf: return "doc.text" + case .other: return "questionmark" + } + } + + var typeTagColor: Color { + switch self { + case .image: return .green + case .video: return .purple + case .audio: return .orange + case .pdf: return .red + case .other: return .blue + } + } + + var displayFileType: String { + switch self { + case .image: return "IMAGE" + case .video: return "VIDEO" + case .audio: return "AUDIO" + case .pdf: return "PDF" + case .other: return "UNKNOWN" + } + } +} + +struct AttachmentPreviewCard: View { + let attachment: MultimodalAttachment + let onRemove: (() -> Void)? + + private var attachmentType: AttachmentType { + AttachmentType(mimeType: attachment.mimeType) + } + + var body: some View { + HStack(spacing: 12) { + Image(systemName: attachmentType.systemImageName) + .font(.system(size: 20)) + .foregroundColor(.blue) + .frame(width: 40, height: 40) + .background(Color.blue.opacity(0.1)) + .clipShape(RoundedRectangle(cornerRadius: 6)) + + VStack(alignment: .leading, spacing: 4) { + Text(displayName) + .font(.system(size: 14, weight: .medium)) + .lineLimit(1) + .truncationMode(.middle) + .foregroundColor(.primary) + + HStack(spacing: 8) { + Text(attachmentType.displayFileType) + .font(.system(size: 10, weight: .semibold)) + .padding(.horizontal, 6) + .padding(.vertical, 2) + .background(attachmentType.typeTagColor) + .foregroundColor(.white) + .clipShape(Capsule()) + + Spacer() + } + } + + if let onRemove = onRemove { + Button(action: onRemove) { + Image(systemName: "xmark.circle.fill") + .font(.system(size: 16)) + .foregroundColor(.gray) + } + .buttonStyle(PlainButtonStyle()) + } + } + .padding(12) + .background(Color(.systemGray6)) + .clipShape(RoundedRectangle(cornerRadius: 12)) + .overlay( + RoundedRectangle(cornerRadius: 12) + .stroke(Color(.separator), lineWidth: 0.5) + ) + } + + private var displayName: String { + let fileName = attachment.url?.lastPathComponent ?? "Default" + let maxLength = 30 + if fileName.count <= maxLength { + return fileName + } + + let prefixName = fileName.prefix(15) + let suffixName = fileName.suffix(10) + return "\(prefixName)...\(suffixName)" + } +} + +struct AttachmentPreviewScrollView: View { + let attachments: [MultimodalAttachment] + var onAttachmentRemove: ((MultimodalAttachment) -> Void)? = nil + + var body: some View { + if !attachments.isEmpty { + ScrollView(.horizontal, showsIndicators: false) { + LazyHStack(spacing: 8) { + ForEach(attachments) { attachment in + AttachmentPreviewCard( + attachment: attachment, + onRemove: onAttachmentRemove == nil ? nil : { onAttachmentRemove?(attachment) } + ) + .frame(width: 180) + } + } + .padding(.horizontal, 16) + } + .frame(height: 80) + } else { + EmptyView() + } + } +} + +#Preview { + VStack(spacing: 20) { + AttachmentPreviewCard( + attachment: MultimodalAttachment( + mimeType: "image/jpeg", + data: Data() + ), + onRemove: { print("Image removed") } + ) + + AttachmentPreviewCard( + attachment: MultimodalAttachment( + mimeType: "application/pdf", + data: Data() + ), + onRemove: { print("PDF removed") } + ) + + AttachmentPreviewCard( + attachment: MultimodalAttachment( + mimeType: "video/mp4", + data: Data() + ), + onRemove: { print("Video removed") } + ) + + AttachmentPreviewCard( + attachment: MultimodalAttachment( + mimeType: "audio/mpeg", + data: Data() + ), + onRemove: { print("Audio removed") } + ) + } + .padding() +} diff --git a/firebaseai/UIComponents/Models/Sample.swift b/firebaseai/UIComponents/Models/Sample.swift index 65649235b..e81b2189a 100644 --- a/firebaseai/UIComponents/Models/Sample.swift +++ b/firebaseai/UIComponents/Models/Sample.swift @@ -21,27 +21,36 @@ public struct Sample: Identifiable { public let description: String public let useCases: [UseCase] public let navRoute: String + public let modelName: String public let chatHistory: [ModelContent]? public let initialPrompt: String? public let systemInstruction: ModelContent? public let tools: [Tool]? + public let generationConfig: GenerationConfig? + public let fileDataParts: [FileDataPart]? public init(title: String, description: String, useCases: [UseCase], navRoute: String, + modelName: String = "gemini-2.5-flash", chatHistory: [ModelContent]? = nil, initialPrompt: String? = nil, systemInstruction: ModelContent? = nil, - tools: [Tool]? = nil) { + tools: [Tool]? = nil, + generationConfig: GenerationConfig? = nil, + fileDataParts: [FileDataPart]? = nil) { self.title = title self.description = description self.useCases = useCases self.navRoute = navRoute + self.modelName = modelName self.chatHistory = chatHistory self.initialPrompt = initialPrompt self.systemInstruction = systemInstruction self.tools = tools + self.generationConfig = generationConfig + self.fileDataParts = fileDataParts } } @@ -90,110 +99,116 @@ extension Sample { title: "Blog post creator", description: "Create a blog post from an image file stored in Cloud Storage.", useCases: [.image], - navRoute: "ChatScreen", - chatHistory: [ - ModelContent(role: "user", parts: "Can you help me create a blog post about this image?"), - ModelContent( - role: "model", - parts: "I'd be happy to help you create a blog post! Please share the image you'd like me to analyze and write about." + navRoute: "MultimodalScreen", + initialPrompt: "Write a short, engaging blog post based on this picture." + + " It should include a description of the meal in the" + + " photo and talk about my journey meal prepping.", + fileDataParts: [ + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/image/meal-prep.jpeg", + mimeType: "image/jpeg" ), - ], - initialPrompt: "Please analyze this image and create an engaging blog post" + ] ), Sample( - title: "Imagen 3 - image generation", + title: "Imagen - image generation", description: "Generate images using Imagen 3", useCases: [.image], navRoute: "ImagenScreen", initialPrompt: "A photo of a modern building with water in the background" ), Sample( - title: "Gemini 2.0 Flash - image generation", + title: "Gemini Flash - image generation", description: "Generate and/or edit images using Gemini 2.0 Flash", useCases: [.image], navRoute: "ChatScreen", - chatHistory: [ - ModelContent(role: "user", parts: "Can you edit this image to make it brighter?"), - ModelContent( - role: "model", - parts: "I can help you edit images using Gemini 2.0 Flash. Please share the image you'd like me to modify." - ), - ], - initialPrompt: "" + modelName: "gemini-2.0-flash-preview-image-generation", + initialPrompt: "Hi, can you create a 3d rendered image of a pig " + + "with wings and a top hat flying over a happy " + + "futuristic scifi city with lots of greenery?", + generationConfig: GenerationConfig(responseModalities: [.text, .image]), ), // Video Sample( title: "Hashtags for a video", description: "Generate hashtags for a video ad stored in Cloud Storage.", useCases: [.video], - navRoute: "ChatScreen", - chatHistory: [ - ModelContent(role: "user", parts: "Can you suggest hashtags for my product video?"), - ModelContent( - role: "model", - parts: "I'd be happy to help you generate relevant hashtags! Please share your video or describe what it's about so I can suggest appropriate hashtags." + navRoute: "MultimodalScreen", + initialPrompt: "Generate 5-10 hashtags that relate to the video content." + + " Try to use more popular and engaging terms," + + " e.g. #Viral. Do not add content not related to" + + " the video.\n Start the output with 'Tags:'", + fileDataParts: [ + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/video/google_home_celebrity_ad.mp4", + mimeType: "video/mp4" ), - ], - initialPrompt: "" + ] ), Sample( title: "Summarize video", description: "Summarize a video and extract important dialogue.", useCases: [.video], - navRoute: "ChatScreen", + navRoute: "MultimodalScreen", chatHistory: [ - ModelContent(role: "user", parts: "Can you summarize this video for me?"), + ModelContent(role: "user", parts: "Can you help me with the description of a video file?"), ModelContent( role: "model", - parts: "I can help you summarize videos and extract key dialogue. Please share the video you'd like me to analyze." + parts: "Sure! Click on the attach button below and choose a video file for me to describe." ), ], - initialPrompt: "" + initialPrompt: "I have attached the video file. Provide a description of" + + " the video. The description should also contain" + + " anything important which people say in the video." ), // Audio Sample( title: "Audio Summarization", description: "Summarize an audio file", useCases: [.audio], - navRoute: "ChatScreen", + navRoute: "MultimodalScreen", chatHistory: [ - ModelContent(role: "user", parts: "Can you summarize this audio recording?"), + ModelContent(role: "user", parts: "Can you help me summarize an audio file?"), ModelContent( role: "model", - parts: "I can help you summarize audio files. Please share the audio recording you'd like me to analyze." + parts: "Of course! Click on the attach button below and choose an audio file for me to summarize." ), ], - initialPrompt: "" + initialPrompt: "I have attached the audio file. Please analyze it and summarize the contents" + + " of the audio as bullet points." ), Sample( title: "Translation from audio", description: "Translate an audio file stored in Cloud Storage", useCases: [.audio], - navRoute: "ChatScreen", - chatHistory: [ - ModelContent(role: "user", parts: "Can you translate this audio from Spanish to English?"), - ModelContent( - role: "model", - parts: "I can help you translate audio files. Please share the audio file you'd like me to translate." + navRoute: "MultimodalScreen", + initialPrompt: "Please translate the audio in Mandarin.", + fileDataParts: [ + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/audio/How_to_create_a_My_Map_in_Google_Maps.mp3", + mimeType: "audio/mp3" ), - ], - initialPrompt: "" + ] ), // Document Sample( title: "Document comparison", description: "Compare the contents of 2 documents." + - " Only supported by the Vertex AI Gemini API because the documents are stored in Cloud Storage", + " Supported by the Vertex AI Gemini API because the documents are stored in Cloud Storage", useCases: [.document], - navRoute: "ChatScreen", - chatHistory: [ - ModelContent(role: "user", parts: "Can you compare these two documents for me?"), - ModelContent( - role: "model", - parts: "I can help you compare documents using the Vertex AI Gemini API. Please share the two documents you'd like me to compare." + navRoute: "MultimodalScreen", + initialPrompt: "The first document is from 2013, and the second document is" + + " from 2023. How did the standard deduction evolve?", + fileDataParts: [ + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/pdf/form_1040_2013.pdf", + mimeType: "application/pdf" ), - ], - initialPrompt: "" + FileDataPart( + uri: "https://storage.googleapis.com/cloud-samples-data/generative-ai/pdf/form_1040_2023.pdf", + mimeType: "application/pdf" + ), + ] ), // Function Calling Sample( @@ -221,7 +236,7 @@ extension Sample { title: "Grounding with Google Search", description: "Use Grounding with Google Search to get responses based on up-to-date information from the web.", useCases: [.text], - navRoute: "ChatScreen", + navRoute: "GroundingScreen", initialPrompt: "What's the weather in Chicago this weekend?", tools: [.googleSearch()] ), diff --git a/firebaseai/UIComponents/Models/UseCase.swift b/firebaseai/UIComponents/Models/UseCase.swift index 5448dc01b..ee4e80f8a 100644 --- a/firebaseai/UIComponents/Models/UseCase.swift +++ b/firebaseai/UIComponents/Models/UseCase.swift @@ -15,6 +15,7 @@ import Foundation public enum UseCase: String, CaseIterable, Identifiable { + case all = "All" case text = "Text" case image = "Image" case video = "Video" diff --git a/firebaseai/UIComponents/Views/InputField.swift b/firebaseai/UIComponents/Views/InputField.swift deleted file mode 100644 index 67941c370..000000000 --- a/firebaseai/UIComponents/Views/InputField.swift +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import SwiftUI - -public struct InputField