From 0dfecfabac21cd000b7c97689d15c3f2e91f8e76 Mon Sep 17 00:00:00 2001 From: marginal23326 <58261815+marginal23326@users.noreply.github.com> Date: Fri, 31 Jan 2025 15:33:40 +0600 Subject: [PATCH] chore: rename 'gemini' to 'google' for consistency --- README.md | 2 +- src/utils/utils.py | 8 ++++---- tests/test_browser_use.py | 2 +- tests/test_llm_api.py | 8 ++++---- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index f0b54cb0..e48c691f 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ We would like to officially thank [WarmShao](https://github.com/warmshao) for hi **WebUI:** is built on Gradio and supports most of `browser-use` functionalities. This UI is designed to be user-friendly and enables easy interaction with the browser agent. -**Expanded LLM Support:** We've integrated support for various Large Language Models (LLMs), including: Gemini, OpenAI, Azure OpenAI, Anthropic, DeepSeek, Ollama etc. And we plan to add support for even more models in the future. +**Expanded LLM Support:** We've integrated support for various Large Language Models (LLMs), including: Google, OpenAI, Azure OpenAI, Anthropic, DeepSeek, Ollama etc. And we plan to add support for even more models in the future. **Custom Browser Support:** You can use your own browser with our tool, eliminating the need to re-login to sites or deal with other authentication challenges. This feature also supports high-definition screen recording. diff --git a/src/utils/utils.py b/src/utils/utils.py index 09dedf8c..e32c1146 100644 --- a/src/utils/utils.py +++ b/src/utils/utils.py @@ -19,7 +19,7 @@ "azure_openai": "Azure OpenAI", "anthropic": "Anthropic", "deepseek": "DeepSeek", - "gemini": "Gemini" + "google": "Google" } def get_llm_model(provider: str, **kwargs): @@ -30,7 +30,7 @@ def get_llm_model(provider: str, **kwargs): :return: """ if provider not in ["ollama"]: - env_var = "GOOGLE_API_KEY" if provider == "gemini" else f"{provider.upper()}_API_KEY" + env_var = f"{provider.upper()}_API_KEY" api_key = kwargs.get("api_key", "") or os.getenv(env_var, "") if not api_key: handle_api_key_error(provider, env_var) @@ -96,7 +96,7 @@ def get_llm_model(provider: str, **kwargs): base_url=base_url, api_key=api_key, ) - elif provider == "gemini": + elif provider == "google": return ChatGoogleGenerativeAI( model=kwargs.get("model_name", "gemini-2.0-flash-exp"), temperature=kwargs.get("temperature", 0.0), @@ -143,7 +143,7 @@ def get_llm_model(provider: str, **kwargs): "anthropic": ["claude-3-5-sonnet-20240620", "claude-3-opus-20240229"], "openai": ["gpt-4o", "gpt-4", "gpt-3.5-turbo", "o3-mini"], "deepseek": ["deepseek-chat", "deepseek-reasoner"], - "gemini": ["gemini-2.0-flash-exp", "gemini-2.0-flash-thinking-exp", "gemini-1.5-flash-latest", "gemini-1.5-flash-8b-latest", "gemini-2.0-flash-thinking-exp-01-21"], + "google": ["gemini-2.0-flash-exp", "gemini-2.0-flash-thinking-exp", "gemini-1.5-flash-latest", "gemini-1.5-flash-8b-latest", "gemini-2.0-flash-thinking-exp-01-21"], "ollama": ["qwen2.5:7b", "llama2:7b", "deepseek-r1:14b", "deepseek-r1:32b"], "azure_openai": ["gpt-4o", "gpt-4", "gpt-3.5-turbo"], "mistral": ["pixtral-large-latest", "mistral-large-latest", "mistral-small-latest", "ministral-8b-latest"] diff --git a/tests/test_browser_use.py b/tests/test_browser_use.py index c467c35f..f377fe31 100644 --- a/tests/test_browser_use.py +++ b/tests/test_browser_use.py @@ -127,7 +127,7 @@ async def test_browser_use_custom(): ) # llm = utils.get_llm_model( - # provider="gemini", + # provider="google", # model_name="gemini-2.0-flash-exp", # temperature=1.0, # api_key=os.getenv("GOOGLE_API_KEY", "") diff --git a/tests/test_llm_api.py b/tests/test_llm_api.py index cf6bad6c..9c9d24fe 100644 --- a/tests/test_llm_api.py +++ b/tests/test_llm_api.py @@ -37,7 +37,7 @@ def get_env_value(key, provider): env_mappings = { "openai": {"api_key": "OPENAI_API_KEY", "base_url": "OPENAI_ENDPOINT"}, "azure_openai": {"api_key": "AZURE_OPENAI_API_KEY", "base_url": "AZURE_OPENAI_ENDPOINT"}, - "gemini": {"api_key": "GOOGLE_API_KEY"}, + "google": {"api_key": "GOOGLE_API_KEY"}, "deepseek": {"api_key": "DEEPSEEK_API_KEY", "base_url": "DEEPSEEK_ENDPOINT"}, "mistral": {"api_key": "MISTRAL_API_KEY", "base_url": "MISTRAL_ENDPOINT"}, } @@ -92,9 +92,9 @@ def test_openai_model(): config = LLMConfig(provider="openai", model_name="gpt-4o") test_llm(config, "Describe this image", "assets/examples/test.png") -def test_gemini_model(): +def test_google_model(): # Enable your API key first if you haven't: https://ai.google.dev/palm_docs/oauth_quickstart - config = LLMConfig(provider="gemini", model_name="gemini-2.0-flash-exp") + config = LLMConfig(provider="google", model_name="gemini-2.0-flash-exp") test_llm(config, "Describe this image", "assets/examples/test.png") def test_azure_openai_model(): @@ -123,7 +123,7 @@ def test_mistral_model(): if __name__ == "__main__": # test_openai_model() - # test_gemini_model() + # test_google_model() # test_azure_openai_model() #test_deepseek_model() # test_ollama_model()