From 7ab7f783b160757acccfd788bcf51bac3fa905d1 Mon Sep 17 00:00:00 2001 From: Carson Date: Tue, 12 Aug 2025 15:43:03 -0500 Subject: [PATCH 1/2] feat(ChatOpenRouter): Add Open Router support --- CHANGELOG.md | 3 +- chatlas/__init__.py | 2 + chatlas/_provider_openrouter.py | 158 ++++++++++++++++++++++++++++++ docs/_quarto.yml | 1 + docs/get-started/models.qmd | 3 +- tests/test_provider_openrouter.py | 68 +++++++++++++ 6 files changed, 233 insertions(+), 2 deletions(-) create mode 100644 chatlas/_provider_openrouter.py create mode 100644 tests/test_provider_openrouter.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 6859b09c..4a686392 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,7 +11,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### New features -* Added `ChatDeepSeek()` for chatting via [DeepSeek](https://www.deepseek.com/). +* Added `ChatDeepSeek()` for chatting via [DeepSeek](https://www.deepseek.com/). (#147) +* Added `ChatOpenRouter()` for chatting via [Open Router](https://openrouter.ai/). (#148) * Added `ChatHuggingFace()` for chatting via [Hugging Face](https://huggingface.co/). (#144) * Added `ChatPortkey()` for chatting via [Portkey AI](https://portkey.ai/). (#143) diff --git a/chatlas/__init__.py b/chatlas/__init__.py index 9266cfa5..b3b47f3e 100644 --- a/chatlas/__init__.py +++ b/chatlas/__init__.py @@ -15,6 +15,7 @@ from ._provider_huggingface import ChatHuggingFace from ._provider_ollama import ChatOllama from ._provider_openai import ChatAzureOpenAI, ChatOpenAI +from ._provider_openrouter import ChatOpenRouter from ._provider_perplexity import ChatPerplexity from ._provider_portkey import ChatPortkey from ._provider_snowflake import ChatSnowflake @@ -39,6 +40,7 @@ "ChatHuggingFace", "ChatOllama", "ChatOpenAI", + "ChatOpenRouter", "ChatAzureOpenAI", "ChatPerplexity", "ChatPortkey", diff --git a/chatlas/_provider_openrouter.py b/chatlas/_provider_openrouter.py new file mode 100644 index 00000000..f2d0f26c --- /dev/null +++ b/chatlas/_provider_openrouter.py @@ -0,0 +1,158 @@ +from __future__ import annotations + +import os +from typing import TYPE_CHECKING, Optional + +from ._chat import Chat +from ._logging import log_model_default +from ._provider_openai import OpenAIProvider +from ._utils import MISSING, MISSING_TYPE, is_testing + +if TYPE_CHECKING: + from ._provider_openai import ChatCompletion + from .types.openai import ChatClientArgs, SubmitInputArgs + + +def ChatOpenRouter( + *, + system_prompt: Optional[str] = None, + model: Optional[str] = None, + api_key: Optional[str] = None, + base_url: str = "https://openrouter.ai/api/v1", + seed: Optional[int] | MISSING_TYPE = MISSING, + kwargs: Optional["ChatClientArgs"] = None, +) -> Chat["SubmitInputArgs", ChatCompletion]: + """ + Chat with one of the many models hosted on OpenRouter. + + OpenRouter provides access to a wide variety of language models from different providers + through a unified API. Support for features depends on the underlying model that you use. + + Prerequisites + ------------- + + ::: {.callout-note} + ## API key + + Sign up at to get an API key. + ::: + + Examples + -------- + + ```python + import os + from chatlas import ChatOpenRouter + + chat = ChatOpenRouter(api_key=os.getenv("OPENROUTER_API_KEY")) + chat.chat("What is the capital of France?") + ``` + + ```python + # Use a specific model + chat = ChatOpenRouter( + model="openai/gpt-4o", + api_key=os.getenv("OPENROUTER_API_KEY") + ) + chat.chat("Tell me about quantum computing") + ``` + + Parameters + ---------- + system_prompt + A system prompt to set the behavior of the assistant. + model + The model to use for the chat. The default, None, will pick a reasonable + default, and warn you about it. We strongly recommend explicitly choosing + a model for all but the most casual use. See + for available models. + api_key + The API key to use for authentication. You generally should not supply + this directly, but instead set the `OPENROUTER_API_KEY` environment variable. + base_url + The base URL to the endpoint; the default uses OpenRouter's API. + seed + Optional integer seed that the model uses to try and make output more + reproducible. + kwargs + Additional arguments to pass to the `openai.OpenAI()` client constructor. + + Returns + ------- + Chat + A chat object that retains the state of the conversation. + + Note + ---- + This function is a lightweight wrapper around [](`~chatlas.ChatOpenAI`) with + the defaults tweaked for OpenRouter. + + Note + ---- + Pasting an API key into a chat constructor (e.g., `ChatOpenRouter(api_key="...")`) + is the simplest way to get started, and is fine for interactive use, but is + problematic for code that may be shared with others. + + Instead, consider using environment variables or a configuration file to manage + your credentials. One popular way to manage credentials is to use a `.env` file + to store your credentials, and then use the `python-dotenv` package to load them + into your environment. + + ```shell + pip install python-dotenv + ``` + + ```shell + # .env + OPENROUTER_API_KEY=... + ``` + + ```python + from chatlas import ChatOpenRouter + from dotenv import load_dotenv + + load_dotenv() + chat = ChatOpenRouter() + chat.console() + ``` + + Another, more general, solution is to load your environment variables into the shell + before starting Python (maybe in a `.bashrc`, `.zshrc`, etc. file): + + ```shell + export OPENROUTER_API_KEY=... + ``` + """ + if model is None: + model = log_model_default("gpt-4o") + + if api_key is None: + api_key = os.getenv("OPENROUTER_API_KEY") + + if isinstance(seed, MISSING_TYPE): + seed = 1014 if is_testing() else None + + kwargs2 = add_default_headers(kwargs or {}) + + return Chat( + provider=OpenAIProvider( + api_key=api_key, + model=model, + base_url=base_url, + seed=seed, + name="OpenRouter", + kwargs=kwargs2, + ), + system_prompt=system_prompt, + ) + + +def add_default_headers(kwargs: "ChatClientArgs") -> "ChatClientArgs": + """Add OpenRouter-specific headers to the client kwargs.""" + headers = kwargs.get("default_headers", None) + default_headers = { + "HTTP-Referer": "https://posit-dev.github.io/chatlas", + "X-Title": "chatlas", + **(headers or {}), + } + return {"default_headers": default_headers, **kwargs} \ No newline at end of file diff --git a/docs/_quarto.yml b/docs/_quarto.yml index 6a0656ae..2394b462 100644 --- a/docs/_quarto.yml +++ b/docs/_quarto.yml @@ -124,6 +124,7 @@ quartodoc: - ChatHuggingFace - ChatOllama - ChatOpenAI + - ChatOpenRouter - ChatPerplexity - ChatPortkey - ChatSnowflake diff --git a/docs/get-started/models.qmd b/docs/get-started/models.qmd index f0df7652..9a4a7442 100644 --- a/docs/get-started/models.qmd +++ b/docs/get-started/models.qmd @@ -22,7 +22,8 @@ To see the pre-requisites for a given provider, visit the relevant usage page in | Google (Gemini) | [`ChatGoogle()`](../reference/ChatGoogle.qmd) | | | Groq | [`ChatGroq()`](../reference/ChatGroq.qmd) | | | Hugging Face | [`ChatHuggingFace()`](../reference/ChatHuggingFace.qmd) | | -| Ollama local models | [`ChatOllama()`](../reference/ChatOllama.qmd) | | +| Ollama (local models) | [`ChatOllama()`](../reference/ChatOllama.qmd) | | +| Open Router | [`ChatOpenRouter()`](../reference/ChatOpenRouter.qmd) | | | OpenAI | [`ChatOpenAI()`](../reference/ChatOpenAI.qmd) | | | perplexity.ai | [`ChatPerplexity()`](../reference/ChatPerplexity.qmd) | | | AWS Bedrock | [`ChatBedrockAnthropic()`](../reference/ChatBedrockAnthropic.qmd) | ✅ | diff --git a/tests/test_provider_openrouter.py b/tests/test_provider_openrouter.py new file mode 100644 index 00000000..9b37ee39 --- /dev/null +++ b/tests/test_provider_openrouter.py @@ -0,0 +1,68 @@ +import os + +import pytest +from chatlas import ChatOpenRouter + +from .conftest import ( + assert_data_extraction, + assert_images_inline, + assert_images_remote, + assert_tools_simple, +) + +api_key = os.getenv("OPENROUTER_API_KEY") +if api_key is None: + pytest.skip( + "OPENROUTER_API_KEY is not set; skipping tests", allow_module_level=True + ) + + +def test_openrouter_simple_request(): + chat = ChatOpenRouter( + model="openai/gpt-4o-mini-2024-07-18", + system_prompt="Be as terse as possible; no punctuation", + ) + chat.chat("What is 1 + 1?") + turn = chat.get_last_turn() + assert turn is not None + assert turn.tokens is not None + assert len(turn.tokens) == 3 + assert turn.tokens[0] >= 1 + assert turn.finish_reason == "stop" + + +@pytest.mark.asyncio +async def test_openrouter_simple_streaming_request(): + chat = ChatOpenRouter( + model="openai/gpt-4o-mini-2024-07-18", + system_prompt="Be as terse as possible; no punctuation", + ) + res = [] + async for x in await chat.stream_async("What is 1 + 1?"): + res.append(x) + assert "2" in "".join(res) + turn = chat.get_last_turn() + assert turn is not None + assert turn.finish_reason == "stop" + + +def test_openrouter_tool_variations(): + def chat_fun(**kwargs): + return ChatOpenRouter(model="openai/gpt-4o-mini-2024-07-18", **kwargs) + + assert_tools_simple(chat_fun) + + +def test_data_extraction(): + def chat_fun(**kwargs): + return ChatOpenRouter(model="openai/gpt-4o-mini-2024-07-18", **kwargs) + + assert_data_extraction(chat_fun) + + +def test_openrouter_images(): + def chat_fun(**kwargs): + return ChatOpenRouter(model="openai/gpt-4o-mini-2024-07-18", **kwargs) + + assert_images_inline(chat_fun) + assert_images_remote(chat_fun) From 887d616881eedc0ce9aa8d880c895654787136fb Mon Sep 17 00:00:00 2001 From: Carson Date: Tue, 12 Aug 2025 15:51:29 -0500 Subject: [PATCH 2/2] cleanup --- chatlas/_provider_openrouter.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/chatlas/_provider_openrouter.py b/chatlas/_provider_openrouter.py index f2d0f26c..451638cd 100644 --- a/chatlas/_provider_openrouter.py +++ b/chatlas/_provider_openrouter.py @@ -48,15 +48,6 @@ def ChatOpenRouter( chat.chat("What is the capital of France?") ``` - ```python - # Use a specific model - chat = ChatOpenRouter( - model="openai/gpt-4o", - api_key=os.getenv("OPENROUTER_API_KEY") - ) - chat.chat("Tell me about quantum computing") - ``` - Parameters ---------- system_prompt @@ -124,7 +115,7 @@ def ChatOpenRouter( ``` """ if model is None: - model = log_model_default("gpt-4o") + model = log_model_default("gpt-4.1") if api_key is None: api_key = os.getenv("OPENROUTER_API_KEY") @@ -148,11 +139,11 @@ def ChatOpenRouter( def add_default_headers(kwargs: "ChatClientArgs") -> "ChatClientArgs": - """Add OpenRouter-specific headers to the client kwargs.""" headers = kwargs.get("default_headers", None) + # https://openrouter.ai/docs/api-keys default_headers = { "HTTP-Referer": "https://posit-dev.github.io/chatlas", "X-Title": "chatlas", **(headers or {}), } - return {"default_headers": default_headers, **kwargs} \ No newline at end of file + return {"default_headers": default_headers, **kwargs}