From cfa4d2f809fa7e45bfc6f474d38bd617d8ad528b Mon Sep 17 00:00:00 2001 From: James Williams Date: Thu, 18 Dec 2025 12:43:52 -0300 Subject: [PATCH 01/10] Integrate todo list with session management. This adds the current todo list content into the session save/load payload. The flag --preserve-todo-list no longer has any effect and is marked as deprecated. --- aider/args.py | 5 ++++- aider/coders/base_coder.py | 27 ++++++++++++++---------- aider/sessions.py | 25 ++++++++++++++++++++++ aider/website/docs/sessions.md | 16 +++++++++++--- tests/basic/test_sessions.py | 38 ++++++++++++++++++++++++++++------ 5 files changed, 90 insertions(+), 21 deletions(-) diff --git a/aider/args.py b/aider/args.py index e501d6d5506..43240ee55bd 100644 --- a/aider/args.py +++ b/aider/args.py @@ -309,7 +309,10 @@ def get_parser(default_config_files, git_root): group.add_argument( "--preserve-todo-list", action="store_true", - help="Preserve the existing .aider.todo.txt file on startup (default: False)", + help=( + "Deprecated: no longer needed because the todo list is saved and restored with" + " sessions. This flag has no effect and will be removed." + ), default=False, ) group.add_argument( diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index b97d0300f8b..f4a311878be 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -314,6 +314,12 @@ def __init__( self.auto_accept_architect = auto_accept_architect self.preserve_todo_list = preserve_todo_list + if self.preserve_todo_list: + self.io.tool_warning( + "--preserve-todo-list is deprecated; todo lists are now saved and restored with" + " sessions. The flag will be removed in a future release." + ) + self.ignore_mentions = ignore_mentions if not self.ignore_mentions: self.ignore_mentions = set() @@ -524,17 +530,16 @@ def __init__( self.auto_test = auto_test self.test_cmd = test_cmd - # Clean up todo list file on startup unless preserve_todo_list is True - if not getattr(self, "preserve_todo_list", False): - todo_file_path = ".aider.todo.txt" - abs_path = self.abs_root_path(todo_file_path) - if os.path.isfile(abs_path): - try: - os.remove(abs_path) - if self.verbose: - self.io.tool_output(f"Removed existing todo list file: {todo_file_path}") - except Exception as e: - self.io.tool_warning(f"Could not remove todo list file {todo_file_path}: {e}") + # Clean up todo list file on startup; sessions will restore it when needed + todo_file_path = ".aider.todo.txt" + abs_path = self.abs_root_path(todo_file_path) + if os.path.isfile(abs_path): + try: + os.remove(abs_path) + if self.verbose: + self.io.tool_output(f"Removed existing todo list file: {todo_file_path}") + except Exception as e: + self.io.tool_warning(f"Could not remove todo list file {todo_file_path}: {e}") # validate the functions jsonschema if self.functions: diff --git a/aider/sessions.py b/aider/sessions.py index f4a29771981..bbc0b2c807e 100644 --- a/aider/sessions.py +++ b/aider/sessions.py @@ -126,6 +126,17 @@ def _build_session_data(self, session_name) -> Dict: for abs_fname in self.coder.abs_read_only_stubs_fnames ] + # Capture todo list content so it can be restored with the session + todo_content = None + try: + todo_path = self.coder.abs_root_path(".aider.todo.txt") + if os.path.isfile(todo_path): + todo_content = self.io.read_text(todo_path) + if todo_content is None: + todo_content = "" + except Exception as e: + self.io.tool_warning(f"Could not read todo list file: {e}") + return { "version": 1, "session_name": session_name, @@ -148,6 +159,7 @@ def _build_session_data(self, session_name) -> Dict: "auto_lint": self.coder.auto_lint, "auto_test": self.coder.auto_test, }, + "todo_list": todo_content, } def _find_session_file(self, session_identifier: str) -> Optional[Path]: @@ -232,6 +244,19 @@ def _apply_session_data(self, session_data: Dict, session_file: Path) -> bool: if "auto_test" in settings: self.coder.auto_test = settings["auto_test"] + # Restore todo list content if present in the session + if "todo_list" in session_data: + todo_path = self.coder.abs_root_path(".aider.todo.txt") + todo_content = session_data.get("todo_list") + try: + if todo_content is None: + if os.path.exists(todo_path): + os.remove(todo_path) + else: + self.io.write_text(todo_path, todo_content) + except Exception as e: + self.io.tool_warning(f"Could not restore todo list: {e}") + self.io.tool_output( f"Session loaded: {session_data.get('session_name', session_file.stem)}" ) diff --git a/aider/website/docs/sessions.md b/aider/website/docs/sessions.md index 8ca925fba2b..1afdb45ec61 100644 --- a/aider/website/docs/sessions.md +++ b/aider/website/docs/sessions.md @@ -42,6 +42,7 @@ When `--auto-save` is enabled, aider will automatically save your session as 'au - All files in the chat (editable, read-only, and read-only stubs) - Current model and edit format settings - Auto-commit, auto-lint, and auto-test settings +- Todo list content from `.aider.todo.txt` - Session metadata (timestamp, version) ### `/load-session ` @@ -56,6 +57,7 @@ Load a previously saved session by name or file path. - Restores chat history and file configurations - Recreates the exact session state - Preserves all settings and model configurations +- Restores the todo list content saved in the session ### `/list-sessions` List all available saved sessions in `.aider/sessions/`. @@ -78,10 +80,13 @@ Sessions are stored as JSON files in the `.aider/sessions/` directory within you ```json { - "version": "1.0", + "version": 1, "timestamp": 1700000000, "session_name": "my-session", "model": "gpt-4", + "weak_model": "gpt-4o-mini", + "editor_model": "gpt-4o", + "editor_edit_format": "diff", "edit_format": "diff", "chat_history": { "done_messages": [...], @@ -93,11 +98,11 @@ Sessions are stored as JSON files in the `.aider/sessions/` directory within you "read_only_stubs": [] }, "settings": { - "root": "/path/to/project", "auto_commits": true, "auto_lint": false, "auto_test": false - } + }, + "todo_list": "- plan feature A\n- write tests\n" } ``` @@ -143,6 +148,7 @@ Sessions are stored as JSON files in the `.aider/sessions/` directory within you - Session files include all file paths, so they work best when project structure is stable - External files (outside the project root) are stored with absolute paths - Missing files are skipped with warnings during loading +- The todo list file (`.aider.todo.txt`) is cleared on startup; it is restored when you load a session or when you update it during a run ### Version Control - Consider adding `.aider/sessions/` to your `.gitignore` if sessions contain sensitive information @@ -160,6 +166,7 @@ If files are reported as missing during loading: - The files may have been moved or deleted - Session files store relative paths, so directory structure changes can affect this - External files must exist at their original locations +- The todo list (`.aider.todo.txt`) is cleared on startup unless restored from a loaded session ### Corrupted Sessions If a session fails to load: @@ -167,6 +174,9 @@ If a session fails to load: - Verify the session version is compatible - Try creating a new session and compare file structures +### Deprecated Options +- `--preserve-todo-list` is deprecated. The todo list is cleared on startup and restored only when you load a session that contains it. + ## Related Commands - `/reset` - Clear chat history and drop files (useful before loading a session) diff --git a/tests/basic/test_sessions.py b/tests/basic/test_sessions.py index 88367e49b9a..2b24607e831 100644 --- a/tests/basic/test_sessions.py +++ b/tests/basic/test_sessions.py @@ -57,6 +57,10 @@ async def test_cmd_save_session_basic(self): {"role": "user", "content": "Can you help me?"}, ] + # Add a todo list + todo_content = "Task 1\nTask 2" + Path(".aider.todo.txt").write_text(todo_content, encoding="utf-8") + # Save session session_name = "test_session" commands.cmd_save_session(session_name) @@ -69,7 +73,7 @@ async def test_cmd_save_session_basic(self): with open(session_file, "r", encoding="utf-8") as f: session_data = json.load(f) - self.assertEqual(session_data["version"], "1.0") + self.assertEqual(session_data["version"], 1) self.assertEqual(session_data["session_name"], session_name) self.assertEqual(session_data["model"], self.GPT35.name) self.assertEqual(session_data["edit_format"], coder.edit_format) @@ -87,11 +91,13 @@ async def test_cmd_save_session_basic(self): # Verify settings settings = session_data["settings"] - self.assertEqual(settings["root"], coder.root) self.assertEqual(settings["auto_commits"], coder.auto_commits) self.assertEqual(settings["auto_lint"], coder.auto_lint) self.assertEqual(settings["auto_test"], coder.auto_test) + # Verify todo list persisted + self.assertEqual(session_data["todo_list"], todo_content) + async def test_cmd_load_session_basic(self): """Test basic session load functionality""" with GitTemporaryDirectory() as repo_dir: @@ -113,7 +119,7 @@ async def test_cmd_load_session_basic(self): # Create a session file manually session_data = { - "version": "1.0", + "version": 1, "timestamp": time.time(), "session_name": "test_session", "model": self.GPT35.name, @@ -133,11 +139,11 @@ async def test_cmd_load_session_basic(self): "read_only_stubs": [], }, "settings": { - "root": str(repo_dir), "auto_commits": True, "auto_lint": False, "auto_test": False, }, + "todo_list": "Restored tasks\n- item", } # Save session file @@ -166,6 +172,11 @@ async def test_cmd_load_session_basic(self): self.assertEqual(coder.auto_lint, False) self.assertEqual(coder.auto_test, False) + # Verify todo list restored + todo_file = Path(".aider.todo.txt") + self.assertTrue(todo_file.exists()) + self.assertEqual(todo_file.read_text(encoding="utf-8"), session_data["todo_list"]) + async def test_cmd_list_sessions_basic(self): """Test basic session list functionality""" with GitTemporaryDirectory(): @@ -176,7 +187,7 @@ async def test_cmd_list_sessions_basic(self): # Create multiple session files sessions_data = [ { - "version": "1.0", + "version": 1, "timestamp": time.time() - 3600, # 1 hour ago "session_name": "session1", "model": "gpt-3.5-turbo", @@ -191,7 +202,7 @@ async def test_cmd_list_sessions_basic(self): }, }, { - "version": "1.0", + "version": 1, "timestamp": time.time(), # current time "session_name": "session2", "model": "gpt-4", @@ -232,3 +243,18 @@ async def test_cmd_list_sessions_basic(self): self.assertIn("session2", output_text) self.assertIn("gpt-3.5-turbo", output_text) self.assertIn("gpt-4", output_text) + + async def test_preserve_todo_list_deprecated(self): + """Ensure preserve-todo-list flag is deprecated and todo is cleared on startup""" + with GitTemporaryDirectory(): + todo_path = Path(".aider.todo.txt") + todo_path.write_text("keep me", encoding="utf-8") + + io = InputOutput(pretty=False, fancy_input=False, yes=True) + with mock.patch.object(io, "tool_warning") as mock_tool_warning: + await Coder.create(self.GPT35, None, io, preserve_todo_list=True) + + self.assertFalse(todo_path.exists()) + self.assertTrue( + any("deprecated" in call[0][0] for call in mock_tool_warning.call_args_list) + ) From 89482a95cf5238c7b88e8580ecc244421241db55 Mon Sep 17 00:00:00 2001 From: James Williams Date: Thu, 18 Dec 2025 17:53:31 -0300 Subject: [PATCH 02/10] Remove references to --preserve-todo-list elsewhere in code. --- aider/website/docs/config/agent-mode.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/aider/website/docs/config/agent-mode.md b/aider/website/docs/config/agent-mode.md index c0953b755e0..2300406a053 100644 --- a/aider/website/docs/config/agent-mode.md +++ b/aider/website/docs/config/agent-mode.md @@ -204,11 +204,9 @@ When `include_context_blocks` is specified, only the listed blocks will be inclu #### Other Aider-CE CLI/Config Options for Agent Mode -- `preserve-todo-list` - Preserve todo list across sessions - `use-enhanced-map` - Use enhanced repo map that takes into account import relationships between files ```yaml -preserve-todo-list: true use-enhanced-map: true ``` @@ -240,7 +238,6 @@ agent-config: skills_excludelist: ["legacy-tools"] # Optional: Blacklist of skills to exclude # Other Agent Mode options -preserve-todo-list: true # Preserve todo list across sessions use-enhanced-map: true # Use enhanced repo map with import relationships ``` @@ -260,4 +257,4 @@ For complete documentation on creating and using skills, including skill directo - **Scalable exploration**: Can handle large codebases through strategic context management - **Recovery mechanisms**: Built-in undo and safety features -Agent Mode represents a significant evolution in aider's capabilities, enabling more sophisticated and autonomous codebase manipulation while maintaining safety and control through the tool-based architecture. \ No newline at end of file +Agent Mode represents a significant evolution in aider's capabilities, enabling more sophisticated and autonomous codebase manipulation while maintaining safety and control through the tool-based architecture. From 9d36dc2de277113785b8fc581811bbb9f25e13e8 Mon Sep 17 00:00:00 2001 From: James Williams Date: Thu, 18 Dec 2025 18:49:40 -0300 Subject: [PATCH 03/10] Fix insert-block tool call. --- aider/tools/insert_block.py | 9 ++- tests/tools/test_insert_block.py | 117 +++++++++++++++++++++++++++++++ 2 files changed, 125 insertions(+), 1 deletion(-) create mode 100644 tests/tools/test_insert_block.py diff --git a/aider/tools/insert_block.py b/aider/tools/insert_block.py index 6fc72835eab..c4f282fce79 100644 --- a/aider/tools/insert_block.py +++ b/aider/tools/insert_block.py @@ -75,7 +75,14 @@ def execute( tool_name = "InsertBlock" try: # 1. Validate parameters - if sum(x is not None for x in [after_pattern, before_pattern, position]) != 1: + def _is_provided(value): + if value is None: + return False + if isinstance(value, str) and value == "": + return False + return True + + if sum(_is_provided(x) for x in [after_pattern, before_pattern, position]) != 1: raise ToolError( "Must specify exactly one of: after_pattern, before_pattern, or position" ) diff --git a/tests/tools/test_insert_block.py b/tests/tools/test_insert_block.py new file mode 100644 index 00000000000..33955bb6509 --- /dev/null +++ b/tests/tools/test_insert_block.py @@ -0,0 +1,117 @@ +from pathlib import Path +from types import SimpleNamespace +from unittest.mock import Mock + +import pytest + +from aider.tools import insert_block + + +class DummyIO: + def __init__(self): + self.tool_error = Mock() + self.tool_warning = Mock() + self.tool_output = Mock() + + def read_text(self, path): + return Path(path).read_text() + + def write_text(self, path, content): + Path(path).write_text(content) + + +class DummyChangeTracker: + def __init__(self): + self.calls = [] + + def track_change( + self, file_path, change_type, original_content, new_content, metadata, change_id=None + ): + self.calls.append( + { + "file_path": file_path, + "change_type": change_type, + "original_content": original_content, + "new_content": new_content, + "metadata": metadata, + "change_id": change_id, + } + ) + return f"change-{len(self.calls)}" + + +class DummyCoder: + def __init__(self, root): + self.root = str(root) + self.repo = SimpleNamespace(root=str(root)) + self.io = DummyIO() + self.change_tracker = DummyChangeTracker() + self.aider_edited_files = set() + self.files_edited_by_tools = set() + self.abs_read_only_fnames = set() + self.abs_fnames = set() + + def abs_root_path(self, file_path): + path = Path(file_path) + if path.is_absolute(): + return str(path) + return str((Path(self.root) / path).resolve()) + + def get_rel_fname(self, abs_path): + return str(Path(abs_path).resolve().relative_to(self.root)) + + +@pytest.fixture +def coder_with_file(tmp_path): + file_path = tmp_path / "example.txt" + file_path.write_text("first line\nsecond line\n") + coder = DummyCoder(tmp_path) + coder.abs_fnames.add(str(file_path.resolve())) + return coder, file_path + + +def test_position_top_succeeds_with_no_patterns(coder_with_file): + coder, file_path = coder_with_file + + result = insert_block.Tool.execute( + coder, + file_path="example.txt", + content="inserted line", + position="top", + ) + + assert result.startswith("Successfully executed InsertBlock.") + assert file_path.read_text().splitlines()[0] == "inserted line" + coder.io.tool_error.assert_not_called() + + +def test_position_top_ignores_blank_patterns(coder_with_file): + coder, file_path = coder_with_file + + result = insert_block.Tool.execute( + coder, + file_path="example.txt", + content="inserted line", + position="top", + after_pattern="", + ) + + assert result.startswith("Successfully executed InsertBlock.") + assert file_path.read_text().splitlines()[0] == "inserted line" + coder.io.tool_error.assert_not_called() + + +def test_mutually_exclusive_parameters_raise(coder_with_file): + coder, file_path = coder_with_file + + result = insert_block.Tool.execute( + coder, + file_path="example.txt", + content="new line", + position="top", + after_pattern="first line", + ) + + assert result.startswith("Error: Must specify exactly one of") + assert file_path.read_text().startswith("first line") + coder.io.tool_error.assert_called() From 6eab7a670ec54f7a4d9221d2726406e9de01aa6f Mon Sep 17 00:00:00 2001 From: James Williams Date: Thu, 18 Dec 2025 19:35:17 -0300 Subject: [PATCH 04/10] Fix mutual exclusivity in show-numbered-context. --- aider/tools/show_numbered_context.py | 17 ++++- tests/tools/test_show_numbered_context.py | 90 +++++++++++++++++++++++ 2 files changed, 106 insertions(+), 1 deletion(-) create mode 100644 tests/tools/test_show_numbered_context.py diff --git a/aider/tools/show_numbered_context.py b/aider/tools/show_numbered_context.py index c5376853851..337f2195e20 100644 --- a/aider/tools/show_numbered_context.py +++ b/aider/tools/show_numbered_context.py @@ -34,9 +34,24 @@ def execute(cls, coder, file_path, pattern=None, line_number=None, context_lines tool_name = "ShowNumberedContext" try: # 1. Validate arguments - if not (pattern is None) ^ (line_number is None): + def _is_provided(value): + if value is None: + return False + if isinstance(value, str) and value == "": + return False + if isinstance(value, (int, float)) and value == 0: + return False + return True + + provided_counts = [_is_provided(x) for x in [pattern, line_number]] + if sum(provided_counts) != 1: raise ToolError("Provide exactly one of 'pattern' or 'line_number'.") + if not provided_counts[0]: + pattern = None + if not provided_counts[1]: + line_number = None + # 2. Resolve path abs_path, rel_path = resolve_paths(coder, file_path) if not os.path.exists(abs_path): diff --git a/tests/tools/test_show_numbered_context.py b/tests/tools/test_show_numbered_context.py new file mode 100644 index 00000000000..e8ef01af70e --- /dev/null +++ b/tests/tools/test_show_numbered_context.py @@ -0,0 +1,90 @@ +from pathlib import Path +from types import SimpleNamespace +from unittest.mock import Mock + +import pytest + +from aider.tools import show_numbered_context + + +class DummyIO: + def __init__(self): + self.tool_error = Mock() + self.tool_warning = Mock() + self.tool_output = Mock() + + def read_text(self, path): + return Path(path).read_text() + + def write_text(self, path, content): + Path(path).write_text(content) + + +class DummyCoder: + def __init__(self, root): + self.root = str(root) + self.repo = SimpleNamespace(root=str(root)) + self.io = DummyIO() + + def abs_root_path(self, file_path): + path = Path(file_path) + if path.is_absolute(): + return str(path) + return str((Path(self.root) / path).resolve()) + + def get_rel_fname(self, abs_path): + return str(Path(abs_path).resolve().relative_to(self.root)) + + +@pytest.fixture +def coder_with_file(tmp_path): + file_path = tmp_path / "example.txt" + file_path.write_text("alpha\nbeta\ngamma\n") + coder = DummyCoder(tmp_path) + return coder, file_path + + +def test_pattern_with_zero_line_number_is_allowed(coder_with_file): + coder, file_path = coder_with_file + + result = show_numbered_context.Tool.execute( + coder, + file_path="example.txt", + pattern="beta", + line_number=0, + context_lines=0, + ) + + assert "beta" in result + assert "line 2" in result or "2 | beta" in result + coder.io.tool_error.assert_not_called() + + +def test_empty_pattern_uses_line_number(coder_with_file): + coder, file_path = coder_with_file + + result = show_numbered_context.Tool.execute( + coder, + file_path="example.txt", + pattern="", + line_number=2, + context_lines=0, + ) + + assert "2 | beta" in result + coder.io.tool_error.assert_not_called() + + +def test_conflicting_pattern_and_line_number_raise(coder_with_file): + coder, file_path = coder_with_file + + result = show_numbered_context.Tool.execute( + coder, + file_path="example.txt", + pattern="beta", + line_number=2, + context_lines=0, + ) + + assert result.startswith("Error: Provide exactly one of") + coder.io.tool_error.assert_called() From 577145f161987b3fff6860ece3bb6e46ebc936b4 Mon Sep 17 00:00:00 2001 From: James Williams Date: Thu, 18 Dec 2025 19:42:58 -0300 Subject: [PATCH 05/10] Harden show-numbered-context. --- aider/tools/utils/helpers.py | 12 ++++++++++-- tests/tools/test_show_numbered_context.py | 16 ++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/aider/tools/utils/helpers.py b/aider/tools/utils/helpers.py index 63e068129a3..ff34179ae9c 100644 --- a/aider/tools/utils/helpers.py +++ b/aider/tools/utils/helpers.py @@ -105,10 +105,18 @@ def determine_line_range( Determines the end line index based on end_pattern or line_count. Raises ToolError if end_pattern is not found or line_count is invalid. """ + + def _is_provided(value): + if value is None: + return False + if isinstance(value, str) and value == "": + return False + return True + # Parameter validation: Ensure only one targeting method is used targeting_methods = [ - target_symbol is not None, - start_pattern_line_index is not None, + _is_provided(target_symbol), + _is_provided(start_pattern_line_index), # Note: line_count and end_pattern depend on start_pattern_line_index ] if sum(targeting_methods) > 1: diff --git a/tests/tools/test_show_numbered_context.py b/tests/tools/test_show_numbered_context.py index e8ef01af70e..a33019b5ec1 100644 --- a/tests/tools/test_show_numbered_context.py +++ b/tests/tools/test_show_numbered_context.py @@ -88,3 +88,19 @@ def test_conflicting_pattern_and_line_number_raise(coder_with_file): assert result.startswith("Error: Provide exactly one of") coder.io.tool_error.assert_called() + + +def test_target_symbol_empty_string_treated_as_missing(): + from aider.tools.utils import helpers + from aider.tools.utils.helpers import ToolError + + with pytest.raises(ToolError, match="Must specify either target_symbol or start_pattern"): + helpers.determine_line_range( + coder=SimpleNamespace(repo_map=None), # repo_map not used in this path + file_path="dummy", + lines=["a", "b"], + target_symbol="", + start_pattern_line_index=None, + end_pattern=None, + line_count=1, + ) From a34a86fab4e3811ef5d24293de60e83ce2aa2cbf Mon Sep 17 00:00:00 2001 From: James Williams Date: Thu, 18 Dec 2025 19:46:56 -0300 Subject: [PATCH 06/10] Refactor is_provided into helpers.py. --- aider/tools/insert_block.py | 10 ++-------- aider/tools/show_numbered_context.py | 26 ++++++++++++-------------- aider/tools/utils/helpers.py | 26 +++++++++++++++++--------- 3 files changed, 31 insertions(+), 31 deletions(-) diff --git a/aider/tools/insert_block.py b/aider/tools/insert_block.py index c4f282fce79..e59fa8f2edb 100644 --- a/aider/tools/insert_block.py +++ b/aider/tools/insert_block.py @@ -9,6 +9,7 @@ format_tool_result, generate_unified_diff_snippet, handle_tool_error, + is_provided, select_occurrence_index, validate_file_for_edit, ) @@ -75,14 +76,7 @@ def execute( tool_name = "InsertBlock" try: # 1. Validate parameters - def _is_provided(value): - if value is None: - return False - if isinstance(value, str) and value == "": - return False - return True - - if sum(_is_provided(x) for x in [after_pattern, before_pattern, position]) != 1: + if sum(is_provided(x) for x in [after_pattern, before_pattern, position]) != 1: raise ToolError( "Must specify exactly one of: after_pattern, before_pattern, or position" ) diff --git a/aider/tools/show_numbered_context.py b/aider/tools/show_numbered_context.py index 337f2195e20..45aff33b446 100644 --- a/aider/tools/show_numbered_context.py +++ b/aider/tools/show_numbered_context.py @@ -1,7 +1,12 @@ import os from aider.tools.utils.base_tool import BaseTool -from aider.tools.utils.helpers import ToolError, handle_tool_error, resolve_paths +from aider.tools.utils.helpers import ( + ToolError, + handle_tool_error, + is_provided, + resolve_paths, +) class Tool(BaseTool): @@ -34,22 +39,15 @@ def execute(cls, coder, file_path, pattern=None, line_number=None, context_lines tool_name = "ShowNumberedContext" try: # 1. Validate arguments - def _is_provided(value): - if value is None: - return False - if isinstance(value, str) and value == "": - return False - if isinstance(value, (int, float)) and value == 0: - return False - return True - - provided_counts = [_is_provided(x) for x in [pattern, line_number]] - if sum(provided_counts) != 1: + pattern_provided = is_provided(pattern) + line_number_provided = is_provided(line_number, treat_zero_as_missing=True) + + if sum([pattern_provided, line_number_provided]) != 1: raise ToolError("Provide exactly one of 'pattern' or 'line_number'.") - if not provided_counts[0]: + if not pattern_provided: pattern = None - if not provided_counts[1]: + if not line_number_provided: line_number = None # 2. Resolve path diff --git a/aider/tools/utils/helpers.py b/aider/tools/utils/helpers.py index ff34179ae9c..a0fbb871118 100644 --- a/aider/tools/utils/helpers.py +++ b/aider/tools/utils/helpers.py @@ -10,6 +10,21 @@ class ToolError(Exception): pass +def is_provided(value, *, treat_zero_as_missing=False): + """ + Normalizes parameter presence checks across tools. + + Returns True when the value should be considered user-provided. + """ + if value is None: + return False + if isinstance(value, str) and value == "": + return False + if treat_zero_as_missing and isinstance(value, (int, float)) and value == 0: + return False + return True + + def resolve_paths(coder, file_path): """Resolves absolute and relative paths for a given file path.""" try: @@ -106,17 +121,10 @@ def determine_line_range( Raises ToolError if end_pattern is not found or line_count is invalid. """ - def _is_provided(value): - if value is None: - return False - if isinstance(value, str) and value == "": - return False - return True - # Parameter validation: Ensure only one targeting method is used targeting_methods = [ - _is_provided(target_symbol), - _is_provided(start_pattern_line_index), + is_provided(target_symbol), + is_provided(start_pattern_line_index), # Note: line_count and end_pattern depend on start_pattern_line_index ] if sum(targeting_methods) > 1: From ec1a1abb68e2b2b9bc210a8677340b14acd569fe Mon Sep 17 00:00:00 2001 From: James Williams Date: Thu, 18 Dec 2025 20:24:22 -0300 Subject: [PATCH 07/10] Allow for empty position argument. --- aider/tools/insert_block.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aider/tools/insert_block.py b/aider/tools/insert_block.py index e59fa8f2edb..96f13262814 100644 --- a/aider/tools/insert_block.py +++ b/aider/tools/insert_block.py @@ -33,7 +33,7 @@ class Tool(BaseTool): "occurrence": {"type": "integer", "default": 1}, "change_id": {"type": "string"}, "dry_run": {"type": "boolean", "default": False}, - "position": {"type": "string", "enum": ["top", "bottom"]}, + "position": {"type": "string", "enum": ["top", "bottom", ""]}, "auto_indent": {"type": "boolean", "default": True}, "use_regex": {"type": "boolean", "default": False}, }, @@ -69,7 +69,7 @@ def execute( occurrence: Which occurrence of the pattern to use (1-based, or -1 for last) change_id: Optional ID for tracking changes dry_run: If True, only simulate the change - position: Special position like "start_of_file" or "end_of_file" + position: Special position like "top" or "bottom" (mutually exclusive with before_pattern and after_pattern) auto_indent: If True, automatically adjust indentation of inserted content use_regex: If True, treat patterns as regular expressions """ From d07b079300270ccfefd75ab1f2b4203ae810323d Mon Sep 17 00:00:00 2001 From: Travis Bender Date: Thu, 18 Dec 2025 20:36:52 -0700 Subject: [PATCH 08/10] fix: add handling for models strings with colons by building an index for suffixes --- aider/main.py | 53 ++++++++++++---------- tests/basic/test_main.py | 97 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 126 insertions(+), 24 deletions(-) diff --git a/aider/main.py b/aider/main.py index 723cdc8c668..3df3ec7bdd9 100644 --- a/aider/main.py +++ b/aider/main.py @@ -962,33 +962,38 @@ def get_io(pretty): model_overrides[model_name] = {} model_overrides[model_name].update(tags) - # Parse model names with suffixes and apply overrides - def parse_model_with_suffix(model_name, overrides): - """Parse model name with optional :suffix and apply overrides.""" + # Build an index from full "base:suffix" names to (base_model, override_dict) + # so we don't have to parse/split user-provided model names at runtime. + override_index = {} + for base_model, suffixes in model_overrides.items(): + if not isinstance(suffixes, dict): + continue + for suffix, cfg in suffixes.items(): + if not isinstance(cfg, dict): + continue + full_name = f"{base_model}:{suffix}" + # Later entries override earlier ones + override_index[full_name] = (base_model, cfg) + + def apply_model_overrides(model_name): + """Return (effective_model_name, override_kwargs) for a given model_name. + + If model_name exactly matches a configured "base:suffix" override, we + switch to the base model and apply that override dict. Otherwise we + leave the name unchanged and return empty overrides. + """ if not model_name: return model_name, {} + entry = override_index.get(model_name) + if not entry: + return model_name, {} + base_model, cfg = entry + return base_model, cfg.copy() - # Split on last colon to get model name and suffix - if ":" in model_name: - base_model, suffix = model_name.rsplit(":", 1) - else: - base_model, suffix = model_name, None - - # Apply overrides if suffix exists - override_kwargs = {} - if suffix and base_model in overrides and suffix in overrides[base_model]: - override_kwargs = overrides[base_model][suffix].copy() - - return base_model, override_kwargs - - # Parse main model - main_model_name, main_model_overrides = parse_model_with_suffix(args.model, model_overrides) - weak_model_name, weak_model_overrides = parse_model_with_suffix( - args.weak_model, model_overrides - ) - editor_model_name, editor_model_overrides = parse_model_with_suffix( - args.editor_model, model_overrides - ) + # Apply overrides (if any) to the selected models + main_model_name, main_model_overrides = apply_model_overrides(args.model) + weak_model_name, weak_model_overrides = apply_model_overrides(args.weak_model) + editor_model_name, editor_model_overrides = apply_model_overrides(args.editor_model) # Create weak model if specified with overrides weak_model_obj = None diff --git a/tests/basic/test_main.py b/tests/basic/test_main.py index 5cd128aba8a..20e30845f29 100644 --- a/tests/basic/test_main.py +++ b/tests/basic/test_main.py @@ -1151,6 +1151,103 @@ async def test_model_precedence(self): del os.environ["ANTHROPIC_API_KEY"] del os.environ["OPENAI_API_KEY"] + async def test_model_overrides_suffix_applied(self): + with GitTemporaryDirectory() as git_dir: + git_dir = Path(git_dir) + overrides_file = git_dir / ".aider.model.overrides.yml" + overrides_file.write_text("gpt-4o:\n fast:\n temperature: 0.1\n") + + with ( + patch("aider.models.Model") as MockModel, + patch("aider.coders.Coder.create") as MockCoder, + ): + mock_coder_instance = MagicMock() + MockCoder.return_value = mock_coder_instance + + mock_instance = MockModel.return_value + mock_instance.info = {} + mock_instance.name = "gpt-4o" + mock_instance.validate_environment.return_value = { + "missing_keys": [], + "keys_in_environment": [], + } + mock_instance.accepts_settings = [] + mock_instance.weak_model_name = None + mock_instance.get_weak_model.return_value = None + + await main( + ["--model", "gpt-4o:fast", "--exit", "--yes", "--no-git"], + input=DummyInput(), + output=DummyOutput(), + force_git_root=git_dir, + ) + + # Find the call that constructed the main model with overrides + matched_call_found = False + for call_args in MockModel.call_args_list: + args, kwargs = call_args + if ( + args + and args[0] == "gpt-4o" + and kwargs.get("override_kwargs") == {"temperature": 0.1} + ): + matched_call_found = True + break + + self.assertTrue( + matched_call_found, + ( + "Expected a Model call with base name 'gpt-4o' and override_kwargs" + " {'temperature': 0.1}" + ), + ) + + async def test_model_overrides_no_match_preserves_model_name(self): + with GitTemporaryDirectory() as git_dir: + git_dir = Path(git_dir) + + with ( + patch("aider.models.Model") as MockModel, + patch("aider.coders.Coder.create") as MockCoder, + ): + mock_coder_instance = MagicMock() + MockCoder.return_value = mock_coder_instance + + mock_instance = MockModel.return_value + mock_instance.info = {} + mock_instance.name = "test-model" + mock_instance.validate_environment.return_value = { + "missing_keys": [], + "keys_in_environment": [], + } + mock_instance.accepts_settings = [] + mock_instance.weak_model_name = None + mock_instance.get_weak_model.return_value = None + + model_name = "hf:moonshotai/Kimi-K2-Thinking" + + await main( + ["--model", model_name, "--exit", "--yes", "--no-git"], + input=DummyInput(), + output=DummyOutput(), + force_git_root=git_dir, + ) + + matched_call_found = False + for call_args in MockModel.call_args_list: + args, kwargs = call_args + if args and args[0] == model_name and kwargs.get("override_kwargs") == {}: + matched_call_found = True + break + + self.assertTrue( + matched_call_found, + ( + "Expected a Model call with the full model name preserved and empty" + " override_kwargs" + ), + ) + async def test_chat_language_spanish(self): with GitTemporaryDirectory(): coder = await main( From 813d05115f0444dcb95fe71bfd70fc6efa7009cc Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Thu, 18 Dec 2025 23:43:27 -0500 Subject: [PATCH 09/10] FIlter model-metadata to only models supporting chat for distribution size --- .pre-commit-config.yaml | 9 + aider/resources/model-metadata.json | 56176 ++++++++++++-------------- scripts/filter-chat-mode.js | 105 + 3 files changed, 25834 insertions(+), 30456 deletions(-) create mode 100644 scripts/filter-chat-mode.js diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ca81b8a7339..07d1c7ed07a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,3 +21,12 @@ repos: args: ["--skip", "aider/website/docs/languages.md"] additional_dependencies: - tomli + - repo: local + hooks: + - id: filter-model-metadata + name: Filter model metadata to chat mode only + entry: node scripts/filter-chat-mode.js + args: ["aider/resources/model-metadata.json"] + language: system + files: ^aider/resources/model-metadata\.json$ + pass_filenames: false diff --git a/aider/resources/model-metadata.json b/aider/resources/model-metadata.json index 2ab5a702a19..bcce123910b 100644 --- a/aider/resources/model-metadata.json +++ b/aider/resources/model-metadata.json @@ -1,30487 +1,25751 @@ { - "sample_spec": { - "code_interpreter_cost_per_session": 0.0, - "computer_use_input_cost_per_1k_tokens": 0.0, - "computer_use_output_cost_per_1k_tokens": 0.0, - "deprecation_date": "date when the model becomes deprecated in the format YYYY-MM-DD", - "file_search_cost_per_1k_calls": 0.0, - "file_search_cost_per_gb_per_day": 0.0, - "input_cost_per_audio_token": 0.0, - "input_cost_per_token": 0.0, - "litellm_provider": "one of https://docs.litellm.ai/docs/providers", - "max_input_tokens": "max input tokens, if the provider specifies it. if not default to max_tokens", - "max_output_tokens": "max output tokens, if the provider specifies it. if not default to max_tokens", - "max_tokens": "LEGACY parameter. set to max_output_tokens if provider specifies it. IF not set to max_input_tokens, if provider specifies it.", - "mode": "one of: chat, embedding, completion, image_generation, audio_transcription, audio_speech, image_generation, moderation, rerank, search", - "output_cost_per_reasoning_token": 0.0, - "output_cost_per_token": 0.0, - "search_context_cost_per_query": { - "search_context_size_high": 0.0, - "search_context_size_low": 0.0, - "search_context_size_medium": 0.0 - }, - "supported_regions": [ - "global", - "us-west-2", - "eu-west-1", - "ap-southeast-1", - "ap-northeast-1" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_vision": true, - "supports_web_search": true, - "vector_store_cost_per_gb_per_day": 0.0 - }, - "1024-x-1024/50-steps/bedrock/amazon.nova-canvas-v1:0": { - "litellm_provider": "bedrock", - "max_input_tokens": 2600, - "mode": "image_generation", - "output_cost_per_image": 0.06 - }, - "1024-x-1024/50-steps/stability.stable-diffusion-xl-v1": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.04 - }, - "1024-x-1024/dall-e-2": { - "input_cost_per_pixel": 1.9e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "1024-x-1024/max-steps/stability.stable-diffusion-xl-v1": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.08 - }, - "256-x-256/dall-e-2": { - "input_cost_per_pixel": 2.4414e-07, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "512-x-512/50-steps/stability.stable-diffusion-xl-v0": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.018 - }, - "512-x-512/dall-e-2": { - "input_cost_per_pixel": 6.86e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "512-x-512/max-steps/stability.stable-diffusion-xl-v0": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.036 - }, - "ai21.j2-mid-v1": { - "input_cost_per_token": 1.25e-05, - "litellm_provider": "bedrock", - "max_input_tokens": 8191, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 1.25e-05 - }, - "ai21.j2-ultra-v1": { - "input_cost_per_token": 1.88e-05, - "litellm_provider": "bedrock", - "max_input_tokens": 8191, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 1.88e-05 - }, - "ai21.jamba-1-5-large-v1:0": { - "input_cost_per_token": 2e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 8e-06 - }, - "ai21.jamba-1-5-mini-v1:0": { - "input_cost_per_token": 2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07 - }, - "ai21.jamba-instruct-v1:0": { - "input_cost_per_token": 5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 70000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7e-07, - "supports_system_messages": true - }, - "aiml/dall-e-2": { - "litellm_provider": "aiml", - "metadata": { - "notes": "DALL-E 2 via AI/ML API - Reliable text-to-image generation" - }, - "mode": "image_generation", - "output_cost_per_image": 0.021, - "source": "https://docs.aimlapi.com/", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "aiml/dall-e-3": { - "litellm_provider": "aiml", - "metadata": { - "notes": "DALL-E 3 via AI/ML API - High-quality text-to-image generation" - }, - "mode": "image_generation", - "output_cost_per_image": 0.042, - "source": "https://docs.aimlapi.com/", - "supported_endpoints": [ - "/v1/images/generations" + "ai21.j2-mid-v1": { + "input_cost_per_token": 0.0000125, + "litellm_provider": "bedrock", + "max_input_tokens": 8191, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000125 + }, + "ai21.j2-ultra-v1": { + "input_cost_per_token": 0.0000188, + "litellm_provider": "bedrock", + "max_input_tokens": 8191, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000188 + }, + "ai21.jamba-1-5-large-v1:0": { + "input_cost_per_token": 0.000002, + "litellm_provider": "bedrock", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000008 + }, + "ai21.jamba-1-5-mini-v1:0": { + "input_cost_per_token": 2e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7 + }, + "ai21.jamba-instruct-v1:0": { + "input_cost_per_token": 5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 70000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 7e-7, + "supports_system_messages": true + }, + "us.writer.palmyra-x4-v1:0": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_pdf_input": true + }, + "us.writer.palmyra-x5-v1:0": { + "input_cost_per_token": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_function_calling": true, + "supports_pdf_input": true + }, + "writer.palmyra-x4-v1:0": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_pdf_input": true + }, + "writer.palmyra-x5-v1:0": { + "input_cost_per_token": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_function_calling": true, + "supports_pdf_input": true + }, + "amazon.nova-lite-v1:0": { + "input_cost_per_token": 6e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 2.4e-7, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "amazon.nova-2-lite-v1:0": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_video_input": true, + "supports_vision": true + }, + "apac.amazon.nova-2-lite-v1:0": { + "cache_read_input_token_cost": 8.25e-8, + "input_cost_per_token": 3.3e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.00000275, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_video_input": true, + "supports_vision": true + }, + "eu.amazon.nova-2-lite-v1:0": { + "cache_read_input_token_cost": 8.25e-8, + "input_cost_per_token": 3.3e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.00000275, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_video_input": true, + "supports_vision": true + }, + "us.amazon.nova-2-lite-v1:0": { + "cache_read_input_token_cost": 8.25e-8, + "input_cost_per_token": 3.3e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.00000275, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_video_input": true, + "supports_vision": true + }, + "amazon.nova-micro-v1:0": { + "input_cost_per_token": 3.5e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 1.4e-7, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "amazon.nova-pro-v1:0": { + "input_cost_per_token": 8e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.0000032, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "twelvelabs.pegasus-1-2-v1:0": { + "input_cost_per_video_per_second": 0.00049, + "output_cost_per_token": 0.0000075, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_video_input": true + }, + "us.twelvelabs.pegasus-1-2-v1:0": { + "input_cost_per_video_per_second": 0.00049, + "output_cost_per_token": 0.0000075, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_video_input": true + }, + "eu.twelvelabs.pegasus-1-2-v1:0": { + "input_cost_per_video_per_second": 0.00049, + "output_cost_per_token": 0.0000075, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_video_input": true + }, + "amazon.titan-text-express-v1": { + "input_cost_per_token": 0.0000013, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 8000, + "max_tokens": 8000, + "mode": "chat", + "output_cost_per_token": 0.0000017 + }, + "amazon.titan-text-lite-v1": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 4000, + "max_tokens": 4000, + "mode": "chat", + "output_cost_per_token": 4e-7 + }, + "amazon.titan-text-premier-v1:0": { + "input_cost_per_token": 5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.0000015 + }, + "anthropic.claude-3-5-haiku-20241022-v1:0": { + "cache_creation_input_token_cost": 0.000001, + "cache_read_input_token_cost": 8e-8, + "input_cost_per_token": 8e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "anthropic.claude-haiku-4-5-20251001-v1:0": { + "cache_creation_input_token_cost": 0.00000125, + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 0.000001, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "anthropic.claude-haiku-4-5@20251001": { + "cache_creation_input_token_cost": 0.00000125, + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 0.000001, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "anthropic.claude-3-5-sonnet-20240620-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "anthropic.claude-3-5-sonnet-20241022-v2:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "anthropic.claude-3-7-sonnet-20240620-v1:0": { + "cache_creation_input_token_cost": 0.0000045, + "cache_read_input_token_cost": 3.6e-7, + "input_cost_per_token": 0.0000036, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000018, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "anthropic.claude-3-7-sonnet-20250219-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "anthropic.claude-3-haiku-20240307-v1:0": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "anthropic.claude-3-opus-20240229-v1:0": { + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "anthropic.claude-3-sonnet-20240229-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "anthropic.claude-instant-v1": { + "input_cost_per_token": 8e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000024, + "supports_tool_choice": true + }, + "anthropic.claude-opus-4-1-20250805-v1:0": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "anthropic.claude-opus-4-20250514-v1:0": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "anthropic.claude-opus-4-5-20251101-v1:0": { + "cache_creation_input_token_cost": 0.00000625, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "anthropic.claude-sonnet-4-20250514-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "anthropic.claude-sonnet-4-5-20250929-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "anthropic.claude-v1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024 + }, + "anthropic.claude-v2:1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "anyscale/HuggingFaceH4/zephyr-7b-beta": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "anyscale", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 1.5e-7 + }, + "anyscale/codellama/CodeLlama-34b-Instruct-hf": { + "input_cost_per_token": 0.000001, + "litellm_provider": "anyscale", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000001 + }, + "anyscale/codellama/CodeLlama-70b-Instruct-hf": { + "input_cost_per_token": 0.000001, + "litellm_provider": "anyscale", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000001, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/codellama-CodeLlama-70b-Instruct-hf" + }, + "anyscale/google/gemma-7b-it": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "anyscale", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/google-gemma-7b-it" + }, + "anyscale/meta-llama/Llama-2-13b-chat-hf": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "anyscale", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.5e-7 + }, + "anyscale/meta-llama/Llama-2-70b-chat-hf": { + "input_cost_per_token": 0.000001, + "litellm_provider": "anyscale", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000001 + }, + "anyscale/meta-llama/Llama-2-7b-chat-hf": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "anyscale", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 1.5e-7 + }, + "anyscale/meta-llama/Meta-Llama-3-70B-Instruct": { + "input_cost_per_token": 0.000001, + "litellm_provider": "anyscale", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000001, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-70B-Instruct" + }, + "anyscale/meta-llama/Meta-Llama-3-8B-Instruct": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "anyscale", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-8B-Instruct" + }, + "anyscale/mistralai/Mistral-7B-Instruct-v0.1": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "anyscale", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mistral-7B-Instruct-v0.1", + "supports_function_calling": true + }, + "anyscale/mistralai/Mixtral-8x22B-Instruct-v0.1": { + "input_cost_per_token": 9e-7, + "litellm_provider": "anyscale", + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 9e-7, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x22B-Instruct-v0.1", + "supports_function_calling": true + }, + "anyscale/mistralai/Mixtral-8x7B-Instruct-v0.1": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "anyscale", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x7B-Instruct-v0.1", + "supports_function_calling": true + }, + "apac.amazon.nova-lite-v1:0": { + "input_cost_per_token": 6.3e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 2.52e-7, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "apac.amazon.nova-micro-v1:0": { + "input_cost_per_token": 3.7e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 1.48e-7, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "apac.amazon.nova-pro-v1:0": { + "input_cost_per_token": 8.4e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.00000336, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "apac.anthropic.claude-3-5-sonnet-20240620-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "apac.anthropic.claude-3-5-sonnet-20241022-v2:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "apac.anthropic.claude-3-haiku-20240307-v1:0": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "apac.anthropic.claude-haiku-4-5-20251001-v1:0": { + "cache_creation_input_token_cost": 0.000001375, + "cache_read_input_token_cost": 1.1e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000055, + "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "apac.anthropic.claude-3-sonnet-20240229-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "apac.anthropic.claude-sonnet-4-20250514-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "au.anthropic.claude-sonnet-4-5-20250929-v1:0": { + "cache_creation_input_token_cost": 0.000004125, + "cache_read_input_token_cost": 3.3e-7, + "input_cost_per_token": 0.0000033, + "input_cost_per_token_above_200k_tokens": 0.0000066, + "output_cost_per_token_above_200k_tokens": 0.00002475, + "cache_creation_input_token_cost_above_200k_tokens": 0.00000825, + "cache_read_input_token_cost_above_200k_tokens": 6.6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000165, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "azure/command-r-plus": { + "input_cost_per_token": 0.000003, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true + }, + "azure_ai/claude-haiku-4-5": { + "input_cost_per_token": 0.000001, + "litellm_provider": "azure_ai", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/claude-opus-4-1": { + "input_cost_per_token": 0.000015, + "litellm_provider": "azure_ai", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/claude-sonnet-4-5": { + "input_cost_per_token": 0.000003, + "litellm_provider": "azure_ai", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/computer-use-preview": { + "input_cost_per_token": 0.000003, + "litellm_provider": "azure", + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_token": 0.000012, + "supported_endpoints": [ + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": false, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/container": { + "code_interpreter_cost_per_session": 0.03, + "litellm_provider": "azure", + "mode": "chat" + }, + "azure/eu/gpt-4o-2024-08-06": { + "deprecation_date": "2026-02-27", + "cache_read_input_token_cost": 0.000001375, + "input_cost_per_token": 0.00000275, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/gpt-4o-2024-11-20": { + "deprecation_date": "2026-03-01", + "cache_creation_input_token_cost": 0.00000138, + "input_cost_per_token": 0.00000275, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/gpt-4o-mini-2024-07-18": { + "cache_read_input_token_cost": 8.3e-8, + "input_cost_per_token": 1.65e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6.6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/gpt-4o-mini-realtime-preview-2024-12-17": { + "cache_creation_input_audio_token_cost": 3.3e-7, + "cache_read_input_token_cost": 3.3e-7, + "input_cost_per_audio_token": 0.000011, + "input_cost_per_token": 6.6e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.000022, + "output_cost_per_token": 0.00000264, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/eu/gpt-4o-realtime-preview-2024-10-01": { + "cache_creation_input_audio_token_cost": 0.000022, + "cache_read_input_token_cost": 0.00000275, + "input_cost_per_audio_token": 0.00011, + "input_cost_per_token": 0.0000055, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00022, + "output_cost_per_token": 0.000022, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/eu/gpt-4o-realtime-preview-2024-12-17": { + "cache_read_input_audio_token_cost": 0.0000025, + "cache_read_input_token_cost": 0.00000275, + "input_cost_per_audio_token": 0.000044, + "input_cost_per_token": 0.0000055, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.000022, + "supported_modalities": [ + "text", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/eu/gpt-5-2025-08-07": { + "cache_read_input_token_cost": 1.375e-7, + "input_cost_per_token": 0.000001375, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/gpt-5-mini-2025-08-07": { + "cache_read_input_token_cost": 2.75e-8, + "input_cost_per_token": 2.75e-7, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.0000022, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/gpt-5.1": { + "cache_read_input_token_cost": 1.4e-7, + "input_cost_per_token": 0.00000138, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/gpt-5.1-chat": { + "cache_read_input_token_cost": 1.4e-7, + "input_cost_per_token": 0.00000138, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/gpt-5-nano-2025-08-07": { + "cache_read_input_token_cost": 5.5e-9, + "input_cost_per_token": 5.5e-8, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 4.4e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/o1-2024-12-17": { + "cache_read_input_token_cost": 0.00000825, + "input_cost_per_token": 0.0000165, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.000066, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/eu/o1-mini-2024-09-12": { + "cache_read_input_token_cost": 6.05e-7, + "input_cost_per_token": 0.00000121, + "input_cost_per_token_batches": 6.05e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.00000484, + "output_cost_per_token_batches": 0.00000242, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_vision": false + }, + "azure/eu/o1-preview-2024-09-12": { + "cache_read_input_token_cost": 0.00000825, + "input_cost_per_token": 0.0000165, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000066, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_vision": false + }, + "azure/eu/o3-mini-2025-01-31": { + "cache_read_input_token_cost": 6.05e-7, + "input_cost_per_token": 0.00000121, + "input_cost_per_token_batches": 6.05e-7, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00000484, + "output_cost_per_token_batches": 0.00000242, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure/global-standard/gpt-4o-2024-08-06": { + "cache_read_input_token_cost": 0.00000125, + "deprecation_date": "2026-02-27", + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/global-standard/gpt-4o-2024-11-20": { + "cache_read_input_token_cost": 0.00000125, + "deprecation_date": "2026-03-01", + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/global-standard/gpt-4o-mini": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/global/gpt-4o-2024-08-06": { + "deprecation_date": "2026-02-27", + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/global/gpt-4o-2024-11-20": { + "deprecation_date": "2026-03-01", + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/global/gpt-5.1": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/global/gpt-5.1-chat": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-3.5-turbo": { + "input_cost_per_token": 5e-7, + "litellm_provider": "azure", + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-3.5-turbo-0125": { + "deprecation_date": "2025-03-31", + "input_cost_per_token": 5e-7, + "litellm_provider": "azure", + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-35-turbo": { + "input_cost_per_token": 5e-7, + "litellm_provider": "azure", + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-35-turbo-0125": { + "deprecation_date": "2025-05-31", + "input_cost_per_token": 5e-7, + "litellm_provider": "azure", + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-35-turbo-0301": { + "deprecation_date": "2025-02-13", + "input_cost_per_token": 2e-7, + "litellm_provider": "azure", + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "max_tokens": 4097, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-35-turbo-0613": { + "deprecation_date": "2025-02-13", + "input_cost_per_token": 0.0000015, + "litellm_provider": "azure", + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "max_tokens": 4097, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-35-turbo-1106": { + "deprecation_date": "2025-03-31", + "input_cost_per_token": 0.000001, + "litellm_provider": "azure", + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-35-turbo-16k": { + "input_cost_per_token": 0.000003, + "litellm_provider": "azure", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supports_tool_choice": true + }, + "azure/gpt-35-turbo-16k-0613": { + "input_cost_per_token": 0.000003, + "litellm_provider": "azure", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-4": { + "input_cost_per_token": 0.00003, + "litellm_provider": "azure", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-4-0125-preview": { + "input_cost_per_token": 0.00001, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-4-0613": { + "input_cost_per_token": 0.00003, + "litellm_provider": "azure", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-4-1106-preview": { + "input_cost_per_token": 0.00001, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-4-32k": { + "input_cost_per_token": 0.00006, + "litellm_provider": "azure", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00012, + "supports_tool_choice": true + }, + "azure/gpt-4-32k-0613": { + "input_cost_per_token": 0.00006, + "litellm_provider": "azure", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00012, + "supports_tool_choice": true + }, + "azure/gpt-4-turbo": { + "input_cost_per_token": 0.00001, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-4-turbo-2024-04-09": { + "input_cost_per_token": 0.00001, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4-turbo-vision-preview": { + "input_cost_per_token": 0.00001, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4.1": { + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_batches": 0.000001, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000008, + "output_cost_per_token_batches": 0.000004, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": false + }, + "azure/gpt-4.1-2025-04-14": { + "deprecation_date": "2026-11-04", + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_batches": 0.000001, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000008, + "output_cost_per_token_batches": 0.000004, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": false + }, + "azure/gpt-4.1-mini": { + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 4e-7, + "input_cost_per_token_batches": 2e-7, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000016, + "output_cost_per_token_batches": 8e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": false + }, + "azure/gpt-4.1-mini-2025-04-14": { + "deprecation_date": "2026-11-04", + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 4e-7, + "input_cost_per_token_batches": 2e-7, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000016, + "output_cost_per_token_batches": 8e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": false + }, + "azure/gpt-4.1-nano": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 1e-7, + "input_cost_per_token_batches": 5e-8, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4e-7, + "output_cost_per_token_batches": 2e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4.1-nano-2025-04-14": { + "deprecation_date": "2026-11-04", + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 1e-7, + "input_cost_per_token_batches": 5e-8, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4e-7, + "output_cost_per_token_batches": 2e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4.5-preview": { + "cache_read_input_token_cost": 0.0000375, + "input_cost_per_token": 0.000075, + "input_cost_per_token_batches": 0.0000375, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00015, + "output_cost_per_token_batches": 0.000075, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4o": { + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4o-2024-05-13": { + "input_cost_per_token": 0.000005, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4o-2024-08-06": { + "deprecation_date": "2026-02-27", + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4o-2024-11-20": { + "deprecation_date": "2026-03-01", + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.00000275, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-audio-2025-08-28": { + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": false, + "supports_reasoning": false, + "supports_response_schema": false, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure/gpt-audio-mini-2025-10-06": { + "input_cost_per_audio_token": 0.00001, + "input_cost_per_token": 6e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.00002, + "output_cost_per_token": 0.0000024, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": false, + "supports_reasoning": false, + "supports_response_schema": false, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure/gpt-4o-audio-preview-2024-12-17": { + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": false, + "supports_reasoning": false, + "supports_response_schema": false, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure/gpt-4o-mini": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_token": 1.65e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6.6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4o-mini-2024-07-18": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_token": 1.65e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6.6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-4o-mini-audio-preview-2024-12-17": { + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": false, + "supports_reasoning": false, + "supports_response_schema": false, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure/gpt-4o-mini-realtime-preview-2024-12-17": { + "cache_creation_input_audio_token_cost": 3e-7, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_audio_token": 0.00001, + "input_cost_per_token": 6e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00002, + "output_cost_per_token": 0.0000024, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/gpt-realtime-2025-08-28": { + "cache_creation_input_audio_token_cost": 0.000004, + "cache_read_input_token_cost": 0.000004, + "input_cost_per_audio_token": 0.000032, + "input_cost_per_image": 0.000005, + "input_cost_per_token": 0.000004, + "litellm_provider": "azure", + "max_input_tokens": 32000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.000064, + "output_cost_per_token": 0.000016, + "supported_endpoints": [ + "/v1/realtime" + ], + "supported_modalities": [ + "text", + "image", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/gpt-realtime-mini-2025-10-06": { + "cache_creation_input_audio_token_cost": 3e-7, + "cache_read_input_token_cost": 6e-8, + "input_cost_per_audio_token": 0.00001, + "input_cost_per_image": 8e-7, + "input_cost_per_token": 6e-7, + "litellm_provider": "azure", + "max_input_tokens": 32000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00002, + "output_cost_per_token": 0.0000024, + "supported_endpoints": [ + "/v1/realtime" + ], + "supported_modalities": [ + "text", + "image", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/gpt-4o-realtime-preview-2024-10-01": { + "cache_creation_input_audio_token_cost": 0.00002, + "cache_read_input_token_cost": 0.0000025, + "input_cost_per_audio_token": 0.0001, + "input_cost_per_token": 0.000005, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.0002, + "output_cost_per_token": 0.00002, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/gpt-4o-realtime-preview-2024-12-17": { + "cache_read_input_token_cost": 0.0000025, + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.000005, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00002, + "supported_modalities": [ + "text", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/gpt-5.1-2025-11-13": { + "cache_read_input_token_cost": 1.25e-7, + "cache_read_input_token_cost_priority": 2.5e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_priority": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_priority": 0.00002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "azure/gpt-5.1-chat-2025-11-13": { + "cache_read_input_token_cost": 1.25e-7, + "cache_read_input_token_cost_priority": 2.5e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_priority": 0.0000025, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_priority": 0.00002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": false, + "supports_native_streaming": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "azure/gpt-5": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5-2025-08-07": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5-chat": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "source": "https://azure.microsoft.com/en-us/blog/gpt-5-in-azure-ai-foundry-the-future-of-ai-apps-and-agents-starts-here/", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "azure/gpt-5-chat-latest": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "azure/gpt-5-mini": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 2.5e-7, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5-mini-2025-08-07": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 2.5e-7, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5-nano": { + "cache_read_input_token_cost": 5e-9, + "input_cost_per_token": 5e-8, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5-nano-2025-08-07": { + "cache_read_input_token_cost": 5e-9, + "input_cost_per_token": 5e-8, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5.1": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5.1-chat": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5.2": { + "cache_read_input_token_cost": 1.75e-7, + "input_cost_per_token": 0.00000175, + "litellm_provider": "azure", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000014, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/gpt-5.2-2025-12-11": { + "cache_read_input_token_cost": 1.75e-7, + "cache_read_input_token_cost_priority": 3.5e-7, + "input_cost_per_token": 0.00000175, + "input_cost_per_token_priority": 0.0000035, + "litellm_provider": "azure", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000014, + "output_cost_per_token_priority": 0.000028, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "azure/gpt-5.2-chat-2025-12-11": { + "cache_read_input_token_cost": 1.75e-7, + "cache_read_input_token_cost_priority": 3.5e-7, + "input_cost_per_token": 0.00000175, + "input_cost_per_token_priority": 0.0000035, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000014, + "output_cost_per_token_priority": 0.000028, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/mistral-large-2402": { + "input_cost_per_token": 0.000008, + "litellm_provider": "azure", + "max_input_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_function_calling": true + }, + "azure/mistral-large-latest": { + "input_cost_per_token": 0.000008, + "litellm_provider": "azure", + "max_input_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_function_calling": true + }, + "azure/o1": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/o1-2024-12-17": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/o1-mini": { + "cache_read_input_token_cost": 6.05e-7, + "input_cost_per_token": 0.00000121, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.00000484, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_vision": false + }, + "azure/o1-mini-2024-09-12": { + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_vision": false + }, + "azure/o1-preview": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_vision": false + }, + "azure/o1-preview-2024-09-12": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_vision": false + }, + "azure/o3": { + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/o3-2025-04-16": { + "deprecation_date": "2026-04-16", + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/o3-mini": { + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure/o3-mini-2025-01-31": { + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure/o4-mini": { + "cache_read_input_token_cost": 2.75e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/o4-mini-2025-04-16": { + "cache_read_input_token_cost": 2.75e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-4.1-2025-04-14": { + "deprecation_date": "2026-11-04", + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000022, + "input_cost_per_token_batches": 0.0000011, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000088, + "output_cost_per_token_batches": 0.0000044, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": false + }, + "azure/us/gpt-4.1-mini-2025-04-14": { + "deprecation_date": "2026-11-04", + "cache_read_input_token_cost": 1.1e-7, + "input_cost_per_token": 4.4e-7, + "input_cost_per_token_batches": 2.2e-7, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00000176, + "output_cost_per_token_batches": 8.8e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": false + }, + "azure/us/gpt-4.1-nano-2025-04-14": { + "deprecation_date": "2026-11-04", + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 1.1e-7, + "input_cost_per_token_batches": 6e-8, + "litellm_provider": "azure", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4.4e-7, + "output_cost_per_token_batches": 2.2e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-4o-2024-08-06": { + "deprecation_date": "2026-02-27", + "cache_read_input_token_cost": 0.000001375, + "input_cost_per_token": 0.00000275, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-4o-2024-11-20": { + "deprecation_date": "2026-03-01", + "cache_creation_input_token_cost": 0.00000138, + "input_cost_per_token": 0.00000275, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-4o-mini-2024-07-18": { + "cache_read_input_token_cost": 8.3e-8, + "input_cost_per_token": 1.65e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6.6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-4o-mini-realtime-preview-2024-12-17": { + "cache_creation_input_audio_token_cost": 3.3e-7, + "cache_read_input_token_cost": 3.3e-7, + "input_cost_per_audio_token": 0.000011, + "input_cost_per_token": 6.6e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.000022, + "output_cost_per_token": 0.00000264, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/us/gpt-4o-realtime-preview-2024-10-01": { + "cache_creation_input_audio_token_cost": 0.000022, + "cache_read_input_token_cost": 0.00000275, + "input_cost_per_audio_token": 0.00011, + "input_cost_per_token": 0.0000055, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00022, + "output_cost_per_token": 0.000022, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/us/gpt-4o-realtime-preview-2024-12-17": { + "cache_read_input_audio_token_cost": 0.0000025, + "cache_read_input_token_cost": 0.00000275, + "input_cost_per_audio_token": 0.000044, + "input_cost_per_token": 0.0000055, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.000022, + "supported_modalities": [ + "text", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "azure/us/gpt-5-2025-08-07": { + "cache_read_input_token_cost": 1.375e-7, + "input_cost_per_token": 0.000001375, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-5-mini-2025-08-07": { + "cache_read_input_token_cost": 2.75e-8, + "input_cost_per_token": 2.75e-7, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.0000022, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-5-nano-2025-08-07": { + "cache_read_input_token_cost": 5.5e-9, + "input_cost_per_token": 5.5e-8, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 4.4e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-5.1": { + "cache_read_input_token_cost": 1.4e-7, + "input_cost_per_token": 0.00000138, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/gpt-5.1-chat": { + "cache_read_input_token_cost": 1.4e-7, + "input_cost_per_token": 0.00000138, + "litellm_provider": "azure", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000011, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/o1-2024-12-17": { + "cache_read_input_token_cost": 0.00000825, + "input_cost_per_token": 0.0000165, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.000066, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/o1-mini-2024-09-12": { + "cache_read_input_token_cost": 6.05e-7, + "input_cost_per_token": 0.00000121, + "input_cost_per_token_batches": 6.05e-7, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.00000484, + "output_cost_per_token_batches": 0.00000242, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_vision": false + }, + "azure/us/o1-preview-2024-09-12": { + "cache_read_input_token_cost": 0.00000825, + "input_cost_per_token": 0.0000165, + "litellm_provider": "azure", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000066, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_vision": false + }, + "azure/us/o3-2025-04-16": { + "deprecation_date": "2026-04-16", + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000022, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000088, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure/us/o3-mini-2025-01-31": { + "cache_read_input_token_cost": 6.05e-7, + "input_cost_per_token": 0.00000121, + "input_cost_per_token_batches": 6.05e-7, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00000484, + "output_cost_per_token_batches": 0.00000242, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure/us/o4-mini-2025-04-16": { + "cache_read_input_token_cost": 3.1e-7, + "input_cost_per_token": 0.00000121, + "litellm_provider": "azure", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00000484, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/Llama-3.2-11B-Vision-Instruct": { + "input_cost_per_token": 3.7e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 3.7e-7, + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-11b-vision-instruct-offer?tab=Overview", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/Llama-3.2-90B-Vision-Instruct": { + "input_cost_per_token": 0.00000204, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.00000204, + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-90b-vision-instruct-offer?tab=Overview", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/Llama-3.3-70B-Instruct": { + "input_cost_per_token": 7.1e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 7.1e-7, + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure_ai/Llama-4-Maverick-17B-128E-Instruct-FP8": { + "input_cost_per_token": 0.00000141, + "litellm_provider": "azure_ai", + "max_input_tokens": 1000000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 3.5e-7, + "source": "https://azure.microsoft.com/en-us/blog/introducing-the-llama-4-herd-in-azure-ai-foundry-and-azure-databricks/", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/Llama-4-Scout-17B-16E-Instruct": { + "input_cost_per_token": 2e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 10000000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 7.8e-7, + "source": "https://azure.microsoft.com/en-us/blog/introducing-the-llama-4-herd-in-azure-ai-foundry-and-azure-databricks/", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/Meta-Llama-3-70B-Instruct": { + "input_cost_per_token": 0.0000011, + "litellm_provider": "azure_ai", + "max_input_tokens": 8192, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 3.7e-7, + "supports_tool_choice": true + }, + "azure_ai/Meta-Llama-3.1-405B-Instruct": { + "input_cost_per_token": 0.00000533, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.000016, + "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-405b-instruct-offer?tab=PlansAndPrice", + "supports_tool_choice": true + }, + "azure_ai/Meta-Llama-3.1-70B-Instruct": { + "input_cost_per_token": 0.00000268, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.00000354, + "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-70b-instruct-offer?tab=PlansAndPrice", + "supports_tool_choice": true + }, + "azure_ai/Meta-Llama-3.1-8B-Instruct": { + "input_cost_per_token": 3e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 6.1e-7, + "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-8b-instruct-offer?tab=PlansAndPrice", + "supports_tool_choice": true + }, + "azure_ai/Phi-3-medium-128k-instruct": { + "input_cost_per_token": 1.7e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6.8e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-3-medium-4k-instruct": { + "input_cost_per_token": 1.7e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6.8e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-3-mini-128k-instruct": { + "input_cost_per_token": 1.3e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 5.2e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-3-mini-4k-instruct": { + "input_cost_per_token": 1.3e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 5.2e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-3-small-128k-instruct": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-3-small-8k-instruct": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-3.5-MoE-instruct": { + "input_cost_per_token": 1.6e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6.4e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-3.5-mini-instruct": { + "input_cost_per_token": 1.3e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 5.2e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-3.5-vision-instruct": { + "input_cost_per_token": 1.3e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 5.2e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/Phi-4": { + "input_cost_per_token": 1.25e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/affordable-innovation-unveiling-the-pricing-of-phi-3-slms-on-models-as-a-service/4156495", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "azure_ai/Phi-4-mini-instruct": { + "input_cost_per_token": 7.5e-8, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://techcommunity.microsoft.com/blog/Azure-AI-Services-blog/announcing-new-phi-pricing-empowering-your-business-with-small-language-models/4395112", + "supports_function_calling": true + }, + "azure_ai/Phi-4-multimodal-instruct": { + "input_cost_per_audio_token": 0.000004, + "input_cost_per_token": 8e-8, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 3.2e-7, + "source": "https://techcommunity.microsoft.com/blog/Azure-AI-Services-blog/announcing-new-phi-pricing-empowering-your-business-with-small-language-models/4395112", + "supports_audio_input": true, + "supports_function_calling": true, + "supports_vision": true + }, + "azure_ai/Phi-4-mini-reasoning": { + "input_cost_per_token": 8e-8, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 3.2e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/", + "supports_function_calling": true + }, + "azure_ai/Phi-4-reasoning": { + "input_cost_per_token": 1.25e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_reasoning": true + }, + "azure_ai/MAI-DS-R1": { + "input_cost_per_token": 0.00000135, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000054, + "source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/", + "supports_reasoning": true, + "supports_tool_choice": true + }, + "azure_ai/deepseek-v3.2": { + "input_cost_per_token": 5.8e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000168, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "azure_ai/deepseek-v3.2-speciale": { + "input_cost_per_token": 5.8e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000168, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "azure_ai/deepseek-r1": { + "input_cost_per_token": 0.00000135, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000054, + "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/deepseek-r1-improved-performance-higher-limits-and-transparent-pricing/4386367", + "supports_reasoning": true, + "supports_tool_choice": true + }, + "azure_ai/deepseek-v3": { + "input_cost_per_token": 0.00000114, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000456, + "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/announcing-deepseek-v3-on-azure-ai-foundry-and-github/4390438", + "supports_tool_choice": true + }, + "azure_ai/deepseek-v3-0324": { + "input_cost_per_token": 0.00000114, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000456, + "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/announcing-deepseek-v3-on-azure-ai-foundry-and-github/4390438", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure_ai/global/grok-3": { + "input_cost_per_token": 0.000003, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000015, + "source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/", + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "azure_ai/global/grok-3-mini": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.00000127, + "source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "azure_ai/grok-3": { + "input_cost_per_token": 0.0000033, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000165, + "source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/", + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "azure_ai/grok-3-mini": { + "input_cost_per_token": 2.75e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.00000138, + "source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "azure_ai/grok-4": { + "input_cost_per_token": 0.0000055, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000275, + "source": "https://azure.microsoft.com/en-us/blog/grok-4-is-now-available-in-azure-ai-foundry-unlock-frontier-intelligence-and-business-ready-capabilities/", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "azure_ai/grok-4-fast-non-reasoning": { + "input_cost_per_token": 4.3e-7, + "output_cost_per_token": 0.00000173, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "azure_ai/grok-4-fast-reasoning": { + "input_cost_per_token": 4.3e-7, + "output_cost_per_token": 0.00000173, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "source": "https://techcommunity.microsoft.com/blog/azure-ai-foundry-blog/announcing-the-grok-4-fast-models-from-xai-now-available-in-azure-ai-foundry/4456701", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "azure_ai/grok-code-fast-1": { + "input_cost_per_token": 0.0000035, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000175, + "source": "https://azure.microsoft.com/en-us/blog/grok-4-is-now-available-in-azure-ai-foundry-unlock-frontier-intelligence-and-business-ready-capabilities/", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "azure_ai/jais-30b-chat": { + "input_cost_per_token": 0.0032, + "litellm_provider": "azure_ai", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00971, + "source": "https://azure.microsoft.com/en-us/products/ai-services/ai-foundry/models/jais-30b-chat" + }, + "azure_ai/jamba-instruct": { + "input_cost_per_token": 5e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 70000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 7e-7, + "supports_tool_choice": true + }, + "azure_ai/ministral-3b": { + "input_cost_per_token": 4e-8, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 4e-8, + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.ministral-3b-2410-offer?tab=Overview", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure_ai/mistral-large": { + "input_cost_per_token": 0.000004, + "litellm_provider": "azure_ai", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000012, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure_ai/mistral-large-2407": { + "input_cost_per_token": 0.000002, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000006, + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure_ai/mistral-large-latest": { + "input_cost_per_token": 0.000002, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000006, + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure_ai/mistral-large-3": { + "input_cost_per_token": 5e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 256000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://azure.microsoft.com/en-us/blog/introducing-mistral-large-3-in-microsoft-foundry-open-capable-and-ready-for-production-workloads/", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "azure_ai/mistral-medium-2505": { + "input_cost_per_token": 4e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure_ai/mistral-nemo": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "azure_ai", + "max_input_tokens": 131072, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-nemo-12b-2407?tab=PlansAndPrice", + "supports_function_calling": true + }, + "azure_ai/mistral-small": { + "input_cost_per_token": 0.000001, + "litellm_provider": "azure_ai", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure_ai/mistral-small-2503": { + "input_cost_per_token": 0.000001, + "litellm_provider": "azure_ai", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/*/1-month-commitment/cohere.command-light-text-v14": { + "input_cost_per_second": 0.001902, + "litellm_provider": "bedrock", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_second": 0.001902, + "supports_tool_choice": true + }, + "bedrock/*/1-month-commitment/cohere.command-text-v14": { + "input_cost_per_second": 0.011, + "litellm_provider": "bedrock", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_second": 0.011, + "supports_tool_choice": true + }, + "bedrock/*/6-month-commitment/cohere.command-light-text-v14": { + "input_cost_per_second": 0.0011416, + "litellm_provider": "bedrock", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_second": 0.0011416, + "supports_tool_choice": true + }, + "bedrock/*/6-month-commitment/cohere.command-text-v14": { + "input_cost_per_second": 0.0066027, + "litellm_provider": "bedrock", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_second": 0.0066027, + "supports_tool_choice": true + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-instant-v1": { + "input_cost_per_second": 0.01475, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.01475, + "supports_tool_choice": true + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v1": { + "input_cost_per_second": 0.0455, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.0455 + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2:1": { + "input_cost_per_second": 0.0455, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.0455, + "supports_tool_choice": true + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-instant-v1": { + "input_cost_per_second": 0.008194, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.008194, + "supports_tool_choice": true + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v1": { + "input_cost_per_second": 0.02527, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.02527 + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2:1": { + "input_cost_per_second": 0.02527, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.02527, + "supports_tool_choice": true + }, + "bedrock/ap-northeast-1/anthropic.claude-instant-v1": { + "input_cost_per_token": 0.00000223, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.00000755, + "supports_tool_choice": true + }, + "bedrock/ap-northeast-1/anthropic.claude-v1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "bedrock/ap-northeast-1/anthropic.claude-v2:1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "bedrock/ap-south-1/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000318, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000042 + }, + "bedrock/ap-south-1/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3.6e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 7.2e-7 + }, + "bedrock/ca-central-1/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000305, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000403 + }, + "bedrock/ca-central-1/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6.9e-7 + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-instant-v1": { + "input_cost_per_second": 0.01635, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.01635, + "supports_tool_choice": true + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v1": { + "input_cost_per_second": 0.0415, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.0415 + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2:1": { + "input_cost_per_second": 0.0415, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.0415, + "supports_tool_choice": true + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-instant-v1": { + "input_cost_per_second": 0.009083, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.009083, + "supports_tool_choice": true + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v1": { + "input_cost_per_second": 0.02305, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.02305 + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2:1": { + "input_cost_per_second": 0.02305, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.02305, + "supports_tool_choice": true + }, + "bedrock/eu-central-1/anthropic.claude-instant-v1": { + "input_cost_per_token": 0.00000248, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.00000838, + "supports_tool_choice": true + }, + "bedrock/eu-central-1/anthropic.claude-v1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024 + }, + "bedrock/eu-central-1/anthropic.claude-v2:1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "bedrock/eu-west-1/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000286, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000378 + }, + "bedrock/eu-west-1/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3.2e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6.5e-7 + }, + "bedrock/eu-west-2/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000345, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000455 + }, + "bedrock/eu-west-2/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3.9e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 7.8e-7 + }, + "bedrock/eu-west-3/mistral.mistral-7b-instruct-v0:2": { + "input_cost_per_token": 2e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2.6e-7, + "supports_tool_choice": true + }, + "bedrock/eu-west-3/mistral.mistral-large-2402-v1:0": { + "input_cost_per_token": 0.0000104, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000312, + "supports_function_calling": true + }, + "bedrock/eu-west-3/mistral.mixtral-8x7b-instruct-v0:1": { + "input_cost_per_token": 5.9e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 9.1e-7, + "supports_tool_choice": true + }, + "bedrock/invoke/anthropic.claude-3-5-sonnet-20240620-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "metadata": { + "notes": "Anthropic via Invoke route does not currently support pdf input." + }, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/sa-east-1/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000445, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000588 + }, + "bedrock/sa-east-1/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000101 + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-instant-v1": { + "input_cost_per_second": 0.011, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.011, + "supports_tool_choice": true + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-v1": { + "input_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.0175 + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2:1": { + "input_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.0175, + "supports_tool_choice": true + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-instant-v1": { + "input_cost_per_second": 0.00611, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.00611, + "supports_tool_choice": true + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-v1": { + "input_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.00972 + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2:1": { + "input_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.00972, + "supports_tool_choice": true + }, + "bedrock/us-east-1/anthropic.claude-instant-v1": { + "input_cost_per_token": 8e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000024, + "supports_tool_choice": true + }, + "bedrock/us-east-1/anthropic.claude-v1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "bedrock/us-east-1/anthropic.claude-v2:1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "bedrock/us-east-1/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000265, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000035 + }, + "bedrock/us-east-1/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6e-7 + }, + "bedrock/us-east-1/mistral.mistral-7b-instruct-v0:2": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_tool_choice": true + }, + "bedrock/us-east-1/mistral.mistral-large-2402-v1:0": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_function_calling": true + }, + "bedrock/us-east-1/mistral.mixtral-8x7b-instruct-v0:1": { + "input_cost_per_token": 4.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 7e-7, + "supports_tool_choice": true + }, + "bedrock/us-gov-east-1/amazon.nova-pro-v1:0": { + "input_cost_per_token": 9.6e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.00000384, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "bedrock/us-gov-east-1/amazon.titan-text-express-v1": { + "input_cost_per_token": 0.0000013, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 8000, + "max_tokens": 8000, + "mode": "chat", + "output_cost_per_token": 0.0000017 + }, + "bedrock/us-gov-east-1/amazon.titan-text-lite-v1": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 4000, + "max_tokens": 4000, + "mode": "chat", + "output_cost_per_token": 4e-7 + }, + "bedrock/us-gov-east-1/amazon.titan-text-premier-v1:0": { + "input_cost_per_token": 5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.0000015 + }, + "bedrock/us-gov-east-1/anthropic.claude-3-5-sonnet-20240620-v1:0": { + "input_cost_per_token": 0.0000036, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000018, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/us-gov-east-1/anthropic.claude-3-haiku-20240307-v1:0": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/us-gov-east-1/claude-sonnet-4-5-20250929-v1:0": { + "input_cost_per_token": 0.0000033, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000165, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/us-gov-east-1/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000265, + "litellm_provider": "bedrock", + "max_input_tokens": 8000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.0000035, + "supports_pdf_input": true + }, + "bedrock/us-gov-east-1/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.00000265, + "supports_pdf_input": true + }, + "bedrock/us-gov-west-1/amazon.nova-pro-v1:0": { + "input_cost_per_token": 9.6e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.00000384, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "bedrock/us-gov-west-1/amazon.titan-text-express-v1": { + "input_cost_per_token": 0.0000013, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 8000, + "max_tokens": 8000, + "mode": "chat", + "output_cost_per_token": 0.0000017 + }, + "bedrock/us-gov-west-1/amazon.titan-text-lite-v1": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 4000, + "max_tokens": 4000, + "mode": "chat", + "output_cost_per_token": 4e-7 + }, + "bedrock/us-gov-west-1/amazon.titan-text-premier-v1:0": { + "input_cost_per_token": 5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 42000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.0000015 + }, + "bedrock/us-gov-west-1/anthropic.claude-3-7-sonnet-20250219-v1:0": { + "cache_creation_input_token_cost": 0.0000045, + "cache_read_input_token_cost": 3.6e-7, + "input_cost_per_token": 0.0000036, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000018, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/us-gov-west-1/anthropic.claude-3-5-sonnet-20240620-v1:0": { + "input_cost_per_token": 0.0000036, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000018, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/us-gov-west-1/anthropic.claude-3-haiku-20240307-v1:0": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/us-gov-west-1/claude-sonnet-4-5-20250929-v1:0": { + "input_cost_per_token": 0.0000033, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000165, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "bedrock/us-gov-west-1/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000265, + "litellm_provider": "bedrock", + "max_input_tokens": 8000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.0000035, + "supports_pdf_input": true + }, + "bedrock/us-gov-west-1/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8000, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.00000265, + "supports_pdf_input": true + }, + "bedrock/us-west-1/meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000265, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000035 + }, + "bedrock/us-west-1/meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6e-7 + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-instant-v1": { + "input_cost_per_second": 0.011, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.011, + "supports_tool_choice": true + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-v1": { + "input_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.0175 + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2:1": { + "input_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.0175, + "supports_tool_choice": true + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-instant-v1": { + "input_cost_per_second": 0.00611, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.00611, + "supports_tool_choice": true + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-v1": { + "input_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.00972 + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2:1": { + "input_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_second": 0.00972, + "supports_tool_choice": true + }, + "bedrock/us-west-2/anthropic.claude-instant-v1": { + "input_cost_per_token": 8e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000024, + "supports_tool_choice": true + }, + "bedrock/us-west-2/anthropic.claude-v1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "bedrock/us-west-2/anthropic.claude-v2:1": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "bedrock/us-west-2/mistral.mistral-7b-instruct-v0:2": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_tool_choice": true + }, + "bedrock/us-west-2/mistral.mistral-large-2402-v1:0": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_function_calling": true + }, + "bedrock/us-west-2/mistral.mixtral-8x7b-instruct-v0:1": { + "input_cost_per_token": 4.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 7e-7, + "supports_tool_choice": true + }, + "bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0": { + "cache_creation_input_token_cost": 0.000001, + "cache_read_input_token_cost": 8e-8, + "input_cost_per_token": 8e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "cerebras/llama-3.3-70b": { + "input_cost_per_token": 8.5e-7, + "litellm_provider": "cerebras", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "cerebras/llama3.1-70b": { + "input_cost_per_token": 6e-7, + "litellm_provider": "cerebras", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "cerebras/llama3.1-8b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "cerebras", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "cerebras/gpt-oss-120b": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "cerebras", + "max_input_tokens": 131072, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 6.9e-7, + "source": "https://www.cerebras.ai/blog/openai-gpt-oss-120b-runs-fastest-on-cerebras", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "cerebras/qwen-3-32b": { + "input_cost_per_token": 4e-7, + "litellm_provider": "cerebras", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 8e-7, + "source": "https://inference-docs.cerebras.ai/support/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "cerebras/zai-glm-4.6": { + "input_cost_per_token": 0.00000225, + "litellm_provider": "cerebras", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00000275, + "source": "https://www.cerebras.ai/pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "chat-bison": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-chat-models", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "chat-bison-32k": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-chat-models", + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "chat-bison-32k@002": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-chat-models", + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "chat-bison@001": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-chat-models", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "chat-bison@002": { + "deprecation_date": "2025-04-09", + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-chat-models", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "chatdolphin": { + "input_cost_per_token": 5e-7, + "litellm_provider": "nlp_cloud", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 5e-7 + }, + "chatgpt-4o-latest": { + "input_cost_per_token": 0.000005, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "claude-3-5-haiku-20241022": { + "cache_creation_input_token_cost": 0.000001, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 8e-8, + "deprecation_date": "2025-10-01", + "input_cost_per_token": 8e-7, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000004, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tool_use_system_prompt_tokens": 264 + }, + "claude-3-5-haiku-latest": { + "cache_creation_input_token_cost": 0.00000125, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 1e-7, + "deprecation_date": "2025-10-01", + "input_cost_per_token": 0.000001, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000005, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tool_use_system_prompt_tokens": 264 + }, + "claude-haiku-4-5-20251001": { + "cache_creation_input_token_cost": 0.00000125, + "cache_creation_input_token_cost_above_1hr": 0.000002, + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 0.000001, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_computer_use": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "claude-haiku-4-5": { + "cache_creation_input_token_cost": 0.00000125, + "cache_creation_input_token_cost_above_1hr": 0.000002, + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 0.000001, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_computer_use": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "claude-3-5-sonnet-20240620": { + "cache_creation_input_token_cost": 0.00000375, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 3e-7, + "deprecation_date": "2025-06-01", + "input_cost_per_token": 0.000003, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-3-5-sonnet-20241022": { + "cache_creation_input_token_cost": 0.00000375, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 3e-7, + "deprecation_date": "2025-10-01", + "input_cost_per_token": 0.000003, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-3-5-sonnet-latest": { + "cache_creation_input_token_cost": 0.00000375, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 3e-7, + "deprecation_date": "2025-06-01", + "input_cost_per_token": 0.000003, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-3-7-sonnet-20250219": { + "cache_creation_input_token_cost": 0.00000375, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 3e-7, + "deprecation_date": "2026-02-19", + "input_cost_per_token": 0.000003, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-3-7-sonnet-latest": { + "cache_creation_input_token_cost": 0.00000375, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 3e-7, + "deprecation_date": "2025-06-01", + "input_cost_per_token": 0.000003, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-3-haiku-20240307": { + "cache_creation_input_token_cost": 3e-7, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 3e-8, + "input_cost_per_token": 2.5e-7, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 264 + }, + "claude-3-opus-20240229": { + "cache_creation_input_token_cost": 0.00001875, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 0.0000015, + "deprecation_date": "2026-05-01", + "input_cost_per_token": 0.000015, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395 + }, + "claude-3-opus-latest": { + "cache_creation_input_token_cost": 0.00001875, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 0.0000015, + "deprecation_date": "2025-03-01", + "input_cost_per_token": 0.000015, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395 + }, + "claude-4-opus-20250514": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-4-sonnet-20250514": { + "cache_creation_input_token_cost": 0.00000375, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost": 3e-7, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "litellm_provider": "anthropic", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-sonnet-4-5": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "claude-sonnet-4-5-20250929": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tool_use_system_prompt_tokens": 346 + }, + "claude-sonnet-4-5-20250929-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-opus-4-1": { + "cache_creation_input_token_cost": 0.00001875, + "cache_creation_input_token_cost_above_1hr": 0.00003, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-opus-4-1-20250805": { + "cache_creation_input_token_cost": 0.00001875, + "cache_creation_input_token_cost_above_1hr": 0.00003, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "deprecation_date": "2026-08-05", + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-opus-4-20250514": { + "cache_creation_input_token_cost": 0.00001875, + "cache_creation_input_token_cost_above_1hr": 0.00003, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "deprecation_date": "2026-05-14", + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-opus-4-5-20251101": { + "cache_creation_input_token_cost": 0.00000625, + "cache_creation_input_token_cost_above_1hr": 0.00001, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-opus-4-5": { + "cache_creation_input_token_cost": 0.00000625, + "cache_creation_input_token_cost_above_1hr": 0.00001, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "anthropic", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "claude-sonnet-4-20250514": { + "deprecation_date": "2026-05-14", + "cache_creation_input_token_cost": 0.00000375, + "cache_creation_input_token_cost_above_1hr": 0.000006, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "anthropic", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "cloudflare/@cf/meta/llama-2-7b-chat-fp16": { + "input_cost_per_token": 0.000001923, + "litellm_provider": "cloudflare", + "max_input_tokens": 3072, + "max_output_tokens": 3072, + "max_tokens": 3072, + "mode": "chat", + "output_cost_per_token": 0.000001923 + }, + "cloudflare/@cf/meta/llama-2-7b-chat-int8": { + "input_cost_per_token": 0.000001923, + "litellm_provider": "cloudflare", + "max_input_tokens": 2048, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.000001923 + }, + "cloudflare/@cf/mistral/mistral-7b-instruct-v0.1": { + "input_cost_per_token": 0.000001923, + "litellm_provider": "cloudflare", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000001923 + }, + "cloudflare/@hf/thebloke/codellama-7b-instruct-awq": { + "input_cost_per_token": 0.000001923, + "litellm_provider": "cloudflare", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000001923 + }, + "code-bison": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-code-text-models", + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "codechat-bison": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-code-chat-models", + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "codechat-bison-32k": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-code-chat-models", + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "codechat-bison-32k@002": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-code-chat-models", + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "codechat-bison@001": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-code-chat-models", + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "codechat-bison@002": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-code-chat-models", + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "codechat-bison@latest": { + "input_cost_per_character": 2.5e-7, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "vertex_ai-code-chat-models", + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_character": 5e-7, + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "codestral/codestral-2405": { + "input_cost_per_token": 0, + "litellm_provider": "codestral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://docs.mistral.ai/capabilities/code_generation/", + "supports_assistant_prefill": true, + "supports_tool_choice": true + }, + "codestral/codestral-latest": { + "input_cost_per_token": 0, + "litellm_provider": "codestral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://docs.mistral.ai/capabilities/code_generation/", + "supports_assistant_prefill": true, + "supports_tool_choice": true + }, + "cohere.command-light-text-v14": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_tool_choice": true + }, + "cohere.command-r-plus-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_tool_choice": true + }, + "cohere.command-r-v1:0": { + "input_cost_per_token": 5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_tool_choice": true + }, + "cohere.command-text-v14": { + "input_cost_per_token": 0.0000015, + "litellm_provider": "bedrock", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_tool_choice": true + }, + "command-a-03-2025": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "cohere_chat", + "max_input_tokens": 256000, + "max_output_tokens": 8000, + "max_tokens": 8000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "command-light": { + "input_cost_per_token": 3e-7, + "litellm_provider": "cohere_chat", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_tool_choice": true + }, + "command-r": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "cohere_chat", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "command-r-08-2024": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "cohere_chat", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "command-r-plus": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "cohere_chat", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "command-r-plus-08-2024": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "cohere_chat", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "command-r7b-12-2024": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "cohere_chat", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 3.75e-8, + "source": "https://docs.cohere.com/v2/docs/command-r7b", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "computer-use-preview": { + "input_cost_per_token": 0.000003, + "litellm_provider": "azure", + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_token": 0.000012, + "supported_endpoints": [ + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": false, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "deepseek-chat": { + "cache_read_input_token_cost": 3e-8, + "input_cost_per_token": 3e-7, + "litellm_provider": "deepseek", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://api-docs.deepseek.com/quick_start/pricing", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "deepseek-reasoner": { + "cache_read_input_token_cost": 3e-8, + "input_cost_per_token": 3e-7, + "litellm_provider": "deepseek", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://api-docs.deepseek.com/quick_start/pricing", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supports_function_calling": false, + "supports_native_streaming": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": false + }, + "dashscope/qwen-coder": { + "input_cost_per_token": 3e-7, + "litellm_provider": "dashscope", + "max_input_tokens": 1000000, + "max_output_tokens": 16384, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-flash": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 32768, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "input_cost_per_token": 5e-8, + "output_cost_per_token": 4e-7, + "range": [ + 0, + 256000 ] - }, - "aiml/flux-pro": { - "litellm_provider": "aiml", - "metadata": { - "notes": "Flux Dev - Development version optimized for experimentation" - }, - "mode": "image_generation", - "output_cost_per_image": 0.053, - "source": "https://docs.aimlapi.com/", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "aiml/flux-pro/v1.1": { - "litellm_provider": "aiml", - "mode": "image_generation", - "output_cost_per_image": 0.042, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "aiml/flux-pro/v1.1-ultra": { - "litellm_provider": "aiml", - "mode": "image_generation", - "output_cost_per_image": 0.063, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "aiml/flux-realism": { - "litellm_provider": "aiml", - "metadata": { - "notes": "Flux Pro - Professional-grade image generation model" - }, - "mode": "image_generation", - "output_cost_per_image": 0.037, - "source": "https://docs.aimlapi.com/", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "aiml/flux/dev": { - "litellm_provider": "aiml", - "metadata": { - "notes": "Flux Dev - Development version optimized for experimentation" - }, - "mode": "image_generation", - "output_cost_per_image": 0.026, - "source": "https://docs.aimlapi.com/", - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 2.5e-7, + "output_cost_per_token": 0.000002, + "range": [ + 256000, + 1000000 ] - }, - "aiml/flux/kontext-max/text-to-image": { - "litellm_provider": "aiml", - "metadata": { - "notes": "Flux Pro v1.1 - Enhanced version with improved capabilities and 6x faster inference speed" - }, - "mode": "image_generation", - "output_cost_per_image": 0.084, - "source": "https://docs.aimlapi.com/", - "supported_endpoints": [ - "/v1/images/generations" + } + ] + }, + "dashscope/qwen-flash-2025-07-28": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 32768, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "input_cost_per_token": 5e-8, + "output_cost_per_token": 4e-7, + "range": [ + 0, + 256000 ] - }, - "aiml/flux/kontext-pro/text-to-image": { - "litellm_provider": "aiml", - "metadata": { - "notes": "Flux Pro v1.1 - Enhanced version with improved capabilities and 6x faster inference speed" - }, - "mode": "image_generation", - "output_cost_per_image": 0.042, - "source": "https://docs.aimlapi.com/", - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 2.5e-7, + "output_cost_per_token": 0.000002, + "range": [ + 256000, + 1000000 ] - }, - "aiml/flux/schnell": { - "litellm_provider": "aiml", - "metadata": { - "notes": "Flux Schnell - Fast generation model optimized for speed" - }, - "mode": "image_generation", - "output_cost_per_image": 0.003, - "source": "https://docs.aimlapi.com/", - "supported_endpoints": [ - "/v1/images/generations" + } + ] + }, + "dashscope/qwen-max": { + "input_cost_per_token": 0.0000016, + "litellm_provider": "dashscope", + "max_input_tokens": 30720, + "max_output_tokens": 8192, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000064, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-plus": { + "input_cost_per_token": 4e-7, + "litellm_provider": "dashscope", + "max_input_tokens": 129024, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-plus-2025-01-25": { + "input_cost_per_token": 4e-7, + "litellm_provider": "dashscope", + "max_input_tokens": 129024, + "max_output_tokens": 8192, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-plus-2025-04-28": { + "input_cost_per_token": 4e-7, + "litellm_provider": "dashscope", + "max_input_tokens": 129024, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_reasoning_token": 0.000004, + "output_cost_per_token": 0.0000012, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-plus-2025-07-14": { + "input_cost_per_token": 4e-7, + "litellm_provider": "dashscope", + "max_input_tokens": 129024, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_reasoning_token": 0.000004, + "output_cost_per_token": 0.0000012, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-plus-2025-07-28": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 32768, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "input_cost_per_token": 4e-7, + "output_cost_per_reasoning_token": 0.000004, + "output_cost_per_token": 0.0000012, + "range": [ + 0, + 256000 ] - }, - "amazon.nova-canvas-v1:0": { - "litellm_provider": "bedrock", - "max_input_tokens": 2600, - "mode": "image_generation", - "output_cost_per_image": 0.06 - }, - "us.writer.palmyra-x4-v1:0": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_pdf_input": true - }, - "us.writer.palmyra-x5-v1:0": { - "input_cost_per_token": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_function_calling": true, - "supports_pdf_input": true - }, - "writer.palmyra-x4-v1:0": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_pdf_input": true - }, - "writer.palmyra-x5-v1:0": { - "input_cost_per_token": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_function_calling": true, - "supports_pdf_input": true - }, - "amazon.nova-lite-v1:0": { - "input_cost_per_token": 6e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 2.4e-07, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "amazon.nova-2-lite-v1:0": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_video_input": true, - "supports_vision": true - }, - "apac.amazon.nova-2-lite-v1:0": { - "cache_read_input_token_cost": 8.25e-08, - "input_cost_per_token": 3.3e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.75e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_video_input": true, - "supports_vision": true - }, - "eu.amazon.nova-2-lite-v1:0": { - "cache_read_input_token_cost": 8.25e-08, - "input_cost_per_token": 3.3e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.75e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_video_input": true, - "supports_vision": true - }, - "us.amazon.nova-2-lite-v1:0": { - "cache_read_input_token_cost": 8.25e-08, - "input_cost_per_token": 3.3e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.75e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_video_input": true, - "supports_vision": true - }, - - "amazon.nova-micro-v1:0": { - "input_cost_per_token": 3.5e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 1.4e-07, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "amazon.nova-pro-v1:0": { - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 3.2e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "amazon.rerank-v1:0": { - "input_cost_per_query": 0.001, - "input_cost_per_token": 0.0, - "litellm_provider": "bedrock", - "max_document_chunks_per_query": 100, - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_query_tokens": 32000, - "max_tokens": 32000, - "max_tokens_per_document_chunk": 512, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "amazon.titan-embed-image-v1": { - "input_cost_per_image": 6e-05, - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128, - "max_tokens": 128, - "metadata": { - "notes": "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead." - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024, - "source": "https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/providers?model=amazon.titan-image-generator-v1", - "supports_embedding_image_input": true, - "supports_image_input": true - }, - "amazon.titan-embed-text-v1": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1536 - }, - "amazon.titan-embed-text-v2:0": { - "input_cost_per_token": 2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024 - }, - "amazon.titan-image-generator-v1": { - "input_cost_per_image": 0.0, - "output_cost_per_image": 0.008, - "output_cost_per_image_premium_image": 0.01, - "output_cost_per_image_above_512_and_512_pixels": 0.01, - "output_cost_per_image_above_512_and_512_pixels_and_premium_image": 0.012, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "amazon.titan-image-generator-v2": { - "input_cost_per_image": 0.0, - "output_cost_per_image": 0.008, - "output_cost_per_image_premium_image": 0.01, - "output_cost_per_image_above_1024_and_1024_pixels": 0.01, - "output_cost_per_image_above_1024_and_1024_pixels_and_premium_image": 0.012, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "amazon.titan-image-generator-v2:0": { - "input_cost_per_image": 0.0, - "output_cost_per_image": 0.008, - "output_cost_per_image_premium_image": 0.01, - "output_cost_per_image_above_1024_and_1024_pixels": 0.01, - "output_cost_per_image_above_1024_and_1024_pixels_and_premium_image": 0.012, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "twelvelabs.marengo-embed-2-7-v1:0": { - "input_cost_per_token": 7e-05, - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024, - "supports_embedding_image_input": true, - "supports_image_input": true - }, - "us.twelvelabs.marengo-embed-2-7-v1:0": { - "input_cost_per_token": 7e-05, - "input_cost_per_video_per_second": 0.0007, - "input_cost_per_audio_per_second": 0.00014, - "input_cost_per_image": 0.0001, - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024, - "supports_embedding_image_input": true, - "supports_image_input": true - }, - "eu.twelvelabs.marengo-embed-2-7-v1:0": { - "input_cost_per_token": 7e-05, - "input_cost_per_video_per_second": 0.0007, - "input_cost_per_audio_per_second": 0.00014, - "input_cost_per_image": 0.0001, - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024, - "supports_embedding_image_input": true, - "supports_image_input": true - }, - "twelvelabs.pegasus-1-2-v1:0": { - "input_cost_per_video_per_second": 0.00049, - "output_cost_per_token": 7.5e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_video_input": true - }, - "us.twelvelabs.pegasus-1-2-v1:0": { - "input_cost_per_video_per_second": 0.00049, - "output_cost_per_token": 7.5e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_video_input": true - }, - "eu.twelvelabs.pegasus-1-2-v1:0": { - "input_cost_per_video_per_second": 0.00049, - "output_cost_per_token": 7.5e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_video_input": true - }, - "amazon.titan-text-express-v1": { - "input_cost_per_token": 1.3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 8000, - "max_tokens": 8000, - "mode": "chat", - "output_cost_per_token": 1.7e-06 - }, - "amazon.titan-text-lite-v1": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 4000, - "max_tokens": 4000, - "mode": "chat", - "output_cost_per_token": 4e-07 - }, - "amazon.titan-text-premier-v1:0": { - "input_cost_per_token": 5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 1.5e-06 - }, - "anthropic.claude-3-5-haiku-20241022-v1:0": { - "cache_creation_input_token_cost": 1e-06, - "cache_read_input_token_cost": 8e-08, - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "anthropic.claude-haiku-4-5-20251001-v1:0": { - "cache_creation_input_token_cost": 1.25e-06, - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 1e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "anthropic.claude-haiku-4-5@20251001": { - "cache_creation_input_token_cost": 1.25e-06, - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 1e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "anthropic.claude-3-5-sonnet-20240620-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "anthropic.claude-3-5-sonnet-20241022-v2:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "anthropic.claude-3-7-sonnet-20240620-v1:0": { - "cache_creation_input_token_cost": 4.5e-06, - "cache_read_input_token_cost": 3.6e-07, - "input_cost_per_token": 3.6e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.8e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "anthropic.claude-3-7-sonnet-20250219-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "anthropic.claude-3-haiku-20240307-v1:0": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "anthropic.claude-3-opus-20240229-v1:0": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "anthropic.claude-3-sonnet-20240229-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "anthropic.claude-instant-v1": { - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-06, - "supports_tool_choice": true - }, - "anthropic.claude-opus-4-1-20250805-v1:0": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "anthropic.claude-opus-4-20250514-v1:0": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "anthropic.claude-opus-4-5-20251101-v1:0": { - "cache_creation_input_token_cost": 6.25e-06, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 5e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "anthropic.claude-sonnet-4-20250514-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "anthropic.claude-sonnet-4-5-20250929-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "anthropic.claude-v1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05 - }, - "anthropic.claude-v2:1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "anyscale/HuggingFaceH4/zephyr-7b-beta": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.5e-07 - }, - "anyscale/codellama/CodeLlama-34b-Instruct-hf": { - "input_cost_per_token": 1e-06, - "litellm_provider": "anyscale", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1e-06 - }, - "anyscale/codellama/CodeLlama-70b-Instruct-hf": { - "input_cost_per_token": 1e-06, - "litellm_provider": "anyscale", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1e-06, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/codellama-CodeLlama-70b-Instruct-hf" - }, - "anyscale/google/gemma-7b-it": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/google-gemma-7b-it" - }, - "anyscale/meta-llama/Llama-2-13b-chat-hf": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "anyscale", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.5e-07 - }, - "anyscale/meta-llama/Llama-2-70b-chat-hf": { - "input_cost_per_token": 1e-06, - "litellm_provider": "anyscale", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1e-06 - }, - "anyscale/meta-llama/Llama-2-7b-chat-hf": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-07 - }, - "anyscale/meta-llama/Meta-Llama-3-70B-Instruct": { - "input_cost_per_token": 1e-06, - "litellm_provider": "anyscale", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1e-06, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-70B-Instruct" - }, - "anyscale/meta-llama/Meta-Llama-3-8B-Instruct": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-8B-Instruct" - }, - "anyscale/mistralai/Mistral-7B-Instruct-v0.1": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mistral-7B-Instruct-v0.1", - "supports_function_calling": true - }, - "anyscale/mistralai/Mixtral-8x22B-Instruct-v0.1": { - "input_cost_per_token": 9e-07, - "litellm_provider": "anyscale", - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 9e-07, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x22B-Instruct-v0.1", - "supports_function_calling": true - }, - "anyscale/mistralai/Mixtral-8x7B-Instruct-v0.1": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x7B-Instruct-v0.1", - "supports_function_calling": true - }, - "apac.amazon.nova-lite-v1:0": { - "input_cost_per_token": 6.3e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 2.52e-07, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "apac.amazon.nova-micro-v1:0": { - "input_cost_per_token": 3.7e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 1.48e-07, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "apac.amazon.nova-pro-v1:0": { - "input_cost_per_token": 8.4e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 3.36e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "apac.anthropic.claude-3-5-sonnet-20240620-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "apac.anthropic.claude-3-5-sonnet-20241022-v2:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "apac.anthropic.claude-3-haiku-20240307-v1:0": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "apac.anthropic.claude-haiku-4-5-20251001-v1:0": { - "cache_creation_input_token_cost": 1.375e-06, - "cache_read_input_token_cost": 1.1e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5.5e-06, - "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "apac.anthropic.claude-3-sonnet-20240229-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "apac.anthropic.claude-sonnet-4-20250514-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "assemblyai/best": { - "input_cost_per_second": 3.333e-05, - "litellm_provider": "assemblyai", - "mode": "audio_transcription", - "output_cost_per_second": 0.0 - }, - "assemblyai/nano": { - "input_cost_per_second": 0.00010278, - "litellm_provider": "assemblyai", - "mode": "audio_transcription", - "output_cost_per_second": 0.0 - }, - "au.anthropic.claude-sonnet-4-5-20250929-v1:0": { - "cache_creation_input_token_cost": 4.125e-06, - "cache_read_input_token_cost": 3.3e-07, - "input_cost_per_token": 3.3e-06, - "input_cost_per_token_above_200k_tokens": 6.6e-06, - "output_cost_per_token_above_200k_tokens": 2.475e-05, - "cache_creation_input_token_cost_above_200k_tokens": 8.25e-06, - "cache_read_input_token_cost_above_200k_tokens": 6.6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.65e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "azure/ada": { - "input_cost_per_token": 1e-07, - "litellm_provider": "azure", - "max_input_tokens": 8191, - "max_tokens": 8191, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "azure/codex-mini": { - "cache_read_input_token_cost": 3.75e-07, - "input_cost_per_token": 1.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 6e-06, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/command-r-plus": { - "input_cost_per_token": 3e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true - }, - "azure_ai/claude-haiku-4-5": { - "input_cost_per_token": 1e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure_ai/claude-opus-4-1": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "azure_ai", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure_ai/claude-sonnet-4-5": { - "input_cost_per_token": 3e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/computer-use-preview": { - "input_cost_per_token": 3e-06, - "litellm_provider": "azure", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": false, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/container": { - "code_interpreter_cost_per_session": 0.03, - "litellm_provider": "azure", - "mode": "chat" - }, - "azure/eu/gpt-4o-2024-08-06": { - "deprecation_date": "2026-02-27", - "cache_read_input_token_cost": 1.375e-06, - "input_cost_per_token": 2.75e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-4o-2024-11-20": { - "deprecation_date": "2026-03-01", - "cache_creation_input_token_cost": 1.38e-06, - "input_cost_per_token": 2.75e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-4o-mini-2024-07-18": { - "cache_read_input_token_cost": 8.3e-08, - "input_cost_per_token": 1.65e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6.6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-4o-mini-realtime-preview-2024-12-17": { - "cache_creation_input_audio_token_cost": 3.3e-07, - "cache_read_input_token_cost": 3.3e-07, - "input_cost_per_audio_token": 1.1e-05, - "input_cost_per_token": 6.6e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 2.2e-05, - "output_cost_per_token": 2.64e-06, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/eu/gpt-4o-realtime-preview-2024-10-01": { - "cache_creation_input_audio_token_cost": 2.2e-05, - "cache_read_input_token_cost": 2.75e-06, - "input_cost_per_audio_token": 0.00011, - "input_cost_per_token": 5.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 0.00022, - "output_cost_per_token": 2.2e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/eu/gpt-4o-realtime-preview-2024-12-17": { - "cache_read_input_audio_token_cost": 2.5e-06, - "cache_read_input_token_cost": 2.75e-06, - "input_cost_per_audio_token": 4.4e-05, - "input_cost_per_token": 5.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 2.2e-05, - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/eu/gpt-5-2025-08-07": { - "cache_read_input_token_cost": 1.375e-07, - "input_cost_per_token": 1.375e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-5-mini-2025-08-07": { - "cache_read_input_token_cost": 2.75e-08, - "input_cost_per_token": 2.75e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.2e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-5.1": { - "cache_read_input_token_cost": 1.4e-07, - "input_cost_per_token": 1.38e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-5.1-chat": { - "cache_read_input_token_cost": 1.4e-07, - "input_cost_per_token": 1.38e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-5.1-codex": { - "cache_read_input_token_cost": 1.4e-07, - "input_cost_per_token": 1.38e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1.1e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-5.1-codex-mini": { - "cache_read_input_token_cost": 2.8e-08, - "input_cost_per_token": 2.75e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 2.2e-06, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/gpt-5-nano-2025-08-07": { - "cache_read_input_token_cost": 5.5e-09, - "input_cost_per_token": 5.5e-08, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 4.4e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/o1-2024-12-17": { - "cache_read_input_token_cost": 8.25e-06, - "input_cost_per_token": 1.65e-05, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 6.6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/eu/o1-mini-2024-09-12": { - "cache_read_input_token_cost": 6.05e-07, - "input_cost_per_token": 1.21e-06, - "input_cost_per_token_batches": 6.05e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 4.84e-06, - "output_cost_per_token_batches": 2.42e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_vision": false - }, - "azure/eu/o1-preview-2024-09-12": { - "cache_read_input_token_cost": 8.25e-06, - "input_cost_per_token": 1.65e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6.6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_vision": false - }, - "azure/eu/o3-mini-2025-01-31": { - "cache_read_input_token_cost": 6.05e-07, - "input_cost_per_token": 1.21e-06, - "input_cost_per_token_batches": 6.05e-07, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.84e-06, - "output_cost_per_token_batches": 2.42e-06, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure/global-standard/gpt-4o-2024-08-06": { - "cache_read_input_token_cost": 1.25e-06, - "deprecation_date": "2026-02-27", - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/global-standard/gpt-4o-2024-11-20": { - "cache_read_input_token_cost": 1.25e-06, - "deprecation_date": "2026-03-01", - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/global-standard/gpt-4o-mini": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/global/gpt-4o-2024-08-06": { - "deprecation_date": "2026-02-27", - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/global/gpt-4o-2024-11-20": { - "deprecation_date": "2026-03-01", - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/global/gpt-5.1": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/global/gpt-5.1-chat": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/global/gpt-5.1-codex": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/global/gpt-5.1-codex-mini": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 2.5e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 2e-06, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-3.5-turbo": { - "input_cost_per_token": 5e-07, - "litellm_provider": "azure", - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-3.5-turbo-0125": { - "deprecation_date": "2025-03-31", - "input_cost_per_token": 5e-07, - "litellm_provider": "azure", - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-3.5-turbo-instruct-0914": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "azure_text", - "max_input_tokens": 4097, - "max_tokens": 4097, - "mode": "completion", - "output_cost_per_token": 2e-06 - }, - "azure/gpt-35-turbo": { - "input_cost_per_token": 5e-07, - "litellm_provider": "azure", - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-35-turbo-0125": { - "deprecation_date": "2025-05-31", - "input_cost_per_token": 5e-07, - "litellm_provider": "azure", - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-35-turbo-0301": { - "deprecation_date": "2025-02-13", - "input_cost_per_token": 2e-07, - "litellm_provider": "azure", - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "max_tokens": 4097, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-35-turbo-0613": { - "deprecation_date": "2025-02-13", - "input_cost_per_token": 1.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "max_tokens": 4097, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-35-turbo-1106": { - "deprecation_date": "2025-03-31", - "input_cost_per_token": 1e-06, - "litellm_provider": "azure", - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-35-turbo-16k": { - "input_cost_per_token": 3e-06, - "litellm_provider": "azure", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supports_tool_choice": true - }, - "azure/gpt-35-turbo-16k-0613": { - "input_cost_per_token": 3e-06, - "litellm_provider": "azure", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-35-turbo-instruct": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "azure_text", - "max_input_tokens": 4097, - "max_tokens": 4097, - "mode": "completion", - "output_cost_per_token": 2e-06 - }, - "azure/gpt-35-turbo-instruct-0914": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "azure_text", - "max_input_tokens": 4097, - "max_tokens": 4097, - "mode": "completion", - "output_cost_per_token": 2e-06 - }, - "azure/gpt-4": { - "input_cost_per_token": 3e-05, - "litellm_provider": "azure", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-4-0125-preview": { - "input_cost_per_token": 1e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-4-0613": { - "input_cost_per_token": 3e-05, - "litellm_provider": "azure", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-4-1106-preview": { - "input_cost_per_token": 1e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-4-32k": { - "input_cost_per_token": 6e-05, - "litellm_provider": "azure", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.00012, - "supports_tool_choice": true - }, - "azure/gpt-4-32k-0613": { - "input_cost_per_token": 6e-05, - "litellm_provider": "azure", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.00012, - "supports_tool_choice": true - }, - "azure/gpt-4-turbo": { - "input_cost_per_token": 1e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "azure/gpt-4-turbo-2024-04-09": { - "input_cost_per_token": 1e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4-turbo-vision-preview": { - "input_cost_per_token": 1e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4.1": { - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-06, - "output_cost_per_token_batches": 4e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": false - }, - "azure/gpt-4.1-2025-04-14": { - "deprecation_date": "2026-11-04", - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-06, - "output_cost_per_token_batches": 4e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": false - }, - "azure/gpt-4.1-mini": { - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 4e-07, - "input_cost_per_token_batches": 2e-07, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.6e-06, - "output_cost_per_token_batches": 8e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": false - }, - "azure/gpt-4.1-mini-2025-04-14": { - "deprecation_date": "2026-11-04", - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 4e-07, - "input_cost_per_token_batches": 2e-07, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.6e-06, - "output_cost_per_token_batches": 8e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": false - }, - "azure/gpt-4.1-nano": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 1e-07, - "input_cost_per_token_batches": 5e-08, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-07, - "output_cost_per_token_batches": 2e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4.1-nano-2025-04-14": { - "deprecation_date": "2026-11-04", - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 1e-07, - "input_cost_per_token_batches": 5e-08, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-07, - "output_cost_per_token_batches": 2e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4.5-preview": { - "cache_read_input_token_cost": 3.75e-05, - "input_cost_per_token": 7.5e-05, - "input_cost_per_token_batches": 3.75e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 0.00015, - "output_cost_per_token_batches": 7.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4o": { - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4o-2024-05-13": { - "input_cost_per_token": 5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4o-2024-08-06": { - "deprecation_date": "2026-02-27", - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4o-2024-11-20": { - "deprecation_date": "2026-03-01", - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.75e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-audio-2025-08-28": { - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": false, - "supports_reasoning": false, - "supports_response_schema": false, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure/gpt-audio-mini-2025-10-06": { - "input_cost_per_audio_token": 1e-05, - "input_cost_per_token": 6e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 2e-05, - "output_cost_per_token": 2.4e-06, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": false, - "supports_reasoning": false, - "supports_response_schema": false, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure/gpt-4o-audio-preview-2024-12-17": { - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": false, - "supports_reasoning": false, - "supports_response_schema": false, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure/gpt-4o-mini": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_token": 1.65e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6.6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4o-mini-2024-07-18": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_token": 1.65e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6.6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-4o-mini-audio-preview-2024-12-17": { - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": false, - "supports_reasoning": false, - "supports_response_schema": false, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure/gpt-4o-mini-realtime-preview-2024-12-17": { - "cache_creation_input_audio_token_cost": 3e-07, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_audio_token": 1e-05, - "input_cost_per_token": 6e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 2e-05, - "output_cost_per_token": 2.4e-06, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/gpt-realtime-2025-08-28": { - "cache_creation_input_audio_token_cost": 4e-06, - "cache_read_input_token_cost": 4e-06, - "input_cost_per_audio_token": 3.2e-05, - "input_cost_per_image": 5e-06, - "input_cost_per_token": 4e-06, - "litellm_provider": "azure", - "max_input_tokens": 32000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 6.4e-05, - "output_cost_per_token": 1.6e-05, - "supported_endpoints": [ - "/v1/realtime" - ], - "supported_modalities": [ - "text", - "image", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/gpt-realtime-mini-2025-10-06": { - "cache_creation_input_audio_token_cost": 3e-07, - "cache_read_input_token_cost": 6e-08, - "input_cost_per_audio_token": 1e-05, - "input_cost_per_image": 8e-07, - "input_cost_per_token": 6e-07, - "litellm_provider": "azure", - "max_input_tokens": 32000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 2e-05, - "output_cost_per_token": 2.4e-06, - "supported_endpoints": [ - "/v1/realtime" - ], - "supported_modalities": [ - "text", - "image", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/gpt-4o-mini-transcribe": { - "input_cost_per_audio_token": 3e-06, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 16000, - "max_output_tokens": 2000, - "mode": "audio_transcription", - "output_cost_per_token": 5e-06, - "supported_endpoints": [ - "/v1/audio/transcriptions" + }, + { + "input_cost_per_token": 0.0000012, + "output_cost_per_reasoning_token": 0.000012, + "output_cost_per_token": 0.0000036, + "range": [ + 256000, + 1000000 ] - }, - "azure/gpt-4o-mini-tts": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "mode": "audio_speech", - "output_cost_per_audio_token": 1.2e-05, - "output_cost_per_second": 0.00025, - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/audio/speech" - ], - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "audio" + } + ] + }, + "dashscope/qwen-plus-2025-09-11": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 32768, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "input_cost_per_token": 4e-7, + "output_cost_per_reasoning_token": 0.000004, + "output_cost_per_token": 0.0000012, + "range": [ + 0, + 256000 ] - }, - "azure/gpt-4o-realtime-preview-2024-10-01": { - "cache_creation_input_audio_token_cost": 2e-05, - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_audio_token": 0.0001, - "input_cost_per_token": 5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 0.0002, - "output_cost_per_token": 2e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/gpt-4o-realtime-preview-2024-12-17": { - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 2e-05, - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/gpt-4o-transcribe": { - "input_cost_per_audio_token": 6e-06, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 16000, - "max_output_tokens": 2000, - "mode": "audio_transcription", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/audio/transcriptions" + }, + { + "input_cost_per_token": 0.0000012, + "output_cost_per_reasoning_token": 0.000012, + "output_cost_per_token": 0.0000036, + "range": [ + 256000, + 1000000 ] - }, - "azure/gpt-4o-transcribe-diarize": { - "input_cost_per_audio_token": 6e-06, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 16000, - "max_output_tokens": 2000, - "mode": "audio_transcription", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/audio/transcriptions" + } + ] + }, + "dashscope/qwen-plus-latest": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 32768, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "input_cost_per_token": 4e-7, + "output_cost_per_reasoning_token": 0.000004, + "output_cost_per_token": 0.0000012, + "range": [ + 0, + 256000 ] - }, - "azure/gpt-5.1-2025-11-13": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "azure/gpt-5.1-chat-2025-11-13": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": false, - "supports_native_streaming": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "azure/gpt-5.1-codex-2025-11-13": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1e-05, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5.1-codex-mini-2025-11-13": { - "cache_read_input_token_cost": 2.5e-08, - "cache_read_input_token_cost_priority": 4.5e-08, - "input_cost_per_token": 2.5e-07, - "input_cost_per_token_priority": 4.5e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 2e-06, - "output_cost_per_token_priority": 3.6e-06, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5-2025-08-07": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5-chat": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "source": "https://azure.microsoft.com/en-us/blog/gpt-5-in-azure-ai-foundry-the-future-of-ai-apps-and-agents-starts-here/", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "azure/gpt-5-chat-latest": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "azure/gpt-5-codex": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5-mini": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 2.5e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5-mini-2025-08-07": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 2.5e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5-nano": { - "cache_read_input_token_cost": 5e-09, - "input_cost_per_token": 5e-08, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5-nano-2025-08-07": { - "cache_read_input_token_cost": 5e-09, - "input_cost_per_token": 5e-08, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5-pro": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 400000, - "mode": "responses", - "output_cost_per_token": 0.00012, - "source": "https://learn.microsoft.com/en-us/azure/ai-foundry/foundry-models/concepts/models-sold-directly-by-azure?pivots=azure-openai&tabs=global-standard-aoai%2Cstandard-chat-completions%2Cglobal-standard#gpt-5", - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5.1": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5.1-chat": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5.1-codex": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5.1-codex-max": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "azure", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-5.1-codex-mini": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 2.5e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 2e-06, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/gpt-image-1": { - "input_cost_per_pixel": 4.0054321e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 0.0000012, + "output_cost_per_reasoning_token": 0.000012, + "output_cost_per_token": 0.0000036, + "range": [ + 256000, + 1000000 ] - }, - "azure/hd/1024-x-1024/dall-e-3": { - "input_cost_per_pixel": 7.629e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_token": 0.0 - }, - "azure/hd/1024-x-1792/dall-e-3": { - "input_cost_per_pixel": 6.539e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_token": 0.0 - }, - "azure/hd/1792-x-1024/dall-e-3": { - "input_cost_per_pixel": 6.539e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_token": 0.0 - }, - "azure/high/1024-x-1024/gpt-image-1": { - "input_cost_per_pixel": 1.59263611e-07, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + } + ] + }, + "dashscope/qwen-turbo": { + "input_cost_per_token": 5e-8, + "litellm_provider": "dashscope", + "max_input_tokens": 129024, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_reasoning_token": 5e-7, + "output_cost_per_token": 2e-7, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-turbo-2024-11-01": { + "input_cost_per_token": 5e-8, + "litellm_provider": "dashscope", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-turbo-2025-04-28": { + "input_cost_per_token": 5e-8, + "litellm_provider": "dashscope", + "max_input_tokens": 1000000, + "max_output_tokens": 16384, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_reasoning_token": 5e-7, + "output_cost_per_token": 2e-7, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen-turbo-latest": { + "input_cost_per_token": 5e-8, + "litellm_provider": "dashscope", + "max_input_tokens": 1000000, + "max_output_tokens": 16384, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_reasoning_token": 5e-7, + "output_cost_per_token": 2e-7, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen3-30b-a3b": { + "litellm_provider": "dashscope", + "max_input_tokens": 129024, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "dashscope/qwen3-coder-flash": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 65536, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "cache_read_input_token_cost": 8e-8, + "input_cost_per_token": 3e-7, + "output_cost_per_token": 0.0000015, + "range": [ + 0, + 32000 ] - }, - "azure/high/1024-x-1536/gpt-image-1": { - "input_cost_per_pixel": 1.58945719e-07, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "cache_read_input_token_cost": 1.2e-7, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 0.0000025, + "range": [ + 32000, + 128000 ] - }, - "azure/high/1536-x-1024/gpt-image-1": { - "input_cost_per_pixel": 1.58945719e-07, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "cache_read_input_token_cost": 2e-7, + "input_cost_per_token": 8e-7, + "output_cost_per_token": 0.000004, + "range": [ + 128000, + 256000 ] - }, - "azure/low/1024-x-1024/gpt-image-1": { - "input_cost_per_pixel": 1.0490417e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "cache_read_input_token_cost": 4e-7, + "input_cost_per_token": 0.0000016, + "output_cost_per_token": 0.0000096, + "range": [ + 256000, + 1000000 ] - }, - "azure/low/1024-x-1536/gpt-image-1": { - "input_cost_per_pixel": 1.0172526e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + } + ] + }, + "dashscope/qwen3-coder-flash-2025-07-28": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 65536, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "input_cost_per_token": 3e-7, + "output_cost_per_token": 0.0000015, + "range": [ + 0, + 32000 ] - }, - "azure/low/1536-x-1024/gpt-image-1": { - "input_cost_per_pixel": 1.0172526e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 5e-7, + "output_cost_per_token": 0.0000025, + "range": [ + 32000, + 128000 ] - }, - "azure/medium/1024-x-1024/gpt-image-1": { - "input_cost_per_pixel": 4.0054321e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 8e-7, + "output_cost_per_token": 0.000004, + "range": [ + 128000, + 256000 ] - }, - "azure/medium/1024-x-1536/gpt-image-1": { - "input_cost_per_pixel": 4.0054321e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 0.0000016, + "output_cost_per_token": 0.0000096, + "range": [ + 256000, + 1000000 ] - }, - "azure/medium/1536-x-1024/gpt-image-1": { - "input_cost_per_pixel": 4.0054321e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + } + ] + }, + "dashscope/qwen3-coder-plus": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 65536, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000005, + "range": [ + 0, + 32000 ] - }, - "azure/gpt-image-1-mini": { - "input_cost_per_pixel": 8.0566406e-09, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "cache_read_input_token_cost": 1.8e-7, + "input_cost_per_token": 0.0000018, + "output_cost_per_token": 0.000009, + "range": [ + 32000, + 128000 ] - }, - "azure/low/1024-x-1024/gpt-image-1-mini": { - "input_cost_per_pixel": 2.0751953125e-09, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "range": [ + 128000, + 256000 ] - }, - "azure/low/1024-x-1536/gpt-image-1-mini": { - "input_cost_per_pixel": 2.0751953125e-09, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "cache_read_input_token_cost": 6e-7, + "input_cost_per_token": 0.000006, + "output_cost_per_token": 0.00006, + "range": [ + 256000, + 1000000 ] - }, - "azure/low/1536-x-1024/gpt-image-1-mini": { - "input_cost_per_pixel": 2.0345052083e-09, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + } + ] + }, + "dashscope/qwen3-coder-plus-2025-07-22": { + "litellm_provider": "dashscope", + "max_input_tokens": 997952, + "max_output_tokens": 65536, + "max_tokens": 1000000, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000005, + "range": [ + 0, + 32000 ] - }, - "azure/medium/1024-x-1024/gpt-image-1-mini": { - "input_cost_per_pixel": 8.056640625e-09, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 0.0000018, + "output_cost_per_token": 0.000009, + "range": [ + 32000, + 128000 ] - }, - "azure/medium/1024-x-1536/gpt-image-1-mini": { - "input_cost_per_pixel": 8.056640625e-09, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "range": [ + 128000, + 256000 ] - }, - "azure/medium/1536-x-1024/gpt-image-1-mini": { - "input_cost_per_pixel": 7.9752604167e-09, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 0.000006, + "output_cost_per_token": 0.00006, + "range": [ + 256000, + 1000000 ] - }, - "azure/high/1024-x-1024/gpt-image-1-mini": { - "input_cost_per_pixel": 3.173828125e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + } + ] + }, + "dashscope/qwen3-max-preview": { + "litellm_provider": "dashscope", + "max_input_tokens": 258048, + "max_output_tokens": 65536, + "max_tokens": 262144, + "mode": "chat", + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "tiered_pricing": [ + { + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.000006, + "range": [ + 0, + 32000 ] - }, - "azure/high/1024-x-1536/gpt-image-1-mini": { - "input_cost_per_pixel": 3.173828125e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 0.0000024, + "output_cost_per_token": 0.000012, + "range": [ + 32000, + 128000 ] - }, - "azure/high/1536-x-1024/gpt-image-1-mini": { - "input_cost_per_pixel": 3.1575520833e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" + }, + { + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "range": [ + 128000, + 252000 ] - }, - "azure/mistral-large-2402": { - "input_cost_per_token": 8e-06, - "litellm_provider": "azure", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_function_calling": true - }, - "azure/mistral-large-latest": { - "input_cost_per_token": 8e-06, - "litellm_provider": "azure", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_function_calling": true - }, - "azure/o1": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/o1-2024-12-17": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/o1-mini": { - "cache_read_input_token_cost": 6.05e-07, - "input_cost_per_token": 1.21e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 4.84e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_vision": false - }, - "azure/o1-mini-2024-09-12": { - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_vision": false - }, - "azure/o1-preview": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_vision": false - }, - "azure/o1-preview-2024-09-12": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_vision": false - }, - "azure/o3": { - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/o3-2025-04-16": { - "deprecation_date": "2026-04-16", - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/o3-deep-research": { - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_token": 1e-05, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 4e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "azure/o3-mini": { - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure/o3-mini-2025-01-31": { - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure/o3-pro": { - "input_cost_per_token": 2e-05, - "input_cost_per_token_batches": 1e-05, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 8e-05, - "output_cost_per_token_batches": 4e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": false, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/o3-pro-2025-06-10": { - "input_cost_per_token": 2e-05, - "input_cost_per_token_batches": 1e-05, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 8e-05, - "output_cost_per_token_batches": 4e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": false, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/o4-mini": { - "cache_read_input_token_cost": 2.75e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/o4-mini-2025-04-16": { - "cache_read_input_token_cost": 2.75e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/standard/1024-x-1024/dall-e-2": { - "input_cost_per_pixel": 0.0, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_token": 0.0 - }, - "azure/standard/1024-x-1024/dall-e-3": { - "input_cost_per_pixel": 3.81469e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_token": 0.0 - }, - "azure/standard/1024-x-1792/dall-e-3": { - "input_cost_per_pixel": 4.359e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_token": 0.0 - }, - "azure/standard/1792-x-1024/dall-e-3": { - "input_cost_per_pixel": 4.359e-08, - "litellm_provider": "azure", - "mode": "image_generation", - "output_cost_per_token": 0.0 - }, - "azure/text-embedding-3-large": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "azure", - "max_input_tokens": 8191, - "max_tokens": 8191, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "azure/text-embedding-3-small": { - "deprecation_date": "2026-04-30", - "input_cost_per_token": 2e-08, - "litellm_provider": "azure", - "max_input_tokens": 8191, - "max_tokens": 8191, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "azure/text-embedding-ada-002": { - "input_cost_per_token": 1e-07, - "litellm_provider": "azure", - "max_input_tokens": 8191, - "max_tokens": 8191, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "azure/speech/azure-tts": { - "input_cost_per_character": 15e-06, - "litellm_provider": "azure", - "mode": "audio_speech", - "source": "https://azure.microsoft.com/en-us/pricing/calculator/" - }, - "azure/speech/azure-tts-hd": { - "input_cost_per_character": 30e-06, - "litellm_provider": "azure", - "mode": "audio_speech", - "source": "https://azure.microsoft.com/en-us/pricing/calculator/" - }, - "azure/tts-1": { - "input_cost_per_character": 1.5e-05, - "litellm_provider": "azure", - "mode": "audio_speech" - }, - "azure/tts-1-hd": { - "input_cost_per_character": 3e-05, - "litellm_provider": "azure", - "mode": "audio_speech" - }, - "azure/us/gpt-4.1-2025-04-14": { - "deprecation_date": "2026-11-04", - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 2.2e-06, - "input_cost_per_token_batches": 1.1e-06, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8.8e-06, - "output_cost_per_token_batches": 4.4e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": false - }, - "azure/us/gpt-4.1-mini-2025-04-14": { - "deprecation_date": "2026-11-04", - "cache_read_input_token_cost": 1.1e-07, - "input_cost_per_token": 4.4e-07, - "input_cost_per_token_batches": 2.2e-07, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.76e-06, - "output_cost_per_token_batches": 8.8e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": false - }, - "azure/us/gpt-4.1-nano-2025-04-14": { - "deprecation_date": "2026-11-04", - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 1.1e-07, - "input_cost_per_token_batches": 6e-08, - "litellm_provider": "azure", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4.4e-07, - "output_cost_per_token_batches": 2.2e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-4o-2024-08-06": { - "deprecation_date": "2026-02-27", - "cache_read_input_token_cost": 1.375e-06, - "input_cost_per_token": 2.75e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-4o-2024-11-20": { - "deprecation_date": "2026-03-01", - "cache_creation_input_token_cost": 1.38e-06, - "input_cost_per_token": 2.75e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-4o-mini-2024-07-18": { - "cache_read_input_token_cost": 8.3e-08, - "input_cost_per_token": 1.65e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6.6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-4o-mini-realtime-preview-2024-12-17": { - "cache_creation_input_audio_token_cost": 3.3e-07, - "cache_read_input_token_cost": 3.3e-07, - "input_cost_per_audio_token": 1.1e-05, - "input_cost_per_token": 6.6e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 2.2e-05, - "output_cost_per_token": 2.64e-06, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/us/gpt-4o-realtime-preview-2024-10-01": { - "cache_creation_input_audio_token_cost": 2.2e-05, - "cache_read_input_token_cost": 2.75e-06, - "input_cost_per_audio_token": 0.00011, - "input_cost_per_token": 5.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 0.00022, - "output_cost_per_token": 2.2e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/us/gpt-4o-realtime-preview-2024-12-17": { - "cache_read_input_audio_token_cost": 2.5e-06, - "cache_read_input_token_cost": 2.75e-06, - "input_cost_per_audio_token": 4.4e-05, - "input_cost_per_token": 5.5e-06, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 2.2e-05, - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "azure/us/gpt-5-2025-08-07": { - "cache_read_input_token_cost": 1.375e-07, - "input_cost_per_token": 1.375e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-5-mini-2025-08-07": { - "cache_read_input_token_cost": 2.75e-08, - "input_cost_per_token": 2.75e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.2e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-5-nano-2025-08-07": { - "cache_read_input_token_cost": 5.5e-09, - "input_cost_per_token": 5.5e-08, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 4.4e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-5.1": { - "cache_read_input_token_cost": 1.4e-07, - "input_cost_per_token": 1.38e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-5.1-chat": { - "cache_read_input_token_cost": 1.4e-07, - "input_cost_per_token": 1.38e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-5.1-codex": { - "cache_read_input_token_cost": 1.4e-07, - "input_cost_per_token": 1.38e-06, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1.1e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/gpt-5.1-codex-mini": { - "cache_read_input_token_cost": 2.8e-08, - "input_cost_per_token": 2.75e-07, - "litellm_provider": "azure", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 2.2e-06, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/o1-2024-12-17": { - "cache_read_input_token_cost": 8.25e-06, - "input_cost_per_token": 1.65e-05, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 6.6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/o1-mini-2024-09-12": { - "cache_read_input_token_cost": 6.05e-07, - "input_cost_per_token": 1.21e-06, - "input_cost_per_token_batches": 6.05e-07, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 4.84e-06, - "output_cost_per_token_batches": 2.42e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_vision": false - }, - "azure/us/o1-preview-2024-09-12": { - "cache_read_input_token_cost": 8.25e-06, - "input_cost_per_token": 1.65e-05, - "litellm_provider": "azure", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6.6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_vision": false - }, - "azure/us/o3-2025-04-16": { - "deprecation_date": "2026-04-16", - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 2.2e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 8.8e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/us/o3-mini-2025-01-31": { - "cache_read_input_token_cost": 6.05e-07, - "input_cost_per_token": 1.21e-06, - "input_cost_per_token_batches": 6.05e-07, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.84e-06, - "output_cost_per_token_batches": 2.42e-06, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure/us/o4-mini-2025-04-16": { - "cache_read_input_token_cost": 3.1e-07, - "input_cost_per_token": 1.21e-06, - "litellm_provider": "azure", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.84e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure/whisper-1": { - "input_cost_per_second": 0.0001, - "litellm_provider": "azure", - "mode": "audio_transcription", - "output_cost_per_second": 0.0001 - }, - "azure_ai/Cohere-embed-v3-english": { - "input_cost_per_token": 1e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 512, - "max_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024, - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice", - "supports_embedding_image_input": true - }, - "azure_ai/Cohere-embed-v3-multilingual": { - "input_cost_per_token": 1e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 512, - "max_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024, - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice", - "supports_embedding_image_input": true - }, - "azure_ai/FLUX-1.1-pro": { - "litellm_provider": "azure_ai", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://techcommunity.microsoft.com/blog/azure-ai-foundry-blog/black-forest-labs-flux-1-kontext-pro-and-flux1-1-pro-now-available-in-azure-ai-f/4434659", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "azure_ai/FLUX.1-Kontext-pro": { - "litellm_provider": "azure_ai", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://azuremarketplace.microsoft.com/pt-br/marketplace/apps/cohere.cohere-embed-4-offer?tab=PlansAndPrice", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "azure_ai/Llama-3.2-11B-Vision-Instruct": { - "input_cost_per_token": 3.7e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 3.7e-07, - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-11b-vision-instruct-offer?tab=Overview", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure_ai/Llama-3.2-90B-Vision-Instruct": { - "input_cost_per_token": 2.04e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 2.04e-06, - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-90b-vision-instruct-offer?tab=Overview", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure_ai/Llama-3.3-70B-Instruct": { - "input_cost_per_token": 7.1e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 7.1e-07, - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure_ai/Llama-4-Maverick-17B-128E-Instruct-FP8": { - "input_cost_per_token": 1.41e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 1000000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 3.5e-07, - "source": "https://azure.microsoft.com/en-us/blog/introducing-the-llama-4-herd-in-azure-ai-foundry-and-azure-databricks/", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure_ai/Llama-4-Scout-17B-16E-Instruct": { - "input_cost_per_token": 2e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 10000000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 7.8e-07, - "source": "https://azure.microsoft.com/en-us/blog/introducing-the-llama-4-herd-in-azure-ai-foundry-and-azure-databricks/", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure_ai/Meta-Llama-3-70B-Instruct": { - "input_cost_per_token": 1.1e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 3.7e-07, - "supports_tool_choice": true - }, - "azure_ai/Meta-Llama-3.1-405B-Instruct": { - "input_cost_per_token": 5.33e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 1.6e-05, - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-405b-instruct-offer?tab=PlansAndPrice", - "supports_tool_choice": true - }, - "azure_ai/Meta-Llama-3.1-70B-Instruct": { - "input_cost_per_token": 2.68e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 3.54e-06, - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-70b-instruct-offer?tab=PlansAndPrice", - "supports_tool_choice": true - }, - "azure_ai/Meta-Llama-3.1-8B-Instruct": { - "input_cost_per_token": 3e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 6.1e-07, - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-8b-instruct-offer?tab=PlansAndPrice", - "supports_tool_choice": true - }, - "azure_ai/Phi-3-medium-128k-instruct": { - "input_cost_per_token": 1.7e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6.8e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-3-medium-4k-instruct": { - "input_cost_per_token": 1.7e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6.8e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-3-mini-128k-instruct": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 5.2e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-3-mini-4k-instruct": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 5.2e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-3-small-128k-instruct": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-3-small-8k-instruct": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-3.5-MoE-instruct": { - "input_cost_per_token": 1.6e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6.4e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-3.5-mini-instruct": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 5.2e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-3.5-vision-instruct": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 5.2e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", - "supports_tool_choice": true, - "supports_vision": true - }, - "azure_ai/Phi-4": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/affordable-innovation-unveiling-the-pricing-of-phi-3-slms-on-models-as-a-service/4156495", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "azure_ai/Phi-4-mini-instruct": { - "input_cost_per_token": 7.5e-08, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://techcommunity.microsoft.com/blog/Azure-AI-Services-blog/announcing-new-phi-pricing-empowering-your-business-with-small-language-models/4395112", - "supports_function_calling": true - }, - "azure_ai/Phi-4-multimodal-instruct": { - "input_cost_per_audio_token": 4e-06, - "input_cost_per_token": 8e-08, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3.2e-07, - "source": "https://techcommunity.microsoft.com/blog/Azure-AI-Services-blog/announcing-new-phi-pricing-empowering-your-business-with-small-language-models/4395112", - "supports_audio_input": true, - "supports_function_calling": true, - "supports_vision": true - }, - "azure_ai/Phi-4-mini-reasoning": { - "input_cost_per_token": 8e-08, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3.2e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/", - "supports_function_calling": true - }, - "azure_ai/Phi-4-reasoning": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_reasoning": true - }, - "azure_ai/mistral-document-ai-2505": { - "litellm_provider": "azure_ai", - "ocr_cost_per_page": 3e-3, - "mode": "ocr", - "supported_endpoints": [ - "/v1/ocr" - ], - "source": "https://devblogs.microsoft.com/foundry/whats-new-in-azure-ai-foundry-august-2025/#mistral-document-ai-(ocr)-%E2%80%94-serverless-in-foundry" - }, - "azure_ai/doc-intelligence/prebuilt-read": { - "litellm_provider": "azure_ai", - "ocr_cost_per_page": 1.5e-3, - "mode": "ocr", - "supported_endpoints": [ - "/v1/ocr" - ], - "source": "https://azure.microsoft.com/en-us/pricing/details/ai-document-intelligence/" - }, - "azure_ai/doc-intelligence/prebuilt-layout": { - "litellm_provider": "azure_ai", - "ocr_cost_per_page": 1e-2, - "mode": "ocr", - "supported_endpoints": [ - "/v1/ocr" - ], - "source": "https://azure.microsoft.com/en-us/pricing/details/ai-document-intelligence/" - }, - "azure_ai/doc-intelligence/prebuilt-document": { - "litellm_provider": "azure_ai", - "ocr_cost_per_page": 1e-2, - "mode": "ocr", - "supported_endpoints": [ - "/v1/ocr" - ], - "source": "https://azure.microsoft.com/en-us/pricing/details/ai-document-intelligence/" - }, - "azure_ai/MAI-DS-R1": { - "input_cost_per_token": 1.35e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5.4e-06, - "source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/", - "supports_reasoning": true, - "supports_tool_choice": true - }, - "azure_ai/cohere-rerank-v3-english": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "azure_ai", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "max_tokens": 4096, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "azure_ai/cohere-rerank-v3-multilingual": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "azure_ai", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "max_tokens": 4096, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "azure_ai/cohere-rerank-v3.5": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "azure_ai", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "max_tokens": 4096, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "azure_ai/deepseek-r1": { - "input_cost_per_token": 1.35e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5.4e-06, - "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/deepseek-r1-improved-performance-higher-limits-and-transparent-pricing/4386367", - "supports_reasoning": true, - "supports_tool_choice": true - }, - "azure_ai/deepseek-v3": { - "input_cost_per_token": 1.14e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4.56e-06, - "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/announcing-deepseek-v3-on-azure-ai-foundry-and-github/4390438", - "supports_tool_choice": true - }, - "azure_ai/deepseek-v3-0324": { - "input_cost_per_token": 1.14e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4.56e-06, - "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/announcing-deepseek-v3-on-azure-ai-foundry-and-github/4390438", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure_ai/embed-v-4-0": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_tokens": 128000, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 3072, - "source": "https://azuremarketplace.microsoft.com/pt-br/marketplace/apps/cohere.cohere-embed-4-offer?tab=PlansAndPrice", - "supported_endpoints": [ - "/v1/embeddings" - ], - "supported_modalities": [ - "text", - "image" - ], - "supports_embedding_image_input": true - }, - "azure_ai/global/grok-3": { - "input_cost_per_token": 3e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/", - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "azure_ai/global/grok-3-mini": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.27e-06, - "source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "azure_ai/grok-3": { - "input_cost_per_token": 3.3e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.65e-05, - "source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/", - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "azure_ai/grok-3-mini": { - "input_cost_per_token": 2.75e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.38e-06, - "source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "azure_ai/grok-4": { - "input_cost_per_token": 5.5e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.75e-05, - "source": "https://azure.microsoft.com/en-us/blog/grok-4-is-now-available-in-azure-ai-foundry-unlock-frontier-intelligence-and-business-ready-capabilities/", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "azure_ai/grok-4-fast-non-reasoning": { - "input_cost_per_token": 0.43e-06, - "output_cost_per_token": 1.73e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "azure_ai/grok-4-fast-reasoning": { - "input_cost_per_token": 0.43e-06, - "output_cost_per_token": 1.73e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "source": "https://techcommunity.microsoft.com/blog/azure-ai-foundry-blog/announcing-the-grok-4-fast-models-from-xai-now-available-in-azure-ai-foundry/4456701", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "azure_ai/grok-code-fast-1": { - "input_cost_per_token": 3.5e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.75e-05, - "source": "https://azure.microsoft.com/en-us/blog/grok-4-is-now-available-in-azure-ai-foundry-unlock-frontier-intelligence-and-business-ready-capabilities/", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "azure_ai/jais-30b-chat": { - "input_cost_per_token": 0.0032, - "litellm_provider": "azure_ai", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.00971, - "source": "https://azure.microsoft.com/en-us/products/ai-services/ai-foundry/models/jais-30b-chat" - }, - "azure_ai/jamba-instruct": { - "input_cost_per_token": 5e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 70000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7e-07, - "supports_tool_choice": true - }, - "azure_ai/ministral-3b": { - "input_cost_per_token": 4e-08, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 4e-08, - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.ministral-3b-2410-offer?tab=Overview", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure_ai/mistral-large": { - "input_cost_per_token": 4e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure_ai/mistral-large-2407": { - "input_cost_per_token": 2e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-06, - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure_ai/mistral-large-latest": { - "input_cost_per_token": 2e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-06, - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure_ai/mistral-large-3": { - "input_cost_per_token": 5e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 256000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://azure.microsoft.com/en-us/blog/introducing-mistral-large-3-in-microsoft-foundry-open-capable-and-ready-for-production-workloads/", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "azure_ai/mistral-medium-2505": { - "input_cost_per_token": 4e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure_ai/mistral-nemo": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "azure_ai", - "max_input_tokens": 131072, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-nemo-12b-2407?tab=PlansAndPrice", - "supports_function_calling": true - }, - "azure_ai/mistral-small": { - "input_cost_per_token": 1e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "azure_ai/mistral-small-2503": { - "input_cost_per_token": 1e-06, - "litellm_provider": "azure_ai", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "babbage-002": { - "input_cost_per_token": 4e-07, - "litellm_provider": "text-completion-openai", - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "max_tokens": 16384, - "mode": "completion", - "output_cost_per_token": 4e-07 - }, - "bedrock/*/1-month-commitment/cohere.command-light-text-v14": { - "input_cost_per_second": 0.001902, - "litellm_provider": "bedrock", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_second": 0.001902, - "supports_tool_choice": true - }, - "bedrock/*/1-month-commitment/cohere.command-text-v14": { - "input_cost_per_second": 0.011, - "litellm_provider": "bedrock", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_second": 0.011, - "supports_tool_choice": true - }, - "bedrock/*/6-month-commitment/cohere.command-light-text-v14": { - "input_cost_per_second": 0.0011416, - "litellm_provider": "bedrock", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_second": 0.0011416, - "supports_tool_choice": true - }, - "bedrock/*/6-month-commitment/cohere.command-text-v14": { - "input_cost_per_second": 0.0066027, - "litellm_provider": "bedrock", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_second": 0.0066027, - "supports_tool_choice": true - }, - "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-instant-v1": { - "input_cost_per_second": 0.01475, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.01475, - "supports_tool_choice": true - }, - "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v1": { - "input_cost_per_second": 0.0455, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.0455 - }, - "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2:1": { - "input_cost_per_second": 0.0455, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.0455, - "supports_tool_choice": true - }, - "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-instant-v1": { - "input_cost_per_second": 0.008194, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.008194, - "supports_tool_choice": true - }, - "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v1": { - "input_cost_per_second": 0.02527, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.02527 - }, - "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2:1": { - "input_cost_per_second": 0.02527, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.02527, - "supports_tool_choice": true - }, - "bedrock/ap-northeast-1/anthropic.claude-instant-v1": { - "input_cost_per_token": 2.23e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 7.55e-06, - "supports_tool_choice": true - }, - "bedrock/ap-northeast-1/anthropic.claude-v1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "bedrock/ap-northeast-1/anthropic.claude-v2:1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "bedrock/ap-south-1/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 3.18e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4.2e-06 - }, - "bedrock/ap-south-1/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3.6e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 7.2e-07 - }, - "bedrock/ca-central-1/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 3.05e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4.03e-06 - }, - "bedrock/ca-central-1/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6.9e-07 - }, - "bedrock/eu-central-1/1-month-commitment/anthropic.claude-instant-v1": { - "input_cost_per_second": 0.01635, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.01635, - "supports_tool_choice": true - }, - "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v1": { - "input_cost_per_second": 0.0415, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.0415 - }, - "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2:1": { - "input_cost_per_second": 0.0415, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.0415, - "supports_tool_choice": true - }, - "bedrock/eu-central-1/6-month-commitment/anthropic.claude-instant-v1": { - "input_cost_per_second": 0.009083, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.009083, - "supports_tool_choice": true - }, - "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v1": { - "input_cost_per_second": 0.02305, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.02305 - }, - "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2:1": { - "input_cost_per_second": 0.02305, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.02305, - "supports_tool_choice": true - }, - "bedrock/eu-central-1/anthropic.claude-instant-v1": { - "input_cost_per_token": 2.48e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 8.38e-06, - "supports_tool_choice": true - }, - "bedrock/eu-central-1/anthropic.claude-v1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05 - }, - "bedrock/eu-central-1/anthropic.claude-v2:1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "bedrock/eu-west-1/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 2.86e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 3.78e-06 - }, - "bedrock/eu-west-1/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3.2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6.5e-07 - }, - "bedrock/eu-west-2/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 3.45e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4.55e-06 - }, - "bedrock/eu-west-2/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3.9e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 7.8e-07 - }, - "bedrock/eu-west-3/mistral.mistral-7b-instruct-v0:2": { - "input_cost_per_token": 2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.6e-07, - "supports_tool_choice": true - }, - "bedrock/eu-west-3/mistral.mistral-large-2402-v1:0": { - "input_cost_per_token": 1.04e-05, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 3.12e-05, - "supports_function_calling": true - }, - "bedrock/eu-west-3/mistral.mixtral-8x7b-instruct-v0:1": { - "input_cost_per_token": 5.9e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 9.1e-07, - "supports_tool_choice": true - }, - "bedrock/invoke/anthropic.claude-3-5-sonnet-20240620-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "metadata": { - "notes": "Anthropic via Invoke route does not currently support pdf input." - }, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "bedrock/sa-east-1/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 4.45e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5.88e-06 - }, - "bedrock/sa-east-1/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.01e-06 - }, - "bedrock/us-east-1/1-month-commitment/anthropic.claude-instant-v1": { - "input_cost_per_second": 0.011, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.011, - "supports_tool_choice": true - }, - "bedrock/us-east-1/1-month-commitment/anthropic.claude-v1": { - "input_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.0175 - }, - "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2:1": { - "input_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.0175, - "supports_tool_choice": true - }, - "bedrock/us-east-1/6-month-commitment/anthropic.claude-instant-v1": { - "input_cost_per_second": 0.00611, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.00611, - "supports_tool_choice": true - }, - "bedrock/us-east-1/6-month-commitment/anthropic.claude-v1": { - "input_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.00972 - }, - "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2:1": { - "input_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.00972, - "supports_tool_choice": true - }, - "bedrock/us-east-1/anthropic.claude-instant-v1": { - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-06, - "supports_tool_choice": true - }, - "bedrock/us-east-1/anthropic.claude-v1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "bedrock/us-east-1/anthropic.claude-v2:1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "bedrock/us-east-1/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 2.65e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 3.5e-06 - }, - "bedrock/us-east-1/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-07 - }, - "bedrock/us-east-1/mistral.mistral-7b-instruct-v0:2": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_tool_choice": true - }, - "bedrock/us-east-1/mistral.mistral-large-2402-v1:0": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_function_calling": true - }, - "bedrock/us-east-1/mistral.mixtral-8x7b-instruct-v0:1": { - "input_cost_per_token": 4.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 7e-07, - "supports_tool_choice": true - }, - "bedrock/us-gov-east-1/amazon.nova-pro-v1:0": { - "input_cost_per_token": 9.6e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 3.84e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "bedrock/us-gov-east-1/amazon.titan-embed-text-v1": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1536 - }, - "bedrock/us-gov-east-1/amazon.titan-embed-text-v2:0": { - "input_cost_per_token": 2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024 - }, - "bedrock/us-gov-east-1/amazon.titan-text-express-v1": { - "input_cost_per_token": 1.3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 8000, - "max_tokens": 8000, - "mode": "chat", - "output_cost_per_token": 1.7e-06 - }, - "bedrock/us-gov-east-1/amazon.titan-text-lite-v1": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 4000, - "max_tokens": 4000, - "mode": "chat", - "output_cost_per_token": 4e-07 - }, - "bedrock/us-gov-east-1/amazon.titan-text-premier-v1:0": { - "input_cost_per_token": 5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 1.5e-06 - }, - "bedrock/us-gov-east-1/anthropic.claude-3-5-sonnet-20240620-v1:0": { - "input_cost_per_token": 3.6e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.8e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "bedrock/us-gov-east-1/anthropic.claude-3-haiku-20240307-v1:0": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "bedrock/us-gov-east-1/claude-sonnet-4-5-20250929-v1:0": { - "input_cost_per_token": 3.3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.65e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "bedrock/us-gov-east-1/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 2.65e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 3.5e-06, - "supports_pdf_input": true - }, - "bedrock/us-gov-east-1/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 2.65e-06, - "supports_pdf_input": true - }, - "bedrock/us-gov-west-1/amazon.nova-pro-v1:0": { - "input_cost_per_token": 9.6e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 3.84e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "bedrock/us-gov-west-1/amazon.titan-embed-text-v1": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1536 - }, - "bedrock/us-gov-west-1/amazon.titan-embed-text-v2:0": { - "input_cost_per_token": 2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1024 - }, - "bedrock/us-gov-west-1/amazon.titan-text-express-v1": { - "input_cost_per_token": 1.3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 8000, - "max_tokens": 8000, - "mode": "chat", - "output_cost_per_token": 1.7e-06 - }, - "bedrock/us-gov-west-1/amazon.titan-text-lite-v1": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 4000, - "max_tokens": 4000, - "mode": "chat", - "output_cost_per_token": 4e-07 - }, - "bedrock/us-gov-west-1/amazon.titan-text-premier-v1:0": { - "input_cost_per_token": 5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 42000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 1.5e-06 - }, - "bedrock/us-gov-west-1/anthropic.claude-3-7-sonnet-20250219-v1:0": { - "cache_creation_input_token_cost": 4.5e-06, - "cache_read_input_token_cost": 3.6e-07, - "input_cost_per_token": 3.6e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.8e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "bedrock/us-gov-west-1/anthropic.claude-3-5-sonnet-20240620-v1:0": { - "input_cost_per_token": 3.6e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.8e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "bedrock/us-gov-west-1/anthropic.claude-3-haiku-20240307-v1:0": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "bedrock/us-gov-west-1/claude-sonnet-4-5-20250929-v1:0": { - "input_cost_per_token": 3.3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.65e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "bedrock/us-gov-west-1/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 2.65e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 3.5e-06, - "supports_pdf_input": true - }, - "bedrock/us-gov-west-1/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8000, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 2.65e-06, - "supports_pdf_input": true - }, - "bedrock/us-west-1/meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 2.65e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 3.5e-06 - }, - "bedrock/us-west-1/meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-07 - }, - "bedrock/us-west-2/1-month-commitment/anthropic.claude-instant-v1": { - "input_cost_per_second": 0.011, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.011, - "supports_tool_choice": true - }, - "bedrock/us-west-2/1-month-commitment/anthropic.claude-v1": { - "input_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.0175 - }, - "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2:1": { - "input_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.0175, - "supports_tool_choice": true - }, - "bedrock/us-west-2/6-month-commitment/anthropic.claude-instant-v1": { - "input_cost_per_second": 0.00611, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.00611, - "supports_tool_choice": true - }, - "bedrock/us-west-2/6-month-commitment/anthropic.claude-v1": { - "input_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.00972 - }, - "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2:1": { - "input_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_second": 0.00972, - "supports_tool_choice": true - }, - "bedrock/us-west-2/anthropic.claude-instant-v1": { - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-06, - "supports_tool_choice": true - }, - "bedrock/us-west-2/anthropic.claude-v1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "bedrock/us-west-2/anthropic.claude-v2:1": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "bedrock/us-west-2/mistral.mistral-7b-instruct-v0:2": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_tool_choice": true - }, - "bedrock/us-west-2/mistral.mistral-large-2402-v1:0": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_function_calling": true - }, - "bedrock/us-west-2/mistral.mixtral-8x7b-instruct-v0:1": { - "input_cost_per_token": 4.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 7e-07, - "supports_tool_choice": true - }, - "bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0": { - "cache_creation_input_token_cost": 1e-06, - "cache_read_input_token_cost": 8e-08, - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "cerebras/llama-3.3-70b": { - "input_cost_per_token": 8.5e-07, - "litellm_provider": "cerebras", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "cerebras/llama3.1-70b": { - "input_cost_per_token": 6e-07, - "litellm_provider": "cerebras", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "cerebras/llama3.1-8b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "cerebras", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "cerebras/gpt-oss-120b": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "cerebras", - "max_input_tokens": 131072, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6.9e-07, - "source": "https://www.cerebras.ai/blog/openai-gpt-oss-120b-runs-fastest-on-cerebras", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "cerebras/qwen-3-32b": { - "input_cost_per_token": 4e-07, - "litellm_provider": "cerebras", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 8e-07, - "source": "https://inference-docs.cerebras.ai/support/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "cerebras/zai-glm-4.6": { - "input_cost_per_token": 2.25e-06, - "litellm_provider": "cerebras", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.75e-06, - "source": "https://www.cerebras.ai/pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "chat-bison": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-chat-models", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "chat-bison-32k": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-chat-models", - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "chat-bison-32k@002": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-chat-models", - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "chat-bison@001": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-chat-models", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "chat-bison@002": { - "deprecation_date": "2025-04-09", - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-chat-models", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "chatdolphin": { - "input_cost_per_token": 5e-07, - "litellm_provider": "nlp_cloud", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 5e-07 - }, - "chatgpt-4o-latest": { - "input_cost_per_token": 5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "claude-3-5-haiku-20241022": { - "cache_creation_input_token_cost": 1e-06, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 8e-08, - "deprecation_date": "2025-10-01", - "input_cost_per_token": 8e-07, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-06, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tool_use_system_prompt_tokens": 264 - }, - "claude-3-5-haiku-latest": { - "cache_creation_input_token_cost": 1.25e-06, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 1e-07, - "deprecation_date": "2025-10-01", - "input_cost_per_token": 1e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5e-06, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tool_use_system_prompt_tokens": 264 - }, - "claude-haiku-4-5-20251001": { - "cache_creation_input_token_cost": 1.25e-06, - "cache_creation_input_token_cost_above_1hr": 2e-06, - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 1e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_computer_use": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "claude-haiku-4-5": { - "cache_creation_input_token_cost": 1.25e-06, - "cache_creation_input_token_cost_above_1hr": 2e-06, - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 1e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_computer_use": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "claude-3-5-sonnet-20240620": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 3e-07, - "deprecation_date": "2025-06-01", - "input_cost_per_token": 3e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-3-5-sonnet-20241022": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 3e-07, - "deprecation_date": "2025-10-01", - "input_cost_per_token": 3e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-3-5-sonnet-latest": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 3e-07, - "deprecation_date": "2025-06-01", - "input_cost_per_token": 3e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-3-7-sonnet-20250219": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 3e-07, - "deprecation_date": "2026-02-19", - "input_cost_per_token": 3e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-3-7-sonnet-latest": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 3e-07, - "deprecation_date": "2025-06-01", - "input_cost_per_token": 3e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-3-haiku-20240307": { - "cache_creation_input_token_cost": 3e-07, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 3e-08, - "input_cost_per_token": 2.5e-07, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 264 - }, - "claude-3-opus-20240229": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 1.5e-06, - "deprecation_date": "2026-05-01", - "input_cost_per_token": 1.5e-05, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 395 - }, - "claude-3-opus-latest": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 1.5e-06, - "deprecation_date": "2025-03-01", - "input_cost_per_token": 1.5e-05, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 395 - }, - "claude-4-opus-20250514": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-4-sonnet-20250514": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost": 3e-07, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-sonnet-4-5": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "claude-sonnet-4-5-20250929": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tool_use_system_prompt_tokens": 346 - }, - "claude-sonnet-4-5-20250929-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-opus-4-1": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_creation_input_token_cost_above_1hr": 3e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-opus-4-1-20250805": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_creation_input_token_cost_above_1hr": 3e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "deprecation_date": "2026-08-05", - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-opus-4-20250514": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_creation_input_token_cost_above_1hr": 3e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "deprecation_date": "2026-05-14", - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-opus-4-5-20251101": { - "cache_creation_input_token_cost": 6.25e-06, - "cache_creation_input_token_cost_above_1hr": 1e-05, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 5e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-opus-4-5": { - "cache_creation_input_token_cost": 6.25e-06, - "cache_creation_input_token_cost_above_1hr": 1e-05, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 5e-06, - "litellm_provider": "anthropic", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "claude-sonnet-4-20250514": { - "deprecation_date": "2026-05-14", - "cache_creation_input_token_cost": 3.75e-06, - "cache_creation_input_token_cost_above_1hr": 6e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "anthropic", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "cloudflare/@cf/meta/llama-2-7b-chat-fp16": { - "input_cost_per_token": 1.923e-06, - "litellm_provider": "cloudflare", - "max_input_tokens": 3072, - "max_output_tokens": 3072, - "max_tokens": 3072, - "mode": "chat", - "output_cost_per_token": 1.923e-06 - }, - "cloudflare/@cf/meta/llama-2-7b-chat-int8": { - "input_cost_per_token": 1.923e-06, - "litellm_provider": "cloudflare", - "max_input_tokens": 2048, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 1.923e-06 - }, - "cloudflare/@cf/mistral/mistral-7b-instruct-v0.1": { - "input_cost_per_token": 1.923e-06, - "litellm_provider": "cloudflare", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.923e-06 - }, - "cloudflare/@hf/thebloke/codellama-7b-instruct-awq": { - "input_cost_per_token": 1.923e-06, - "litellm_provider": "cloudflare", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.923e-06 - }, - "code-bison": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "code-bison-32k@002": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-bison32k": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-bison@001": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-bison@002": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-gecko": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 2048, - "max_output_tokens": 64, - "max_tokens": 64, - "mode": "completion", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-gecko-latest": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 2048, - "max_output_tokens": 64, - "max_tokens": 64, - "mode": "completion", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-gecko@001": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 2048, - "max_output_tokens": 64, - "max_tokens": 64, - "mode": "completion", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-gecko@002": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "max_input_tokens": 2048, - "max_output_tokens": 64, - "max_tokens": 64, - "mode": "completion", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "codechat-bison": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "codechat-bison-32k": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "codechat-bison-32k@002": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "codechat-bison@001": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "codechat-bison@002": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "codechat-bison@latest": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "codestral/codestral-2405": { - "input_cost_per_token": 0.0, - "litellm_provider": "codestral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://docs.mistral.ai/capabilities/code_generation/", - "supports_assistant_prefill": true, - "supports_tool_choice": true - }, - "codestral/codestral-latest": { - "input_cost_per_token": 0.0, - "litellm_provider": "codestral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://docs.mistral.ai/capabilities/code_generation/", - "supports_assistant_prefill": true, - "supports_tool_choice": true - }, - "codex-mini-latest": { - "cache_read_input_token_cost": 3.75e-07, - "input_cost_per_token": 1.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 6e-06, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "cohere.command-light-text-v14": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_tool_choice": true - }, - "cohere.command-r-plus-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_tool_choice": true - }, - "cohere.command-r-v1:0": { - "input_cost_per_token": 5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_tool_choice": true - }, - "cohere.command-text-v14": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_tool_choice": true - }, - "cohere.embed-english-v3": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 512, - "max_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "supports_embedding_image_input": true - }, - "cohere.embed-multilingual-v3": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 512, - "max_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "supports_embedding_image_input": true - }, - "cohere.embed-v4:0": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_tokens": 128000, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1536, - "supports_embedding_image_input": true - }, - "cohere/embed-v4.0": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "cohere", - "max_input_tokens": 128000, - "max_tokens": 128000, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1536, - "supports_embedding_image_input": true - }, - "cohere.rerank-v3-5:0": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "bedrock", - "max_document_chunks_per_query": 100, - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_query_tokens": 32000, - "max_tokens": 32000, - "max_tokens_per_document_chunk": 512, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "command": { - "input_cost_per_token": 1e-06, - "litellm_provider": "cohere", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 2e-06 - }, - "command-a-03-2025": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "cohere_chat", - "max_input_tokens": 256000, - "max_output_tokens": 8000, - "max_tokens": 8000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "command-light": { - "input_cost_per_token": 3e-07, - "litellm_provider": "cohere_chat", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_tool_choice": true - }, - "command-nightly": { - "input_cost_per_token": 1e-06, - "litellm_provider": "cohere", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 2e-06 - }, - "command-r": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "cohere_chat", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "command-r-08-2024": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "cohere_chat", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "command-r-plus": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "cohere_chat", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "command-r-plus-08-2024": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "cohere_chat", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "command-r7b-12-2024": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "cohere_chat", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3.75e-08, - "source": "https://docs.cohere.com/v2/docs/command-r7b", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "computer-use-preview": { - "input_cost_per_token": 3e-06, - "litellm_provider": "azure", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": false, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "deepseek-chat": { - "cache_read_input_token_cost": 3e-08, - "input_cost_per_token": 3e-07, - "litellm_provider": "deepseek", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://api-docs.deepseek.com/quick_start/pricing", - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "deepseek-reasoner": { - "cache_read_input_token_cost": 3e-08, - "input_cost_per_token": 3e-07, - "litellm_provider": "deepseek", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://api-docs.deepseek.com/quick_start/pricing", - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supports_function_calling": false, - "supports_native_streaming": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": false - }, - "dashscope/qwen-coder": { - "input_cost_per_token": 3e-07, - "litellm_provider": "dashscope", - "max_input_tokens": 1000000, - "max_output_tokens": 16384, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-flash": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 32768, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "input_cost_per_token": 5e-08, - "output_cost_per_token": 4e-07, - "range": [ - 0, - 256000.0 - ] - }, - { - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 2e-06, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen-flash-2025-07-28": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 32768, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "input_cost_per_token": 5e-08, - "output_cost_per_token": 4e-07, - "range": [ - 0, - 256000.0 - ] - }, - { - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 2e-06, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen-max": { - "input_cost_per_token": 1.6e-06, - "litellm_provider": "dashscope", - "max_input_tokens": 30720, - "max_output_tokens": 8192, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6.4e-06, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-plus": { - "input_cost_per_token": 4e-07, - "litellm_provider": "dashscope", - "max_input_tokens": 129024, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-plus-2025-01-25": { - "input_cost_per_token": 4e-07, - "litellm_provider": "dashscope", - "max_input_tokens": 129024, - "max_output_tokens": 8192, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-plus-2025-04-28": { - "input_cost_per_token": 4e-07, - "litellm_provider": "dashscope", - "max_input_tokens": 129024, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-06, - "output_cost_per_token": 1.2e-06, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-plus-2025-07-14": { - "input_cost_per_token": 4e-07, - "litellm_provider": "dashscope", - "max_input_tokens": 129024, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-06, - "output_cost_per_token": 1.2e-06, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-plus-2025-07-28": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 32768, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "input_cost_per_token": 4e-07, - "output_cost_per_reasoning_token": 4e-06, - "output_cost_per_token": 1.2e-06, - "range": [ - 0, - 256000.0 - ] - }, - { - "input_cost_per_token": 1.2e-06, - "output_cost_per_reasoning_token": 1.2e-05, - "output_cost_per_token": 3.6e-06, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen-plus-2025-09-11": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 32768, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "input_cost_per_token": 4e-07, - "output_cost_per_reasoning_token": 4e-06, - "output_cost_per_token": 1.2e-06, - "range": [ - 0, - 256000.0 - ] - }, - { - "input_cost_per_token": 1.2e-06, - "output_cost_per_reasoning_token": 1.2e-05, - "output_cost_per_token": 3.6e-06, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen-plus-latest": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 32768, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "input_cost_per_token": 4e-07, - "output_cost_per_reasoning_token": 4e-06, - "output_cost_per_token": 1.2e-06, - "range": [ - 0, - 256000.0 - ] - }, - { - "input_cost_per_token": 1.2e-06, - "output_cost_per_reasoning_token": 1.2e-05, - "output_cost_per_token": 3.6e-06, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen-turbo": { - "input_cost_per_token": 5e-08, - "litellm_provider": "dashscope", - "max_input_tokens": 129024, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_reasoning_token": 5e-07, - "output_cost_per_token": 2e-07, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-turbo-2024-11-01": { - "input_cost_per_token": 5e-08, - "litellm_provider": "dashscope", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-turbo-2025-04-28": { - "input_cost_per_token": 5e-08, - "litellm_provider": "dashscope", - "max_input_tokens": 1000000, - "max_output_tokens": 16384, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_reasoning_token": 5e-07, - "output_cost_per_token": 2e-07, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen-turbo-latest": { - "input_cost_per_token": 5e-08, - "litellm_provider": "dashscope", - "max_input_tokens": 1000000, - "max_output_tokens": 16384, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_reasoning_token": 5e-07, - "output_cost_per_token": 2e-07, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen3-30b-a3b": { - "litellm_provider": "dashscope", - "max_input_tokens": 129024, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dashscope/qwen3-coder-flash": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 65536, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "cache_read_input_token_cost": 8e-08, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 1.5e-06, - "range": [ - 0, - 32000.0 - ] - }, - { - "cache_read_input_token_cost": 1.2e-07, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 2.5e-06, - "range": [ - 32000.0, - 128000.0 - ] - }, - { - "cache_read_input_token_cost": 2e-07, - "input_cost_per_token": 8e-07, - "output_cost_per_token": 4e-06, - "range": [ - 128000.0, - 256000.0 - ] - }, - { - "cache_read_input_token_cost": 4e-07, - "input_cost_per_token": 1.6e-06, - "output_cost_per_token": 9.6e-06, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen3-coder-flash-2025-07-28": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 65536, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "input_cost_per_token": 3e-07, - "output_cost_per_token": 1.5e-06, - "range": [ - 0, - 32000.0 - ] - }, - { - "input_cost_per_token": 5e-07, - "output_cost_per_token": 2.5e-06, - "range": [ - 32000.0, - 128000.0 - ] - }, - { - "input_cost_per_token": 8e-07, - "output_cost_per_token": 4e-06, - "range": [ - 128000.0, - 256000.0 - ] - }, - { - "input_cost_per_token": 1.6e-06, - "output_cost_per_token": 9.6e-06, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen3-coder-plus": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 65536, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "range": [ - 0, - 32000.0 - ] - }, - { - "cache_read_input_token_cost": 1.8e-07, - "input_cost_per_token": 1.8e-06, - "output_cost_per_token": 9e-06, - "range": [ - 32000.0, - 128000.0 - ] - }, - { - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "range": [ - 128000.0, - 256000.0 - ] - }, - { - "cache_read_input_token_cost": 6e-07, - "input_cost_per_token": 6e-06, - "output_cost_per_token": 6e-05, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen3-coder-plus-2025-07-22": { - "litellm_provider": "dashscope", - "max_input_tokens": 997952, - "max_output_tokens": 65536, - "max_tokens": 1000000, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "range": [ - 0, - 32000.0 - ] - }, - { - "input_cost_per_token": 1.8e-06, - "output_cost_per_token": 9e-06, - "range": [ - 32000.0, - 128000.0 - ] - }, - { - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "range": [ - 128000.0, - 256000.0 - ] - }, - { - "input_cost_per_token": 6e-06, - "output_cost_per_token": 6e-05, - "range": [ - 256000.0, - 1000000.0 - ] - } - ] - }, - "dashscope/qwen3-max-preview": { - "litellm_provider": "dashscope", - "max_input_tokens": 258048, - "max_output_tokens": 65536, - "max_tokens": 262144, - "mode": "chat", - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "tiered_pricing": [ - { - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 6e-06, - "range": [ - 0, - 32000.0 - ] - }, - { - "input_cost_per_token": 2.4e-06, - "output_cost_per_token": 1.2e-05, - "range": [ - 32000.0, - 128000.0 - ] - }, - { - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "range": [ - 128000.0, - 252000.0 - ] - } - ] - }, - "dashscope/qwq-plus": { - "input_cost_per_token": 8e-07, - "litellm_provider": "dashscope", - "max_input_tokens": 98304, - "max_output_tokens": 8192, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.4e-06, - "source": "https://www.alibabacloud.com/help/en/model-studio/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-bge-large-en": { - "input_cost_per_token": 1.0003e-07, - "input_dbu_cost_per_token": 1.429e-06, - "litellm_provider": "databricks", - "max_input_tokens": 512, - "max_tokens": 512, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_dbu_cost_per_token": 0.0, - "output_vector_size": 1024, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving" - }, - "databricks/databricks-claude-3-7-sonnet": { - "input_cost_per_token": 2.9999900000000002e-06, - "input_dbu_cost_per_token": 4.2857e-05, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 128000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.5000020000000002e-05, - "output_dbu_cost_per_token": 0.000214286, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-claude-haiku-4-5": { - "input_cost_per_token": 1.00002e-06, - "input_dbu_cost_per_token": 1.4286e-05, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 5.00003e-06, - "output_dbu_cost_per_token": 7.1429e-05, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-claude-opus-4": { - "input_cost_per_token": 1.5000020000000002e-05, - "input_dbu_cost_per_token": 0.000214286, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 7.500003000000001e-05, - "output_dbu_cost_per_token": 0.001071429, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-claude-opus-4-1": { - "input_cost_per_token": 1.5000020000000002e-05, - "input_dbu_cost_per_token": 0.000214286, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 7.500003000000001e-05, - "output_dbu_cost_per_token": 0.001071429, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-claude-opus-4-5": { - "input_cost_per_token": 5.00003e-06, - "input_dbu_cost_per_token": 7.1429e-05, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 2.5000010000000002e-05, - "output_dbu_cost_per_token": 0.000357143, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-claude-sonnet-4": { - "input_cost_per_token": 2.9999900000000002e-06, - "input_dbu_cost_per_token": 4.2857e-05, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.5000020000000002e-05, - "output_dbu_cost_per_token": 0.000214286, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-claude-sonnet-4-1": { - "input_cost_per_token": 2.9999900000000002e-06, - "input_dbu_cost_per_token": 4.2857e-05, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.5000020000000002e-05, - "output_dbu_cost_per_token": 0.000214286, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-claude-sonnet-4-5": { - "input_cost_per_token": 2.9999900000000002e-06, - "input_dbu_cost_per_token": 4.2857e-05, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.5000020000000002e-05, - "output_dbu_cost_per_token": 0.000214286, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "databricks/databricks-gemini-2-5-flash": { - "input_cost_per_token": 3.0001999999999996e-07, - "input_dbu_cost_per_token": 4.285999999999999e-06, - "litellm_provider": "databricks", - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_tokens": 1048576, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 2.49998e-06, - "output_dbu_cost_per_token": 3.5714e-05, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "databricks/databricks-gemini-2-5-pro": { - "input_cost_per_token": 1.24999e-06, - "input_dbu_cost_per_token": 1.7857e-05, - "litellm_provider": "databricks", - "max_input_tokens": 1048576, - "max_output_tokens": 65536, - "max_tokens": 1048576, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 9.999990000000002e-06, - "output_dbu_cost_per_token": 0.000142857, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "databricks/databricks-gemma-3-12b": { - "input_cost_per_token": 1.5000999999999998e-07, - "input_dbu_cost_per_token": 2.1429999999999996e-06, - "litellm_provider": "databricks", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "max_tokens": 128000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 5.0001e-07, - "output_dbu_cost_per_token": 7.143e-06, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving" - }, - "databricks/databricks-gpt-5": { - "input_cost_per_token": 1.24999e-06, - "input_dbu_cost_per_token": 1.7857e-05, - "litellm_provider": "databricks", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 400000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 9.999990000000002e-06, - "output_dbu_cost_per_token": 0.000142857, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving" - }, - "databricks/databricks-gpt-5-1": { - "input_cost_per_token": 1.24999e-06, - "input_dbu_cost_per_token": 1.7857e-05, - "litellm_provider": "databricks", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 400000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 9.999990000000002e-06, - "output_dbu_cost_per_token": 0.000142857, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving" - }, - "databricks/databricks-gpt-5-mini": { - "input_cost_per_token": 2.4997000000000006e-07, - "input_dbu_cost_per_token": 3.571e-06, - "litellm_provider": "databricks", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 400000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.9999700000000004e-06, - "output_dbu_cost_per_token": 2.8571e-05, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving" - }, - "databricks/databricks-gpt-5-nano": { - "input_cost_per_token": 4.998e-08, - "input_dbu_cost_per_token": 7.14e-07, - "litellm_provider": "databricks", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 400000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 3.9998000000000007e-07, - "output_dbu_cost_per_token": 5.714000000000001e-06, - "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving" - }, - "databricks/databricks-gpt-oss-120b": { - "input_cost_per_token": 1.5000999999999998e-07, - "input_dbu_cost_per_token": 2.1429999999999996e-06, - "litellm_provider": "databricks", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 5.9997e-07, - "output_dbu_cost_per_token": 8.571e-06, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving" - }, - "databricks/databricks-gpt-oss-20b": { - "input_cost_per_token": 7e-08, - "input_dbu_cost_per_token": 1e-06, - "litellm_provider": "databricks", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 3.0001999999999996e-07, - "output_dbu_cost_per_token": 4.285999999999999e-06, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving" - }, - "databricks/databricks-gte-large-en": { - "input_cost_per_token": 1.2999000000000001e-07, - "input_dbu_cost_per_token": 1.857e-06, - "litellm_provider": "databricks", - "max_input_tokens": 8192, - "max_tokens": 8192, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_dbu_cost_per_token": 0.0, - "output_vector_size": 1024, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving" - }, - "databricks/databricks-llama-2-70b-chat": { - "input_cost_per_token": 5.0001e-07, - "input_dbu_cost_per_token": 7.143e-06, - "litellm_provider": "databricks", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.5000300000000002e-06, - "output_dbu_cost_per_token": 2.1429e-05, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "supports_tool_choice": true - }, - "databricks/databricks-llama-4-maverick": { - "input_cost_per_token": 5.0001e-07, - "input_dbu_cost_per_token": 7.143e-06, - "litellm_provider": "databricks", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "metadata": { - "notes": "Databricks documentation now provides both DBU costs (_dbu_cost_per_token) and dollar costs(_cost_per_token)." - }, - "mode": "chat", - "output_cost_per_token": 1.5000300000000002e-06, - "output_dbu_cost_per_token": 2.1429e-05, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "supports_tool_choice": true - }, - "databricks/databricks-meta-llama-3-1-405b-instruct": { - "input_cost_per_token": 5.00003e-06, - "input_dbu_cost_per_token": 7.1429e-05, - "litellm_provider": "databricks", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.5000020000000002e-05, - "output_dbu_cost_per_token": 0.000214286, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "supports_tool_choice": true - }, - "databricks/databricks-meta-llama-3-1-8b-instruct": { - "input_cost_per_token": 1.5000999999999998e-07, - "input_dbu_cost_per_token": 2.1429999999999996e-06, - "litellm_provider": "databricks", - "max_input_tokens": 200000, - "max_output_tokens": 128000, - "max_tokens": 200000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 4.5003000000000007e-07, - "output_dbu_cost_per_token": 6.429000000000001e-06, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving" - }, - "databricks/databricks-meta-llama-3-3-70b-instruct": { - "input_cost_per_token": 5.0001e-07, - "input_dbu_cost_per_token": 7.143e-06, - "litellm_provider": "databricks", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.5000300000000002e-06, - "output_dbu_cost_per_token": 2.1429e-05, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "supports_tool_choice": true - }, - "databricks/databricks-meta-llama-3-70b-instruct": { - "input_cost_per_token": 1.00002e-06, - "input_dbu_cost_per_token": 1.4286e-05, - "litellm_provider": "databricks", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 2.9999900000000002e-06, - "output_dbu_cost_per_token": 4.2857e-05, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "supports_tool_choice": true - }, - "databricks/databricks-mixtral-8x7b-instruct": { - "input_cost_per_token": 5.0001e-07, - "input_dbu_cost_per_token": 7.143e-06, - "litellm_provider": "databricks", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.00002e-06, - "output_dbu_cost_per_token": 1.4286e-05, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "supports_tool_choice": true - }, - "databricks/databricks-mpt-30b-instruct": { - "input_cost_per_token": 1.00002e-06, - "input_dbu_cost_per_token": 1.4286e-05, - "litellm_provider": "databricks", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 1.00002e-06, - "output_dbu_cost_per_token": 1.4286e-05, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "supports_tool_choice": true - }, - "databricks/databricks-mpt-7b-instruct": { - "input_cost_per_token": 5.0001e-07, - "input_dbu_cost_per_token": 7.143e-06, - "litellm_provider": "databricks", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - }, - "mode": "chat", - "output_cost_per_token": 0.0, - "output_dbu_cost_per_token": 0.0, - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "supports_tool_choice": true - }, - "dataforseo/search": { - "input_cost_per_query": 0.003, - "litellm_provider": "dataforseo", - "mode": "search" - }, - "davinci-002": { - "input_cost_per_token": 2e-06, - "litellm_provider": "text-completion-openai", - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "max_tokens": 16384, - "mode": "completion", - "output_cost_per_token": 2e-06 - }, - "deepgram/base": { - "input_cost_per_second": 0.00020833, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0125/60 seconds = $0.00020833 per second", - "original_pricing_per_minute": 0.0125 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/base-conversationalai": { - "input_cost_per_second": 0.00020833, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0125/60 seconds = $0.00020833 per second", - "original_pricing_per_minute": 0.0125 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/base-finance": { - "input_cost_per_second": 0.00020833, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0125/60 seconds = $0.00020833 per second", - "original_pricing_per_minute": 0.0125 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/base-general": { - "input_cost_per_second": 0.00020833, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0125/60 seconds = $0.00020833 per second", - "original_pricing_per_minute": 0.0125 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/base-meeting": { - "input_cost_per_second": 0.00020833, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0125/60 seconds = $0.00020833 per second", - "original_pricing_per_minute": 0.0125 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/base-phonecall": { - "input_cost_per_second": 0.00020833, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0125/60 seconds = $0.00020833 per second", - "original_pricing_per_minute": 0.0125 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/base-video": { - "input_cost_per_second": 0.00020833, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0125/60 seconds = $0.00020833 per second", - "original_pricing_per_minute": 0.0125 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/base-voicemail": { - "input_cost_per_second": 0.00020833, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0125/60 seconds = $0.00020833 per second", - "original_pricing_per_minute": 0.0125 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/enhanced": { - "input_cost_per_second": 0.00024167, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0145/60 seconds = $0.00024167 per second", - "original_pricing_per_minute": 0.0145 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/enhanced-finance": { - "input_cost_per_second": 0.00024167, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0145/60 seconds = $0.00024167 per second", - "original_pricing_per_minute": 0.0145 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/enhanced-general": { - "input_cost_per_second": 0.00024167, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0145/60 seconds = $0.00024167 per second", - "original_pricing_per_minute": 0.0145 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/enhanced-meeting": { - "input_cost_per_second": 0.00024167, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0145/60 seconds = $0.00024167 per second", - "original_pricing_per_minute": 0.0145 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/enhanced-phonecall": { - "input_cost_per_second": 0.00024167, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0145/60 seconds = $0.00024167 per second", - "original_pricing_per_minute": 0.0145 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-atc": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-automotive": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-conversationalai": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-drivethru": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-finance": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-general": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-meeting": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-phonecall": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-video": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-2-voicemail": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-3": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-3-general": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-3-medical": { - "input_cost_per_second": 8.667e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0052/60 seconds = $0.00008667 per second (multilingual)", - "original_pricing_per_minute": 0.0052 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-general": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/nova-phonecall": { - "input_cost_per_second": 7.167e-05, - "litellm_provider": "deepgram", - "metadata": { - "calculation": "$0.0043/60 seconds = $0.00007167 per second", - "original_pricing_per_minute": 0.0043 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/whisper": { - "input_cost_per_second": 0.0001, - "litellm_provider": "deepgram", - "metadata": { - "notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models" - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/whisper-base": { - "input_cost_per_second": 0.0001, - "litellm_provider": "deepgram", - "metadata": { - "notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models" - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/whisper-large": { - "input_cost_per_second": 0.0001, - "litellm_provider": "deepgram", - "metadata": { - "notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models" - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/whisper-medium": { - "input_cost_per_second": 0.0001, - "litellm_provider": "deepgram", - "metadata": { - "notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models" - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/whisper-small": { - "input_cost_per_second": 0.0001, - "litellm_provider": "deepgram", - "metadata": { - "notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models" - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepgram/whisper-tiny": { - "input_cost_per_second": 0.0001, - "litellm_provider": "deepgram", - "metadata": { - "notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models" - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://deepgram.com/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "deepinfra/Gryphe/MythoMax-L2-13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 8e-08, - "output_cost_per_token": 9e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/NousResearch/Hermes-3-Llama-3.1-405B": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/NousResearch/Hermes-3-Llama-3.1-70B": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 3e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/Qwen/QwQ-32B": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen2.5-72B-Instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1.2e-07, - "output_cost_per_token": 3.9e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen2.5-7B-Instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 4e-08, - "output_cost_per_token": 1e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/Qwen/Qwen2.5-VL-32B-Instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true, - "supports_vision": true - }, - "deepinfra/Qwen/Qwen3-14B": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 6e-08, - "output_cost_per_token": 2.4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-235B-A22B": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 1.8e-07, - "output_cost_per_token": 5.4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-235B-A22B-Instruct-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 9e-08, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-235B-A22B-Thinking-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 2.9e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-30B-A3B": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 8e-08, - "output_cost_per_token": 2.9e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-32B": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-Coder-480B-A35B-Instruct": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 4e-07, - "output_cost_per_token": 1.6e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 2.9e-07, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-Next-80B-A3B-Instruct": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 1.4e-07, - "output_cost_per_token": 1.4e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Qwen/Qwen3-Next-80B-A3B-Thinking": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 1.4e-07, - "output_cost_per_token": 1.4e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/Sao10K/L3-8B-Lunaris-v1-Turbo": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 4e-08, - "output_cost_per_token": 5e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/Sao10K/L3.1-70B-Euryale-v2.2": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 6.5e-07, - "output_cost_per_token": 7.5e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/Sao10K/L3.3-70B-Euryale-v2.3": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 6.5e-07, - "output_cost_per_token": 7.5e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/allenai/olmOCR-7B-0725-FP8": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2.7e-07, - "output_cost_per_token": 1.5e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/anthropic/claude-3-7-sonnet-latest": { - "max_tokens": 200000, - "max_input_tokens": 200000, - "max_output_tokens": 200000, - "input_cost_per_token": 3.3e-06, - "output_cost_per_token": 1.65e-05, - "cache_read_input_token_cost": 3.3e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/anthropic/claude-4-opus": { - "max_tokens": 200000, - "max_input_tokens": 200000, - "max_output_tokens": 200000, - "input_cost_per_token": 1.65e-05, - "output_cost_per_token": 8.25e-05, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/anthropic/claude-4-sonnet": { - "max_tokens": 200000, - "max_input_tokens": 200000, - "max_output_tokens": 200000, - "input_cost_per_token": 3.3e-06, - "output_cost_per_token": 1.65e-05, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/deepseek-ai/DeepSeek-R1": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 2.4e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/deepseek-ai/DeepSeek-R1-0528": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 2.15e-06, - "cache_read_input_token_cost": 4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/deepseek-ai/DeepSeek-R1-0528-Turbo": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/deepseek-ai/DeepSeek-R1-Distill-Llama-70B": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2.7e-07, - "output_cost_per_token": 2.7e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/deepseek-ai/DeepSeek-R1-Turbo": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/deepseek-ai/DeepSeek-V3": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 3.8e-07, - "output_cost_per_token": 8.9e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/deepseek-ai/DeepSeek-V3-0324": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 8.8e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/deepseek-ai/DeepSeek-V3.1": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 2.7e-07, - "output_cost_per_token": 1e-06, - "cache_read_input_token_cost": 2.16e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true, - "supports_reasoning": true - }, - "deepinfra/deepseek-ai/DeepSeek-V3.1-Terminus": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 2.7e-07, - "output_cost_per_token": 1e-06, - "cache_read_input_token_cost": 2.16e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/google/gemini-2.0-flash-001": { - "max_tokens": 1000000, - "max_input_tokens": 1000000, - "max_output_tokens": 1000000, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/google/gemini-2.5-flash": { - "max_tokens": 1000000, - "max_input_tokens": 1000000, - "max_output_tokens": 1000000, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 2.5e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/google/gemini-2.5-pro": { - "max_tokens": 1000000, - "max_input_tokens": 1000000, - "max_output_tokens": 1000000, - "input_cost_per_token": 1.25e-06, - "output_cost_per_token": 1e-05, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/google/gemma-3-12b-it": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 1e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/google/gemma-3-27b-it": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-08, - "output_cost_per_token": 1.6e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/google/gemma-3-4b-it": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 4e-08, - "output_cost_per_token": 8e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Llama-3.2-11B-Vision-Instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 4.9e-08, - "output_cost_per_token": 4.9e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/meta-llama/Llama-3.2-3B-Instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-08, - "output_cost_per_token": 2e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Llama-3.3-70B-Instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2.3e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Llama-3.3-70B-Instruct-Turbo": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 3.9e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": { - "max_tokens": 1048576, - "max_input_tokens": 1048576, - "max_output_tokens": 1048576, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Llama-4-Scout-17B-16E-Instruct": { - "max_tokens": 327680, - "max_input_tokens": 327680, - "max_output_tokens": 327680, - "input_cost_per_token": 8e-08, - "output_cost_per_token": 3e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Llama-Guard-3-8B": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 5.5e-08, - "output_cost_per_token": 5.5e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/meta-llama/Llama-Guard-4-12B": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 1.8e-07, - "output_cost_per_token": 1.8e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/meta-llama/Meta-Llama-3-8B-Instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-08, - "output_cost_per_token": 6e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Meta-Llama-3.1-70B-Instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 4e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Meta-Llama-3.1-8B-Instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 3e-08, - "output_cost_per_token": 5e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-08, - "output_cost_per_token": 3e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/microsoft/WizardLM-2-8x22B": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "input_cost_per_token": 4.8e-07, - "output_cost_per_token": 4.8e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": false - }, - "deepinfra/microsoft/phi-4": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 7e-08, - "output_cost_per_token": 1.4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/mistralai/Mistral-Nemo-Instruct-2407": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-08, - "output_cost_per_token": 4e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/mistralai/Mistral-Small-24B-Instruct-2501": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 8e-08, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/mistralai/Mistral-Small-3.2-24B-Instruct-2506": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 7.5e-08, - "output_cost_per_token": 2e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/mistralai/Mixtral-8x7B-Instruct-v0.1": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 4e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/moonshotai/Kimi-K2-Instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 2e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/moonshotai/Kimi-K2-Instruct-0905": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 2e-06, - "cache_read_input_token_cost": 4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/nvidia/Llama-3.1-Nemotron-70B-Instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/nvidia/Llama-3.3-Nemotron-Super-49B-v1.5": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/nvidia/NVIDIA-Nemotron-Nano-9B-v2": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 4e-08, - "output_cost_per_token": 1.6e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/openai/gpt-oss-120b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 4.5e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/openai/gpt-oss-20b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 4e-08, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepinfra/zai-org/GLM-4.5": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 4e-07, - "output_cost_per_token": 1.6e-06, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_tool_choice": true - }, - "deepseek/deepseek-chat": { - "cache_read_input_token_cost": 3e-08, - "input_cost_per_token": 3e-07, - "litellm_provider": "deepseek", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://api-docs.deepseek.com/quick_start/pricing", - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "deepseek/deepseek-coder": { - "input_cost_per_token": 1.4e-07, - "input_cost_per_token_cache_hit": 1.4e-08, - "litellm_provider": "deepseek", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.8e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_tool_choice": true - }, - "deepseek/deepseek-r1": { - "input_cost_per_token": 5.5e-07, - "input_cost_per_token_cache_hit": 1.4e-07, - "litellm_provider": "deepseek", - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.19e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "deepseek/deepseek-reasoner": { - "input_cost_per_token": 5.5e-07, - "input_cost_per_token_cache_hit": 1.4e-07, - "litellm_provider": "deepseek", - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.19e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "deepseek/deepseek-v3": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 7e-08, - "input_cost_per_token": 2.7e-07, - "input_cost_per_token_cache_hit": 7e-08, - "litellm_provider": "deepseek", - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.1e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_tool_choice": true - }, - "deepseek/deepseek-v3.2": { - "input_cost_per_token": 2.8e-07, - "input_cost_per_token_cache_hit": 2.8e-08, - "litellm_provider": "deepseek", - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "deepseek.v3-v1:0": { - "input_cost_per_token": 5.8e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 163840, - "max_output_tokens": 81920, - "max_tokens": 163840, - "mode": "chat", - "output_cost_per_token": 1.68e-06, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "dolphin": { - "input_cost_per_token": 5e-07, - "litellm_provider": "nlp_cloud", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "completion", - "output_cost_per_token": 5e-07 - }, - "doubao-embedding": { - "input_cost_per_token": 0.0, - "litellm_provider": "volcengine", - "max_input_tokens": 4096, - "max_tokens": 4096, - "metadata": { - "notes": "Volcengine Doubao embedding model - standard version with 2560 dimensions" - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 2560 - }, - "doubao-embedding-large": { - "input_cost_per_token": 0.0, - "litellm_provider": "volcengine", - "max_input_tokens": 4096, - "max_tokens": 4096, - "metadata": { - "notes": "Volcengine Doubao embedding model - large version with 2048 dimensions" - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 2048 - }, - "doubao-embedding-large-text-240915": { - "input_cost_per_token": 0.0, - "litellm_provider": "volcengine", - "max_input_tokens": 4096, - "max_tokens": 4096, - "metadata": { - "notes": "Volcengine Doubao embedding model - text-240915 version with 4096 dimensions" - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 4096 - }, - "doubao-embedding-large-text-250515": { - "input_cost_per_token": 0.0, - "litellm_provider": "volcengine", - "max_input_tokens": 4096, - "max_tokens": 4096, - "metadata": { - "notes": "Volcengine Doubao embedding model - text-250515 version with 2048 dimensions" - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 2048 - }, - "doubao-embedding-text-240715": { - "input_cost_per_token": 0.0, - "litellm_provider": "volcengine", - "max_input_tokens": 4096, - "max_tokens": 4096, - "metadata": { - "notes": "Volcengine Doubao embedding model - text-240715 version with 2560 dimensions" - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 2560 - }, - "exa_ai/search": { - "litellm_provider": "exa_ai", - "mode": "search", - "tiered_pricing": [ - { - "input_cost_per_query": 5e-03, - "max_results_range": [ - 0, - 25 - ] - }, - { - "input_cost_per_query": 25e-03, - "max_results_range": [ - 26, - 100 - ] - } - ] - }, - "firecrawl/search": { - "litellm_provider": "firecrawl", - "mode": "search", - "tiered_pricing": [ - { - "input_cost_per_query": 1.66e-03, - "max_results_range": [ - 1, - 10 - ] - }, - { - "input_cost_per_query": 3.32e-03, - "max_results_range": [ - 11, - 20 - ] - }, - { - "input_cost_per_query": 4.98e-03, - "max_results_range": [ - 21, - 30 - ] - }, - { - "input_cost_per_query": 6.64e-03, - "max_results_range": [ - 31, - 40 - ] - }, - { - "input_cost_per_query": 8.3e-03, - "max_results_range": [ - 41, - 50 - ] - }, - { - "input_cost_per_query": 9.96e-03, - "max_results_range": [ - 51, - 60 - ] - }, - { - "input_cost_per_query": 11.62e-03, - "max_results_range": [ - 61, - 70 - ] - }, - { - "input_cost_per_query": 13.28e-03, - "max_results_range": [ - 71, - 80 - ] - }, - { - "input_cost_per_query": 14.94e-03, - "max_results_range": [ - 81, - 90 - ] - }, - { - "input_cost_per_query": 16.6e-03, - "max_results_range": [ - 91, - 100 - ] - } - ], - "metadata": { - "notes": "Firecrawl search pricing: $83 for 100,000 credits, 2 credits per 10 results. Cost = ceiling(limit/10) * 2 * $0.00083" - } - }, - "perplexity/search": { - "input_cost_per_query": 5e-03, - "litellm_provider": "perplexity", - "mode": "search" - }, - "searxng/search": { - "litellm_provider": "searxng", - "mode": "search", - "input_cost_per_query": 0.0, - "metadata": { - "notes": "SearXNG is an open-source metasearch engine. Free to use when self-hosted or using public instances." - } - }, - "elevenlabs/scribe_v1": { - "input_cost_per_second": 6.11e-05, - "litellm_provider": "elevenlabs", - "metadata": { - "calculation": "$0.22/hour = $0.00366/minute = $0.0000611 per second (enterprise pricing)", - "notes": "ElevenLabs Scribe v1 - state-of-the-art speech recognition model with 99 language support", - "original_pricing_per_hour": 0.22 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://elevenlabs.io/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "elevenlabs/scribe_v1_experimental": { - "input_cost_per_second": 6.11e-05, - "litellm_provider": "elevenlabs", - "metadata": { - "calculation": "$0.22/hour = $0.00366/minute = $0.0000611 per second (enterprise pricing)", - "notes": "ElevenLabs Scribe v1 experimental - enhanced version of the main Scribe model", - "original_pricing_per_hour": 0.22 - }, - "mode": "audio_transcription", - "output_cost_per_second": 0.0, - "source": "https://elevenlabs.io/pricing", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "embed-english-light-v2.0": { - "input_cost_per_token": 1e-07, - "litellm_provider": "cohere", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "embed-english-light-v3.0": { - "input_cost_per_token": 1e-07, - "litellm_provider": "cohere", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "embed-english-v2.0": { - "input_cost_per_token": 1e-07, - "litellm_provider": "cohere", - "max_input_tokens": 4096, - "max_tokens": 4096, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "embed-english-v3.0": { - "input_cost_per_image": 0.0001, - "input_cost_per_token": 1e-07, - "litellm_provider": "cohere", - "max_input_tokens": 1024, - "max_tokens": 1024, - "metadata": { - "notes": "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead." - }, - "mode": "embedding", - "output_cost_per_token": 0.0, - "supports_embedding_image_input": true, - "supports_image_input": true - }, - "embed-multilingual-v2.0": { - "input_cost_per_token": 1e-07, - "litellm_provider": "cohere", - "max_input_tokens": 768, - "max_tokens": 768, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "embed-multilingual-v3.0": { - "input_cost_per_token": 1e-07, - "litellm_provider": "cohere", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "embedding", - "output_cost_per_token": 0.0, - "supports_embedding_image_input": true - }, - "embed-multilingual-light-v3.0": { - "input_cost_per_token": 1e-04, - "litellm_provider": "cohere", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "embedding", - "output_cost_per_token": 0.0, - "supports_embedding_image_input": true - }, - "eu.amazon.nova-lite-v1:0": { - "input_cost_per_token": 7.8e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 3.12e-07, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "eu.amazon.nova-micro-v1:0": { - "input_cost_per_token": 4.6e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 1.84e-07, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "eu.amazon.nova-pro-v1:0": { - "input_cost_per_token": 1.05e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 4.2e-06, - "source": "https://aws.amazon.com/bedrock/pricing/", - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-5-haiku-20241022-v1:0": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "eu.anthropic.claude-haiku-4-5-20251001-v1:0": { - "cache_creation_input_token_cost": 1.375e-06, - "cache_read_input_token_cost": 1.1e-07, - "input_cost_per_token": 1.1e-06, - "deprecation_date": "2026-10-15", - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5.5e-06, - "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "eu.anthropic.claude-3-5-sonnet-20240620-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-5-sonnet-20241022-v2:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-7-sonnet-20250219-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-haiku-20240307-v1:0": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-opus-20240229-v1:0": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-sonnet-20240229-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "eu.anthropic.claude-opus-4-1-20250805-v1:0": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "eu.anthropic.claude-opus-4-20250514-v1:0": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "eu.anthropic.claude-sonnet-4-20250514-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "eu.anthropic.claude-sonnet-4-5-20250929-v1:0": { - "cache_creation_input_token_cost": 4.125e-06, - "cache_read_input_token_cost": 3.3e-07, - "input_cost_per_token": 3.3e-06, - "input_cost_per_token_above_200k_tokens": 6.6e-06, - "output_cost_per_token_above_200k_tokens": 2.475e-05, - "cache_creation_input_token_cost_above_200k_tokens": 8.25e-06, - "cache_read_input_token_cost_above_200k_tokens": 6.6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.65e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "eu.meta.llama3-2-1b-instruct-v1:0": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.3e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "eu.meta.llama3-2-3b-instruct-v1:0": { - "input_cost_per_token": 1.9e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.9e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "eu.mistral.pixtral-large-2502-v1:0": { - "input_cost_per_token": 2e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "fal_ai/bria/text-to-image/3.2": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.0398, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/flux-pro/v1.1": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/flux-pro/v1.1-ultra": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.06, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/flux/schnell": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.003, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/bytedance/seedream/v3/text-to-image": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.03, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/bytedance/dreamina/v3.1/text-to-image": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.03, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/ideogram/v3": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.06, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/imagen4/preview": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.0398, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/imagen4/preview/fast": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.02, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/imagen4/preview/ultra": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.06, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/recraft/v3/text-to-image": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.0398, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "fal_ai/fal-ai/stable-diffusion-v35-medium": { - "litellm_provider": "fal_ai", - "mode": "image_generation", - "output_cost_per_image": 0.0398, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "featherless_ai/featherless-ai/Qwerky-72B": { - "litellm_provider": "featherless_ai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 32768, - "mode": "chat" - }, - "featherless_ai/featherless-ai/Qwerky-QwQ-32B": { - "litellm_provider": "featherless_ai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 32768, - "mode": "chat" - }, - "fireworks-ai-4.1b-to-16b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "output_cost_per_token": 2e-07 - }, - "fireworks-ai-56b-to-176b": { - "input_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "output_cost_per_token": 1.2e-06 - }, - "fireworks-ai-above-16b": { - "input_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "output_cost_per_token": 9e-07 - }, - "fireworks-ai-default": { - "input_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "output_cost_per_token": 0.0 - }, - "fireworks-ai-embedding-150m-to-350m": { - "input_cost_per_token": 1.6e-08, - "litellm_provider": "fireworks_ai-embedding-models", - "output_cost_per_token": 0.0 - }, - "fireworks-ai-embedding-up-to-150m": { - "input_cost_per_token": 8e-09, - "litellm_provider": "fireworks_ai-embedding-models", - "output_cost_per_token": 0.0 - }, - "fireworks-ai-moe-up-to-56b": { - "input_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "output_cost_per_token": 5e-07 - }, - "fireworks-ai-up-to-4b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "output_cost_per_token": 2e-07 - }, - "fireworks_ai/WhereIsAI/UAE-Large-V1": { - "input_cost_per_token": 1.6e-08, - "litellm_provider": "fireworks_ai-embedding-models", - "max_input_tokens": 512, - "max_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": { - "input_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1": { - "input_cost_per_token": 3e-06, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 128000, - "max_output_tokens": 20480, - "max_tokens": 20480, - "mode": "chat", - "output_cost_per_token": 8e-06, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-0528": { - "input_cost_per_token": 3e-06, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 160000, - "max_output_tokens": 160000, - "max_tokens": 160000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-basic": { - "input_cost_per_token": 5.5e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 128000, - "max_output_tokens": 20480, - "max_tokens": 20480, - "mode": "chat", - "output_cost_per_token": 2.19e-06, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/deepseek-v3": { - "input_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 9e-07, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/deepseek-v3-0324": { - "input_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "max_tokens": 163840, - "mode": "chat", - "output_cost_per_token": 9e-07, - "source": "https://fireworks.ai/models/fireworks/deepseek-v3-0324", - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/deepseek-v3p1": { - "input_cost_per_token": 5.6e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.68e-06, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/deepseek-v3p1-terminus": { - "input_cost_per_token": 5.6e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.68e-06, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/deepseek-v3p2": { - "input_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "max_tokens": 163840, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://fireworks.ai/models/fireworks/deepseek-v3p2", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/firefunction-v2": { - "input_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 9e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/glm-4p5": { - "input_cost_per_token": 5.5e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 128000, - "max_output_tokens": 96000, - "max_tokens": 96000, - "mode": "chat", - "output_cost_per_token": 2.19e-06, - "source": "https://fireworks.ai/models/fireworks/glm-4p5", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/glm-4p5-air": { - "input_cost_per_token": 2.2e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 128000, - "max_output_tokens": 96000, - "max_tokens": 96000, - "mode": "chat", - "output_cost_per_token": 8.8e-07, - "source": "https://artificialanalysis.ai/models/glm-4-5-air", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/glm-4p6": { - "input_cost_per_token": 0.55e-06, - "output_cost_per_token": 2.19e-06, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 202800, - "max_output_tokens": 202800, - "max_tokens": 202800, - "mode": "chat", - "source": "https://fireworks.ai/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/gpt-oss-120b": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/gpt-oss-20b": { - "input_cost_per_token": 5e-08, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/kimi-k2-instruct": { - "input_cost_per_token": 6e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 131072, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "source": "https://fireworks.ai/models/fireworks/kimi-k2-instruct", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/kimi-k2-instruct-0905": { - "input_cost_per_token": 6e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 262144, - "max_output_tokens": 32768, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "source": "https://app.fireworks.ai/models/fireworks/kimi-k2-instruct-0905", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/kimi-k2-thinking": { - "input_cost_per_token": 6e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p1-405b-instruct": { - "input_cost_per_token": 3e-06, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p1-8b-instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": { - "input_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-90b-vision-instruct": { - "input_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 9e-07, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "fireworks_ai/accounts/fireworks/models/llama4-maverick-instruct-basic": { - "input_cost_per_token": 2.2e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 8.8e-07, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/llama4-scout-instruct-basic": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://fireworks.ai/pricing", - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": { - "input_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": { - "input_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 9e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": { - "input_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 9e-07, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/accounts/fireworks/models/yi-large": { - "input_cost_per_token": 3e-06, - "litellm_provider": "fireworks_ai", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://fireworks.ai/pricing", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "fireworks_ai/nomic-ai/nomic-embed-text-v1": { - "input_cost_per_token": 8e-09, - "litellm_provider": "fireworks_ai-embedding-models", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0.0, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/nomic-ai/nomic-embed-text-v1.5": { - "input_cost_per_token": 8e-09, - "litellm_provider": "fireworks_ai-embedding-models", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0.0, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/thenlper/gte-base": { - "input_cost_per_token": 8e-09, - "litellm_provider": "fireworks_ai-embedding-models", - "max_input_tokens": 512, - "max_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/thenlper/gte-large": { - "input_cost_per_token": 1.6e-08, - "litellm_provider": "fireworks_ai-embedding-models", - "max_input_tokens": 512, - "max_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "source": "https://fireworks.ai/pricing" - }, - "friendliai/meta-llama-3.1-70b-instruct": { - "input_cost_per_token": 6e-07, - "litellm_provider": "friendliai", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "friendliai/meta-llama-3.1-8b-instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "friendliai", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:babbage-002": { - "input_cost_per_token": 1.6e-06, - "input_cost_per_token_batches": 2e-07, - "litellm_provider": "text-completion-openai", - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "max_tokens": 16384, - "mode": "completion", - "output_cost_per_token": 1.6e-06, - "output_cost_per_token_batches": 2e-07 - }, - "ft:davinci-002": { - "input_cost_per_token": 1.2e-05, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "text-completion-openai", - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "max_tokens": 16384, - "mode": "completion", - "output_cost_per_token": 1.2e-05, - "output_cost_per_token_batches": 1e-06 - }, - "ft:gpt-3.5-turbo": { - "input_cost_per_token": 3e-06, - "input_cost_per_token_batches": 1.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-06, - "output_cost_per_token_batches": 3e-06, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-3.5-turbo-0125": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openai", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-3.5-turbo-0613": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openai", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-3.5-turbo-1106": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openai", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-4-0613": { - "input_cost_per_token": 3e-05, - "litellm_provider": "openai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-05, - "source": "OpenAI needs to add pricing for this ft model, will be updated when added by OpenAI. Defaulting to base model pricing", - "supports_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-4o-2024-08-06": { - "cache_read_input_token_cost": 1.875e-06, - "input_cost_per_token": 3.75e-06, - "input_cost_per_token_batches": 1.875e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "output_cost_per_token_batches": 7.5e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "ft:gpt-4o-2024-11-20": { - "cache_creation_input_token_cost": 1.875e-06, - "input_cost_per_token": 3.75e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-4o-mini-2024-07-18": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 3e-07, - "input_cost_per_token_batches": 1.5e-07, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "output_cost_per_token_batches": 6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-4.1-2025-04-14": { - "cache_read_input_token_cost": 7.5e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_batches": 1.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "output_cost_per_token_batches": 6e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-4.1-mini-2025-04-14": { - "cache_read_input_token_cost": 2e-07, - "input_cost_per_token": 8e-07, - "input_cost_per_token_batches": 4e-07, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3.2e-06, - "output_cost_per_token_batches": 1.6e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:gpt-4.1-nano-2025-04-14": { - "cache_read_input_token_cost": 5e-08, - "input_cost_per_token": 2e-07, - "input_cost_per_token_batches": 1e-07, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-07, - "output_cost_per_token_batches": 4e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "ft:o4-mini-2025-04-16": { - "cache_read_input_token_cost": 1e-06, - "input_cost_per_token": 4e-06, - "input_cost_per_token_batches": 2e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 1.6e-05, - "output_cost_per_token_batches": 8e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "gemini-1.0-pro": { - "input_cost_per_character": 1.25e-07, - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "input_cost_per_video_per_second": 0.002, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 3.75e-07, - "output_cost_per_token": 1.5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#google_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "gemini-1.0-pro-001": { - "deprecation_date": "2025-04-09", - "input_cost_per_character": 1.25e-07, - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "input_cost_per_video_per_second": 0.002, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 3.75e-07, - "output_cost_per_token": 1.5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "gemini-1.0-pro-002": { - "deprecation_date": "2025-04-09", - "input_cost_per_character": 1.25e-07, - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "input_cost_per_video_per_second": 0.002, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 3.75e-07, - "output_cost_per_token": 1.5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "gemini-1.0-pro-vision": { - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "litellm_provider": "vertex_ai-vision-models", - "max_images_per_prompt": 16, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_tokens": 2048, - "max_video_length": 2, - "max_videos_per_prompt": 1, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.0-pro-vision-001": { - "deprecation_date": "2025-04-09", - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "litellm_provider": "vertex_ai-vision-models", - "max_images_per_prompt": 16, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_tokens": 2048, - "max_video_length": 2, - "max_videos_per_prompt": 1, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.0-ultra": { - "input_cost_per_character": 1.25e-07, - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "input_cost_per_video_per_second": 0.002, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 3.75e-07, - "output_cost_per_token": 1.5e-06, - "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "gemini-1.0-ultra-001": { - "input_cost_per_character": 1.25e-07, - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "input_cost_per_video_per_second": 0.002, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 3.75e-07, - "output_cost_per_token": 1.5e-06, - "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "gemini-1.5-flash": { - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "input_cost_per_character": 1.875e-08, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image": 2e-05, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 7.5e-08, - "output_cost_per_character_above_128k_tokens": 1.5e-07, - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.5-flash-001": { - "deprecation_date": "2025-05-24", - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "input_cost_per_character": 1.875e-08, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image": 2e-05, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 7.5e-08, - "output_cost_per_character_above_128k_tokens": 1.5e-07, - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.5-flash-002": { - "deprecation_date": "2025-09-24", - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "input_cost_per_character": 1.875e-08, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image": 2e-05, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 7.5e-08, - "output_cost_per_character_above_128k_tokens": 1.5e-07, - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-flash", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.5-flash-exp-0827": { - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "input_cost_per_character": 1.875e-08, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image": 2e-05, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_token": 4.688e-09, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 1.875e-08, - "output_cost_per_character_above_128k_tokens": 3.75e-08, - "output_cost_per_token": 4.6875e-09, - "output_cost_per_token_above_128k_tokens": 9.375e-09, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.5-flash-preview-0514": { - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "input_cost_per_character": 1.875e-08, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image": 2e-05, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 1.875e-08, - "output_cost_per_character_above_128k_tokens": 3.75e-08, - "output_cost_per_token": 4.6875e-09, - "output_cost_per_token_above_128k_tokens": 9.375e-09, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.5-pro": { - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_character": 3.125e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "input_cost_per_image": 0.00032875, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_128k_tokens": 2.5e-06, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 1.25e-06, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "output_cost_per_token": 5e-06, - "output_cost_per_token_above_128k_tokens": 1e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.5-pro-001": { - "deprecation_date": "2025-05-24", - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_character": 3.125e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "input_cost_per_image": 0.00032875, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_128k_tokens": 2.5e-06, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 1.25e-06, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "output_cost_per_token": 5e-06, - "output_cost_per_token_above_128k_tokens": 1e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.5-pro-002": { - "deprecation_date": "2025-09-24", - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_character": 3.125e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "input_cost_per_image": 0.00032875, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_128k_tokens": 2.5e-06, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 1.25e-06, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "output_cost_per_token": 5e-06, - "output_cost_per_token_above_128k_tokens": 1e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-pro", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini-1.5-pro-preview-0215": { - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_character": 3.125e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "input_cost_per_image": 0.00032875, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_token": 7.8125e-08, - "input_cost_per_token_above_128k_tokens": 1.5625e-07, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 1.25e-06, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "output_cost_per_token": 3.125e-07, - "output_cost_per_token_above_128k_tokens": 6.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gemini-1.5-pro-preview-0409": { - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_character": 3.125e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "input_cost_per_image": 0.00032875, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_token": 7.8125e-08, - "input_cost_per_token_above_128k_tokens": 1.5625e-07, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 1.25e-06, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "output_cost_per_token": 3.125e-07, - "output_cost_per_token_above_128k_tokens": 6.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "gemini-1.5-pro-preview-0514": { - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_character": 3.125e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "input_cost_per_image": 0.00032875, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_token": 7.8125e-08, - "input_cost_per_token_above_128k_tokens": 1.5625e-07, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 1.25e-06, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "output_cost_per_token": 3.125e-07, - "output_cost_per_token_above_128k_tokens": 6.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gemini-2.0-flash": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 4e-07, - "source": "https://ai.google.dev/pricing#2_0flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.0-flash-001": { - "cache_read_input_token_cost": 3.75e-08, - "deprecation_date": "2026-02-05", - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.0-flash-exp": { - "cache_read_input_token_cost": 3.75e-08, - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 1.5e-07, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 6e-07, - "output_cost_per_token_above_128k_tokens": 0, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.0-flash-lite": { - "cache_read_input_token_cost": 1.875e-08, - "input_cost_per_audio_token": 7.5e-08, - "input_cost_per_token": 7.5e-08, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 50, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.0-flash-lite-001": { - "cache_read_input_token_cost": 1.875e-08, - "deprecation_date": "2026-02-25", - "input_cost_per_audio_token": 7.5e-08, - "input_cost_per_token": 7.5e-08, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 50, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.0-flash-live-preview-04-09": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 3e-06, - "input_cost_per_image": 3e-06, - "input_cost_per_token": 5e-07, - "input_cost_per_video_per_second": 3e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_audio_token": 1.2e-05, - "output_cost_per_token": 2e-06, - "rpm": 10, - "source": "https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/gemini#gemini-2-0-flash-live-preview-04-09", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini-2.0-flash-preview-image-generation": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 4e-07, - "source": "https://ai.google.dev/pricing#2_0flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.0-flash-thinking-exp": { - "cache_read_input_token_cost": 0.0, - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.0-flash-thinking-exp-01-21": { - "cache_read_input_token_cost": 0.0, - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65536, - "max_pdf_size_mb": 30, - "max_tokens": 65536, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": false, - "supports_function_calling": false, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.0-pro-exp-02-05": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-flash": { - "cache_read_input_token_cost": 3e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-flash-image": { - "cache_read_input_token_cost": 3e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "max_pdf_size_mb": 30, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "image_generation", - "output_cost_per_image": 0.039, - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "rpm": 100000, - "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-flash-image", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": false, - "tpm": 8000000 - }, - "gemini-2.5-flash-image-preview": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "image_generation", - "output_cost_per_image": 0.039, - "output_cost_per_reasoning_token": 3e-05, - "output_cost_per_token": 3e-05, - "rpm": 100000, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 8000000 - }, - "gemini-3-pro-image-preview": { - "input_cost_per_image": 0.0011, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 65536, - "max_output_tokens": 32768, - "max_tokens": 65536, - "mode": "image_generation", - "output_cost_per_image": 0.134, - "output_cost_per_image_token": 1.2e-04, - "output_cost_per_token": 1.2e-05, - "output_cost_per_token_batches": 6e-06, - "source": "https://ai.google.dev/gemini-api/docs/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": false, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-flash-lite": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 5e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-07, - "output_cost_per_token": 4e-07, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-flash-lite-preview-09-2025": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 3e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-07, - "output_cost_per_token": 4e-07, - "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-flash-preview-09-2025": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-live-2.5-flash-preview-native-audio-09-2025": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 3e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_audio_token": 1.2e-05, - "output_cost_per_token": 2e-06, - "source": "https://ai.google.dev/gemini-api/docs/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini/gemini-live-2.5-flash-preview-native-audio-09-2025": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 3e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_audio_token": 1.2e-05, - "output_cost_per_token": 2e-06, - "rpm": 100000, - "source": "https://ai.google.dev/gemini-api/docs/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 8000000 - }, - "gemini-2.5-flash-lite-preview-06-17": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 5e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-07, - "output_cost_per_token": 4e-07, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-flash-preview-04-17": { - "cache_read_input_token_cost": 3.75e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 3.5e-06, - "output_cost_per_token": 6e-07, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-flash-preview-05-20": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-pro": { - "cache_read_input_token_cost": 1.25e-07, - "cache_creation_input_token_cost_above_200k_tokens": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-3-pro-preview": { - "cache_read_input_token_cost": 2e-07, - "cache_read_input_token_cost_above_200k_tokens": 4e-07, - "cache_creation_input_token_cost_above_200k_tokens": 2.5e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_above_200k_tokens": 4e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "output_cost_per_token_above_200k_tokens": 1.8e-05, - "output_cost_per_token_batches": 6e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true - }, - "vertex_ai/gemini-3-pro-preview": { - "cache_read_input_token_cost": 2e-07, - "cache_read_input_token_cost_above_200k_tokens": 4e-07, - "cache_creation_input_token_cost_above_200k_tokens": 2.5e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_above_200k_tokens": 4e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "vertex_ai", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "output_cost_per_token_above_200k_tokens": 1.8e-05, - "output_cost_per_token_batches": 6e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-pro-exp-03-25": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-pro-preview-03-25": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_audio_token": 1.25e-06, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-pro-preview-05-06": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_audio_token": 1.25e-06, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supported_regions": [ - "global" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-pro-preview-06-05": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_audio_token": 1.25e-06, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-2.5-pro-preview-tts": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "audio" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini-embedding-001": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 2048, - "max_tokens": 2048, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 3072, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" - }, - "gemini-flash-experimental": { - "input_cost_per_character": 0, - "input_cost_per_token": 0, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_token": 0, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental", - "supports_function_calling": false, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "gemini-pro": { - "input_cost_per_character": 1.25e-07, - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "input_cost_per_video_per_second": 0.002, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 3.75e-07, - "output_cost_per_token": 1.5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "gemini-pro-experimental": { - "input_cost_per_character": 0, - "input_cost_per_token": 0, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_token": 0, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental", - "supports_function_calling": false, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "gemini-pro-vision": { - "input_cost_per_image": 0.0025, - "input_cost_per_token": 5e-07, - "litellm_provider": "vertex_ai-vision-models", - "max_images_per_prompt": 16, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_tokens": 2048, - "max_video_length": 2, - "max_videos_per_prompt": 1, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini/gemini-embedding-001": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "gemini", - "max_input_tokens": 2048, - "max_tokens": 2048, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 3072, - "rpm": 10000, - "source": "https://ai.google.dev/gemini-api/docs/embeddings#model-versions", - "tpm": 10000000 - }, - "gemini/gemini-1.5-flash": { - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1.5e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "rpm": 2000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-flash-001": { - "cache_creation_input_token_cost": 1e-06, - "cache_read_input_token_cost": 1.875e-08, - "deprecation_date": "2025-05-24", - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1.5e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "rpm": 2000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-flash-002": { - "cache_creation_input_token_cost": 1e-06, - "cache_read_input_token_cost": 1.875e-08, - "deprecation_date": "2025-09-24", - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1.5e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "rpm": 2000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-flash-8b": { - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 4000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-flash-8b-exp-0827": { - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 4000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-flash-8b-exp-0924": { - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 4000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-flash-exp-0827": { - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 2000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-flash-latest": { - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1.5e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "rpm": 2000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-pro": { - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "litellm_provider": "gemini", - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.05e-05, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-pro-001": { - "deprecation_date": "2025-05-24", - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "litellm_provider": "gemini", - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.05e-05, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-pro-002": { - "deprecation_date": "2025-09-24", - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "litellm_provider": "gemini", - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.05e-05, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-pro-exp-0801": { - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "litellm_provider": "gemini", - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.05e-05, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-pro-exp-0827": { - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-1.5-pro-latest": { - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "litellm_provider": "gemini", - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.05e-06, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-2.0-flash": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 4e-07, - "rpm": 10000, - "source": "https://ai.google.dev/pricing#2_0flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 10000000 - }, - "gemini/gemini-2.0-flash-001": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 4e-07, - "rpm": 10000, - "source": "https://ai.google.dev/pricing#2_0flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 10000000 - }, - "gemini/gemini-2.0-flash-exp": { - "cache_read_input_token_cost": 0.0, - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 10, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 4000000 - }, - "gemini/gemini-2.0-flash-lite": { - "cache_read_input_token_cost": 1.875e-08, - "input_cost_per_audio_token": 7.5e-08, - "input_cost_per_token": 7.5e-08, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 50, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 3e-07, - "rpm": 4000, - "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.0-flash-lite", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 4000000 - }, - "gemini/gemini-2.0-flash-lite-preview-02-05": { - "cache_read_input_token_cost": 1.875e-08, - "input_cost_per_audio_token": 7.5e-08, - "input_cost_per_token": 7.5e-08, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 3e-07, - "rpm": 60000, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash-lite", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 10000000 - }, - "gemini/gemini-2.0-flash-live-001": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 2.1e-06, - "input_cost_per_image": 2.1e-06, - "input_cost_per_token": 3.5e-07, - "input_cost_per_video_per_second": 2.1e-06, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_audio_token": 8.5e-06, - "output_cost_per_token": 1.5e-06, - "rpm": 10, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2-0-flash-live-001", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.0-flash-preview-image-generation": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 4e-07, - "rpm": 10000, - "source": "https://ai.google.dev/pricing#2_0flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 10000000 - }, - "gemini/gemini-2.0-flash-thinking-exp": { - "cache_read_input_token_cost": 0.0, - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65536, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 10, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 4000000 - }, - "gemini/gemini-2.0-flash-thinking-exp-01-21": { - "cache_read_input_token_cost": 0.0, - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65536, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 10, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 4000000 - }, - "gemini/gemini-2.0-pro-exp-02-05": { - "cache_read_input_token_cost": 0.0, - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 2, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supports_audio_input": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 1000000 - }, - "gemini/gemini-2.5-flash": { - "cache_read_input_token_cost": 3e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "rpm": 100000, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 8000000 - }, - "gemini/gemini-2.5-flash-image": { - "cache_read_input_token_cost": 3e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "supports_reasoning": false, - "max_images_per_prompt": 3000, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "max_pdf_size_mb": 30, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "image_generation", - "output_cost_per_image": 0.039, - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "rpm": 100000, - "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-flash-image", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 8000000 - }, - "gemini/gemini-2.5-flash-image-preview": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "image_generation", - "output_cost_per_image": 0.039, - "output_cost_per_reasoning_token": 3e-05, - "output_cost_per_token": 3e-05, - "rpm": 100000, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 8000000 - }, - "gemini/gemini-3-pro-image-preview": { - "input_cost_per_image": 0.0011, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "gemini", - "max_input_tokens": 65536, - "max_output_tokens": 32768, - "max_tokens": 65536, - "mode": "image_generation", - "output_cost_per_image": 0.134, - "output_cost_per_image_token": 1.2e-04, - "output_cost_per_token": 1.2e-05, - "rpm": 1000, - "tpm": 4000000, - "output_cost_per_token_batches": 6e-06, - "source": "https://ai.google.dev/gemini-api/docs/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": false, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_vision": true, - "supports_web_search": true - }, - "gemini/gemini-2.5-flash-lite": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 5e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-07, - "output_cost_per_token": 4e-07, - "rpm": 15, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-lite", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.5-flash-lite-preview-09-2025": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 3e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-07, - "output_cost_per_token": 4e-07, - "rpm": 15, - "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.5-flash-preview-09-2025": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "rpm": 15, - "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-flash-latest": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "rpm": 15, - "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-flash-lite-latest": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 3e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-07, - "output_cost_per_token": 4e-07, - "rpm": 15, - "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.5-flash-lite-preview-06-17": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_audio_token": 5e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 4e-07, - "output_cost_per_token": 4e-07, - "rpm": 15, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-lite", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.5-flash-preview-04-17": { - "cache_read_input_token_cost": 3.75e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 1.5e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 3.5e-06, - "output_cost_per_token": 6e-07, - "rpm": 10, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.5-flash-preview-05-20": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "rpm": 10, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.5-flash-preview-tts": { - "cache_read_input_token_cost": 3.75e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 1.5e-07, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_reasoning_token": 3.5e-06, - "output_cost_per_token": 6e-07, - "rpm": 10, - "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "audio" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.5-pro": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "rpm": 2000, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 800000 - }, - "gemini/gemini-2.5-computer-use-preview-10-2025": { - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "gemini", - "max_images_per_prompt": 3000, - "max_input_tokens": 128000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "rpm": 2000, - "source": "https://ai.google.dev/gemini-api/docs/computer-use", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_computer_use": true, - "supports_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 800000 - }, - "gemini/gemini-3-pro-preview": { - "cache_read_input_token_cost": 2e-07, - "cache_read_input_token_cost_above_200k_tokens": 4e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_above_200k_tokens": 4e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "output_cost_per_token_above_200k_tokens": 1.8e-05, - "output_cost_per_token_batches": 6e-06, - "rpm": 2000, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 800000 - }, - "gemini/gemini-2.5-pro-exp-03-25": { - "cache_read_input_token_cost": 0.0, - "input_cost_per_token": 0.0, - "input_cost_per_token_above_200k_tokens": 0.0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 0.0, - "output_cost_per_token_above_200k_tokens": 0.0, - "rpm": 5, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 250000 - }, - "gemini/gemini-2.5-pro-preview-03-25": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "rpm": 10000, - "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 10000000 - }, - "gemini/gemini-2.5-pro-preview-05-06": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "rpm": 10000, - "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 10000000 - }, - "gemini/gemini-2.5-pro-preview-06-05": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "rpm": 10000, - "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 10000000 - }, - "gemini/gemini-2.5-pro-preview-tts": { - "cache_read_input_token_cost": 3.125e-07, - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_above_200k_tokens": 2.5e-06, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_above_200k_tokens": 1.5e-05, - "rpm": 10000, - "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "audio" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true, - "tpm": 10000000 - }, - "gemini/gemini-exp-1114": { - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "metadata": { - "notes": "Rate limits not documented for gemini-exp-1114. Assuming same as gemini-1.5-pro.", - "supports_tool_choice": true - }, - "mode": "chat", - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-exp-1206": { - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "metadata": { - "notes": "Rate limits not documented for gemini-exp-1206. Assuming same as gemini-1.5-pro.", - "supports_tool_choice": true - }, - "mode": "chat", - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 4000000 - }, - "gemini/gemini-gemma-2-27b-it": { - "input_cost_per_token": 3.5e-07, - "litellm_provider": "gemini", - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.05e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini/gemini-gemma-2-9b-it": { - "input_cost_per_token": 3.5e-07, - "litellm_provider": "gemini", - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.05e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini/gemini-pro": { - "input_cost_per_token": 3.5e-07, - "input_cost_per_token_above_128k_tokens": 7e-07, - "litellm_provider": "gemini", - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.05e-06, - "output_cost_per_token_above_128k_tokens": 2.1e-06, - "rpd": 30000, - "rpm": 360, - "source": "https://ai.google.dev/gemini-api/docs/models/gemini", - "supports_function_calling": true, - "supports_tool_choice": true, - "tpm": 120000 - }, - "gemini/gemini-pro-vision": { - "input_cost_per_token": 3.5e-07, - "input_cost_per_token_above_128k_tokens": 7e-07, - "litellm_provider": "gemini", - "max_input_tokens": 30720, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 1.05e-06, - "output_cost_per_token_above_128k_tokens": 2.1e-06, - "rpd": 30000, - "rpm": 360, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "tpm": 120000 - }, - "gemini/gemma-3-27b-it": { - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_input_tokens": 131072, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "source": "https://aistudio.google.com", - "supports_audio_output": false, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini/imagen-3.0-fast-generate-001": { - "litellm_provider": "gemini", - "mode": "image_generation", - "output_cost_per_image": 0.02, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "gemini/imagen-3.0-generate-001": { - "litellm_provider": "gemini", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "gemini/imagen-3.0-generate-002": { - "litellm_provider": "gemini", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "gemini/imagen-4.0-fast-generate-001": { - "litellm_provider": "gemini", - "mode": "image_generation", - "output_cost_per_image": 0.02, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "gemini/imagen-4.0-generate-001": { - "litellm_provider": "gemini", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "gemini/imagen-4.0-ultra-generate-001": { - "litellm_provider": "gemini", - "mode": "image_generation", - "output_cost_per_image": 0.06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "gemini/learnlm-1.5-pro-experimental": { - "input_cost_per_audio_per_second": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "input_cost_per_character": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "litellm_provider": "gemini", - "max_input_tokens": 32767, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 0, - "output_cost_per_character_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "source": "https://aistudio.google.com", - "supports_audio_output": false, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gemini/veo-2.0-generate-001": { - "litellm_provider": "gemini", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.35, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "gemini/veo-3.0-fast-generate-preview": { - "litellm_provider": "gemini", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.4, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "gemini/veo-3.0-generate-preview": { - "litellm_provider": "gemini", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.75, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "gemini/veo-3.1-fast-generate-preview": { - "litellm_provider": "gemini", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.15, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "gemini/veo-3.1-generate-preview": { - "litellm_provider": "gemini", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.40, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "google.gemma-3-12b-it": { - "input_cost_per_token": 9e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.9e-07, - "supports_system_messages": true, - "supports_vision": true - }, - "google.gemma-3-27b-it": { - "input_cost_per_token": 2.3e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 3.8e-07, - "supports_system_messages": true, - "supports_vision": true - }, - "google.gemma-3-4b-it": { - "input_cost_per_token": 4e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 8e-08, - "supports_system_messages": true, - "supports_vision": true - }, - "google_pse/search": { - "input_cost_per_query": 0.005, - "litellm_provider": "google_pse", - "mode": "search" - }, - "global.anthropic.claude-sonnet-4-5-20250929-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "global.anthropic.claude-sonnet-4-20250514-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "global.anthropic.claude-haiku-4-5-20251001-v1:0": { - "cache_creation_input_token_cost": 1.25e-06, - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 1e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "global.amazon.nova-2-lite-v1:0": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_video_input": true, - "supports_vision": true - }, - "gpt-3.5-turbo": { - "input_cost_per_token": 0.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 4097, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-3.5-turbo-0125": { - "input_cost_per_token": 5e-07, - "litellm_provider": "openai", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 16385, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-3.5-turbo-0301": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "max_tokens": 4097, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-3.5-turbo-0613": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "max_tokens": 4097, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-3.5-turbo-1106": { - "deprecation_date": "2026-09-28", - "input_cost_per_token": 1e-06, - "litellm_provider": "openai", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 16385, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-3.5-turbo-16k": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openai", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 16385, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-3.5-turbo-16k-0613": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openai", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 16385, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-3.5-turbo-instruct": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "text-completion-openai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 2e-06 - }, - "gpt-3.5-turbo-instruct-0914": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "text-completion-openai", - "max_input_tokens": 8192, - "max_output_tokens": 4097, - "max_tokens": 4097, - "mode": "completion", - "output_cost_per_token": 2e-06 - }, - "gpt-4": { - "input_cost_per_token": 3e-05, - "litellm_provider": "openai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-0125-preview": { - "deprecation_date": "2026-03-26", - "input_cost_per_token": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-0314": { - "input_cost_per_token": 3e-05, - "litellm_provider": "openai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-0613": { - "deprecation_date": "2025-06-06", - "input_cost_per_token": 3e-05, - "litellm_provider": "openai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-1106-preview": { - "deprecation_date": "2026-03-26", - "input_cost_per_token": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-1106-vision-preview": { - "deprecation_date": "2024-12-06", - "input_cost_per_token": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4-32k": { - "input_cost_per_token": 6e-05, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.00012, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-32k-0314": { - "input_cost_per_token": 6e-05, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.00012, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-32k-0613": { - "input_cost_per_token": 6e-05, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.00012, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-turbo": { - "input_cost_per_token": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4-turbo-2024-04-09": { - "input_cost_per_token": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4-turbo-preview": { - "input_cost_per_token": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4-vision-preview": { - "deprecation_date": "2024-12-06", - "input_cost_per_token": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4.1": { - "cache_read_input_token_cost": 5e-07, - "cache_read_input_token_cost_priority": 8.75e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "input_cost_per_token_priority": 3.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-06, - "output_cost_per_token_batches": 4e-06, - "output_cost_per_token_priority": 1.4e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4.1-2025-04-14": { - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-06, - "output_cost_per_token_batches": 4e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4.1-mini": { - "cache_read_input_token_cost": 1e-07, - "cache_read_input_token_cost_priority": 1.75e-07, - "input_cost_per_token": 4e-07, - "input_cost_per_token_batches": 2e-07, - "input_cost_per_token_priority": 7e-07, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.6e-06, - "output_cost_per_token_batches": 8e-07, - "output_cost_per_token_priority": 2.8e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4.1-mini-2025-04-14": { - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 4e-07, - "input_cost_per_token_batches": 2e-07, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.6e-06, - "output_cost_per_token_batches": 8e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4.1-nano": { - "cache_read_input_token_cost": 2.5e-08, - "cache_read_input_token_cost_priority": 5e-08, - "input_cost_per_token": 1e-07, - "input_cost_per_token_batches": 5e-08, - "input_cost_per_token_priority": 2e-07, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-07, - "output_cost_per_token_batches": 2e-07, - "output_cost_per_token_priority": 8e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4.1-nano-2025-04-14": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 1e-07, - "input_cost_per_token_batches": 5e-08, - "litellm_provider": "openai", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-07, - "output_cost_per_token_batches": 2e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4.5-preview": { - "cache_read_input_token_cost": 3.75e-05, - "input_cost_per_token": 7.5e-05, - "input_cost_per_token_batches": 3.75e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 0.00015, - "output_cost_per_token_batches": 7.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4.5-preview-2025-02-27": { - "cache_read_input_token_cost": 3.75e-05, - "deprecation_date": "2025-07-14", - "input_cost_per_token": 7.5e-05, - "input_cost_per_token_batches": 3.75e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 0.00015, - "output_cost_per_token_batches": 7.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4o": { - "cache_read_input_token_cost": 1.25e-06, - "cache_read_input_token_cost_priority": 2.125e-06, - "input_cost_per_token": 2.5e-06, - "input_cost_per_token_batches": 1.25e-06, - "input_cost_per_token_priority": 4.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_batches": 5e-06, - "output_cost_per_token_priority": 1.7e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4o-2024-05-13": { - "input_cost_per_token": 5e-06, - "input_cost_per_token_batches": 2.5e-06, - "input_cost_per_token_priority": 8.75e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "output_cost_per_token_batches": 7.5e-06, - "output_cost_per_token_priority": 2.625e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4o-2024-08-06": { - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "input_cost_per_token_batches": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_batches": 5e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4o-2024-11-20": { - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "input_cost_per_token_batches": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_batches": 5e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4o-audio-preview": { - "input_cost_per_audio_token": 0.0001, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 0.0002, - "output_cost_per_token": 1e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-audio-preview-2024-10-01": { - "input_cost_per_audio_token": 0.0001, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 0.0002, - "output_cost_per_token": 1e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-audio-preview-2024-12-17": { - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 1e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-audio-preview-2025-06-03": { - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 1e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-mini": { - "cache_read_input_token_cost": 7.5e-08, - "cache_read_input_token_cost_priority": 1.25e-07, - "input_cost_per_token": 1.5e-07, - "input_cost_per_token_batches": 7.5e-08, - "input_cost_per_token_priority": 2.5e-07, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6e-07, - "output_cost_per_token_batches": 3e-07, - "output_cost_per_token_priority": 1e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4o-mini-2024-07-18": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_token": 1.5e-07, - "input_cost_per_token_batches": 7.5e-08, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6e-07, - "output_cost_per_token_batches": 3e-07, - "search_context_cost_per_query": { - "search_context_size_high": 0.03, - "search_context_size_low": 0.025, - "search_context_size_medium": 0.0275 - }, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-4o-mini-audio-preview": { - "input_cost_per_audio_token": 1e-05, - "input_cost_per_token": 1.5e-07, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 2e-05, - "output_cost_per_token": 6e-07, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-mini-audio-preview-2024-12-17": { - "input_cost_per_audio_token": 1e-05, - "input_cost_per_token": 1.5e-07, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_audio_token": 2e-05, - "output_cost_per_token": 6e-07, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-mini-realtime-preview": { - "cache_creation_input_audio_token_cost": 3e-07, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_audio_token": 1e-05, - "input_cost_per_token": 6e-07, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 2e-05, - "output_cost_per_token": 2.4e-06, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-mini-realtime-preview-2024-12-17": { - "cache_creation_input_audio_token_cost": 3e-07, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_audio_token": 1e-05, - "input_cost_per_token": 6e-07, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 2e-05, - "output_cost_per_token": 2.4e-06, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-mini-search-preview": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_token": 1.5e-07, - "input_cost_per_token_batches": 7.5e-08, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6e-07, - "output_cost_per_token_batches": 3e-07, - "search_context_cost_per_query": { - "search_context_size_high": 0.03, - "search_context_size_low": 0.025, - "search_context_size_medium": 0.0275 - }, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gpt-4o-mini-search-preview-2025-03-11": { - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_token": 1.5e-07, - "input_cost_per_token_batches": 7.5e-08, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 6e-07, - "output_cost_per_token_batches": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4o-mini-transcribe": { - "input_cost_per_audio_token": 3e-06, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 16000, - "max_output_tokens": 2000, - "mode": "audio_transcription", - "output_cost_per_token": 5e-06, - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "gpt-4o-mini-tts": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "openai", - "mode": "audio_speech", - "output_cost_per_audio_token": 1.2e-05, - "output_cost_per_second": 0.00025, - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/audio/speech" - ], - "supported_modalities": [ - "text", - "audio" - ], - "supported_output_modalities": [ - "audio" - ] - }, - "gpt-4o-realtime-preview": { - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 2e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-realtime-preview-2024-10-01": { - "cache_creation_input_audio_token_cost": 2e-05, - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_audio_token": 0.0001, - "input_cost_per_token": 5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 0.0002, - "output_cost_per_token": 2e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-realtime-preview-2024-12-17": { - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 2e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-realtime-preview-2025-06-03": { - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_audio_token": 4e-05, - "input_cost_per_token": 5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 8e-05, - "output_cost_per_token": 2e-05, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-4o-search-preview": { - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "input_cost_per_token_batches": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_batches": 5e-06, - "search_context_cost_per_query": { - "search_context_size_high": 0.05, - "search_context_size_low": 0.03, - "search_context_size_medium": 0.035 - }, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gpt-4o-search-preview-2025-03-11": { - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "input_cost_per_token_batches": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_batches": 5e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-4o-transcribe": { - "input_cost_per_audio_token": 6e-06, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 16000, - "max_output_tokens": 2000, - "mode": "audio_transcription", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "gpt-5": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_flex": 6.25e-08, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_flex": 6.25e-07, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_flex": 5e-06, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-5.1": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-5.1-2025-11-13": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-5.1-chat-latest": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": false, - "supports_native_streaming": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "gpt-5.2": { - "cache_read_input_token_cost": 1.75e-07, - "cache_read_input_token_cost_priority": 3.5e-07, - "input_cost_per_token": 1.75e-06, - "input_cost_per_token_priority": 3.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.4e-05, - "output_cost_per_token_priority": 2.8e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-5.2-2025-12-11": { - "cache_read_input_token_cost": 1.75e-07, - "cache_read_input_token_cost_priority": 3.5e-07, - "input_cost_per_token": 1.75e-06, - "input_cost_per_token_priority": 3.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.4e-05, - "output_cost_per_token_priority": 2.8e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-5.2-chat-latest": { - "cache_read_input_token_cost": 1.75e-07, - "cache_read_input_token_cost_priority": 3.5e-07, - "input_cost_per_token": 1.75e-06, - "input_cost_per_token_priority": 3.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.4e-05, - "output_cost_per_token_priority": 2.8e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-5.2-pro": { - "input_cost_per_token": 2.1e-05, - "litellm_provider": "openai", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1.68e-04, - "supported_endpoints": [ - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gpt-5.2-pro-2025-12-11": { - "input_cost_per_token": 2.1e-05, - "litellm_provider": "openai", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1.68e-04, - "supported_endpoints": [ - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gpt-5-pro": { - "input_cost_per_token": 1.5e-05, - "input_cost_per_token_batches": 7.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 400000, - "max_output_tokens": 272000, - "max_tokens": 272000, - "mode": "responses", - "output_cost_per_token": 1.2e-04, - "output_cost_per_token_batches": 6e-05, - "supported_endpoints": [ - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": false, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gpt-5-pro-2025-10-06": { - "input_cost_per_token": 1.5e-05, - "input_cost_per_token_batches": 7.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 400000, - "max_output_tokens": 272000, - "max_tokens": 272000, - "mode": "responses", - "output_cost_per_token": 1.2e-04, - "output_cost_per_token_batches": 6e-05, - "supported_endpoints": [ - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": false, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "gpt-5-2025-08-07": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_flex": 6.25e-08, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_flex": 6.25e-07, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "output_cost_per_token_flex": 5e-06, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-5-chat": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": false, - "supports_native_streaming": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "gpt-5-chat-latest": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": false, - "supports_native_streaming": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "gpt-5-codex": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-5.1-codex": { - "cache_read_input_token_cost": 1.25e-07, - "cache_read_input_token_cost_priority": 2.5e-07, - "input_cost_per_token": 1.25e-06, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1e-05, - "output_cost_per_token_priority": 2e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-5.1-codex-max": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openai", - "max_input_tokens": 400000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 1e-05, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-5.1-codex-mini": { - "cache_read_input_token_cost": 2.5e-08, - "cache_read_input_token_cost_priority": 4.5e-08, - "input_cost_per_token": 2.5e-07, - "input_cost_per_token_priority": 4.5e-07, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "responses", - "output_cost_per_token": 2e-06, - "output_cost_per_token_priority": 3.6e-06, - "supported_endpoints": [ - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-5-mini": { - "cache_read_input_token_cost": 2.5e-08, - "cache_read_input_token_cost_flex": 1.25e-08, - "cache_read_input_token_cost_priority": 4.5e-08, - "input_cost_per_token": 2.5e-07, - "input_cost_per_token_flex": 1.25e-07, - "input_cost_per_token_priority": 4.5e-07, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "output_cost_per_token_flex": 1e-06, - "output_cost_per_token_priority": 3.6e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-5-mini-2025-08-07": { - "cache_read_input_token_cost": 2.5e-08, - "cache_read_input_token_cost_flex": 1.25e-08, - "cache_read_input_token_cost_priority": 4.5e-08, - "input_cost_per_token": 2.5e-07, - "input_cost_per_token_flex": 1.25e-07, - "input_cost_per_token_priority": 4.5e-07, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "output_cost_per_token_flex": 1e-06, - "output_cost_per_token_priority": 3.6e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "gpt-5-nano": { - "cache_read_input_token_cost": 5e-09, - "cache_read_input_token_cost_flex": 2.5e-09, - "input_cost_per_token": 5e-08, - "input_cost_per_token_flex": 2.5e-08, - "input_cost_per_token_priority": 2.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "output_cost_per_token_flex": 2e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-5-nano-2025-08-07": { - "cache_read_input_token_cost": 5e-09, - "cache_read_input_token_cost_flex": 2.5e-09, - "input_cost_per_token": 5e-08, - "input_cost_per_token_flex": 2.5e-08, - "litellm_provider": "openai", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "output_cost_per_token_flex": 2e-07, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "gpt-image-1": { - "input_cost_per_pixel": 4.0054321e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "gpt-image-1-mini": { - "cache_read_input_image_token_cost": 2.5e-07, - "cache_read_input_token_cost": 2e-07, - "input_cost_per_image_token": 2.5e-06, - "input_cost_per_token": 2e-06, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_image_token": 8e-06, - "supported_endpoints": [ - "/v1/images/generations", - "/v1/images/edits" - ] - }, - "gpt-realtime": { - "cache_creation_input_audio_token_cost": 4e-07, - "cache_read_input_token_cost": 4e-07, - "input_cost_per_audio_token": 3.2e-05, - "input_cost_per_image": 5e-06, - "input_cost_per_token": 4e-06, - "litellm_provider": "openai", - "max_input_tokens": 32000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 6.4e-05, - "output_cost_per_token": 1.6e-05, - "supported_endpoints": [ - "/v1/realtime" - ], - "supported_modalities": [ - "text", - "image", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-realtime-mini": { - "cache_creation_input_audio_token_cost": 3e-07, - "cache_read_input_audio_token_cost": 3e-07, - "input_cost_per_audio_token": 1e-05, - "input_cost_per_token": 6e-07, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 2e-05, - "output_cost_per_token": 2.4e-06, - "supported_endpoints": [ - "/v1/realtime" - ], - "supported_modalities": [ - "text", - "image", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gpt-realtime-2025-08-28": { - "cache_creation_input_audio_token_cost": 4e-07, - "cache_read_input_token_cost": 4e-07, - "input_cost_per_audio_token": 3.2e-05, - "input_cost_per_image": 5e-06, - "input_cost_per_token": 4e-06, - "litellm_provider": "openai", - "max_input_tokens": 32000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_audio_token": 6.4e-05, - "output_cost_per_token": 1.6e-05, - "supported_endpoints": [ - "/v1/realtime" - ], - "supported_modalities": [ - "text", - "image", - "audio" - ], - "supported_output_modalities": [ - "text", - "audio" - ], - "supports_audio_input": true, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "gradient_ai/alibaba-qwen3-32b": { - "litellm_provider": "gradient_ai", - "max_tokens": 2048, - "mode": "chat", - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/anthropic-claude-3-opus": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "gradient_ai", - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/anthropic-claude-3.5-haiku": { - "input_cost_per_token": 8e-07, - "litellm_provider": "gradient_ai", - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/anthropic-claude-3.5-sonnet": { - "input_cost_per_token": 3e-06, - "litellm_provider": "gradient_ai", - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/anthropic-claude-3.7-sonnet": { - "input_cost_per_token": 3e-06, - "litellm_provider": "gradient_ai", - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/deepseek-r1-distill-llama-70b": { - "input_cost_per_token": 9.9e-07, - "litellm_provider": "gradient_ai", - "max_tokens": 8000, - "mode": "chat", - "output_cost_per_token": 9.9e-07, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/llama3-8b-instruct": { - "input_cost_per_token": 2e-07, - "litellm_provider": "gradient_ai", - "max_tokens": 512, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/llama3.3-70b-instruct": { - "input_cost_per_token": 6.5e-07, - "litellm_provider": "gradient_ai", - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 6.5e-07, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/mistral-nemo-instruct-2407": { - "input_cost_per_token": 3e-07, - "litellm_provider": "gradient_ai", - "max_tokens": 512, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/openai-gpt-4o": { - "litellm_provider": "gradient_ai", - "max_tokens": 16384, - "mode": "chat", - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/openai-gpt-4o-mini": { - "litellm_provider": "gradient_ai", - "max_tokens": 16384, - "mode": "chat", - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/openai-o3": { - "input_cost_per_token": 2e-06, - "litellm_provider": "gradient_ai", - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "gradient_ai/openai-o3-mini": { - "input_cost_per_token": 1.1e-06, - "litellm_provider": "gradient_ai", - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supported_endpoints": [ - "/v1/chat/completions" - ], - "supported_modalities": [ - "text" - ], - "supports_tool_choice": false - }, - "lemonade/Qwen3-Coder-30B-A3B-Instruct-GGUF": { - "input_cost_per_token": 0, - "litellm_provider": "lemonade", - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "lemonade/gpt-oss-20b-mxfp4-GGUF": { - "input_cost_per_token": 0, - "litellm_provider": "lemonade", - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "lemonade/gpt-oss-120b-mxfp-GGUF": { - "input_cost_per_token": 0, - "litellm_provider": "lemonade", - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "lemonade/Gemma-3-4b-it-GGUF": { - "input_cost_per_token": 0, - "litellm_provider": "lemonade", - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "lemonade/Qwen3-4B-Instruct-2507-GGUF": { - "input_cost_per_token": 0, - "litellm_provider": "lemonade", - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "amazon-nova/nova-micro-v1": { - "input_cost_per_token": 3.5e-08, - "litellm_provider": "amazon_nova", - "max_input_tokens": 128000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 1.4e-07, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "amazon-nova/nova-lite-v1": { - "input_cost_per_token": 6e-08, - "litellm_provider": "amazon_nova", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 2.4e-07, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "amazon-nova/nova-premier-v1": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "amazon_nova", - "max_input_tokens": 1000000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 1.25e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": false, - "supports_response_schema": true, - "supports_vision": true - }, - "amazon-nova/nova-pro-v1": { - "input_cost_per_token": 8e-07, - "litellm_provider": "amazon_nova", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 3.2e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "groq/deepseek-r1-distill-llama-70b": { - "input_cost_per_token": 7.5e-07, - "litellm_provider": "groq", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9.9e-07, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/distil-whisper-large-v3-en": { - "input_cost_per_second": 5.56e-06, - "litellm_provider": "groq", - "mode": "audio_transcription", - "output_cost_per_second": 0.0 - }, - "groq/gemma-7b-it": { - "deprecation_date": "2024-12-18", - "input_cost_per_token": 7e-08, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 7e-08, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/gemma2-9b-it": { - "input_cost_per_token": 2e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_function_calling": false, - "supports_response_schema": false, - "supports_tool_choice": false - }, - "groq/llama-3.1-405b-reasoning": { - "input_cost_per_token": 5.9e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 7.9e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama-3.1-70b-versatile": { - "deprecation_date": "2025-01-24", - "input_cost_per_token": 5.9e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 7.9e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama-3.1-8b-instant": { - "input_cost_per_token": 5e-08, - "litellm_provider": "groq", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 8e-08, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama-3.2-11b-text-preview": { - "deprecation_date": "2024-10-28", - "input_cost_per_token": 1.8e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.8e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama-3.2-11b-vision-preview": { - "deprecation_date": "2025-04-14", - "input_cost_per_token": 1.8e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.8e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "groq/llama-3.2-1b-preview": { - "deprecation_date": "2025-04-14", - "input_cost_per_token": 4e-08, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-08, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama-3.2-3b-preview": { - "deprecation_date": "2025-04-14", - "input_cost_per_token": 6e-08, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-08, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama-3.2-90b-text-preview": { - "deprecation_date": "2024-11-25", - "input_cost_per_token": 9e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 9e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama-3.2-90b-vision-preview": { - "deprecation_date": "2025-04-14", - "input_cost_per_token": 9e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 9e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_vision": true - }, - "groq/llama-3.3-70b-specdec": { - "deprecation_date": "2025-04-14", - "input_cost_per_token": 5.9e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 9.9e-07, - "supports_tool_choice": true - }, - "groq/llama-3.3-70b-versatile": { - "input_cost_per_token": 5.9e-07, - "litellm_provider": "groq", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 7.9e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama-guard-3-8b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-07 - }, - "groq/llama2-70b-4096": { - "input_cost_per_token": 7e-07, - "litellm_provider": "groq", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 8e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama3-groq-70b-8192-tool-use-preview": { - "deprecation_date": "2025-01-06", - "input_cost_per_token": 8.9e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 8.9e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/llama3-groq-8b-8192-tool-use-preview": { - "deprecation_date": "2025-01-06", - "input_cost_per_token": 1.9e-07, - "litellm_provider": "groq", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.9e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/meta-llama/llama-4-maverick-17b-128e-instruct": { - "input_cost_per_token": 2e-07, - "litellm_provider": "groq", - "max_input_tokens": 131072, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "groq/meta-llama/llama-4-scout-17b-16e-instruct": { - "input_cost_per_token": 1.1e-07, - "litellm_provider": "groq", - "max_input_tokens": 131072, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 3.4e-07, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "groq/mistral-saba-24b": { - "input_cost_per_token": 7.9e-07, - "litellm_provider": "groq", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.9e-07 - }, - "groq/mixtral-8x7b-32768": { - "deprecation_date": "2025-03-20", - "input_cost_per_token": 2.4e-07, - "litellm_provider": "groq", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 2.4e-07, - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/moonshotai/kimi-k2-instruct": { - "input_cost_per_token": 1e-06, - "litellm_provider": "groq", - "max_input_tokens": 131072, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "groq/moonshotai/kimi-k2-instruct-0905": { - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "cache_read_input_token_cost": 0.5e-06, - "litellm_provider": "groq", - "max_input_tokens": 262144, - "max_output_tokens": 16384, - "max_tokens": 278528, - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "groq/openai/gpt-oss-120b": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "groq", - "max_input_tokens": 131072, - "max_output_tokens": 32766, - "max_tokens": 32766, - "mode": "chat", - "output_cost_per_token": 7.5e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "groq/openai/gpt-oss-20b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "groq", - "max_input_tokens": 131072, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 5e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "groq/playai-tts": { - "input_cost_per_character": 5e-05, - "litellm_provider": "groq", - "max_input_tokens": 10000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "audio_speech" - }, - "groq/qwen/qwen3-32b": { - "input_cost_per_token": 2.9e-07, - "litellm_provider": "groq", - "max_input_tokens": 131000, - "max_output_tokens": 131000, - "max_tokens": 131000, - "mode": "chat", - "output_cost_per_token": 5.9e-07, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true - }, - "groq/whisper-large-v3": { - "input_cost_per_second": 3.083e-05, - "litellm_provider": "groq", - "mode": "audio_transcription", - "output_cost_per_second": 0.0 - }, - "groq/whisper-large-v3-turbo": { - "input_cost_per_second": 1.111e-05, - "litellm_provider": "groq", - "mode": "audio_transcription", - "output_cost_per_second": 0.0 - }, - "hd/1024-x-1024/dall-e-3": { - "input_cost_per_pixel": 7.629e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "hd/1024-x-1792/dall-e-3": { - "input_cost_per_pixel": 6.539e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "hd/1792-x-1024/dall-e-3": { - "input_cost_per_pixel": 6.539e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "heroku/claude-3-5-haiku": { - "litellm_provider": "heroku", - "max_tokens": 4096, - "mode": "chat", - "supports_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "heroku/claude-3-5-sonnet-latest": { - "litellm_provider": "heroku", - "max_tokens": 8192, - "mode": "chat", - "supports_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "heroku/claude-3-7-sonnet": { - "litellm_provider": "heroku", - "max_tokens": 8192, - "mode": "chat", - "supports_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "heroku/claude-4-sonnet": { - "litellm_provider": "heroku", - "max_tokens": 8192, - "mode": "chat", - "supports_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "high/1024-x-1024/gpt-image-1": { - "input_cost_per_pixel": 1.59263611e-07, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "high/1024-x-1536/gpt-image-1": { - "input_cost_per_pixel": 1.58945719e-07, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "high/1536-x-1024/gpt-image-1": { - "input_cost_per_pixel": 1.58945719e-07, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "hyperbolic/NousResearch/Hermes-3-Llama-3.1-70B": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/Qwen/QwQ-32B": { - "input_cost_per_token": 2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/Qwen/Qwen2.5-72B-Instruct": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/Qwen/Qwen2.5-Coder-32B-Instruct": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/Qwen/Qwen3-235B-A22B": { - "input_cost_per_token": 2e-06, - "litellm_provider": "hyperbolic", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/deepseek-ai/DeepSeek-R1": { - "input_cost_per_token": 4e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/deepseek-ai/DeepSeek-R1-0528": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/deepseek-ai/DeepSeek-V3": { - "input_cost_per_token": 2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/deepseek-ai/DeepSeek-V3-0324": { - "input_cost_per_token": 4e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/meta-llama/Llama-3.2-3B-Instruct": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/meta-llama/Llama-3.3-70B-Instruct": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/meta-llama/Meta-Llama-3-70B-Instruct": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/meta-llama/Meta-Llama-3.1-405B-Instruct": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/meta-llama/Meta-Llama-3.1-70B-Instruct": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/meta-llama/Meta-Llama-3.1-8B-Instruct": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "hyperbolic", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "hyperbolic/moonshotai/Kimi-K2-Instruct": { - "input_cost_per_token": 2e-06, - "litellm_provider": "hyperbolic", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "j2-light": { - "input_cost_per_token": 3e-06, - "litellm_provider": "ai21", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "completion", - "output_cost_per_token": 3e-06 - }, - "j2-mid": { - "input_cost_per_token": 1e-05, - "litellm_provider": "ai21", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "completion", - "output_cost_per_token": 1e-05 - }, - "j2-ultra": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "ai21", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "completion", - "output_cost_per_token": 1.5e-05 - }, - "jamba-1.5": { - "input_cost_per_token": 2e-07, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_tool_choice": true - }, - "jamba-1.5-large": { - "input_cost_per_token": 2e-06, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supports_tool_choice": true - }, - "jamba-1.5-large@001": { - "input_cost_per_token": 2e-06, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supports_tool_choice": true - }, - "jamba-1.5-mini": { - "input_cost_per_token": 2e-07, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_tool_choice": true - }, - "jamba-1.5-mini@001": { - "input_cost_per_token": 2e-07, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_tool_choice": true - }, - "jamba-large-1.6": { - "input_cost_per_token": 2e-06, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supports_tool_choice": true - }, - "jamba-large-1.7": { - "input_cost_per_token": 2e-06, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supports_tool_choice": true - }, - "jamba-mini-1.6": { - "input_cost_per_token": 2e-07, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_tool_choice": true - }, - "jamba-mini-1.7": { - "input_cost_per_token": 2e-07, - "litellm_provider": "ai21", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_tool_choice": true - }, - "jina-reranker-v2-base-multilingual": { - "input_cost_per_token": 1.8e-08, - "litellm_provider": "jina_ai", - "max_document_chunks_per_query": 2048, - "max_input_tokens": 1024, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "rerank", - "output_cost_per_token": 1.8e-08 - }, - "jp.anthropic.claude-sonnet-4-5-20250929-v1:0": { - "cache_creation_input_token_cost": 4.125e-06, - "cache_read_input_token_cost": 3.3e-07, - "input_cost_per_token": 3.3e-06, - "input_cost_per_token_above_200k_tokens": 6.6e-06, - "output_cost_per_token_above_200k_tokens": 2.475e-05, - "cache_creation_input_token_cost_above_200k_tokens": 8.25e-06, - "cache_read_input_token_cost_above_200k_tokens": 6.6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.65e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "jp.anthropic.claude-haiku-4-5-20251001-v1:0": { - "cache_creation_input_token_cost": 1.375e-06, - "cache_read_input_token_cost": 1.1e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5.5e-06, - "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "lambda_ai/deepseek-llama3.3-70b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/deepseek-r1-0528": { - "input_cost_per_token": 2e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/deepseek-r1-671b": { - "input_cost_per_token": 8e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 8e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/deepseek-v3-0324": { - "input_cost_per_token": 2e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/hermes3-405b": { - "input_cost_per_token": 8e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 8e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/hermes3-70b": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/hermes3-8b": { - "input_cost_per_token": 2.5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 4e-08, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/lfm-40b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/lfm-7b": { - "input_cost_per_token": 2.5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 4e-08, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/llama-4-maverick-17b-128e-instruct-fp8": { - "input_cost_per_token": 5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 8192, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/llama-4-scout-17b-16e-instruct": { - "input_cost_per_token": 5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 16384, - "max_output_tokens": 8192, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/llama3.1-405b-instruct-fp8": { - "input_cost_per_token": 8e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 8e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/llama3.1-70b-instruct-fp8": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/llama3.1-8b-instruct": { - "input_cost_per_token": 2.5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 4e-08, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/llama3.1-nemotron-70b-instruct-fp8": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/llama3.2-11b-vision-instruct": { - "input_cost_per_token": 1.5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-08, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "lambda_ai/llama3.2-3b-instruct": { - "input_cost_per_token": 1.5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-08, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/llama3.3-70b-instruct-fp8": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/qwen25-coder-32b-instruct": { - "input_cost_per_token": 5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "lambda_ai/qwen3-32b-fp8": { - "input_cost_per_token": 5e-08, - "litellm_provider": "lambda_ai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_system_messages": true, - "supports_tool_choice": true - }, - "low/1024-x-1024/gpt-image-1": { - "input_cost_per_pixel": 1.0490417e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "low/1024-x-1536/gpt-image-1": { - "input_cost_per_pixel": 1.0172526e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "low/1536-x-1024/gpt-image-1": { - "input_cost_per_pixel": 1.0172526e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "luminous-base": { - "input_cost_per_token": 3e-05, - "litellm_provider": "aleph_alpha", - "max_tokens": 2048, - "mode": "completion", - "output_cost_per_token": 3.3e-05 - }, - "luminous-base-control": { - "input_cost_per_token": 3.75e-05, - "litellm_provider": "aleph_alpha", - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 4.125e-05 - }, - "luminous-extended": { - "input_cost_per_token": 4.5e-05, - "litellm_provider": "aleph_alpha", - "max_tokens": 2048, - "mode": "completion", - "output_cost_per_token": 4.95e-05 - }, - "luminous-extended-control": { - "input_cost_per_token": 5.625e-05, - "litellm_provider": "aleph_alpha", - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 6.1875e-05 - }, - "luminous-supreme": { - "input_cost_per_token": 0.000175, - "litellm_provider": "aleph_alpha", - "max_tokens": 2048, - "mode": "completion", - "output_cost_per_token": 0.0001925 - }, - "luminous-supreme-control": { - "input_cost_per_token": 0.00021875, - "litellm_provider": "aleph_alpha", - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 0.000240625 - }, - "max-x-max/50-steps/stability.stable-diffusion-xl-v0": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.036 - }, - "max-x-max/max-steps/stability.stable-diffusion-xl-v0": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.072 - }, - "medium/1024-x-1024/gpt-image-1": { - "input_cost_per_pixel": 4.0054321e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "medium/1024-x-1536/gpt-image-1": { - "input_cost_per_pixel": 4.0054321e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "medium/1536-x-1024/gpt-image-1": { - "input_cost_per_pixel": 4.0054321e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "low/1024-x-1024/gpt-image-1-mini": { - "input_cost_per_image": 0.005, - "litellm_provider": "openai", - "mode": "image_generation", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "low/1024-x-1536/gpt-image-1-mini": { - "input_cost_per_image": 0.006, - "litellm_provider": "openai", - "mode": "image_generation", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "low/1536-x-1024/gpt-image-1-mini": { - "input_cost_per_image": 0.006, - "litellm_provider": "openai", - "mode": "image_generation", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "medium/1024-x-1024/gpt-image-1-mini": { - "input_cost_per_image": 0.011, - "litellm_provider": "openai", - "mode": "image_generation", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "medium/1024-x-1536/gpt-image-1-mini": { - "input_cost_per_image": 0.015, - "litellm_provider": "openai", - "mode": "image_generation", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "medium/1536-x-1024/gpt-image-1-mini": { - "input_cost_per_image": 0.015, - "litellm_provider": "openai", - "mode": "image_generation", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "medlm-large": { - "input_cost_per_character": 5e-06, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "chat", - "output_cost_per_character": 1.5e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "medlm-medium": { - "input_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_character": 1e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", - "supports_tool_choice": true - }, - "meta.llama2-13b-chat-v1": { - "input_cost_per_token": 7.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1e-06 - }, - "meta.llama2-70b-chat-v1": { - "input_cost_per_token": 1.95e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.56e-06 - }, - "meta.llama3-1-405b-instruct-v1:0": { - "input_cost_per_token": 5.32e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.6e-05, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-1-70b-instruct-v1:0": { - "input_cost_per_token": 9.9e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9.9e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-1-8b-instruct-v1:0": { - "input_cost_per_token": 2.2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.2e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-2-11b-instruct-v1:0": { - "input_cost_per_token": 3.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3.5e-07, - "supports_function_calling": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "meta.llama3-2-1b-instruct-v1:0": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-2-3b-instruct-v1:0": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-2-90b-instruct-v1:0": { - "input_cost_per_token": 2e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "meta.llama3-3-70b-instruct-v1:0": { - "input_cost_per_token": 7.2e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.2e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-70b-instruct-v1:0": { - "input_cost_per_token": 2.65e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 3.5e-06 - }, - "meta.llama3-8b-instruct-v1:0": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-07 - }, - "meta.llama4-maverick-17b-instruct-v1:0": { - "input_cost_per_token": 2.4e-07, - "input_cost_per_token_batches": 1.2e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 9.7e-07, - "output_cost_per_token_batches": 4.85e-07, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "code" - ], - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama4-scout-17b-instruct-v1:0": { - "input_cost_per_token": 1.7e-07, - "input_cost_per_token_batches": 8.5e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6.6e-07, - "output_cost_per_token_batches": 3.3e-07, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "code" - ], - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta_llama/Llama-3.3-70B-Instruct": { - "litellm_provider": "meta_llama", - "max_input_tokens": 128000, - "max_output_tokens": 4028, - "max_tokens": 128000, - "mode": "chat", - "source": "https://llama.developer.meta.com/docs/models", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_tool_choice": true - }, - "meta_llama/Llama-3.3-8B-Instruct": { - "litellm_provider": "meta_llama", - "max_input_tokens": 128000, - "max_output_tokens": 4028, - "max_tokens": 128000, - "mode": "chat", - "source": "https://llama.developer.meta.com/docs/models", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_tool_choice": true - }, - "meta_llama/Llama-4-Maverick-17B-128E-Instruct-FP8": { - "litellm_provider": "meta_llama", - "max_input_tokens": 1000000, - "max_output_tokens": 4028, - "max_tokens": 128000, - "mode": "chat", - "source": "https://llama.developer.meta.com/docs/models", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_tool_choice": true - }, - "meta_llama/Llama-4-Scout-17B-16E-Instruct-FP8": { - "litellm_provider": "meta_llama", - "max_input_tokens": 10000000, - "max_output_tokens": 4028, - "max_tokens": 128000, - "mode": "chat", - "source": "https://llama.developer.meta.com/docs/models", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_tool_choice": true - }, - "minimax.minimax-m2": { - "input_cost_per_token": 3e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "supports_system_messages": true - }, - "mistral.magistral-small-2509": { - "input_cost_per_token": 5e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_system_messages": true - }, - "mistral.ministral-3-14b-instruct": { - "input_cost_per_token": 2e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_function_calling": true, - "supports_system_messages": true - }, - "mistral.ministral-3-3b-instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_system_messages": true - }, - "mistral.ministral-3-8b-instruct": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "supports_function_calling": true, - "supports_system_messages": true - }, - "mistral.mistral-7b-instruct-v0:2": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_tool_choice": true - }, - "mistral.mistral-large-2402-v1:0": { - "input_cost_per_token": 8e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_function_calling": true - }, - "mistral.mistral-large-2407-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 9e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "mistral.mistral-large-3-675b-instruct": { - "input_cost_per_token": 5e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_function_calling": true, - "supports_system_messages": true - }, - "mistral.mistral-small-2402-v1:0": { - "input_cost_per_token": 1e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_function_calling": true - }, - "mistral.mixtral-8x7b-instruct-v0:1": { - "input_cost_per_token": 4.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 7e-07, - "supports_tool_choice": true - }, - "mistral.voxtral-mini-3b-2507": { - "input_cost_per_token": 4e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-08, - "supports_audio_input": true, - "supports_system_messages": true - }, - "mistral.voxtral-small-24b-2507": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_audio_input": true, - "supports_system_messages": true - }, - "mistral/codestral-2405": { - "input_cost_per_token": 1e-06, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_assistant_prefill": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/codestral-2508": { - "input_cost_per_token": 3e-07, - "litellm_provider": "mistral", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 9e-07, - "source": "https://mistral.ai/news/codestral-25-08", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/codestral-latest": { - "input_cost_per_token": 1e-06, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_assistant_prefill": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/codestral-mamba-latest": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "mistral", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true, - "supports_tool_choice": true - }, - "mistral/devstral-medium-2507": { - "input_cost_per_token": 4e-07, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "source": "https://mistral.ai/news/devstral", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/devstral-small-2505": { - "input_cost_per_token": 1e-07, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://mistral.ai/news/devstral", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/devstral-small-2507": { - "input_cost_per_token": 1e-07, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://mistral.ai/news/devstral", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/labs-devstral-small-2512": { - "input_cost_per_token": 1e-07, - "litellm_provider": "mistral", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://docs.mistral.ai/models/devstral-small-2-25-12", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/devstral-2512": { - "input_cost_per_token": 4e-07, - "litellm_provider": "mistral", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "source": "https://mistral.ai/news/devstral-2-vibe-cli", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/magistral-medium-2506": { - "input_cost_per_token": 2e-06, - "litellm_provider": "mistral", - "max_input_tokens": 40000, - "max_output_tokens": 40000, - "max_tokens": 40000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://mistral.ai/news/magistral", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/magistral-medium-2509": { - "input_cost_per_token": 2e-06, - "litellm_provider": "mistral", - "max_input_tokens": 40000, - "max_output_tokens": 40000, - "max_tokens": 40000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://mistral.ai/news/magistral", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-ocr-latest": { - "litellm_provider": "mistral", - "ocr_cost_per_page": 1e-3, - "annotation_cost_per_page": 3e-3, - "mode": "ocr", - "supported_endpoints": [ - "/v1/ocr" - ], - "source": "https://mistral.ai/pricing#api-pricing" - }, - "mistral/mistral-ocr-2505-completion": { - "litellm_provider": "mistral", - "ocr_cost_per_page": 1e-3, - "annotation_cost_per_page": 3e-3, - "mode": "ocr", - "supported_endpoints": [ - "/v1/ocr" - ], - "source": "https://mistral.ai/pricing#api-pricing" - }, - "mistral/magistral-medium-latest": { - "input_cost_per_token": 2e-06, - "litellm_provider": "mistral", - "max_input_tokens": 40000, - "max_output_tokens": 40000, - "max_tokens": 40000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://mistral.ai/news/magistral", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/magistral-small-2506": { - "input_cost_per_token": 5e-07, - "litellm_provider": "mistral", - "max_input_tokens": 40000, - "max_output_tokens": 40000, - "max_tokens": 40000, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://mistral.ai/pricing#api-pricing", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/magistral-small-latest": { - "input_cost_per_token": 5e-07, - "litellm_provider": "mistral", - "max_input_tokens": 40000, - "max_output_tokens": 40000, - "max_tokens": 40000, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://mistral.ai/pricing#api-pricing", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-embed": { - "input_cost_per_token": 1e-07, - "litellm_provider": "mistral", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding" - }, - "mistral/codestral-embed": { - "input_cost_per_token": 0.15e-06, - "litellm_provider": "mistral", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding" - }, - "mistral/codestral-embed-2505": { - "input_cost_per_token": 0.15e-06, - "litellm_provider": "mistral", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding" - }, - "mistral/mistral-large-2402": { - "input_cost_per_token": 4e-06, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-large-2407": { - "input_cost_per_token": 3e-06, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-large-2411": { - "input_cost_per_token": 2e-06, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-large-latest": { - "input_cost_per_token": 2e-06, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-large-3": { - "input_cost_per_token": 5e-07, - "litellm_provider": "mistral", - "max_input_tokens": 256000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://docs.mistral.ai/models/mistral-large-3-25-12", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "mistral/mistral-medium": { - "input_cost_per_token": 2.7e-06, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 8.1e-06, - "supports_assistant_prefill": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-medium-2312": { - "input_cost_per_token": 2.7e-06, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 8.1e-06, - "supports_assistant_prefill": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-medium-2505": { - "input_cost_per_token": 4e-07, - "litellm_provider": "mistral", - "max_input_tokens": 131072, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-medium-latest": { - "input_cost_per_token": 4e-07, - "litellm_provider": "mistral", - "max_input_tokens": 131072, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-small": { - "input_cost_per_token": 1e-07, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-small-latest": { - "input_cost_per_token": 1e-07, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/mistral-tiny": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_assistant_prefill": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/open-codestral-mamba": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "mistral", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true, - "supports_tool_choice": true - }, - "mistral/open-mistral-7b": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_assistant_prefill": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/open-mistral-nemo": { - "input_cost_per_token": 3e-07, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/open-mistral-nemo-2407": { - "input_cost_per_token": 3e-07, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/open-mixtral-8x22b": { - "input_cost_per_token": 2e-06, - "litellm_provider": "mistral", - "max_input_tokens": 65336, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/open-mixtral-8x7b": { - "input_cost_per_token": 7e-07, - "litellm_provider": "mistral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 7e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "mistral/pixtral-12b-2409": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "mistral/pixtral-large-2411": { - "input_cost_per_token": 2e-06, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "mistral/pixtral-large-latest": { - "input_cost_per_token": 2e-06, - "litellm_provider": "mistral", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "moonshot.kimi-k2-thinking": { - "input_cost_per_token": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "supports_reasoning": true, - "supports_system_messages": true - }, - "moonshot/kimi-k2-0711-preview": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 6e-07, - "litellm_provider": "moonshot", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "moonshot/kimi-k2-0905-preview": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 6e-07, - "litellm_provider": "moonshot", - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "moonshot/kimi-k2-turbo-preview": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 1.15e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 8e-06, - "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "moonshot/kimi-latest": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "moonshot/kimi-latest-128k": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "moonshot/kimi-latest-32k": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 1e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "moonshot/kimi-latest-8k": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 2e-07, - "litellm_provider": "moonshot", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "moonshot/kimi-thinking-preview": { - "cache_read_input_token_cost": 1.5e-07, - "input_cost_per_token": 6e-07, - "litellm_provider": "moonshot", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", - "supports_vision": true - }, - "moonshot/kimi-k2-thinking": { - "cache_read_input_token_cost": 1.5e-7, - "input_cost_per_token": 6e-7, - "litellm_provider": "moonshot", - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 2.5e-6, - "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "moonshot/kimi-k2-thinking-turbo": { - "cache_read_input_token_cost": 1.5e-7, - "input_cost_per_token": 1.15e-6, - "litellm_provider": "moonshot", - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 8e-6, - "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "moonshot/moonshot-v1-128k": { - "input_cost_per_token": 2e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "moonshot/moonshot-v1-128k-0430": { - "input_cost_per_token": 2e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "moonshot/moonshot-v1-128k-vision-preview": { - "input_cost_per_token": 2e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "moonshot/moonshot-v1-32k": { - "input_cost_per_token": 1e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "moonshot/moonshot-v1-32k-0430": { - "input_cost_per_token": 1e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "moonshot/moonshot-v1-32k-vision-preview": { - "input_cost_per_token": 1e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "moonshot/moonshot-v1-8k": { - "input_cost_per_token": 2e-07, - "litellm_provider": "moonshot", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "moonshot/moonshot-v1-8k-0430": { - "input_cost_per_token": 2e-07, - "litellm_provider": "moonshot", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "moonshot/moonshot-v1-8k-vision-preview": { - "input_cost_per_token": 2e-07, - "litellm_provider": "moonshot", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "moonshot/moonshot-v1-auto": { - "input_cost_per_token": 2e-06, - "litellm_provider": "moonshot", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://platform.moonshot.ai/docs/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "morph/morph-v3-fast": { - "input_cost_per_token": 8e-07, - "litellm_provider": "morph", - "max_input_tokens": 16000, - "max_output_tokens": 16000, - "max_tokens": 16000, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_system_messages": true, - "supports_tool_choice": false, - "supports_vision": false - }, - "morph/morph-v3-large": { - "input_cost_per_token": 9e-07, - "litellm_provider": "morph", - "max_input_tokens": 16000, - "max_output_tokens": 16000, - "max_tokens": 16000, - "mode": "chat", - "output_cost_per_token": 1.9e-06, - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_system_messages": true, - "supports_tool_choice": false, - "supports_vision": false - }, - "multimodalembedding": { - "input_cost_per_character": 2e-07, - "input_cost_per_image": 0.0001, - "input_cost_per_token": 8e-07, - "input_cost_per_video_per_second": 0.0005, - "input_cost_per_video_per_second_above_15s_interval": 0.002, - "input_cost_per_video_per_second_above_8s_interval": 0.001, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 2048, - "max_tokens": 2048, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models", - "supported_endpoints": [ - "/v1/embeddings" - ], - "supported_modalities": [ - "text", - "image", - "video" - ] - }, - "multimodalembedding@001": { - "input_cost_per_character": 2e-07, - "input_cost_per_image": 0.0001, - "input_cost_per_token": 8e-07, - "input_cost_per_video_per_second": 0.0005, - "input_cost_per_video_per_second_above_15s_interval": 0.002, - "input_cost_per_video_per_second_above_8s_interval": 0.001, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 2048, - "max_tokens": 2048, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models", - "supported_endpoints": [ - "/v1/embeddings" - ], - "supported_modalities": [ - "text", - "image", - "video" - ] - }, - "nscale/Qwen/QwQ-32B": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "nscale", - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/Qwen/Qwen2.5-Coder-32B-Instruct": { - "input_cost_per_token": 6e-08, - "litellm_provider": "nscale", - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/Qwen/Qwen2.5-Coder-3B-Instruct": { - "input_cost_per_token": 1e-08, - "litellm_provider": "nscale", - "mode": "chat", - "output_cost_per_token": 3e-08, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/Qwen/Qwen2.5-Coder-7B-Instruct": { - "input_cost_per_token": 1e-08, - "litellm_provider": "nscale", - "mode": "chat", - "output_cost_per_token": 3e-08, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/black-forest-labs/FLUX.1-schnell": { - "input_cost_per_pixel": 1.3e-09, - "litellm_provider": "nscale", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#image-models", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "nscale/deepseek-ai/DeepSeek-R1-Distill-Llama-70B": { - "input_cost_per_token": 3.75e-07, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $0.75/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 3.75e-07, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/deepseek-ai/DeepSeek-R1-Distill-Llama-8B": { - "input_cost_per_token": 2.5e-08, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $0.05/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 2.5e-08, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B": { - "input_cost_per_token": 9e-08, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $0.18/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 9e-08, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-14B": { - "input_cost_per_token": 7e-08, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $0.14/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 7e-08, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $0.30/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B": { - "input_cost_per_token": 2e-07, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $0.40/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/meta-llama/Llama-3.1-8B-Instruct": { - "input_cost_per_token": 3e-08, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $0.06/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 3e-08, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/meta-llama/Llama-3.3-70B-Instruct": { - "input_cost_per_token": 2e-07, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $0.40/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/meta-llama/Llama-4-Scout-17B-16E-Instruct": { - "input_cost_per_token": 9e-08, - "litellm_provider": "nscale", - "mode": "chat", - "output_cost_per_token": 2.9e-07, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/mistralai/mixtral-8x22b-instruct-v0.1": { - "input_cost_per_token": 6e-07, - "litellm_provider": "nscale", - "metadata": { - "notes": "Pricing listed as $1.20/1M tokens total. Assumed 50/50 split for input/output." - }, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" - }, - "nscale/stabilityai/stable-diffusion-xl-base-1.0": { - "input_cost_per_pixel": 3e-09, - "litellm_provider": "nscale", - "mode": "image_generation", - "output_cost_per_pixel": 0.0, - "source": "https://docs.nscale.com/docs/inference/serverless-models/current#image-models", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "nvidia.nemotron-nano-12b-v2": { - "input_cost_per_token": 2e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_system_messages": true, - "supports_vision": true - }, - "nvidia.nemotron-nano-9b-v2": { - "input_cost_per_token": 6e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.3e-07, - "supports_system_messages": true - }, - "o1": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o1-2024-12-17": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o1-mini": { - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_vision": true - }, - "o1-mini-2024-09-12": { - "deprecation_date": "2025-10-27", - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 3e-06, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_vision": true - }, - "o1-preview": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_vision": true - }, - "o1-preview-2024-09-12": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openai", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_vision": true - }, - "o1-pro": { - "input_cost_per_token": 0.00015, - "input_cost_per_token_batches": 7.5e-05, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 0.0006, - "output_cost_per_token_batches": 0.0003, - "supported_endpoints": [ - "/v1/responses", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": false, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o1-pro-2025-03-19": { - "input_cost_per_token": 0.00015, - "input_cost_per_token_batches": 7.5e-05, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 0.0006, - "output_cost_per_token_batches": 0.0003, - "supported_endpoints": [ - "/v1/responses", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": false, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o3": { - "cache_read_input_token_cost": 5e-07, - "cache_read_input_token_cost_flex": 2.5e-07, - "cache_read_input_token_cost_priority": 8.75e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_flex": 1e-06, - "input_cost_per_token_priority": 3.5e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "output_cost_per_token_flex": 4e-06, - "output_cost_per_token_priority": 1.4e-05, - "supported_endpoints": [ - "/v1/responses", - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "o3-2025-04-16": { - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supported_endpoints": [ - "/v1/responses", - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "o3-deep-research": { - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_token": 1e-05, - "input_cost_per_token_batches": 5e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 4e-05, - "output_cost_per_token_batches": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o3-deep-research-2025-06-26": { - "cache_read_input_token_cost": 2.5e-06, - "input_cost_per_token": 1e-05, - "input_cost_per_token_batches": 5e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 4e-05, - "output_cost_per_token_batches": 2e-05, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o3-mini": { - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "o3-mini-2025-01-31": { - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "o3-pro": { - "input_cost_per_token": 2e-05, - "input_cost_per_token_batches": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 8e-05, - "output_cost_per_token_batches": 4e-05, - "supported_endpoints": [ - "/v1/responses", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o3-pro-2025-06-10": { - "input_cost_per_token": 2e-05, - "input_cost_per_token_batches": 1e-05, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 8e-05, - "output_cost_per_token_batches": 4e-05, - "supported_endpoints": [ - "/v1/responses", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o4-mini": { - "cache_read_input_token_cost": 2.75e-07, - "cache_read_input_token_cost_flex": 1.375e-07, - "cache_read_input_token_cost_priority": 5e-07, - "input_cost_per_token": 1.1e-06, - "input_cost_per_token_flex": 5.5e-07, - "input_cost_per_token_priority": 2e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "output_cost_per_token_flex": 2.2e-06, - "output_cost_per_token_priority": 8e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "o4-mini-2025-04-16": { - "cache_read_input_token_cost": 2.75e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_service_tier": true, - "supports_vision": true - }, - "o4-mini-deep-research": { - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 8e-06, - "output_cost_per_token_batches": 4e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "o4-mini-deep-research-2025-06-26": { - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "openai", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "responses", - "output_cost_per_token": 8e-06, - "output_cost_per_token_batches": 4e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/batch", - "/v1/responses" - ], - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_function_calling": true, - "supports_native_streaming": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "oci/meta.llama-3.1-405b-instruct": { - "input_cost_per_token": 1.068e-05, - "litellm_provider": "oci", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.068e-05, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/meta.llama-3.2-90b-vision-instruct": { - "input_cost_per_token": 2e-06, - "litellm_provider": "oci", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/meta.llama-3.3-70b-instruct": { - "input_cost_per_token": 7.2e-07, - "litellm_provider": "oci", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 7.2e-07, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/meta.llama-4-maverick-17b-128e-instruct-fp8": { - "input_cost_per_token": 7.2e-07, - "litellm_provider": "oci", - "max_input_tokens": 512000, - "max_output_tokens": 4000, - "max_tokens": 512000, - "mode": "chat", - "output_cost_per_token": 7.2e-07, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/meta.llama-4-scout-17b-16e-instruct": { - "input_cost_per_token": 7.2e-07, - "litellm_provider": "oci", - "max_input_tokens": 192000, - "max_output_tokens": 4000, - "max_tokens": 192000, - "mode": "chat", - "output_cost_per_token": 7.2e-07, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/xai.grok-3": { - "input_cost_per_token": 3e-06, - "litellm_provider": "oci", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/xai.grok-3-fast": { - "input_cost_per_token": 5e-06, - "litellm_provider": "oci", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/xai.grok-3-mini": { - "input_cost_per_token": 3e-07, - "litellm_provider": "oci", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/xai.grok-3-mini-fast": { - "input_cost_per_token": 6e-07, - "litellm_provider": "oci", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 4e-06, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/xai.grok-4": { - "input_cost_per_token": 3e-06, - "litellm_provider": "oci", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/cohere.command-latest": { - "input_cost_per_token": 1.56e-06, - "litellm_provider": "oci", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.56e-06, - "source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/cohere.command-a-03-2025": { - "input_cost_per_token": 1.56e-06, - "litellm_provider": "oci", - "max_input_tokens": 256000, - "max_output_tokens": 4000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.56e-06, - "source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/", - "supports_function_calling": true, - "supports_response_schema": false - }, - "oci/cohere.command-plus-latest": { - "input_cost_per_token": 1.56e-06, - "litellm_provider": "oci", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.56e-06, - "source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/", - "supports_function_calling": true, - "supports_response_schema": false - }, - "ollama/codegeex4": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": false - }, - "ollama/codegemma": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "completion", - "output_cost_per_token": 0.0 - }, - "ollama/codellama": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 0.0 - }, - "ollama/deepseek-coder-v2-base": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "completion", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/deepseek-coder-v2-instruct": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/deepseek-coder-v2-lite-base": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "completion", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/deepseek-coder-v2-lite-instruct": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/deepseek-v3.1:671b-cloud" : { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "max_tokens": 163840, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/gpt-oss:120b-cloud" : { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/gpt-oss:20b-cloud" : { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/internlm2_5-20b-chat": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/llama2": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "ollama/llama2-uncensored": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 0.0 - }, - "ollama/llama2:13b": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "ollama/llama2:70b": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "ollama/llama2:7b": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "ollama/llama3": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "ollama/llama3.1": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/llama3:70b": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "ollama/llama3:8b": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "ollama/mistral": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "completion", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/mistral-7B-Instruct-v0.1": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/mistral-7B-Instruct-v0.2": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/mistral-large-instruct-2407": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/mixtral-8x22B-Instruct-v0.1": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/mixtral-8x7B-Instruct-v0.1": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/orca-mini": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 0.0 - }, - "ollama/qwen3-coder:480b-cloud": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_function_calling": true - }, - "ollama/vicuna": { - "input_cost_per_token": 0.0, - "litellm_provider": "ollama", - "max_input_tokens": 2048, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "completion", - "output_cost_per_token": 0.0 - }, - "omni-moderation-2024-09-26": { - "input_cost_per_token": 0.0, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 0, - "max_tokens": 32768, - "mode": "moderation", - "output_cost_per_token": 0.0 - }, - "omni-moderation-latest": { - "input_cost_per_token": 0.0, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 0, - "max_tokens": 32768, - "mode": "moderation", - "output_cost_per_token": 0.0 - }, - "omni-moderation-latest-intents": { - "input_cost_per_token": 0.0, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 0, - "max_tokens": 32768, - "mode": "moderation", - "output_cost_per_token": 0.0 - }, - "openai.gpt-oss-120b-1:0": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "openai.gpt-oss-20b-1:0": { - "input_cost_per_token": 7e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "openai.gpt-oss-safeguard-120b": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_system_messages": true - }, - "openai.gpt-oss-safeguard-20b": { - "input_cost_per_token": 7e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_system_messages": true - }, - "openrouter/anthropic/claude-2": { - "input_cost_per_token": 1.102e-05, - "litellm_provider": "openrouter", - "max_output_tokens": 8191, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 3.268e-05, - "supports_tool_choice": true - }, - "openrouter/anthropic/claude-3-5-haiku": { - "input_cost_per_token": 1e-06, - "litellm_provider": "openrouter", - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "openrouter/anthropic/claude-3-5-haiku-20241022": { - "input_cost_per_token": 1e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5e-06, - "supports_function_calling": true, - "supports_tool_choice": true, - "tool_use_system_prompt_tokens": 264 - }, - "openrouter/anthropic/claude-3-haiku": { - "input_cost_per_image": 0.0004, - "input_cost_per_token": 2.5e-07, - "litellm_provider": "openrouter", - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/anthropic/claude-3-haiku-20240307": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 264 - }, - "openrouter/anthropic/claude-3-opus": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 395 - }, - "openrouter/anthropic/claude-3-sonnet": { - "input_cost_per_image": 0.0048, - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/anthropic/claude-3.5-sonnet": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-3.5-sonnet:beta": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-3.7-sonnet": { - "input_cost_per_image": 0.0048, - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-3.7-sonnet:beta": { - "input_cost_per_image": 0.0048, - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-instant-v1": { - "input_cost_per_token": 1.63e-06, - "litellm_provider": "openrouter", - "max_output_tokens": 8191, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 5.51e-06, - "supports_tool_choice": true - }, - "openrouter/anthropic/claude-opus-4": { - "input_cost_per_image": 0.0048, - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-opus-4.1": { - "input_cost_per_image": 0.0048, - "cache_creation_input_token_cost": 1.875e-05, - "cache_creation_input_token_cost_above_1hr": 3e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-sonnet-4": { - "input_cost_per_image": 0.0048, - "cache_creation_input_token_cost": 3.75e-06, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost": 3e-07, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "litellm_provider": "openrouter", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-opus-4.5": { - "cache_creation_input_token_cost": 6.25e-06, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 5e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-sonnet-4.5": { - "input_cost_per_image": 0.0048, - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 1000000, - "max_output_tokens": 1000000, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-haiku-4.5": { - "cache_creation_input_token_cost": 1.25e-06, - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 1e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 200000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "openrouter/bytedance/ui-tars-1.5-7b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 131072, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://openrouter.ai/api/v1/models/bytedance/ui-tars-1.5-7b", - "supports_tool_choice": true - }, - "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { - "input_cost_per_token": 5e-07, - "litellm_provider": "openrouter", - "max_tokens": 32769, - "mode": "chat", - "output_cost_per_token": 5e-07, - "supports_tool_choice": true - }, - "openrouter/cohere/command-r-plus": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_tool_choice": true - }, - "openrouter/databricks/dbrx-instruct": { - "input_cost_per_token": 6e-07, - "litellm_provider": "openrouter", - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_tool_choice": true - }, - "openrouter/deepseek/deepseek-chat": { - "input_cost_per_token": 1.4e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.8e-07, - "supports_prompt_caching": true, - "supports_tool_choice": true - }, - "openrouter/deepseek/deepseek-chat-v3-0324": { - "input_cost_per_token": 1.4e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.8e-07, - "supports_prompt_caching": true, - "supports_tool_choice": true - }, - "openrouter/deepseek/deepseek-chat-v3.1": { - "input_cost_per_token": 2e-07, - "input_cost_per_token_cache_hit": 2e-08, - "litellm_provider": "openrouter", - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 8e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/deepseek/deepseek-v3.2": { - "input_cost_per_token": 2.8e-07, - "input_cost_per_token_cache_hit": 2.8e-08, - "litellm_provider": "openrouter", - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/deepseek/deepseek-v3.2-exp": { - "input_cost_per_token": 2e-07, - "input_cost_per_token_cache_hit": 2e-08, - "litellm_provider": "openrouter", - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": false, - "supports_tool_choice": true - }, - "openrouter/deepseek/deepseek-coder": { - "input_cost_per_token": 1.4e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 66000, - "max_output_tokens": 4096, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.8e-07, - "supports_prompt_caching": true, - "supports_tool_choice": true - }, - "openrouter/deepseek/deepseek-r1": { - "input_cost_per_token": 5.5e-07, - "input_cost_per_token_cache_hit": 1.4e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 65336, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.19e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/deepseek/deepseek-r1-0528": { - "input_cost_per_token": 5e-07, - "input_cost_per_token_cache_hit": 1.4e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 65336, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.15e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/fireworks/firellava-13b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "openrouter", - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_tool_choice": true - }, - "openrouter/google/gemini-2.0-flash-001": { - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1e-07, - "litellm_provider": "openrouter", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/google/gemini-2.5-flash": { - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 3e-07, - "litellm_provider": "openrouter", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/google/gemini-2.5-pro": { - "input_cost_per_audio_token": 7e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openrouter", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_pdf_size_mb": 30, - "max_tokens": 8192, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_audio_output": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/google/gemini-3-pro-preview": { - "cache_read_input_token_cost": 2e-07, - "cache_read_input_token_cost_above_200k_tokens": 4e-07, - "cache_creation_input_token_cost_above_200k_tokens": 2.5e-07, - "input_cost_per_token": 2e-06, - "input_cost_per_token_above_200k_tokens": 4e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "openrouter", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 1048576, - "max_output_tokens": 65535, - "max_pdf_size_mb": 30, - "max_tokens": 65535, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "output_cost_per_token_above_200k_tokens": 1.8e-05, - "output_cost_per_token_batches": 6e-06, - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text" - ], - "supports_audio_input": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_video_input": true, - "supports_vision": true, - "supports_web_search": true - }, - "openrouter/google/gemini-pro-1.5": { - "input_cost_per_image": 0.00265, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 7.5e-06, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/google/gemini-pro-vision": { - "input_cost_per_image": 0.0025, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "openrouter", - "max_tokens": 45875, - "mode": "chat", - "output_cost_per_token": 3.75e-07, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/google/palm-2-chat-bison": { - "input_cost_per_token": 5e-07, - "litellm_provider": "openrouter", - "max_tokens": 25804, - "mode": "chat", - "output_cost_per_token": 5e-07, - "supports_tool_choice": true - }, - "openrouter/google/palm-2-codechat-bison": { - "input_cost_per_token": 5e-07, - "litellm_provider": "openrouter", - "max_tokens": 20070, - "mode": "chat", - "output_cost_per_token": 5e-07, - "supports_tool_choice": true - }, - "openrouter/gryphe/mythomax-l2-13b": { - "input_cost_per_token": 1.875e-06, - "litellm_provider": "openrouter", - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.875e-06, - "supports_tool_choice": true - }, - "openrouter/jondurbin/airoboros-l2-70b-2.1": { - "input_cost_per_token": 1.3875e-05, - "litellm_provider": "openrouter", - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.3875e-05, - "supports_tool_choice": true - }, - "openrouter/mancer/weaver": { - "input_cost_per_token": 5.625e-06, - "litellm_provider": "openrouter", - "max_tokens": 8000, - "mode": "chat", - "output_cost_per_token": 5.625e-06, - "supports_tool_choice": true - }, - "openrouter/meta-llama/codellama-34b-instruct": { - "input_cost_per_token": 5e-07, - "litellm_provider": "openrouter", - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5e-07, - "supports_tool_choice": true - }, - "openrouter/meta-llama/llama-2-13b-chat": { - "input_cost_per_token": 2e-07, - "litellm_provider": "openrouter", - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_tool_choice": true - }, - "openrouter/meta-llama/llama-2-70b-chat": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "openrouter", - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "supports_tool_choice": true - }, - "openrouter/meta-llama/llama-3-70b-instruct": { - "input_cost_per_token": 5.9e-07, - "litellm_provider": "openrouter", - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 7.9e-07, - "supports_tool_choice": true - }, - "openrouter/meta-llama/llama-3-70b-instruct:nitro": { - "input_cost_per_token": 9e-07, - "litellm_provider": "openrouter", - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 9e-07, - "supports_tool_choice": true - }, - "openrouter/meta-llama/llama-3-8b-instruct:extended": { - "input_cost_per_token": 2.25e-07, - "litellm_provider": "openrouter", - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 2.25e-06, - "supports_tool_choice": true - }, - "openrouter/meta-llama/llama-3-8b-instruct:free": { - "input_cost_per_token": 0.0, - "litellm_provider": "openrouter", - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_tool_choice": true - }, - "openrouter/microsoft/wizardlm-2-8x22b:nitro": { - "input_cost_per_token": 1e-06, - "litellm_provider": "openrouter", - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 1e-06, - "supports_tool_choice": true - }, - "openrouter/minimax/minimax-m2": { - "input_cost_per_token": 2.55e-7, - "litellm_provider": "openrouter", - "max_input_tokens": 204800, - "max_output_tokens": 204800, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.02e-6, - "supports_function_calling": true, - "supports_prompt_caching": false, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/mistralai/mistral-7b-instruct": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "openrouter", - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.3e-07, - "supports_tool_choice": true - }, - "openrouter/mistralai/mistral-7b-instruct:free": { - "input_cost_per_token": 0.0, - "litellm_provider": "openrouter", - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0, - "supports_tool_choice": true - }, - "openrouter/mistralai/mistral-large": { - "input_cost_per_token": 8e-06, - "litellm_provider": "openrouter", - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 2.4e-05, - "supports_tool_choice": true - }, - "openrouter/mistralai/mistral-small-3.1-24b-instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "openrouter", - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_tool_choice": true - }, - "openrouter/mistralai/mistral-small-3.2-24b-instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "openrouter", - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 3e-07, - "supports_tool_choice": true - }, - "openrouter/mistralai/mixtral-8x22b-instruct": { - "input_cost_per_token": 6.5e-07, - "litellm_provider": "openrouter", - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 6.5e-07, - "supports_tool_choice": true - }, - "openrouter/nousresearch/nous-hermes-llama2-13b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "openrouter", - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2e-07, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-3.5-turbo": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "openrouter", - "max_tokens": 4095, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-3.5-turbo-16k": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_tokens": 16383, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-4": { - "input_cost_per_token": 3e-05, - "litellm_provider": "openrouter", - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-4-vision-preview": { - "input_cost_per_image": 0.01445, - "input_cost_per_token": 1e-05, - "litellm_provider": "openrouter", - "max_tokens": 130000, - "mode": "chat", - "output_cost_per_token": 3e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4.1": { - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4.1-2025-04-14": { - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4.1-mini": { - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 4e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.6e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4.1-mini-2025-04-14": { - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 4e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.6e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4.1-nano": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4.1-nano-2025-04-14": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4o": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4o-2024-05-13": { - "input_cost_per_token": 5e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/gpt-5-chat": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-5-codex": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-5": { - "cache_read_input_token_cost": 1.25e-07, - "input_cost_per_token": 1.25e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-5-mini": { - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 2.5e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-5-nano": { - "cache_read_input_token_cost": 5e-09, - "input_cost_per_token": 5e-08, - "litellm_provider": "openrouter", - "max_input_tokens": 272000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text" - ], - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-oss-120b": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 131072, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-07, - "source": "https://openrouter.ai/openai/gpt-oss-120b", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "openrouter/openai/gpt-oss-20b": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 131072, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 8e-07, - "source": "https://openrouter.ai/openai/gpt-oss-20b", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "openrouter/openai/o1": { - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 100000, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/openai/o1-mini": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "openrouter/openai/o1-mini-2024-09-12": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 1.2e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "openrouter/openai/o1-preview": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "openrouter/openai/o1-preview-2024-09-12": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "openrouter/openai/o3-mini": { - "input_cost_per_token": 1.1e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "openrouter/openai/o3-mini-high": { - "input_cost_per_token": 1.1e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 4.4e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "openrouter/pygmalionai/mythalion-13b": { - "input_cost_per_token": 1.875e-06, - "litellm_provider": "openrouter", - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.875e-06, - "supports_tool_choice": true - }, - "openrouter/qwen/qwen-2.5-coder-32b-instruct": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 33792, - "max_output_tokens": 33792, - "max_tokens": 33792, - "mode": "chat", - "output_cost_per_token": 1.8e-07, - "supports_tool_choice": true - }, - "openrouter/qwen/qwen-vl-plus": { - "input_cost_per_token": 2.1e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 6.3e-07, - "supports_tool_choice": true, - "supports_vision": true - }, - "openrouter/qwen/qwen3-coder": { - "input_cost_per_token": 2.2e-7, - "litellm_provider": "openrouter", - "max_input_tokens": 262100, - "max_output_tokens": 262100, - "max_tokens": 262100, - "mode": "chat", - "output_cost_per_token": 9.5e-7, - "source": "https://openrouter.ai/qwen/qwen3-coder", - "supports_tool_choice": true, - "supports_function_calling": true - }, - "openrouter/switchpoint/router": { - "input_cost_per_token": 8.5e-07, - "litellm_provider": "openrouter", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3.4e-06, - "source": "https://openrouter.ai/switchpoint/router", - "supports_tool_choice": true - }, - "openrouter/undi95/remm-slerp-l2-13b": { - "input_cost_per_token": 1.875e-06, - "litellm_provider": "openrouter", - "max_tokens": 6144, - "mode": "chat", - "output_cost_per_token": 1.875e-06, - "supports_tool_choice": true - }, - "openrouter/x-ai/grok-4": { - "input_cost_per_token": 3e-06, - "litellm_provider": "openrouter", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "source": "https://openrouter.ai/x-ai/grok-4", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "openrouter/x-ai/grok-4-fast:free": { - "input_cost_per_token": 0, - "litellm_provider": "openrouter", - "max_input_tokens": 2000000, - "max_output_tokens": 30000, - "max_tokens": 2000000, - "mode": "chat", - "output_cost_per_token": 0, - "source": "https://openrouter.ai/x-ai/grok-4-fast:free", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true, - "supports_web_search": false - }, - "openrouter/z-ai/glm-4.6": { - "input_cost_per_token": 4.0e-7, - "litellm_provider": "openrouter", - "max_input_tokens": 202800, - "max_output_tokens": 131000, - "max_tokens": 202800, - "mode": "chat", - "output_cost_per_token": 1.75e-6, - "source": "https://openrouter.ai/z-ai/glm-4.6", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "openrouter/z-ai/glm-4.6:exacto": { - "input_cost_per_token": 4.5e-7, - "litellm_provider": "openrouter", - "max_input_tokens": 202800, - "max_output_tokens": 131000, - "max_tokens": 202800, - "mode": "chat", - "output_cost_per_token": 1.9e-6, - "source": "https://openrouter.ai/z-ai/glm-4.6:exacto", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "ovhcloud/DeepSeek-R1-Distill-Llama-70B": { - "input_cost_per_token": 6.7e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 131000, - "max_output_tokens": 131000, - "max_tokens": 131000, - "mode": "chat", - "output_cost_per_token": 6.7e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/deepseek-r1-distill-llama-70b", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "ovhcloud/Llama-3.1-8B-Instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 131000, - "max_output_tokens": 131000, - "max_tokens": 131000, - "mode": "chat", - "output_cost_per_token": 1e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/llama-3-1-8b-instruct", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "ovhcloud/Meta-Llama-3_1-70B-Instruct": { - "input_cost_per_token": 6.7e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 131000, - "max_output_tokens": 131000, - "max_tokens": 131000, - "mode": "chat", - "output_cost_per_token": 6.7e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/meta-llama-3-1-70b-instruct", - "supports_function_calling": false, - "supports_response_schema": false, - "supports_tool_choice": false - }, - "ovhcloud/Meta-Llama-3_3-70B-Instruct": { - "input_cost_per_token": 6.7e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 131000, - "max_output_tokens": 131000, - "max_tokens": 131000, - "mode": "chat", - "output_cost_per_token": 6.7e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/meta-llama-3-3-70b-instruct", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "ovhcloud/Mistral-7B-Instruct-v0.3": { - "input_cost_per_token": 1e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 127000, - "max_output_tokens": 127000, - "max_tokens": 127000, - "mode": "chat", - "output_cost_per_token": 1e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/mistral-7b-instruct-v0-3", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "ovhcloud/Mistral-Nemo-Instruct-2407": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 118000, - "max_output_tokens": 118000, - "max_tokens": 118000, - "mode": "chat", - "output_cost_per_token": 1.3e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/mistral-nemo-instruct-2407", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "ovhcloud/Mistral-Small-3.2-24B-Instruct-2506": { - "input_cost_per_token": 9e-08, - "litellm_provider": "ovhcloud", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.8e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/mistral-small-3-2-24b-instruct-2506", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "ovhcloud/Mixtral-8x7B-Instruct-v0.1": { - "input_cost_per_token": 6.3e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 6.3e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/mixtral-8x7b-instruct-v0-1", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "ovhcloud/Qwen2.5-Coder-32B-Instruct": { - "input_cost_per_token": 8.7e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 8.7e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/qwen2-5-coder-32b-instruct", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "ovhcloud/Qwen2.5-VL-72B-Instruct": { - "input_cost_per_token": 9.1e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 9.1e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/qwen2-5-vl-72b-instruct", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "ovhcloud/Qwen3-32B": { - "input_cost_per_token": 8e-08, - "litellm_provider": "ovhcloud", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 2.3e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/qwen3-32b", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "ovhcloud/gpt-oss-120b": { - "input_cost_per_token": 8e-08, - "litellm_provider": "ovhcloud", - "max_input_tokens": 131000, - "max_output_tokens": 131000, - "max_tokens": 131000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/gpt-oss-120b", - "supports_function_calling": false, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "ovhcloud/gpt-oss-20b": { - "input_cost_per_token": 4e-08, - "litellm_provider": "ovhcloud", - "max_input_tokens": 131000, - "max_output_tokens": 131000, - "max_tokens": 131000, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/gpt-oss-20b", - "supports_function_calling": false, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "ovhcloud/llava-v1.6-mistral-7b-hf": { - "input_cost_per_token": 2.9e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 2.9e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/llava-next-mistral-7b", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "ovhcloud/mamba-codestral-7B-v0.1": { - "input_cost_per_token": 1.9e-07, - "litellm_provider": "ovhcloud", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.9e-07, - "source": "https://endpoints.ai.cloud.ovh.net/models/mamba-codestral-7b-v0-1", - "supports_function_calling": false, - "supports_response_schema": true, - "supports_tool_choice": false - }, - "palm/chat-bison": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/chat-bison-001": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/text-bison": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/text-bison-001": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/text-bison-safety-off": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/text-bison-safety-recitation-off": { - "input_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "parallel_ai/search": { - "input_cost_per_query": 0.004, - "litellm_provider": "parallel_ai", - "mode": "search" - }, - "parallel_ai/search-pro": { - "input_cost_per_query": 0.009, - "litellm_provider": "parallel_ai", - "mode": "search" - }, - "perplexity/codellama-34b-instruct": { - "input_cost_per_token": 3.5e-07, - "litellm_provider": "perplexity", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.4e-06 - }, - "perplexity/codellama-70b-instruct": { - "input_cost_per_token": 7e-07, - "litellm_provider": "perplexity", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 2.8e-06 - }, - "perplexity/llama-2-70b-chat": { - "input_cost_per_token": 7e-07, - "litellm_provider": "perplexity", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.8e-06 - }, - "perplexity/llama-3.1-70b-instruct": { - "input_cost_per_token": 1e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-06 - }, - "perplexity/llama-3.1-8b-instruct": { - "input_cost_per_token": 2e-07, - "litellm_provider": "perplexity", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2e-07 - }, - "perplexity/llama-3.1-sonar-huge-128k-online": { - "deprecation_date": "2025-02-22", - "input_cost_per_token": 5e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 127072, - "max_output_tokens": 127072, - "max_tokens": 127072, - "mode": "chat", - "output_cost_per_token": 5e-06 - }, - "perplexity/llama-3.1-sonar-large-128k-chat": { - "deprecation_date": "2025-02-22", - "input_cost_per_token": 1e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-06 - }, - "perplexity/llama-3.1-sonar-large-128k-online": { - "deprecation_date": "2025-02-22", - "input_cost_per_token": 1e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 127072, - "max_output_tokens": 127072, - "max_tokens": 127072, - "mode": "chat", - "output_cost_per_token": 1e-06 - }, - "perplexity/llama-3.1-sonar-small-128k-chat": { - "deprecation_date": "2025-02-22", - "input_cost_per_token": 2e-07, - "litellm_provider": "perplexity", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2e-07 - }, - "perplexity/llama-3.1-sonar-small-128k-online": { - "deprecation_date": "2025-02-22", - "input_cost_per_token": 2e-07, - "litellm_provider": "perplexity", - "max_input_tokens": 127072, - "max_output_tokens": 127072, - "max_tokens": 127072, - "mode": "chat", - "output_cost_per_token": 2e-07 - }, - "perplexity/mistral-7b-instruct": { - "input_cost_per_token": 7e-08, - "litellm_provider": "perplexity", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.8e-07 - }, - "perplexity/mixtral-8x7b-instruct": { - "input_cost_per_token": 7e-08, - "litellm_provider": "perplexity", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.8e-07 - }, - "perplexity/pplx-70b-chat": { - "input_cost_per_token": 7e-07, - "litellm_provider": "perplexity", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.8e-06 - }, - "perplexity/pplx-70b-online": { - "input_cost_per_request": 0.005, - "input_cost_per_token": 0.0, - "litellm_provider": "perplexity", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.8e-06 - }, - "perplexity/pplx-7b-chat": { - "input_cost_per_token": 7e-08, - "litellm_provider": "perplexity", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.8e-07 - }, - "perplexity/pplx-7b-online": { - "input_cost_per_request": 0.005, - "input_cost_per_token": 0.0, - "litellm_provider": "perplexity", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.8e-07 - }, - "perplexity/sonar": { - "input_cost_per_token": 1e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-06, - "search_context_cost_per_query": { - "search_context_size_high": 0.012, - "search_context_size_low": 0.005, - "search_context_size_medium": 0.008 - }, - "supports_web_search": true - }, - "perplexity/sonar-deep-research": { - "citation_cost_per_token": 2e-06, - "input_cost_per_token": 2e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_reasoning_token": 3e-06, - "output_cost_per_token": 8e-06, - "search_context_cost_per_query": { - "search_context_size_high": 0.005, - "search_context_size_low": 0.005, - "search_context_size_medium": 0.005 - }, - "supports_reasoning": true, - "supports_web_search": true - }, - "perplexity/sonar-medium-chat": { - "input_cost_per_token": 6e-07, - "litellm_provider": "perplexity", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1.8e-06 - }, - "perplexity/sonar-medium-online": { - "input_cost_per_request": 0.005, - "input_cost_per_token": 0, - "litellm_provider": "perplexity", - "max_input_tokens": 12000, - "max_output_tokens": 12000, - "max_tokens": 12000, - "mode": "chat", - "output_cost_per_token": 1.8e-06 - }, - "perplexity/sonar-pro": { - "input_cost_per_token": 3e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 200000, - "max_output_tokens": 8000, - "max_tokens": 8000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.014, - "search_context_size_low": 0.006, - "search_context_size_medium": 0.01 - }, - "supports_web_search": true - }, - "perplexity/sonar-reasoning": { - "input_cost_per_token": 1e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 5e-06, - "search_context_cost_per_query": { - "search_context_size_high": 0.014, - "search_context_size_low": 0.005, - "search_context_size_medium": 0.008 - }, - "supports_reasoning": true, - "supports_web_search": true - }, - "perplexity/sonar-reasoning-pro": { - "input_cost_per_token": 2e-06, - "litellm_provider": "perplexity", - "max_input_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "search_context_cost_per_query": { - "search_context_size_high": 0.014, - "search_context_size_low": 0.006, - "search_context_size_medium": 0.01 - }, - "supports_reasoning": true, - "supports_web_search": true - }, - "perplexity/sonar-small-chat": { - "input_cost_per_token": 7e-08, - "litellm_provider": "perplexity", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 2.8e-07 - }, - "perplexity/sonar-small-online": { - "input_cost_per_request": 0.005, - "input_cost_per_token": 0, - "litellm_provider": "perplexity", - "max_input_tokens": 12000, - "max_output_tokens": 12000, - "max_tokens": 12000, - "mode": "chat", - "output_cost_per_token": 2.8e-07 - }, - "publicai/swiss-ai/apertus-8b-instruct": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "publicai/swiss-ai/apertus-70b-instruct": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "publicai/aisingapore/Gemma-SEA-LION-v4-27B-IT": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "publicai/BSC-LT/salamandra-7b-instruct-tools-16k": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "publicai/BSC-LT/ALIA-40b-instruct_Q8_0": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "publicai/allenai/Olmo-3-7B-Instruct": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "publicai/aisingapore/Qwen-SEA-LION-v4-32B-IT": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "publicai/allenai/Olmo-3-7B-Think": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_reasoning": true - }, - "publicai/allenai/Olmo-3-32B-Think": { - "input_cost_per_token": 0.0, - "litellm_provider": "publicai", - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://platform.publicai.co/docs", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_reasoning": true - }, - "qwen.qwen3-coder-480b-a35b-v1:0": { - "input_cost_per_token": 2.2e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 262000, - "max_output_tokens": 65536, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 1.8e-06, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "qwen.qwen3-235b-a22b-2507-v1:0": { - "input_cost_per_token": 2.2e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 262144, - "max_output_tokens": 131072, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 8.8e-07, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "qwen.qwen3-coder-30b-a3b-v1:0": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 262144, - "max_output_tokens": 131072, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 6.0e-07, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "qwen.qwen3-32b-v1:0": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 131072, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 6.0e-07, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "qwen.qwen3-next-80b-a3b": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "supports_function_calling": true, - "supports_system_messages": true - }, - "qwen.qwen3-vl-235b-a22b": { - "input_cost_per_token": 5.3e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.66e-06, - "supports_function_calling": true, - "supports_system_messages": true, - "supports_vision": true - }, - "recraft/recraftv2": { - "litellm_provider": "recraft", - "mode": "image_generation", - "output_cost_per_image": 0.022, - "source": "https://www.recraft.ai/docs#pricing", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "recraft/recraftv3": { - "litellm_provider": "recraft", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://www.recraft.ai/docs#pricing", - "supported_endpoints": [ - "/v1/images/generations" - ] - }, - "replicate/meta/llama-2-13b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 5e-07, - "supports_tool_choice": true - }, - "replicate/meta/llama-2-13b-chat": { - "input_cost_per_token": 1e-07, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 5e-07, - "supports_tool_choice": true - }, - "replicate/meta/llama-2-70b": { - "input_cost_per_token": 6.5e-07, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.75e-06, - "supports_tool_choice": true - }, - "replicate/meta/llama-2-70b-chat": { - "input_cost_per_token": 6.5e-07, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.75e-06, - "supports_tool_choice": true - }, - "replicate/meta/llama-2-7b": { - "input_cost_per_token": 5e-08, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_tool_choice": true - }, - "replicate/meta/llama-2-7b-chat": { - "input_cost_per_token": 5e-08, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_tool_choice": true - }, - "replicate/meta/llama-3-70b": { - "input_cost_per_token": 6.5e-07, - "litellm_provider": "replicate", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.75e-06, - "supports_tool_choice": true - }, - "replicate/meta/llama-3-70b-instruct": { - "input_cost_per_token": 6.5e-07, - "litellm_provider": "replicate", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2.75e-06, - "supports_tool_choice": true - }, - "replicate/meta/llama-3-8b": { - "input_cost_per_token": 5e-08, - "litellm_provider": "replicate", - "max_input_tokens": 8086, - "max_output_tokens": 8086, - "max_tokens": 8086, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_tool_choice": true - }, - "replicate/meta/llama-3-8b-instruct": { - "input_cost_per_token": 5e-08, - "litellm_provider": "replicate", - "max_input_tokens": 8086, - "max_output_tokens": 8086, - "max_tokens": 8086, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_tool_choice": true - }, - "replicate/mistralai/mistral-7b-instruct-v0.2": { - "input_cost_per_token": 5e-08, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_tool_choice": true - }, - "replicate/mistralai/mistral-7b-v0.1": { - "input_cost_per_token": 5e-08, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 2.5e-07, - "supports_tool_choice": true - }, - "replicate/mistralai/mixtral-8x7b-instruct-v0.1": { - "input_cost_per_token": 3e-07, - "litellm_provider": "replicate", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1e-06, - "supports_tool_choice": true - }, - "rerank-english-v2.0": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "cohere", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "max_tokens": 4096, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "rerank-english-v3.0": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "cohere", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "max_tokens": 4096, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "rerank-multilingual-v2.0": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "cohere", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "max_tokens": 4096, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "rerank-multilingual-v3.0": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "cohere", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "max_tokens": 4096, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "rerank-v3.5": { - "input_cost_per_query": 0.002, - "input_cost_per_token": 0.0, - "litellm_provider": "cohere", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "max_tokens": 4096, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "nvidia_nim/nvidia/nv-rerankqa-mistral-4b-v3": { - "input_cost_per_query": 0.0, - "input_cost_per_token": 0.0, - "litellm_provider": "nvidia_nim", - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "nvidia_nim/nvidia/llama-3_2-nv-rerankqa-1b-v2": { - "input_cost_per_query": 0.0, - "input_cost_per_token": 0.0, - "litellm_provider": "nvidia_nim", - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "nvidia_nim/ranking/nvidia/llama-3.2-nv-rerankqa-1b-v2": { - "input_cost_per_query": 0.0, - "input_cost_per_token": 0.0, - "litellm_provider": "nvidia_nim", - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "sagemaker/meta-textgeneration-llama-2-13b": { - "input_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 0.0 - }, - "sagemaker/meta-textgeneration-llama-2-13b-f": { - "input_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "sagemaker/meta-textgeneration-llama-2-70b": { - "input_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 0.0 - }, - "sagemaker/meta-textgeneration-llama-2-70b-b-f": { - "input_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "sagemaker/meta-textgeneration-llama-2-7b": { - "input_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "completion", - "output_cost_per_token": 0.0 - }, - "sagemaker/meta-textgeneration-llama-2-7b-f": { - "input_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "sambanova/DeepSeek-R1": { - "input_cost_per_token": 5e-06, - "litellm_provider": "sambanova", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 7e-06, - "source": "https://cloud.sambanova.ai/plans/pricing" - }, - "sambanova/DeepSeek-R1-Distill-Llama-70B": { - "input_cost_per_token": 7e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.4e-06, - "source": "https://cloud.sambanova.ai/plans/pricing" - }, - "sambanova/DeepSeek-V3-0324": { - "input_cost_per_token": 3e-06, - "litellm_provider": "sambanova", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4.5e-06, - "source": "https://cloud.sambanova.ai/plans/pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "sambanova/Llama-4-Maverick-17B-128E-Instruct": { - "input_cost_per_token": 6.3e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "metadata": { - "notes": "For vision models, images are converted to 6432 input tokens and are billed at that amount" - }, - "mode": "chat", - "output_cost_per_token": 1.8e-06, - "source": "https://cloud.sambanova.ai/plans/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "sambanova/Llama-4-Scout-17B-16E-Instruct": { - "input_cost_per_token": 4e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "metadata": { - "notes": "For vision models, images are converted to 6432 input tokens and are billed at that amount" - }, - "mode": "chat", - "output_cost_per_token": 7e-07, - "source": "https://cloud.sambanova.ai/plans/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "sambanova/Meta-Llama-3.1-405B-Instruct": { - "input_cost_per_token": 5e-06, - "litellm_provider": "sambanova", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-05, - "source": "https://cloud.sambanova.ai/plans/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "sambanova/Meta-Llama-3.1-8B-Instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://cloud.sambanova.ai/plans/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "sambanova/Meta-Llama-3.2-1B-Instruct": { - "input_cost_per_token": 4e-08, - "litellm_provider": "sambanova", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 8e-08, - "source": "https://cloud.sambanova.ai/plans/pricing" - }, - "sambanova/Meta-Llama-3.2-3B-Instruct": { - "input_cost_per_token": 8e-08, - "litellm_provider": "sambanova", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.6e-07, - "source": "https://cloud.sambanova.ai/plans/pricing" - }, - "sambanova/Meta-Llama-3.3-70B-Instruct": { - "input_cost_per_token": 6e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://cloud.sambanova.ai/plans/pricing", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "sambanova/Meta-Llama-Guard-3-8B": { - "input_cost_per_token": 3e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://cloud.sambanova.ai/plans/pricing" - }, - "sambanova/QwQ-32B": { - "input_cost_per_token": 5e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-06, - "source": "https://cloud.sambanova.ai/plans/pricing" - }, - "sambanova/Qwen2-Audio-7B-Instruct": { - "input_cost_per_token": 5e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 0.0001, - "source": "https://cloud.sambanova.ai/plans/pricing", - "supports_audio_input": true - }, - "sambanova/Qwen3-32B": { - "input_cost_per_token": 4e-07, - "litellm_provider": "sambanova", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 8e-07, - "source": "https://cloud.sambanova.ai/plans/pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "sambanova/DeepSeek-V3.1": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 4.5e-06, - "litellm_provider": "sambanova", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_reasoning": true, - "source": "https://cloud.sambanova.ai/plans/pricing" - }, - "sambanova/gpt-oss-120b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 4.5e-06, - "litellm_provider": "sambanova", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_reasoning": true, - "source": "https://cloud.sambanova.ai/plans/pricing" - }, - - "snowflake/claude-3-5-sonnet": { - "litellm_provider": "snowflake", - "max_input_tokens": 18000, - "max_output_tokens": 8192, - "max_tokens": 18000, - "mode": "chat", - "supports_computer_use": true - }, - "snowflake/deepseek-r1": { - "litellm_provider": "snowflake", - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "max_tokens": 32768, - "mode": "chat", - "supports_reasoning": true - }, - "snowflake/gemma-7b": { - "litellm_provider": "snowflake", - "max_input_tokens": 8000, - "max_output_tokens": 8192, - "max_tokens": 8000, - "mode": "chat" - }, - "snowflake/jamba-1.5-large": { - "litellm_provider": "snowflake", - "max_input_tokens": 256000, - "max_output_tokens": 8192, - "max_tokens": 256000, - "mode": "chat" - }, - "snowflake/jamba-1.5-mini": { - "litellm_provider": "snowflake", - "max_input_tokens": 256000, - "max_output_tokens": 8192, - "max_tokens": 256000, - "mode": "chat" - }, - "snowflake/jamba-instruct": { - "litellm_provider": "snowflake", - "max_input_tokens": 256000, - "max_output_tokens": 8192, - "max_tokens": 256000, - "mode": "chat" - }, - "snowflake/llama2-70b-chat": { - "litellm_provider": "snowflake", - "max_input_tokens": 4096, - "max_output_tokens": 8192, - "max_tokens": 4096, - "mode": "chat" - }, - "snowflake/llama3-70b": { - "litellm_provider": "snowflake", - "max_input_tokens": 8000, - "max_output_tokens": 8192, - "max_tokens": 8000, - "mode": "chat" - }, - "snowflake/llama3-8b": { - "litellm_provider": "snowflake", - "max_input_tokens": 8000, - "max_output_tokens": 8192, - "max_tokens": 8000, - "mode": "chat" - }, - "snowflake/llama3.1-405b": { - "litellm_provider": "snowflake", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat" - }, - "snowflake/llama3.1-70b": { - "litellm_provider": "snowflake", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat" - }, - "snowflake/llama3.1-8b": { - "litellm_provider": "snowflake", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat" - }, - "snowflake/llama3.2-1b": { - "litellm_provider": "snowflake", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat" - }, - "snowflake/llama3.2-3b": { - "litellm_provider": "snowflake", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat" - }, - "snowflake/llama3.3-70b": { - "litellm_provider": "snowflake", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat" - }, - "snowflake/mistral-7b": { - "litellm_provider": "snowflake", - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "max_tokens": 32000, - "mode": "chat" - }, - "snowflake/mistral-large": { - "litellm_provider": "snowflake", - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "max_tokens": 32000, - "mode": "chat" - }, - "snowflake/mistral-large2": { - "litellm_provider": "snowflake", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat" - }, - "snowflake/mixtral-8x7b": { - "litellm_provider": "snowflake", - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "max_tokens": 32000, - "mode": "chat" - }, - "snowflake/reka-core": { - "litellm_provider": "snowflake", - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "max_tokens": 32000, - "mode": "chat" - }, - "snowflake/reka-flash": { - "litellm_provider": "snowflake", - "max_input_tokens": 100000, - "max_output_tokens": 8192, - "max_tokens": 100000, - "mode": "chat" - }, - "snowflake/snowflake-arctic": { - "litellm_provider": "snowflake", - "max_input_tokens": 4096, - "max_output_tokens": 8192, - "max_tokens": 4096, - "mode": "chat" - }, - "snowflake/snowflake-llama-3.1-405b": { - "litellm_provider": "snowflake", - "max_input_tokens": 8000, - "max_output_tokens": 8192, - "max_tokens": 8000, - "mode": "chat" - }, - "snowflake/snowflake-llama-3.3-70b": { - "litellm_provider": "snowflake", - "max_input_tokens": 8000, - "max_output_tokens": 8192, - "max_tokens": 8000, - "mode": "chat" - }, - "stability.sd3-5-large-v1:0": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.08 - }, - "stability.sd3-large-v1:0": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.08 - }, - "stability.stable-image-core-v1:0": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.04 - }, - "stability.stable-image-core-v1:1": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.04 - }, - "stability.stable-image-ultra-v1:0": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.14 - }, - "stability.stable-image-ultra-v1:1": { - "litellm_provider": "bedrock", - "max_input_tokens": 77, - "max_tokens": 77, - "mode": "image_generation", - "output_cost_per_image": 0.14 - }, - "standard/1024-x-1024/dall-e-3": { - "input_cost_per_pixel": 3.81469e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "standard/1024-x-1792/dall-e-3": { - "input_cost_per_pixel": 4.359e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "standard/1792-x-1024/dall-e-3": { - "input_cost_per_pixel": 4.359e-08, - "litellm_provider": "openai", - "mode": "image_generation", - "output_cost_per_pixel": 0.0 - }, - "tavily/search": { - "input_cost_per_query": 0.008, - "litellm_provider": "tavily", - "mode": "search" - }, - "tavily/search-advanced": { - "input_cost_per_query": 0.016, - "litellm_provider": "tavily", - "mode": "search" - }, - "text-bison": { - "input_cost_per_character": 2.5e-07, - "litellm_provider": "vertex_ai-text-models", - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "max_tokens": 2048, - "mode": "completion", - "output_cost_per_character": 5e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-bison32k": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-text-models", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-bison32k@002": { - "input_cost_per_character": 2.5e-07, - "input_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-text-models", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_character": 5e-07, - "output_cost_per_token": 1.25e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-bison@001": { - "input_cost_per_character": 2.5e-07, - "litellm_provider": "vertex_ai-text-models", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_character": 5e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-bison@002": { - "input_cost_per_character": 2.5e-07, - "litellm_provider": "vertex_ai-text-models", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_character": 5e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-completion-codestral/codestral-2405": { - "input_cost_per_token": 0.0, - "litellm_provider": "text-completion-codestral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "completion", - "output_cost_per_token": 0.0, - "source": "https://docs.mistral.ai/capabilities/code_generation/" - }, - "text-completion-codestral/codestral-latest": { - "input_cost_per_token": 0.0, - "litellm_provider": "text-completion-codestral", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "completion", - "output_cost_per_token": 0.0, - "source": "https://docs.mistral.ai/capabilities/code_generation/" - }, - "text-embedding-004": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 2048, - "max_tokens": 2048, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" - }, - "text-embedding-005": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 2048, - "max_tokens": 2048, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" - }, - "text-embedding-3-large": { - "input_cost_per_token": 1.3e-07, - "input_cost_per_token_batches": 6.5e-08, - "litellm_provider": "openai", - "max_input_tokens": 8191, - "max_tokens": 8191, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_cost_per_token_batches": 0.0, - "output_vector_size": 3072 - }, - "text-embedding-3-small": { - "input_cost_per_token": 2e-08, - "input_cost_per_token_batches": 1e-08, - "litellm_provider": "openai", - "max_input_tokens": 8191, - "max_tokens": 8191, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_cost_per_token_batches": 0.0, - "output_vector_size": 1536 - }, - "text-embedding-ada-002": { - "input_cost_per_token": 1e-07, - "litellm_provider": "openai", - "max_input_tokens": 8191, - "max_tokens": 8191, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 1536 - }, - "text-embedding-ada-002-v2": { - "input_cost_per_token": 1e-07, - "input_cost_per_token_batches": 5e-08, - "litellm_provider": "openai", - "max_input_tokens": 8191, - "max_tokens": 8191, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_cost_per_token_batches": 0.0 - }, - "text-embedding-large-exp-03-07": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 8192, - "max_tokens": 8192, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 3072, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" - }, - "text-embedding-preview-0409": { - "input_cost_per_token": 6.25e-09, - "input_cost_per_token_batch_requests": 5e-09, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 3072, - "max_tokens": 3072, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "text-moderation-007": { - "input_cost_per_token": 0.0, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 0, - "max_tokens": 32768, - "mode": "moderation", - "output_cost_per_token": 0.0 - }, - "text-moderation-latest": { - "input_cost_per_token": 0.0, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 0, - "max_tokens": 32768, - "mode": "moderation", - "output_cost_per_token": 0.0 - }, - "text-moderation-stable": { - "input_cost_per_token": 0.0, - "litellm_provider": "openai", - "max_input_tokens": 32768, - "max_output_tokens": 0, - "max_tokens": 32768, - "mode": "moderation", - "output_cost_per_token": 0.0 - }, - "text-multilingual-embedding-002": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 2048, - "max_tokens": 2048, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" - }, - "text-multilingual-embedding-preview-0409": { - "input_cost_per_token": 6.25e-09, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 3072, - "max_tokens": 3072, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-unicorn": { - "input_cost_per_token": 1e-05, - "litellm_provider": "vertex_ai-text-models", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_token": 2.8e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-unicorn@001": { - "input_cost_per_token": 1e-05, - "litellm_provider": "vertex_ai-text-models", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 1024, - "mode": "completion", - "output_cost_per_token": 2.8e-05, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 3072, - "max_tokens": 3072, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko-multilingual": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 3072, - "max_tokens": 3072, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko-multilingual@001": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 3072, - "max_tokens": 3072, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko@001": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 3072, - "max_tokens": 3072, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko@003": { - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vertex_ai-embedding-models", - "max_input_tokens": 3072, - "max_tokens": 3072, - "mode": "embedding", - "output_cost_per_token": 0, - "output_vector_size": 768, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "together-ai-21.1b-41b": { - "input_cost_per_token": 8e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 8e-07 - }, - "together-ai-4.1b-8b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 2e-07 - }, - "together-ai-41.1b-80b": { - "input_cost_per_token": 9e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 9e-07 - }, - "together-ai-8.1b-21b": { - "input_cost_per_token": 3e-07, - "litellm_provider": "together_ai", - "max_tokens": 1000, - "mode": "chat", - "output_cost_per_token": 3e-07 - }, - "together-ai-81.1b-110b": { - "input_cost_per_token": 1.8e-06, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 1.8e-06 - }, - "together-ai-embedding-151m-to-350m": { - "input_cost_per_token": 1.6e-08, - "litellm_provider": "together_ai", - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "together-ai-embedding-up-to-150m": { - "input_cost_per_token": 8e-09, - "litellm_provider": "together_ai", - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "together_ai/baai/bge-base-en-v1.5": { - "input_cost_per_token": 8e-09, - "litellm_provider": "together_ai", - "max_input_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 768 - }, - "together_ai/BAAI/bge-base-en-v1.5": { - "input_cost_per_token": 8e-09, - "litellm_provider": "together_ai", - "max_input_tokens": 512, - "mode": "embedding", - "output_cost_per_token": 0.0, - "output_vector_size": 768 - }, - "together-ai-up-to-4b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 1e-07 - }, - "together_ai/Qwen/Qwen2.5-72B-Instruct-Turbo": { - "litellm_provider": "together_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/Qwen/Qwen2.5-7B-Instruct-Turbo": { - "litellm_provider": "together_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/Qwen/Qwen3-235B-A22B-Instruct-2507-tput": { - "input_cost_per_token": 2e-07, - "litellm_provider": "together_ai", - "max_input_tokens": 262000, - "mode": "chat", - "output_cost_per_token": 6e-06, - "source": "https://www.together.ai/models/qwen3-235b-a22b-instruct-2507-fp8", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/Qwen/Qwen3-235B-A22B-Thinking-2507": { - "input_cost_per_token": 6.5e-07, - "litellm_provider": "together_ai", - "max_input_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://www.together.ai/models/qwen3-235b-a22b-thinking-2507", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/Qwen/Qwen3-235B-A22B-fp8-tput": { - "input_cost_per_token": 2e-07, - "litellm_provider": "together_ai", - "max_input_tokens": 40000, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://www.together.ai/models/qwen3-235b-a22b-fp8-tput", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_tool_choice": false - }, - "together_ai/Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8": { - "input_cost_per_token": 2e-06, - "litellm_provider": "together_ai", - "max_input_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "source": "https://www.together.ai/models/qwen3-coder-480b-a35b-instruct", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/deepseek-ai/DeepSeek-R1": { - "input_cost_per_token": 3e-06, - "litellm_provider": "together_ai", - "max_input_tokens": 128000, - "max_output_tokens": 20480, - "max_tokens": 20480, - "mode": "chat", - "output_cost_per_token": 7e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/deepseek-ai/DeepSeek-R1-0528-tput": { - "input_cost_per_token": 5.5e-07, - "litellm_provider": "together_ai", - "max_input_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.19e-06, - "source": "https://www.together.ai/models/deepseek-r1-0528-throughput", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/deepseek-ai/DeepSeek-V3": { - "input_cost_per_token": 1.25e-06, - "litellm_provider": "together_ai", - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/deepseek-ai/DeepSeek-V3.1": { - "input_cost_per_token": 6e-07, - "litellm_provider": "together_ai", - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.7e-06, - "source": "https://www.together.ai/models/deepseek-v3-1", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "together_ai/meta-llama/Llama-3.2-3B-Instruct-Turbo": { - "litellm_provider": "together_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo": { - "input_cost_per_token": 8.8e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 8.8e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo-Free": { - "input_cost_per_token": 0, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 0, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "together_ai/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": { - "input_cost_per_token": 2.7e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 8.5e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/meta-llama/Llama-4-Scout-17B-16E-Instruct": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 5.9e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": { - "input_cost_per_token": 3.5e-06, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 3.5e-06, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { - "input_cost_per_token": 8.8e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 8.8e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "together_ai/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 1.8e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "together_ai/mistralai/Mistral-7B-Instruct-v0.1": { - "litellm_provider": "together_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "together_ai/mistralai/Mistral-Small-24B-Instruct-2501": { - "litellm_provider": "together_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1": { - "input_cost_per_token": 6e-07, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "together_ai/moonshotai/Kimi-K2-Instruct": { - "input_cost_per_token": 1e-06, - "litellm_provider": "together_ai", - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://www.together.ai/models/kimi-k2-instruct", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/openai/gpt-oss-120b": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "together_ai", - "max_input_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://www.together.ai/models/gpt-oss-120b", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/openai/gpt-oss-20b": { - "input_cost_per_token": 5e-08, - "litellm_provider": "together_ai", - "max_input_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-07, - "source": "https://www.together.ai/models/gpt-oss-20b", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/togethercomputer/CodeLlama-34b-Instruct": { - "litellm_provider": "together_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/zai-org/GLM-4.5-Air-FP8": { - "input_cost_per_token": 2e-07, - "litellm_provider": "together_ai", - "max_input_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.1e-06, - "source": "https://www.together.ai/models/glm-4-5-air", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/zai-org/GLM-4.6": { - "input_cost_per_token": 0.6e-06, - "litellm_provider": "together_ai", - "max_input_tokens": 200000, - "max_output_tokens": 200000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 2.2e-06, - "source": "https://www.together.ai/models/glm-4-6", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "together_ai/moonshotai/Kimi-K2-Instruct-0905": { - "input_cost_per_token": 1e-06, - "litellm_provider": "together_ai", - "max_input_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 3e-06, - "source": "https://www.together.ai/models/kimi-k2-0905", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/Qwen/Qwen3-Next-80B-A3B-Instruct": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "together_ai", - "max_input_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://www.together.ai/models/qwen3-next-80b-a3b-instruct", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "together_ai/Qwen/Qwen3-Next-80B-A3B-Thinking": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "together_ai", - "max_input_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://www.together.ai/models/qwen3-next-80b-a3b-thinking", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "tts-1": { - "input_cost_per_character": 1.5e-05, - "litellm_provider": "openai", - "mode": "audio_speech", - "supported_endpoints": [ - "/v1/audio/speech" - ] - }, - "tts-1-hd": { - "input_cost_per_character": 3e-05, - "litellm_provider": "openai", - "mode": "audio_speech", - "supported_endpoints": [ - "/v1/audio/speech" - ] - }, - "us.amazon.nova-lite-v1:0": { - "input_cost_per_token": 6e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 2.4e-07, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "us.amazon.nova-micro-v1:0": { - "input_cost_per_token": 3.5e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 1.4e-07, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "us.amazon.nova-premier-v1:0": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 1.25e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": false, - "supports_response_schema": true, - "supports_vision": true - }, - "us.amazon.nova-pro-v1:0": { - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 300000, - "max_output_tokens": 10000, - "max_tokens": 10000, - "mode": "chat", - "output_cost_per_token": 3.2e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_vision": true - }, - "us.anthropic.claude-3-5-haiku-20241022-v1:0": { - "cache_creation_input_token_cost": 1e-06, - "cache_read_input_token_cost": 8e-08, - "input_cost_per_token": 8e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 4e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "us.anthropic.claude-haiku-4-5-20251001-v1:0": { - "cache_creation_input_token_cost": 1.375e-06, - "cache_read_input_token_cost": 1.1e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5.5e-06, - "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "us.anthropic.claude-3-5-sonnet-20240620-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "us.anthropic.claude-3-5-sonnet-20241022-v2:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "us.anthropic.claude-3-7-sonnet-20250219-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "us.anthropic.claude-3-haiku-20240307-v1:0": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "us.anthropic.claude-3-opus-20240229-v1:0": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "us.anthropic.claude-3-sonnet-20240229-v1:0": { - "input_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "us.anthropic.claude-opus-4-1-20250805-v1:0": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "us.anthropic.claude-sonnet-4-5-20250929-v1:0": { - "cache_creation_input_token_cost": 4.125e-06, - "cache_read_input_token_cost": 3.3e-07, - "input_cost_per_token": 3.3e-06, - "input_cost_per_token_above_200k_tokens": 6.6e-06, - "output_cost_per_token_above_200k_tokens": 2.475e-05, - "cache_creation_input_token_cost_above_200k_tokens": 8.25e-06, - "cache_read_input_token_cost_above_200k_tokens": 6.6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.65e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "au.anthropic.claude-haiku-4-5-20251001-v1:0": { - "cache_creation_input_token_cost": 1.375e-06, - "cache_read_input_token_cost": 1.1e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 5.5e-06, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 346 - }, - "us.anthropic.claude-opus-4-20250514-v1:0": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "us.anthropic.claude-opus-4-5-20251101-v1:0": { - "cache_creation_input_token_cost": 6.25e-06, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 5e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "global.anthropic.claude-opus-4-5-20251101-v1:0": { - "cache_creation_input_token_cost": 6.25e-06, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 5e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "us.anthropic.claude-sonnet-4-20250514-v1:0": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "us.deepseek.r1-v1:0": { - "input_cost_per_token": 1.35e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 5.4e-06, - "supports_function_calling": false, - "supports_reasoning": true, - "supports_tool_choice": false - }, - "us.meta.llama3-1-405b-instruct-v1:0": { - "input_cost_per_token": 5.32e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.6e-05, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-1-70b-instruct-v1:0": { - "input_cost_per_token": 9.9e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9.9e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-1-8b-instruct-v1:0": { - "input_cost_per_token": 2.2e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.2e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-2-11b-instruct-v1:0": { - "input_cost_per_token": 3.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3.5e-07, - "supports_function_calling": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "us.meta.llama3-2-1b-instruct-v1:0": { - "input_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-2-3b-instruct-v1:0": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-2-90b-instruct-v1:0": { - "input_cost_per_token": 2e-06, - "litellm_provider": "bedrock", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_tool_choice": false, - "supports_vision": true - }, - "us.meta.llama3-3-70b-instruct-v1:0": { - "input_cost_per_token": 7.2e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.2e-07, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama4-maverick-17b-instruct-v1:0": { - "input_cost_per_token": 2.4e-07, - "input_cost_per_token_batches": 1.2e-07, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 9.7e-07, - "output_cost_per_token_batches": 4.85e-07, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "code" - ], - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama4-scout-17b-instruct-v1:0": { - "input_cost_per_token": 1.7e-07, - "input_cost_per_token_batches": 8.5e-08, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 6.6e-07, - "output_cost_per_token_batches": 3.3e-07, - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "code" - ], - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.mistral.pixtral-large-2502-v1:0": { - "input_cost_per_token": 2e-06, - "litellm_provider": "bedrock_converse", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_function_calling": true, - "supports_tool_choice": false - }, - "v0/v0-1.0-md": { - "input_cost_per_token": 3e-06, - "litellm_provider": "v0", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "v0/v0-1.5-lg": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "v0", - "max_input_tokens": 512000, - "max_output_tokens": 512000, - "max_tokens": 512000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "v0/v0-1.5-md": { - "input_cost_per_token": 3e-06, - "litellm_provider": "v0", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vercel_ai_gateway/alibaba/qwen-3-14b": { - "input_cost_per_token": 8e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 40960, - "max_output_tokens": 16384, - "max_tokens": 40960, - "mode": "chat", - "output_cost_per_token": 2.4e-07 - }, - "vercel_ai_gateway/alibaba/qwen-3-235b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 40960, - "max_output_tokens": 16384, - "max_tokens": 40960, - "mode": "chat", - "output_cost_per_token": 6e-07 - }, - "vercel_ai_gateway/alibaba/qwen-3-30b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 40960, - "max_output_tokens": 16384, - "max_tokens": 40960, - "mode": "chat", - "output_cost_per_token": 3e-07 - }, - "vercel_ai_gateway/alibaba/qwen-3-32b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 40960, - "max_output_tokens": 16384, - "max_tokens": 40960, - "mode": "chat", - "output_cost_per_token": 3e-07 - }, - "vercel_ai_gateway/alibaba/qwen3-coder": { - "input_cost_per_token": 4e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 262144, - "max_output_tokens": 66536, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 1.6e-06 - }, - "vercel_ai_gateway/amazon/nova-lite": { - "input_cost_per_token": 6e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 300000, - "max_output_tokens": 8192, - "max_tokens": 300000, - "mode": "chat", - "output_cost_per_token": 2.4e-07 - }, - "vercel_ai_gateway/amazon/nova-micro": { - "input_cost_per_token": 3.5e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.4e-07 - }, - "vercel_ai_gateway/amazon/nova-pro": { - "input_cost_per_token": 8e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 300000, - "max_output_tokens": 8192, - "max_tokens": 300000, - "mode": "chat", - "output_cost_per_token": 3.2e-06 - }, - "vercel_ai_gateway/amazon/titan-embed-text-v2": { - "input_cost_per_token": 2e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/anthropic/claude-3-haiku": { - "cache_creation_input_token_cost": 3e-07, - "cache_read_input_token_cost": 3e-08, - "input_cost_per_token": 2.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 1.25e-06 - }, - "vercel_ai_gateway/anthropic/claude-3-opus": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 7.5e-05 - }, - "vercel_ai_gateway/anthropic/claude-3.5-haiku": { - "cache_creation_input_token_cost": 1e-06, - "cache_read_input_token_cost": 8e-08, - "input_cost_per_token": 8e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 4e-06 - }, - "vercel_ai_gateway/anthropic/claude-3.5-sonnet": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 1.5e-05 - }, - "vercel_ai_gateway/anthropic/claude-3.7-sonnet": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 1.5e-05 - }, - "vercel_ai_gateway/anthropic/claude-4-opus": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 7.5e-05 - }, - "vercel_ai_gateway/anthropic/claude-4-sonnet": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 1.5e-05 - }, - "vercel_ai_gateway/cohere/command-a": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 256000, - "max_output_tokens": 8000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1e-05 - }, - "vercel_ai_gateway/cohere/command-r": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-07 - }, - "vercel_ai_gateway/cohere/command-r-plus": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05 - }, - "vercel_ai_gateway/cohere/embed-v4.0": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/deepseek/deepseek-r1": { - "input_cost_per_token": 5.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.19e-06 - }, - "vercel_ai_gateway/deepseek/deepseek-r1-distill-llama-70b": { - "input_cost_per_token": 7.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 9.9e-07 - }, - "vercel_ai_gateway/deepseek/deepseek-v3": { - "input_cost_per_token": 9e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9e-07 - }, - "vercel_ai_gateway/google/gemini-2.0-flash": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_tokens": 1048576, - "mode": "chat", - "output_cost_per_token": 6e-07 - }, - "vercel_ai_gateway/google/gemini-2.0-flash-lite": { - "input_cost_per_token": 7.5e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_tokens": 1048576, - "mode": "chat", - "output_cost_per_token": 3e-07 - }, - "vercel_ai_gateway/google/gemini-2.5-flash": { - "input_cost_per_token": 3e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 1000000, - "max_output_tokens": 65536, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_token": 2.5e-06 - }, - "vercel_ai_gateway/google/gemini-2.5-pro": { - "input_cost_per_token": 2.5e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 1048576, - "max_output_tokens": 65536, - "max_tokens": 1048576, - "mode": "chat", - "output_cost_per_token": 1e-05 - }, - "vercel_ai_gateway/google/gemini-embedding-001": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/google/gemma-2-9b": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-07 - }, - "vercel_ai_gateway/google/text-embedding-005": { - "input_cost_per_token": 2.5e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/google/text-multilingual-embedding-002": { - "input_cost_per_token": 2.5e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/inception/mercury-coder-small": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 32000, - "max_output_tokens": 16384, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 1e-06 - }, - "vercel_ai_gateway/meta/llama-3-70b": { - "input_cost_per_token": 5.9e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 7.9e-07 - }, - "vercel_ai_gateway/meta/llama-3-8b": { - "input_cost_per_token": 5e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 8e-08 - }, - "vercel_ai_gateway/meta/llama-3.1-70b": { - "input_cost_per_token": 7.2e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 7.2e-07 - }, - "vercel_ai_gateway/meta/llama-3.1-8b": { - "input_cost_per_token": 5e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131000, - "max_output_tokens": 131072, - "max_tokens": 131000, - "mode": "chat", - "output_cost_per_token": 8e-08 - }, - "vercel_ai_gateway/meta/llama-3.2-11b": { - "input_cost_per_token": 1.6e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.6e-07 - }, - "vercel_ai_gateway/meta/llama-3.2-1b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-07 - }, - "vercel_ai_gateway/meta/llama-3.2-3b": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-07 - }, - "vercel_ai_gateway/meta/llama-3.2-90b": { - "input_cost_per_token": 7.2e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 7.2e-07 - }, - "vercel_ai_gateway/meta/llama-3.3-70b": { - "input_cost_per_token": 7.2e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 7.2e-07 - }, - "vercel_ai_gateway/meta/llama-4-maverick": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 8192, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 6e-07 - }, - "vercel_ai_gateway/meta/llama-4-scout": { - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 8192, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 3e-07 - }, - "vercel_ai_gateway/mistral/codestral": { - "input_cost_per_token": 3e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 256000, - "max_output_tokens": 4000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 9e-07 - }, - "vercel_ai_gateway/mistral/codestral-embed": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/mistral/devstral-small": { - "input_cost_per_token": 7e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 2.8e-07 - }, - "vercel_ai_gateway/mistral/magistral-medium": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 64000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 5e-06 - }, - "vercel_ai_gateway/mistral/magistral-small": { - "input_cost_per_token": 5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 64000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-06 - }, - "vercel_ai_gateway/mistral/ministral-3b": { - "input_cost_per_token": 4e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 4e-08 - }, - "vercel_ai_gateway/mistral/ministral-8b": { - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-07 - }, - "vercel_ai_gateway/mistral/mistral-embed": { - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "chat", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/mistral/mistral-large": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 32000, - "max_output_tokens": 4000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 6e-06 - }, - "vercel_ai_gateway/mistral/mistral-saba-24b": { - "input_cost_per_token": 7.9e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 7.9e-07 - }, - "vercel_ai_gateway/mistral/mistral-small": { - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 32000, - "max_output_tokens": 4000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 3e-07 - }, - "vercel_ai_gateway/mistral/mixtral-8x22b-instruct": { - "input_cost_per_token": 1.2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 65536, - "max_output_tokens": 2048, - "max_tokens": 65536, - "mode": "chat", - "output_cost_per_token": 1.2e-06 - }, - "vercel_ai_gateway/mistral/pixtral-12b": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-07 - }, - "vercel_ai_gateway/mistral/pixtral-large": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 4000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-06 - }, - "vercel_ai_gateway/moonshotai/kimi-k2": { - "input_cost_per_token": 5.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.2e-06 - }, - "vercel_ai_gateway/morph/morph-v3-fast": { - "input_cost_per_token": 8e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 32768, - "max_output_tokens": 16384, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.2e-06 - }, - "vercel_ai_gateway/morph/morph-v3-large": { - "input_cost_per_token": 9e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 32768, - "max_output_tokens": 16384, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1.9e-06 - }, - "vercel_ai_gateway/openai/gpt-3.5-turbo": { - "input_cost_per_token": 5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "max_tokens": 16385, - "mode": "chat", - "output_cost_per_token": 1.5e-06 - }, - "vercel_ai_gateway/openai/gpt-3.5-turbo-instruct": { - "input_cost_per_token": 1.5e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 2e-06 - }, - "vercel_ai_gateway/openai/gpt-4-turbo": { - "input_cost_per_token": 1e-05, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-05 - }, - "vercel_ai_gateway/openai/gpt-4.1": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 1047576, - "mode": "chat", - "output_cost_per_token": 8e-06 - }, - "vercel_ai_gateway/openai/gpt-4.1-mini": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 4e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 1047576, - "mode": "chat", - "output_cost_per_token": 1.6e-06 - }, - "vercel_ai_gateway/openai/gpt-4.1-nano": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 2.5e-08, - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 1047576, - "max_output_tokens": 32768, - "max_tokens": 1047576, - "mode": "chat", - "output_cost_per_token": 4e-07 - }, - "vercel_ai_gateway/openai/gpt-4o": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 1.25e-06, - "input_cost_per_token": 2.5e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1e-05 - }, - "vercel_ai_gateway/openai/gpt-4o-mini": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 7.5e-08, - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-07 - }, - "vercel_ai_gateway/openai/o1": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 7.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 6e-05 - }, - "vercel_ai_gateway/openai/o3": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 8e-06 - }, - "vercel_ai_gateway/openai/o3-mini": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 5.5e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 4.4e-06 - }, - "vercel_ai_gateway/openai/o4-mini": { - "cache_creation_input_token_cost": 0.0, - "cache_read_input_token_cost": 2.75e-07, - "input_cost_per_token": 1.1e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 4.4e-06 - }, - "vercel_ai_gateway/openai/text-embedding-3-large": { - "input_cost_per_token": 1.3e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/openai/text-embedding-3-small": { - "input_cost_per_token": 2e-08, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/openai/text-embedding-ada-002": { - "input_cost_per_token": 1e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 0, - "max_output_tokens": 0, - "max_tokens": 0, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "vercel_ai_gateway/perplexity/sonar": { - "input_cost_per_token": 1e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 127000, - "max_output_tokens": 8000, - "max_tokens": 127000, - "mode": "chat", - "output_cost_per_token": 1e-06 - }, - "vercel_ai_gateway/perplexity/sonar-pro": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 200000, - "max_output_tokens": 8000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 1.5e-05 - }, - "vercel_ai_gateway/perplexity/sonar-reasoning": { - "input_cost_per_token": 1e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 127000, - "max_output_tokens": 8000, - "max_tokens": 127000, - "mode": "chat", - "output_cost_per_token": 5e-06 - }, - "vercel_ai_gateway/perplexity/sonar-reasoning-pro": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 127000, - "max_output_tokens": 8000, - "max_tokens": 127000, - "mode": "chat", - "output_cost_per_token": 8e-06 - }, - "vercel_ai_gateway/vercel/v0-1.0-md": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05 - }, - "vercel_ai_gateway/vercel/v0-1.5-md": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-05 - }, - "vercel_ai_gateway/xai/grok-2": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 4000, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-05 - }, - "vercel_ai_gateway/xai/grok-2-vision": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1e-05 - }, - "vercel_ai_gateway/xai/grok-3": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.5e-05 - }, - "vercel_ai_gateway/xai/grok-3-fast": { - "input_cost_per_token": 5e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-05 - }, - "vercel_ai_gateway/xai/grok-3-mini": { - "input_cost_per_token": 3e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-07 - }, - "vercel_ai_gateway/xai/grok-3-mini-fast": { - "input_cost_per_token": 6e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 4e-06 - }, - "vercel_ai_gateway/xai/grok-4": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.5e-05 - }, - "vercel_ai_gateway/zai/glm-4.5": { - "input_cost_per_token": 6e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.2e-06 - }, - "vercel_ai_gateway/zai/glm-4.5-air": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vercel_ai_gateway", - "max_input_tokens": 128000, - "max_output_tokens": 96000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.1e-06 - }, - "vercel_ai_gateway/zai/glm-4.6": { - "litellm_provider": "vercel_ai_gateway", - "cache_read_input_token_cost": 1.1e-07, - "input_cost_per_token": 4.5e-07, - "max_input_tokens": 200000, - "max_output_tokens": 200000, - "max_tokens": 200000, - "mode": "chat", - "output_cost_per_token": 1.8e-06, - "source": "https://vercel.com/ai-gateway/models/glm-4.6", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/chirp": { - "input_cost_per_character": 30e-06, - "litellm_provider": "vertex_ai", - "mode": "audio_speech", - "source": "https://cloud.google.com/text-to-speech/pricing", - "supported_endpoints": [ - "/v1/audio/speech" - ] - }, - "vertex_ai/claude-3-5-haiku": { - "input_cost_per_token": 1e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_tool_choice": true - }, - "vertex_ai/claude-3-5-haiku@20241022": { - "input_cost_per_token": 1e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_tool_choice": true - }, - "vertex_ai/claude-haiku-4-5@20251001": { - "cache_creation_input_token_cost": 1.25e-06, - "cache_read_input_token_cost": 1e-07, - "input_cost_per_token": 1e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/claude/haiku-4-5", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true - }, - "vertex_ai/claude-3-5-sonnet": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-5-sonnet-v2": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-5-sonnet-v2@20241022": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-5-sonnet@20240620": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-7-sonnet@20250219": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "deprecation_date": "2025-06-01", - "input_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "vertex_ai/claude-3-haiku": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-haiku@20240307": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.25e-06, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-opus": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-opus@20240229": { - "input_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-sonnet": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-3-sonnet@20240229": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "max_tokens": 4096, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-opus-4": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "vertex_ai/claude-opus-4-1": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "input_cost_per_token_batches": 7.5e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "output_cost_per_token_batches": 3.75e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-opus-4-1@20250805": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "input_cost_per_token_batches": 7.5e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "output_cost_per_token_batches": 3.75e-05, - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-opus-4-5": { - "cache_creation_input_token_cost": 6.25e-06, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 5e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "vertex_ai/claude-opus-4-5@20251101": { - "cache_creation_input_token_cost": 6.25e-06, - "cache_read_input_token_cost": 5e-07, - "input_cost_per_token": 5e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "vertex_ai/claude-sonnet-4-5": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "input_cost_per_token_batches": 1.5e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "output_cost_per_token_batches": 7.5e-06, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-sonnet-4-5@20250929": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "input_cost_per_token_batches": 1.5e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "output_cost_per_token_batches": 7.5e-06, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/claude-opus-4@20250514": { - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "input_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 200000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 7.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "vertex_ai/claude-sonnet-4": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "vertex_ai/claude-sonnet-4@20250514": { - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_200k_tokens": 6e-06, - "output_cost_per_token_above_200k_tokens": 2.25e-05, - "cache_creation_input_token_cost_above_200k_tokens": 7.5e-06, - "cache_read_input_token_cost_above_200k_tokens": 6e-07, - "litellm_provider": "vertex_ai-anthropic_models", - "max_input_tokens": 1000000, - "max_output_tokens": 64000, - "max_tokens": 64000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "search_context_cost_per_query": { - "search_context_size_high": 0.01, - "search_context_size_low": 0.01, - "search_context_size_medium": 0.01 - }, - "supports_assistant_prefill": true, - "supports_computer_use": true, - "supports_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "vertex_ai/mistralai/codestral-2@001": { - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/codestral-2": { - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/codestral-2@001": { - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistralai/codestral-2": { - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 9e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/codestral-2501": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/codestral@2405": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/codestral@latest": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 6e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/deepseek-ai/deepseek-v3.1-maas": { - "input_cost_per_token": 1.35e-06, - "litellm_provider": "vertex_ai-deepseek_models", - "max_input_tokens": 163840, - "max_output_tokens": 32768, - "max_tokens": 163840, - "mode": "chat", - "output_cost_per_token": 5.4e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supported_regions": [ - "us-west2" - ], - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "vertex_ai/deepseek-ai/deepseek-v3.2-maas": { - "input_cost_per_token": 5.6e-07, - "input_cost_per_token_batches": 2.8e-07, - "litellm_provider": "vertex_ai-deepseek_models", - "max_input_tokens": 163840, - "max_output_tokens": 32768, - "max_tokens": 163840, - "mode": "chat", - "output_cost_per_token": 1.68e-06, - "output_cost_per_token_batches": 8.4e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supported_regions": [ - "us-west2" - ], - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "vertex_ai/deepseek-ai/deepseek-r1-0528-maas": { - "input_cost_per_token": 1.35e-06, - "litellm_provider": "vertex_ai-deepseek_models", - "max_input_tokens": 65336, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 5.4e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "vertex_ai/gemini-2.5-flash-image": { - "cache_read_input_token_cost": 3e-08, - "input_cost_per_audio_token": 1e-06, - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-language-models", - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_images_per_prompt": 3000, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "max_pdf_size_mb": 30, - "max_video_length": 1, - "max_videos_per_prompt": 10, - "mode": "image_generation", - "output_cost_per_image": 0.039, - "output_cost_per_reasoning_token": 2.5e-06, - "output_cost_per_token": 2.5e-06, - "rpm": 100000, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/image-generation#edit-an-image", - "supported_endpoints": [ - "/v1/chat/completions", - "/v1/completions", - "/v1/batch" - ], - "supported_modalities": [ - "text", - "image", - "audio", - "video" - ], - "supported_output_modalities": [ - "text", - "image" - ], - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_url_context": true, - "supports_vision": true, - "supports_web_search": false, - "tpm": 8000000 - }, - "vertex_ai/gemini-3-pro-image-preview": { - "input_cost_per_image": 0.0011, - "input_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "litellm_provider": "vertex_ai-language-models", - "max_input_tokens": 65536, - "max_output_tokens": 32768, - "max_tokens": 65536, - "mode": "image_generation", - "output_cost_per_image": 0.134, - "output_cost_per_image_token": 1.2e-04, - "output_cost_per_token": 1.2e-05, - "output_cost_per_token_batches": 6e-06, - "source": "https://docs.cloud.google.com/vertex-ai/generative-ai/docs/models/gemini/3-pro-image" - }, - "vertex_ai/imagegeneration@006": { - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "output_cost_per_image": 0.02, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/imagen-3.0-fast-generate-001": { - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "output_cost_per_image": 0.02, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/imagen-3.0-generate-001": { - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/imagen-3.0-generate-002": { - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/imagen-3.0-capability-001": { - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/image/edit-insert-objects" - }, - "vertex_ai/imagen-4.0-fast-generate-001": { - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "output_cost_per_image": 0.02, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/imagen-4.0-generate-001": { - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "output_cost_per_image": 0.04, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/imagen-4.0-ultra-generate-001": { - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "output_cost_per_image": 0.06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/jamba-1.5": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vertex_ai-ai21_models", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_tool_choice": true - }, - "vertex_ai/jamba-1.5-large": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vertex_ai-ai21_models", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supports_tool_choice": true - }, - "vertex_ai/jamba-1.5-large@001": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vertex_ai-ai21_models", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 8e-06, - "supports_tool_choice": true - }, - "vertex_ai/jamba-1.5-mini": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vertex_ai-ai21_models", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_tool_choice": true - }, - "vertex_ai/jamba-1.5-mini@001": { - "input_cost_per_token": 2e-07, - "litellm_provider": "vertex_ai-ai21_models", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 4e-07, - "supports_tool_choice": true - }, - "vertex_ai/meta/llama-3.1-405b-instruct-maas": { - "input_cost_per_token": 5e-06, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.6e-05, - "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/meta/llama-3.1-70b-instruct-maas": { - "input_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/meta/llama-3.1-8b-instruct-maas": { - "input_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 128000, - "metadata": { - "notes": "VertexAI states that The Llama 3.1 API service for llama-3.1-70b-instruct-maas and llama-3.1-8b-instruct-maas are in public preview and at no cost." - }, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/meta/llama-3.2-90b-vision-instruct-maas": { - "input_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "max_tokens": 128000, - "metadata": { - "notes": "VertexAI states that The Llama 3.2 API service is at no cost during public preview, and will be priced as per dollar-per-1M-tokens at GA." - }, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/meta/llama-4-maverick-17b-128e-instruct-maas": { - "input_cost_per_token": 3.5e-07, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 1000000, - "max_output_tokens": 1000000, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_token": 1.15e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "code" - ], - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/meta/llama-4-maverick-17b-16e-instruct-maas": { - "input_cost_per_token": 3.5e-07, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 1000000, - "max_output_tokens": 1000000, - "max_tokens": 1000000, - "mode": "chat", - "output_cost_per_token": 1.15e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "code" - ], - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/meta/llama-4-scout-17b-128e-instruct-maas": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 10000000, - "max_output_tokens": 10000000, - "max_tokens": 10000000, - "mode": "chat", - "output_cost_per_token": 7e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "code" - ], - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/meta/llama-4-scout-17b-16e-instruct-maas": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 10000000, - "max_output_tokens": 10000000, - "max_tokens": 10000000, - "mode": "chat", - "output_cost_per_token": 7e-07, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "text", - "code" - ], - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/meta/llama3-405b-instruct-maas": { - "input_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supports_tool_choice": true - }, - "vertex_ai/meta/llama3-70b-instruct-maas": { - "input_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supports_tool_choice": true - }, - "vertex_ai/meta/llama3-8b-instruct-maas": { - "input_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_tokens": 32000, - "mode": "chat", - "output_cost_per_token": 0.0, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supports_tool_choice": true - }, - "vertex_ai/minimaxai/minimax-m2-maas": { - "input_cost_per_token": 3e-07, - "litellm_provider": "vertex_ai-minimax_models", - "max_input_tokens": 196608, - "max_output_tokens": 196608, - "max_tokens": 196608, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/moonshotai/kimi-k2-thinking-maas": { - "input_cost_per_token": 6e-07, - "litellm_provider": "vertex_ai-moonshot_models", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 2.5e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "vertex_ai/mistral-medium-3": { - "input_cost_per_token": 4e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-medium-3@001": { - "input_cost_per_token": 4e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistralai/mistral-medium-3": { - "input_cost_per_token": 4e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistralai/mistral-medium-3@001": { - "input_cost_per_token": 4e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 2e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-large-2411": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-large@2407": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-large@2411-001": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-large@latest": { - "input_cost_per_token": 2e-06, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 6e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-nemo@2407": { - "input_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-nemo@latest": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 1.5e-07, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-small-2503": { - "input_cost_per_token": 1e-06, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "max_tokens": 128000, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true - }, - "vertex_ai/mistral-small-2503@001": { - "input_cost_per_token": 1e-06, - "litellm_provider": "vertex_ai-mistral_models", - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "max_tokens": 8191, - "mode": "chat", - "output_cost_per_token": 3e-06, - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/mistral-ocr-2505": { - "litellm_provider": "vertex_ai", - "mode": "ocr", - "ocr_cost_per_page": 5e-4, - "supported_endpoints": [ - "/v1/ocr" - ], - "source": "https://cloud.google.com/generative-ai-app-builder/pricing" - }, - "vertex_ai/openai/gpt-oss-120b-maas": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vertex_ai-openai_models", - "max_input_tokens": 131072, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 6e-07, - "source": "https://console.cloud.google.com/vertex-ai/publishers/openai/model-garden/gpt-oss-120b-maas", - "supports_reasoning": true - }, - "vertex_ai/openai/gpt-oss-20b-maas": { - "input_cost_per_token": 7.5e-08, - "litellm_provider": "vertex_ai-openai_models", - "max_input_tokens": 131072, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 3e-07, - "source": "https://console.cloud.google.com/vertex-ai/publishers/openai/model-garden/gpt-oss-120b-maas", - "supports_reasoning": true - }, - "vertex_ai/qwen/qwen3-235b-a22b-instruct-2507-maas": { - "input_cost_per_token": 2.5e-07, - "litellm_provider": "vertex_ai-qwen_models", - "max_input_tokens": 262144, - "max_output_tokens": 16384, - "max_tokens": 16384, - "mode": "chat", - "output_cost_per_token": 1e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/qwen/qwen3-coder-480b-a35b-instruct-maas": { - "input_cost_per_token": 1e-06, - "litellm_provider": "vertex_ai-qwen_models", - "max_input_tokens": 262144, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 4e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/qwen/qwen3-next-80b-a3b-instruct-maas": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vertex_ai-qwen_models", - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/qwen/qwen3-next-80b-a3b-thinking-maas": { - "input_cost_per_token": 1.5e-07, - "litellm_provider": "vertex_ai-qwen_models", - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "max_tokens": 262144, - "mode": "chat", - "output_cost_per_token": 1.2e-06, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", - "supports_function_calling": true, - "supports_tool_choice": true - }, - "vertex_ai/veo-2.0-generate-001": { - "litellm_provider": "vertex_ai-video-models", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.35, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "vertex_ai/veo-3.0-fast-generate-preview": { - "litellm_provider": "vertex_ai-video-models", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.15, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "vertex_ai/veo-3.0-generate-preview": { - "litellm_provider": "vertex_ai-video-models", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.4, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "vertex_ai/veo-3.0-fast-generate-001": { - "litellm_provider": "vertex_ai-video-models", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.15, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "vertex_ai/veo-3.0-generate-001": { - "litellm_provider": "vertex_ai-video-models", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.4, - "source": "https://ai.google.dev/gemini-api/docs/video", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "vertex_ai/veo-3.1-generate-preview": { - "litellm_provider": "vertex_ai-video-models", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.4, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/veo", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "vertex_ai/veo-3.1-fast-generate-preview": { - "litellm_provider": "vertex_ai-video-models", - "max_input_tokens": 1024, - "max_tokens": 1024, - "mode": "video_generation", - "output_cost_per_second": 0.15, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/veo", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ] - }, - "voyage/rerank-2": { - "input_cost_per_token": 5e-08, - "litellm_provider": "voyage", - "max_input_tokens": 16000, - "max_output_tokens": 16000, - "max_query_tokens": 16000, - "max_tokens": 16000, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "voyage/rerank-2-lite": { - "input_cost_per_token": 2e-08, - "litellm_provider": "voyage", - "max_input_tokens": 8000, - "max_output_tokens": 8000, - "max_query_tokens": 8000, - "max_tokens": 8000, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "voyage/rerank-2.5": { - "input_cost_per_token": 5e-08, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_query_tokens": 32000, - "max_tokens": 32000, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "voyage/rerank-2.5-lite": { - "input_cost_per_token": 2e-08, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "max_query_tokens": 32000, - "max_tokens": 32000, - "mode": "rerank", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-2": { - "input_cost_per_token": 1e-07, - "litellm_provider": "voyage", - "max_input_tokens": 4000, - "max_tokens": 4000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-3": { - "input_cost_per_token": 6e-08, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-3-large": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-3-lite": { - "input_cost_per_token": 2e-08, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-3.5": { - "input_cost_per_token": 6e-08, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-3.5-lite": { - "input_cost_per_token": 2e-08, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-code-2": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "voyage", - "max_input_tokens": 16000, - "max_tokens": 16000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-code-3": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-context-3": { - "input_cost_per_token": 1.8e-07, - "litellm_provider": "voyage", - "max_input_tokens": 120000, - "max_tokens": 120000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-finance-2": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-large-2": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "voyage", - "max_input_tokens": 16000, - "max_tokens": 16000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-law-2": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "voyage", - "max_input_tokens": 16000, - "max_tokens": 16000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-lite-01": { - "input_cost_per_token": 1e-07, - "litellm_provider": "voyage", - "max_input_tokens": 4096, - "max_tokens": 4096, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-lite-02-instruct": { - "input_cost_per_token": 1e-07, - "litellm_provider": "voyage", - "max_input_tokens": 4000, - "max_tokens": 4000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "voyage/voyage-multimodal-3": { - "input_cost_per_token": 1.2e-07, - "litellm_provider": "voyage", - "max_input_tokens": 32000, - "max_tokens": 32000, - "mode": "embedding", - "output_cost_per_token": 0.0 - }, - "wandb/openai/gpt-oss-120b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 0.015, - "output_cost_per_token": 0.06, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/openai/gpt-oss-20b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 0.005, - "output_cost_per_token": 0.02, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/zai-org/GLM-4.5": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 0.055, - "output_cost_per_token": 0.2, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/Qwen/Qwen3-235B-A22B-Instruct-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 0.01, - "output_cost_per_token": 0.01, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/Qwen/Qwen3-Coder-480B-A35B-Instruct": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 0.1, - "output_cost_per_token": 0.15, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/Qwen/Qwen3-235B-A22B-Thinking-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 0.01, - "output_cost_per_token": 0.01, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/moonshotai/Kimi-K2-Instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 2.5e-06, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/meta-llama/Llama-3.1-8B-Instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.022, - "output_cost_per_token": 0.022, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/deepseek-ai/DeepSeek-V3.1": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.055, - "output_cost_per_token": 0.165, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/deepseek-ai/DeepSeek-R1-0528": { - "max_tokens": 161000, - "max_input_tokens": 161000, - "max_output_tokens": 161000, - "input_cost_per_token": 0.135, - "output_cost_per_token": 0.54, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/deepseek-ai/DeepSeek-V3-0324": { - "max_tokens": 161000, - "max_input_tokens": 161000, - "max_output_tokens": 161000, - "input_cost_per_token": 0.114, - "output_cost_per_token": 0.275, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/meta-llama/Llama-3.3-70B-Instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.071, - "output_cost_per_token": 0.071, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/meta-llama/Llama-4-Scout-17B-16E-Instruct": { - "max_tokens": 64000, - "max_input_tokens": 64000, - "max_output_tokens": 64000, - "input_cost_per_token": 0.017, - "output_cost_per_token": 0.066, - "litellm_provider": "wandb", - "mode": "chat" - }, - "wandb/microsoft/Phi-4-mini-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.008, - "output_cost_per_token": 0.035, - "litellm_provider": "wandb", - "mode": "chat" - }, - "watsonx/ibm/granite-3-8b-instruct": { - "input_cost_per_token": 0.2e-06, - "litellm_provider": "watsonx", - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 0.2e-06, - "supports_audio_input": false, - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "watsonx/mistralai/mistral-large": { - "input_cost_per_token": 3e-06, - "litellm_provider": "watsonx", - "max_input_tokens": 131072, - "max_output_tokens": 16384, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 10e-06, - "supports_audio_input": false, - "supports_audio_output": false, - "supports_function_calling": true, - "supports_parallel_function_calling": false, - "supports_prompt_caching": true, - "supports_response_schema": true, - "supports_system_messages": true, - "supports_tool_choice": true, - "supports_vision": false - }, - "watsonx/bigscience/mt0-xxl-13b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0005, - "output_cost_per_token": 0.002, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/core42/jais-13b-chat": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0005, - "output_cost_per_token": 0.002, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/google/flan-t5-xl-3b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.6e-06, - "output_cost_per_token": 0.6e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/ibm/granite-13b-chat-v2": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.6e-06, - "output_cost_per_token": 0.6e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/ibm/granite-13b-instruct-v2": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.6e-06, - "output_cost_per_token": 0.6e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/ibm/granite-3-3-8b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.2e-06, - "output_cost_per_token": 0.2e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/ibm/granite-4-h-small": { - "max_tokens": 20480, - "max_input_tokens": 20480, - "max_output_tokens": 20480, - "input_cost_per_token": 0.06e-06, - "output_cost_per_token": 0.25e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/ibm/granite-guardian-3-2-2b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.1e-06, - "output_cost_per_token": 0.1e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/ibm/granite-guardian-3-3-8b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.2e-06, - "output_cost_per_token": 0.2e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/ibm/granite-ttm-1024-96-r2": { - "max_tokens": 512, - "max_input_tokens": 512, - "max_output_tokens": 512, - "input_cost_per_token": 0.38e-06, - "output_cost_per_token": 0.38e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/ibm/granite-ttm-1536-96-r2": { - "max_tokens": 512, - "max_input_tokens": 512, - "max_output_tokens": 512, - "input_cost_per_token": 0.38e-06, - "output_cost_per_token": 0.38e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/ibm/granite-ttm-512-96-r2": { - "max_tokens": 512, - "max_input_tokens": 512, - "max_output_tokens": 512, - "input_cost_per_token": 0.38e-06, - "output_cost_per_token": 0.38e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/ibm/granite-vision-3-2-2b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.1e-06, - "output_cost_per_token": 0.1e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": true - }, - "watsonx/meta-llama/llama-3-2-11b-vision-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.35e-06, - "output_cost_per_token": 0.35e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true - }, - "watsonx/meta-llama/llama-3-2-1b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.1e-06, - "output_cost_per_token": 0.1e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/meta-llama/llama-3-2-3b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.15e-06, - "output_cost_per_token": 0.15e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/meta-llama/llama-3-2-90b-vision-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true - }, - "watsonx/meta-llama/llama-3-3-70b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.71e-06, - "output_cost_per_token": 0.71e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/meta-llama/llama-4-maverick-17b": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.35e-06, - "output_cost_per_token": 1.4e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/meta-llama/llama-guard-3-11b-vision": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.35e-06, - "output_cost_per_token": 0.35e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": true - }, - "watsonx/mistralai/mistral-medium-2505": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 10e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/mistralai/mistral-small-2503": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "input_cost_per_token": 0.1e-06, - "output_cost_per_token": 0.3e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/mistralai/mistral-small-3-1-24b-instruct-2503": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "input_cost_per_token": 0.1e-06, - "output_cost_per_token": 0.3e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "watsonx/mistralai/pixtral-12b-2409": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 0.35e-06, - "output_cost_per_token": 0.35e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": true - }, - "watsonx/openai/gpt-oss-120b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.15e-06, - "output_cost_per_token": 0.6e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/sdaia/allam-1-13b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1.8e-06, - "output_cost_per_token": 1.8e-06, - "litellm_provider": "watsonx", - "mode": "chat", - "supports_function_calling": false, - "supports_parallel_function_calling": false, - "supports_vision": false - }, - "watsonx/whisper-large-v3-turbo": { - "input_cost_per_second": 0.0001, - "output_cost_per_second": 0.0001, - "litellm_provider": "watsonx", - "mode": "audio_transcription", - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "whisper-1": { - "input_cost_per_second": 0.0001, - "litellm_provider": "openai", - "mode": "audio_transcription", - "output_cost_per_second": 0.0001, - "supported_endpoints": [ - "/v1/audio/transcriptions" - ] - }, - "xai/grok-2": { - "input_cost_per_token": 2e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-2-1212": { - "input_cost_per_token": 2e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-2-latest": { - "input_cost_per_token": 2e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-2-vision": { - "input_cost_per_image": 2e-06, - "input_cost_per_token": 2e-06, - "litellm_provider": "xai", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-2-vision-1212": { - "input_cost_per_image": 2e-06, - "input_cost_per_token": 2e-06, - "litellm_provider": "xai", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-2-vision-latest": { - "input_cost_per_image": 2e-06, - "input_cost_per_token": 2e-06, - "litellm_provider": "xai", - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "max_tokens": 32768, - "mode": "chat", - "output_cost_per_token": 1e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-3": { - "input_cost_per_token": 3e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-beta": { - "input_cost_per_token": 3e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-fast-beta": { - "input_cost_per_token": 5e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-fast-latest": { - "input_cost_per_token": 5e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 2.5e-05, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-latest": { - "input_cost_per_token": 3e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-mini": { - "input_cost_per_token": 3e-07, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-mini-beta": { - "input_cost_per_token": 3e-07, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-mini-fast": { - "input_cost_per_token": 6e-07, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 4e-06, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-mini-fast-beta": { - "input_cost_per_token": 6e-07, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 4e-06, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-mini-fast-latest": { - "input_cost_per_token": 6e-07, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 4e-06, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-3-mini-latest": { - "input_cost_per_token": 3e-07, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 5e-07, - "source": "https://x.ai/api#pricing", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": false, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-4": { - "input_cost_per_token": 3e-06, - "litellm_provider": "xai", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "source": "https://docs.x.ai/docs/models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-4-fast-reasoning": { - "litellm_provider": "xai", - "max_input_tokens": 2e6, - "max_output_tokens": 2e6, - "max_tokens": 2e6, - "mode": "chat", - "input_cost_per_token": 0.2e-06, - "input_cost_per_token_above_128k_tokens": 0.4e-06, - "output_cost_per_token": 0.5e-06, - "output_cost_per_token_above_128k_tokens": 1e-06, - "cache_read_input_token_cost": 0.05e-06, - "source": "https://docs.x.ai/docs/models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-4-fast-non-reasoning": { - "litellm_provider": "xai", - "max_input_tokens": 2e6, - "max_output_tokens": 2e6, - "cache_read_input_token_cost": 0.05e-06, - "max_tokens": 2e6, - "mode": "chat", - "input_cost_per_token": 0.2e-06, - "input_cost_per_token_above_128k_tokens": 0.4e-06, - "output_cost_per_token": 0.5e-06, - "output_cost_per_token_above_128k_tokens": 1e-06, - "source": "https://docs.x.ai/docs/models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-4-0709": { - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_128k_tokens": 6e-06, - "litellm_provider": "xai", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "output_cost_per_token_above_128k_tokens": 30e-06, - "source": "https://docs.x.ai/docs/models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-4-latest": { - "input_cost_per_token": 3e-06, - "input_cost_per_token_above_128k_tokens": 6e-06, - "litellm_provider": "xai", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "output_cost_per_token_above_128k_tokens": 30e-06, - "source": "https://docs.x.ai/docs/models", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_web_search": true - }, - "xai/grok-4-1-fast": { - "cache_read_input_token_cost": 0.05e-06, - "input_cost_per_token": 0.2e-06, - "input_cost_per_token_above_128k_tokens": 0.4e-06, - "litellm_provider": "xai", - "max_input_tokens": 2e6, - "max_output_tokens": 2e6, - "max_tokens": 2e6, - "mode": "chat", - "output_cost_per_token": 0.5e-06, - "output_cost_per_token_above_128k_tokens": 1e-06, - "source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning", - "supports_audio_input": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-4-1-fast-reasoning": { - "cache_read_input_token_cost": 0.05e-06, - "input_cost_per_token": 0.2e-06, - "input_cost_per_token_above_128k_tokens": 0.4e-06, - "litellm_provider": "xai", - "max_input_tokens": 2e6, - "max_output_tokens": 2e6, - "max_tokens": 2e6, - "mode": "chat", - "output_cost_per_token": 0.5e-06, - "output_cost_per_token_above_128k_tokens": 1e-06, - "source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning", - "supports_audio_input": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-4-1-fast-reasoning-latest": { - "cache_read_input_token_cost": 0.05e-06, - "input_cost_per_token": 0.2e-06, - "input_cost_per_token_above_128k_tokens": 0.4e-06, - "litellm_provider": "xai", - "max_input_tokens": 2e6, - "max_output_tokens": 2e6, - "max_tokens": 2e6, - "mode": "chat", - "output_cost_per_token": 0.5e-06, - "output_cost_per_token_above_128k_tokens": 1e-06, - "source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning", - "supports_audio_input": true, - "supports_function_calling": true, - "supports_reasoning": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-4-1-fast-non-reasoning": { - "cache_read_input_token_cost": 0.05e-06, - "input_cost_per_token": 0.2e-06, - "input_cost_per_token_above_128k_tokens": 0.4e-06, - "litellm_provider": "xai", - "max_input_tokens": 2e6, - "max_output_tokens": 2e6, - "max_tokens": 2e6, - "mode": "chat", - "output_cost_per_token": 0.5e-06, - "output_cost_per_token_above_128k_tokens": 1e-06, - "source": "https://docs.x.ai/docs/models/grok-4-1-fast-non-reasoning", - "supports_audio_input": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-4-1-fast-non-reasoning-latest": { - "cache_read_input_token_cost": 0.05e-06, - "input_cost_per_token": 0.2e-06, - "input_cost_per_token_above_128k_tokens": 0.4e-06, - "litellm_provider": "xai", - "max_input_tokens": 2e6, - "max_output_tokens": 2e6, - "max_tokens": 2e6, - "mode": "chat", - "output_cost_per_token": 0.5e-06, - "output_cost_per_token_above_128k_tokens": 1e-06, - "source": "https://docs.x.ai/docs/models/grok-4-1-fast-non-reasoning", - "supports_audio_input": true, - "supports_function_calling": true, - "supports_response_schema": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-beta": { - "input_cost_per_token": 5e-06, - "litellm_provider": "xai", - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "max_tokens": 131072, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "xai/grok-code-fast": { - "cache_read_input_token_cost": 2e-08, - "input_cost_per_token": 2e-07, - "litellm_provider": "xai", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://docs.x.ai/docs/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "xai/grok-code-fast-1": { - "cache_read_input_token_cost": 2e-08, - "input_cost_per_token": 2e-07, - "litellm_provider": "xai", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://docs.x.ai/docs/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "xai/grok-code-fast-1-0825": { - "cache_read_input_token_cost": 2e-08, - "input_cost_per_token": 2e-07, - "litellm_provider": "xai", - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "max_tokens": 256000, - "mode": "chat", - "output_cost_per_token": 1.5e-06, - "source": "https://docs.x.ai/docs/models", - "supports_function_calling": true, - "supports_reasoning": true, - "supports_tool_choice": true - }, - "xai/grok-vision-beta": { - "input_cost_per_image": 5e-06, - "input_cost_per_token": 5e-06, - "litellm_provider": "xai", - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "max_tokens": 8192, - "mode": "chat", - "output_cost_per_token": 1.5e-05, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "supports_web_search": true - }, - "zai/glm-4.6": { - "input_cost_per_token": 6e-07, - "output_cost_per_token": 2.2e-06, - "litellm_provider": "zai", - "max_input_tokens": 200000, - "max_output_tokens": 128000, - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "source": "https://docs.z.ai/guides/overview/pricing" - }, - "zai/glm-4.5": { - "input_cost_per_token": 6e-07, - "output_cost_per_token": 2.2e-06, - "litellm_provider": "zai", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "source": "https://docs.z.ai/guides/overview/pricing" - }, - "zai/glm-4.5v": { - "input_cost_per_token": 6e-07, - "output_cost_per_token": 1.8e-06, - "litellm_provider": "zai", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_vision": true, - "source": "https://docs.z.ai/guides/overview/pricing" - }, - "zai/glm-4.5-x": { - "input_cost_per_token": 2.2e-06, - "output_cost_per_token": 8.9e-06, - "litellm_provider": "zai", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "source": "https://docs.z.ai/guides/overview/pricing" - }, - "zai/glm-4.5-air": { - "input_cost_per_token": 2e-07, - "output_cost_per_token": 1.1e-06, - "litellm_provider": "zai", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "source": "https://docs.z.ai/guides/overview/pricing" - }, - "zai/glm-4.5-airx": { - "input_cost_per_token": 1.1e-06, - "output_cost_per_token": 4.5e-06, - "litellm_provider": "zai", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "source": "https://docs.z.ai/guides/overview/pricing" - }, - "zai/glm-4-32b-0414-128k": { - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "zai", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "source": "https://docs.z.ai/guides/overview/pricing" - }, - "zai/glm-4.5-flash": { - "input_cost_per_token": 0, - "output_cost_per_token": 0, - "litellm_provider": "zai", - "max_input_tokens": 128000, - "max_output_tokens": 32000, - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "source": "https://docs.z.ai/guides/overview/pricing" - }, - "vertex_ai/search_api": { - "input_cost_per_query": 1.5e-03, - "litellm_provider": "vertex_ai", - "mode": "vector_store" - }, - "openai/container": { - "code_interpreter_cost_per_session": 0.03, - "litellm_provider": "openai", - "mode": "chat" - }, - "openai/sora-2": { - "litellm_provider": "openai", - "mode": "video_generation", - "output_cost_per_video_per_second": 0.10, - "source": "https://platform.openai.com/docs/api-reference/videos", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "video" - ], - "supported_resolutions": [ - "720x1280", - "1280x720" - ] - }, - "openai/sora-2-pro": { - "litellm_provider": "openai", - "mode": "video_generation", - "output_cost_per_video_per_second": 0.30, - "source": "https://platform.openai.com/docs/api-reference/videos", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "video" - ], - "supported_resolutions": [ - "720x1280", - "1280x720" - ] - }, - "azure/sora-2": { - "litellm_provider": "azure", - "mode": "video_generation", - "output_cost_per_video_per_second": 0.10, - "source": "https://azure.microsoft.com/en-us/products/ai-services/video-generation", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ], - "supported_resolutions": [ - "720x1280", - "1280x720" - ] - }, - "azure/sora-2-pro": { - "litellm_provider": "azure", - "mode": "video_generation", - "output_cost_per_video_per_second": 0.30, - "source": "https://azure.microsoft.com/en-us/products/ai-services/video-generation", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ], - "supported_resolutions": [ - "720x1280", - "1280x720" - ] - }, - "azure/sora-2-pro-high-res": { - "litellm_provider": "azure", - "mode": "video_generation", - "output_cost_per_video_per_second": 0.50, - "source": "https://azure.microsoft.com/en-us/products/ai-services/video-generation", - "supported_modalities": [ - "text" - ], - "supported_output_modalities": [ - "video" - ], - "supported_resolutions": [ - "1024x1792", - "1792x1024" - ] - }, - "runwayml/gen4_turbo": { - "litellm_provider": "runwayml", - "mode": "video_generation", - "output_cost_per_video_per_second": 0.05, - "source": "https://docs.dev.runwayml.com/guides/pricing/", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "video" - ], - "supported_resolutions": [ - "1280x720", - "720x1280" - ], - "metadata": { - "comment": "5 credits per second @ $0.01 per credit = $0.05 per second" - } - }, - "runwayml/gen4_aleph": { - "litellm_provider": "runwayml", - "mode": "video_generation", - "output_cost_per_video_per_second": 0.15, - "source": "https://docs.dev.runwayml.com/guides/pricing/", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "video" - ], - "supported_resolutions": [ - "1280x720", - "720x1280" - ], - "metadata": { - "comment": "15 credits per second @ $0.01 per credit = $0.15 per second" - } - }, - "runwayml/gen3a_turbo": { - "litellm_provider": "runwayml", - "mode": "video_generation", - "output_cost_per_video_per_second": 0.05, - "source": "https://docs.dev.runwayml.com/guides/pricing/", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "video" - ], - "supported_resolutions": [ - "1280x720", - "720x1280" - ], - "metadata": { - "comment": "5 credits per second @ $0.01 per credit = $0.05 per second" - } - }, - "runwayml/gen4_image": { - "litellm_provider": "runwayml", - "mode": "image_generation", - "input_cost_per_image": 0.05, - "output_cost_per_image": 0.05, - "source": "https://docs.dev.runwayml.com/guides/pricing/", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "image" - ], - "supported_resolutions": [ - "1280x720", - "1920x1080" - ], - "metadata": { - "comment": "5 credits per 720p image or 8 credits per 1080p image @ $0.01 per credit. Using 5 credits ($0.05) as base cost" - } - }, - "runwayml/gen4_image_turbo": { - "litellm_provider": "runwayml", - "mode": "image_generation", - "input_cost_per_image": 0.02, - "output_cost_per_image": 0.02, - "source": "https://docs.dev.runwayml.com/guides/pricing/", - "supported_modalities": [ - "text", - "image" - ], - "supported_output_modalities": [ - "image" - ], - "supported_resolutions": [ - "1280x720", - "1920x1080" - ], - "metadata": { - "comment": "2 credits per image (any resolution) @ $0.01 per credit = $0.02 per image" - } - }, - "runwayml/eleven_multilingual_v2": { - "litellm_provider": "runwayml", - "mode": "audio_speech", - "input_cost_per_character": 3e-07, - "source": "https://docs.dev.runwayml.com/guides/pricing/", - "metadata": { - "comment": "Estimated cost based on standard TTS pricing. RunwayML uses ElevenLabs models." - } - }, - "fireworks_ai/accounts/fireworks/models/qwen3-coder-480b-a35b-instruct": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 4.5e-07, - "output_cost_per_token": 1.8e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/flux-kontext-pro": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 4e-08, - "output_cost_per_token": 4e-08, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/SSD-1B": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-10, - "output_cost_per_token": 1.3e-10, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/chronos-hermes-13b-v2": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-13b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-13b-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-13b-python": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-34b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-34b-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-34b-python": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-70b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-70b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-70b-python": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-7b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-7b-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-llama-7b-python": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/code-qwen-1p5-7b": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/codegemma-2b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/codegemma-7b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/cogito-671b-v2-p1": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-3b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-70b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-8b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-qwen-14b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-qwen-32b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/flux-kontext-max": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 8e-08, - "output_cost_per_token": 8e-08, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/dbrx-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-1b-base": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-33b-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-base": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-base-v1p5": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-instruct-v1p5": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-lite-base": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-lite-instruct": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-prover-v2": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-0528-distill-qwen3-8b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-llama-70b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-llama-8b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-14b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-1p5b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-32b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-7b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-v2-lite-chat": { - "max_tokens": 163840, - "max_input_tokens": 163840, - "max_output_tokens": 163840, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-v2p5": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/devstral-small-2505": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/dobby-mini-unhinged-plus-llama-3-1-8b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/dobby-unhinged-llama-3-3-70b-new": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/dolphin-2-9-2-qwen2-72b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/dolphin-2p6-mixtral-8x7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/ernie-4p5-21b-a3b-pt": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/ernie-4p5-300b-a47b-pt": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/fare-20b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/firefunction-v1": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/firellava-13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/firesearch-ocr-v6": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/fireworks-asr-large": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "audio_transcription" - }, - "fireworks_ai/accounts/fireworks/models/fireworks-asr-v2": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "audio_transcription" - }, - "fireworks_ai/accounts/fireworks/models/flux-1-dev": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/flux-1-dev-controlnet-union": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-09, - "output_cost_per_token": 1e-09, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/flux-1-dev-fp8": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-10, - "output_cost_per_token": 5e-10, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/flux-1-schnell": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/flux-1-schnell-fp8": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 3.5e-10, - "output_cost_per_token": 3.5e-10, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/gemma-2b-it": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/gemma-3-27b-it": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/gemma-7b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/gemma-7b-it": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/gemma2-9b-it": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/glm-4p5v": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/gpt-oss-safeguard-120b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/gpt-oss-safeguard-20b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/hermes-2-pro-mistral-7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/internvl3-38b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/internvl3-78b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/internvl3-8b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/japanese-stable-diffusion-xl": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-10, - "output_cost_per_token": 1.3e-10, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/kat-coder": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/kat-dev-32b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/kat-dev-72b-exp": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-guard-2-8b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-guard-3-1b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-guard-3-8b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v2-13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v2-13b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v2-70b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v2-70b-chat": { - "max_tokens": 2048, - "max_input_tokens": 2048, - "max_output_tokens": 2048, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v2-7b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v2-7b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3-70b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3-70b-instruct-hf": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3-8b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3-8b-instruct-hf": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p1-405b-instruct-long": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p1-70b-instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p1-70b-instruct-1b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p1-nemotron-70b-instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-1b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-3b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p3-70b-instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llamaguard-7b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llava-yi-34b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/minimax-m1-80k": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/minimax-m2": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/ministral-3-14b-instruct-2512": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/ministral-3-3b-instruct-2512": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/ministral-3-8b-instruct-2512": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-4k": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-v0p2": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-v3": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-7b-v0p2": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-large-3-fp8": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-nemo-base-2407": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-nemo-instruct-2407": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mistral-small-24b-instruct-2501": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mixtral-8x22b": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mixtral-8x7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mixtral-8x7b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mixtral-8x7b-instruct-hf": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/mythomax-l2-13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nemotron-nano-v2-12b-vl": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nous-capybara-7b-v1p9": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nous-hermes-2-mixtral-8x7b-dpo": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nous-hermes-2-yi-34b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-70b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-7b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nvidia-nemotron-nano-12b-v2": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/nvidia-nemotron-nano-9b-v2": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/openchat-3p5-0106-7b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/openhermes-2-mistral-7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/openhermes-2p5-mistral-7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/openorca-7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/phi-2-3b": { - "max_tokens": 2048, - "max_input_tokens": 2048, - "max_output_tokens": 2048, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/phi-3-mini-128k-instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/phi-3-vision-128k-instruct": { - "max_tokens": 32064, - "max_input_tokens": 32064, - "max_output_tokens": 32064, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-python-v1": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-v1": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-v2": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/playground-v2-1024px-aesthetic": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-10, - "output_cost_per_token": 1.3e-10, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/playground-v2-5-1024px-aesthetic": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-10, - "output_cost_per_token": 1.3e-10, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/pythia-12b": { - "max_tokens": 2048, - "max_input_tokens": 2048, - "max_output_tokens": 2048, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen-qwq-32b-preview": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen-v2p5-14b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen-v2p5-7b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen1p5-72b-chat": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2-7b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2-vl-2b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2-vl-72b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2-vl-7b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-0p5b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-14b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-1p5b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-32b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-32b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-72b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-72b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-7b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-0p5b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-0p5b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-14b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-14b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-1p5b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-1p5b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-128k": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-32k-rope": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-64k": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-3b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-3b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-7b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-math-72b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-vl-32b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-vl-3b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-vl-72b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-vl-7b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-0p6b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-14b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-1p7b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft-131072": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft-40960": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 8.8e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b-instruct-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 8.8e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b-thinking-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 8.8e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b-instruct-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b-thinking-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-32b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-4b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-4b-instruct-2507": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-8b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-coder-30b-a3b-instruct": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-coder-480b-instruct-bf16": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-embedding-0p6b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "embedding" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-embedding-4b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "embedding" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-embedding-8b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "embedding" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-next-80b-a3b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-next-80b-a3b-thinking": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-reranker-0p6b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "rerank" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-reranker-4b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "rerank" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-reranker-8b": { - "max_tokens": 40960, - "max_input_tokens": 40960, - "max_output_tokens": 40960, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "rerank" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-vl-235b-a22b-instruct": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 8.8e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-vl-235b-a22b-thinking": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 8.8e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-vl-30b-a3b-instruct": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-vl-30b-a3b-thinking": { - "max_tokens": 262144, - "max_input_tokens": 262144, - "max_output_tokens": 262144, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-vl-32b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwen3-vl-8b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/qwq-32b": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/rolm-ocr": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/snorkel-mistral-7b-pairrm-dpo": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/stable-diffusion-xl-1024-v1-0": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-10, - "output_cost_per_token": 1.3e-10, - "litellm_provider": "fireworks_ai", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/stablecode-3b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/starcoder-16b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/starcoder-7b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/starcoder2-15b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/starcoder2-3b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/starcoder2-7b": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/toppy-m-7b": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/whisper-v3": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "audio_transcription" - }, - "fireworks_ai/accounts/fireworks/models/whisper-v3-turbo": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai", - "mode": "audio_transcription" - }, - "fireworks_ai/accounts/fireworks/models/yi-34b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/yi-34b-200k-capybara": { - "max_tokens": 200000, - "max_input_tokens": 200000, - "max_output_tokens": 200000, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/yi-34b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/yi-6b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/zephyr-7b-beta": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat" - } - + } + ] + }, + "dashscope/qwq-plus": { + "input_cost_per_token": 8e-7, + "litellm_provider": "dashscope", + "max_input_tokens": 98304, + "max_output_tokens": 8192, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000024, + "source": "https://www.alibabacloud.com/help/en/model-studio/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-claude-3-7-sonnet": { + "input_cost_per_token": 0.0000029999900000000002, + "input_dbu_cost_per_token": 0.000042857, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 128000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000015000020000000002, + "output_dbu_cost_per_token": 0.000214286, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-claude-haiku-4-5": { + "input_cost_per_token": 0.00000100002, + "input_dbu_cost_per_token": 0.000014286, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.00000500003, + "output_dbu_cost_per_token": 0.000071429, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-claude-opus-4": { + "input_cost_per_token": 0.000015000020000000002, + "input_dbu_cost_per_token": 0.000214286, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.00007500003000000001, + "output_dbu_cost_per_token": 0.001071429, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-claude-opus-4-1": { + "input_cost_per_token": 0.000015000020000000002, + "input_dbu_cost_per_token": 0.000214286, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.00007500003000000001, + "output_dbu_cost_per_token": 0.001071429, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-claude-opus-4-5": { + "input_cost_per_token": 0.00000500003, + "input_dbu_cost_per_token": 0.000071429, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000025000010000000002, + "output_dbu_cost_per_token": 0.000357143, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-claude-sonnet-4": { + "input_cost_per_token": 0.0000029999900000000002, + "input_dbu_cost_per_token": 0.000042857, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000015000020000000002, + "output_dbu_cost_per_token": 0.000214286, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-claude-sonnet-4-1": { + "input_cost_per_token": 0.0000029999900000000002, + "input_dbu_cost_per_token": 0.000042857, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000015000020000000002, + "output_dbu_cost_per_token": 0.000214286, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-claude-sonnet-4-5": { + "input_cost_per_token": 0.0000029999900000000002, + "input_dbu_cost_per_token": 0.000042857, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000015000020000000002, + "output_dbu_cost_per_token": 0.000214286, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "databricks/databricks-gemini-2-5-flash": { + "input_cost_per_token": 3.0001999999999996e-7, + "input_dbu_cost_per_token": 0.000004285999999999999, + "litellm_provider": "databricks", + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_tokens": 1048576, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.00000249998, + "output_dbu_cost_per_token": 0.000035714, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "databricks/databricks-gemini-2-5-pro": { + "input_cost_per_token": 0.00000124999, + "input_dbu_cost_per_token": 0.000017857, + "litellm_provider": "databricks", + "max_input_tokens": 1048576, + "max_output_tokens": 65536, + "max_tokens": 1048576, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000009999990000000002, + "output_dbu_cost_per_token": 0.000142857, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "databricks/databricks-gemma-3-12b": { + "input_cost_per_token": 1.5000999999999998e-7, + "input_dbu_cost_per_token": 0.0000021429999999999996, + "litellm_provider": "databricks", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "max_tokens": 128000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 5.0001e-7, + "output_dbu_cost_per_token": 0.000007143, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving" + }, + "databricks/databricks-gpt-5": { + "input_cost_per_token": 0.00000124999, + "input_dbu_cost_per_token": 0.000017857, + "litellm_provider": "databricks", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 400000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000009999990000000002, + "output_dbu_cost_per_token": 0.000142857, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving" + }, + "databricks/databricks-gpt-5-1": { + "input_cost_per_token": 0.00000124999, + "input_dbu_cost_per_token": 0.000017857, + "litellm_provider": "databricks", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 400000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000009999990000000002, + "output_dbu_cost_per_token": 0.000142857, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving" + }, + "databricks/databricks-gpt-5-mini": { + "input_cost_per_token": 2.4997000000000006e-7, + "input_dbu_cost_per_token": 0.000003571, + "litellm_provider": "databricks", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 400000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.0000019999700000000004, + "output_dbu_cost_per_token": 0.000028571, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving" + }, + "databricks/databricks-gpt-5-nano": { + "input_cost_per_token": 4.998e-8, + "input_dbu_cost_per_token": 7.14e-7, + "litellm_provider": "databricks", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 400000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 3.9998000000000007e-7, + "output_dbu_cost_per_token": 0.000005714000000000001, + "source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving" + }, + "databricks/databricks-gpt-oss-120b": { + "input_cost_per_token": 1.5000999999999998e-7, + "input_dbu_cost_per_token": 0.0000021429999999999996, + "litellm_provider": "databricks", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 5.9997e-7, + "output_dbu_cost_per_token": 0.000008571, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving" + }, + "databricks/databricks-gpt-oss-20b": { + "input_cost_per_token": 7e-8, + "input_dbu_cost_per_token": 0.000001, + "litellm_provider": "databricks", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 3.0001999999999996e-7, + "output_dbu_cost_per_token": 0.000004285999999999999, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving" + }, + "databricks/databricks-llama-2-70b-chat": { + "input_cost_per_token": 5.0001e-7, + "input_dbu_cost_per_token": 0.000007143, + "litellm_provider": "databricks", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.0000015000300000000002, + "output_dbu_cost_per_token": 0.000021429, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "supports_tool_choice": true + }, + "databricks/databricks-llama-4-maverick": { + "input_cost_per_token": 5.0001e-7, + "input_dbu_cost_per_token": 0.000007143, + "litellm_provider": "databricks", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "metadata": { + "notes": "Databricks documentation now provides both DBU costs (_dbu_cost_per_token) and dollar costs(_cost_per_token)." + }, + "mode": "chat", + "output_cost_per_token": 0.0000015000300000000002, + "output_dbu_cost_per_token": 0.000021429, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "supports_tool_choice": true + }, + "databricks/databricks-meta-llama-3-1-405b-instruct": { + "input_cost_per_token": 0.00000500003, + "input_dbu_cost_per_token": 0.000071429, + "litellm_provider": "databricks", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.000015000020000000002, + "output_dbu_cost_per_token": 0.000214286, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "supports_tool_choice": true + }, + "databricks/databricks-meta-llama-3-1-8b-instruct": { + "input_cost_per_token": 1.5000999999999998e-7, + "input_dbu_cost_per_token": 0.0000021429999999999996, + "litellm_provider": "databricks", + "max_input_tokens": 200000, + "max_output_tokens": 128000, + "max_tokens": 200000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 4.5003000000000007e-7, + "output_dbu_cost_per_token": 0.000006429000000000001, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving" + }, + "databricks/databricks-meta-llama-3-3-70b-instruct": { + "input_cost_per_token": 5.0001e-7, + "input_dbu_cost_per_token": 0.000007143, + "litellm_provider": "databricks", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.0000015000300000000002, + "output_dbu_cost_per_token": 0.000021429, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "supports_tool_choice": true + }, + "databricks/databricks-meta-llama-3-70b-instruct": { + "input_cost_per_token": 0.00000100002, + "input_dbu_cost_per_token": 0.000014286, + "litellm_provider": "databricks", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.0000029999900000000002, + "output_dbu_cost_per_token": 0.000042857, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "supports_tool_choice": true + }, + "databricks/databricks-mixtral-8x7b-instruct": { + "input_cost_per_token": 5.0001e-7, + "input_dbu_cost_per_token": 0.000007143, + "litellm_provider": "databricks", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.00000100002, + "output_dbu_cost_per_token": 0.000014286, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "supports_tool_choice": true + }, + "databricks/databricks-mpt-30b-instruct": { + "input_cost_per_token": 0.00000100002, + "input_dbu_cost_per_token": 0.000014286, + "litellm_provider": "databricks", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0.00000100002, + "output_dbu_cost_per_token": 0.000014286, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "supports_tool_choice": true + }, + "databricks/databricks-mpt-7b-instruct": { + "input_cost_per_token": 5.0001e-7, + "input_dbu_cost_per_token": 0.000007143, + "litellm_provider": "databricks", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + }, + "mode": "chat", + "output_cost_per_token": 0, + "output_dbu_cost_per_token": 0, + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "supports_tool_choice": true + }, + "deepinfra/Gryphe/MythoMax-L2-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 8e-8, + "output_cost_per_token": 9e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/NousResearch/Hermes-3-Llama-3.1-405B": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000001, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/NousResearch/Hermes-3-Llama-3.1-70B": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 3e-7, + "output_cost_per_token": 3e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/Qwen/QwQ-32B": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1.5e-7, + "output_cost_per_token": 4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen2.5-72B-Instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1.2e-7, + "output_cost_per_token": 3.9e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen2.5-7B-Instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 4e-8, + "output_cost_per_token": 1e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/Qwen/Qwen2.5-VL-32B-Instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true, + "supports_vision": true + }, + "deepinfra/Qwen/Qwen3-14B": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 6e-8, + "output_cost_per_token": 2.4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-235B-A22B": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 1.8e-7, + "output_cost_per_token": 5.4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-235B-A22B-Instruct-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 9e-8, + "output_cost_per_token": 6e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-235B-A22B-Thinking-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 3e-7, + "output_cost_per_token": 0.0000029, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-30B-A3B": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 8e-8, + "output_cost_per_token": 2.9e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-32B": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 2.8e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-Coder-480B-A35B-Instruct": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 4e-7, + "output_cost_per_token": 0.0000016, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 2.9e-7, + "output_cost_per_token": 0.0000012, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-Next-80B-A3B-Instruct": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 1.4e-7, + "output_cost_per_token": 0.0000014, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Qwen/Qwen3-Next-80B-A3B-Thinking": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 1.4e-7, + "output_cost_per_token": 0.0000014, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/Sao10K/L3-8B-Lunaris-v1-Turbo": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 4e-8, + "output_cost_per_token": 5e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/Sao10K/L3.1-70B-Euryale-v2.2": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 6.5e-7, + "output_cost_per_token": 7.5e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/Sao10K/L3.3-70B-Euryale-v2.3": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 6.5e-7, + "output_cost_per_token": 7.5e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/allenai/olmOCR-7B-0725-FP8": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2.7e-7, + "output_cost_per_token": 0.0000015, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/anthropic/claude-3-7-sonnet-latest": { + "max_tokens": 200000, + "max_input_tokens": 200000, + "max_output_tokens": 200000, + "input_cost_per_token": 0.0000033, + "output_cost_per_token": 0.0000165, + "cache_read_input_token_cost": 3.3e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/anthropic/claude-4-opus": { + "max_tokens": 200000, + "max_input_tokens": 200000, + "max_output_tokens": 200000, + "input_cost_per_token": 0.0000165, + "output_cost_per_token": 0.0000825, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/anthropic/claude-4-sonnet": { + "max_tokens": 200000, + "max_input_tokens": 200000, + "max_output_tokens": 200000, + "input_cost_per_token": 0.0000033, + "output_cost_per_token": 0.0000165, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/deepseek-ai/DeepSeek-R1": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 7e-7, + "output_cost_per_token": 0.0000024, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/deepseek-ai/DeepSeek-R1-0528": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 0.00000215, + "cache_read_input_token_cost": 4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/deepseek-ai/DeepSeek-R1-0528-Turbo": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000003, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/deepseek-ai/DeepSeek-R1-Distill-Llama-70B": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2.7e-7, + "output_cost_per_token": 2.7e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/deepseek-ai/DeepSeek-R1-Turbo": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000003, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/deepseek-ai/DeepSeek-V3": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 3.8e-7, + "output_cost_per_token": 8.9e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/deepseek-ai/DeepSeek-V3-0324": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 2.5e-7, + "output_cost_per_token": 8.8e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/deepseek-ai/DeepSeek-V3.1": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 2.7e-7, + "output_cost_per_token": 0.000001, + "cache_read_input_token_cost": 2.16e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true, + "supports_reasoning": true + }, + "deepinfra/deepseek-ai/DeepSeek-V3.1-Terminus": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 2.7e-7, + "output_cost_per_token": 0.000001, + "cache_read_input_token_cost": 2.16e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/google/gemini-2.0-flash-001": { + "max_tokens": 1000000, + "max_input_tokens": 1000000, + "max_output_tokens": 1000000, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/google/gemini-2.5-flash": { + "max_tokens": 1000000, + "max_input_tokens": 1000000, + "max_output_tokens": 1000000, + "input_cost_per_token": 3e-7, + "output_cost_per_token": 0.0000025, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/google/gemini-2.5-pro": { + "max_tokens": 1000000, + "max_input_tokens": 1000000, + "max_output_tokens": 1000000, + "input_cost_per_token": 0.00000125, + "output_cost_per_token": 0.00001, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/google/gemma-3-12b-it": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 5e-8, + "output_cost_per_token": 1e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/google/gemma-3-27b-it": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-8, + "output_cost_per_token": 1.6e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/google/gemma-3-4b-it": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 4e-8, + "output_cost_per_token": 8e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Llama-3.2-11B-Vision-Instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 4.9e-8, + "output_cost_per_token": 4.9e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/meta-llama/Llama-3.2-3B-Instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-8, + "output_cost_per_token": 2e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Llama-3.3-70B-Instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2.3e-7, + "output_cost_per_token": 4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Llama-3.3-70B-Instruct-Turbo": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1.3e-7, + "output_cost_per_token": 3.9e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": { + "max_tokens": 1048576, + "max_input_tokens": 1048576, + "max_output_tokens": 1048576, + "input_cost_per_token": 1.5e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Llama-4-Scout-17B-16E-Instruct": { + "max_tokens": 327680, + "max_input_tokens": 327680, + "max_output_tokens": 327680, + "input_cost_per_token": 8e-8, + "output_cost_per_token": 3e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Llama-Guard-3-8B": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 5.5e-8, + "output_cost_per_token": 5.5e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/meta-llama/Llama-Guard-4-12B": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 1.8e-7, + "output_cost_per_token": 1.8e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/meta-llama/Meta-Llama-3-8B-Instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-8, + "output_cost_per_token": 6e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Meta-Llama-3.1-70B-Instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 4e-7, + "output_cost_per_token": 4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 2.8e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Meta-Llama-3.1-8B-Instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 3e-8, + "output_cost_per_token": 5e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-8, + "output_cost_per_token": 3e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/microsoft/WizardLM-2-8x22B": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 4.8e-7, + "output_cost_per_token": 4.8e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": false + }, + "deepinfra/microsoft/phi-4": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 7e-8, + "output_cost_per_token": 1.4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/mistralai/Mistral-Nemo-Instruct-2407": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-8, + "output_cost_per_token": 4e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/mistralai/Mistral-Small-24B-Instruct-2501": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 5e-8, + "output_cost_per_token": 8e-8, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/mistralai/Mistral-Small-3.2-24B-Instruct-2506": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 7.5e-8, + "output_cost_per_token": 2e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/mistralai/Mixtral-8x7B-Instruct-v0.1": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 4e-7, + "output_cost_per_token": 4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/moonshotai/Kimi-K2-Instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 0.000002, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/moonshotai/Kimi-K2-Instruct-0905": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 0.000002, + "cache_read_input_token_cost": 4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/nvidia/Llama-3.1-Nemotron-70B-Instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 6e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/nvidia/Llama-3.3-Nemotron-Super-49B-v1.5": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 4e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/nvidia/NVIDIA-Nemotron-Nano-9B-v2": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 4e-8, + "output_cost_per_token": 1.6e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/openai/gpt-oss-120b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 5e-8, + "output_cost_per_token": 4.5e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/openai/gpt-oss-20b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 4e-8, + "output_cost_per_token": 1.5e-7, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepinfra/zai-org/GLM-4.5": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 4e-7, + "output_cost_per_token": 0.0000016, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_tool_choice": true + }, + "deepseek/deepseek-chat": { + "cache_read_input_token_cost": 3e-8, + "input_cost_per_token": 3e-7, + "litellm_provider": "deepseek", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://api-docs.deepseek.com/quick_start/pricing", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "deepseek/deepseek-coder": { + "input_cost_per_token": 1.4e-7, + "input_cost_per_token_cache_hit": 1.4e-8, + "litellm_provider": "deepseek", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.8e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_tool_choice": true + }, + "deepseek/deepseek-r1": { + "input_cost_per_token": 5.5e-7, + "input_cost_per_token_cache_hit": 1.4e-7, + "litellm_provider": "deepseek", + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000219, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "deepseek/deepseek-reasoner": { + "input_cost_per_token": 5.5e-7, + "input_cost_per_token_cache_hit": 1.4e-7, + "litellm_provider": "deepseek", + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000219, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "deepseek/deepseek-v3": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 7e-8, + "input_cost_per_token": 2.7e-7, + "input_cost_per_token_cache_hit": 7e-8, + "litellm_provider": "deepseek", + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000011, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_tool_choice": true + }, + "deepseek/deepseek-v3.2": { + "input_cost_per_token": 2.8e-7, + "input_cost_per_token_cache_hit": 2.8e-8, + "litellm_provider": "deepseek", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "deepseek.v3-v1:0": { + "input_cost_per_token": 5.8e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 163840, + "max_output_tokens": 81920, + "max_tokens": 163840, + "mode": "chat", + "output_cost_per_token": 0.00000168, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "eu.amazon.nova-lite-v1:0": { + "input_cost_per_token": 7.8e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 3.12e-7, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "eu.amazon.nova-micro-v1:0": { + "input_cost_per_token": 4.6e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 1.84e-7, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "eu.amazon.nova-pro-v1:0": { + "input_cost_per_token": 0.00000105, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.0000042, + "source": "https://aws.amazon.com/bedrock/pricing/", + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-5-haiku-20241022-v1:0": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "eu.anthropic.claude-haiku-4-5-20251001-v1:0": { + "cache_creation_input_token_cost": 0.000001375, + "cache_read_input_token_cost": 1.1e-7, + "input_cost_per_token": 0.0000011, + "deprecation_date": "2026-10-15", + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000055, + "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "eu.anthropic.claude-3-5-sonnet-20240620-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-5-sonnet-20241022-v2:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-7-sonnet-20250219-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-haiku-20240307-v1:0": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-opus-20240229-v1:0": { + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-sonnet-20240229-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "eu.anthropic.claude-opus-4-1-20250805-v1:0": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "eu.anthropic.claude-opus-4-20250514-v1:0": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "eu.anthropic.claude-sonnet-4-20250514-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "eu.anthropic.claude-sonnet-4-5-20250929-v1:0": { + "cache_creation_input_token_cost": 0.000004125, + "cache_read_input_token_cost": 3.3e-7, + "input_cost_per_token": 0.0000033, + "input_cost_per_token_above_200k_tokens": 0.0000066, + "output_cost_per_token_above_200k_tokens": 0.00002475, + "cache_creation_input_token_cost_above_200k_tokens": 0.00000825, + "cache_read_input_token_cost_above_200k_tokens": 6.6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000165, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "eu.meta.llama3-2-1b-instruct-v1:0": { + "input_cost_per_token": 1.3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.3e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "eu.meta.llama3-2-3b-instruct-v1:0": { + "input_cost_per_token": 1.9e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.9e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "eu.mistral.pixtral-large-2502-v1:0": { + "input_cost_per_token": 0.000002, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "featherless_ai/featherless-ai/Qwerky-72B": { + "litellm_provider": "featherless_ai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 32768, + "mode": "chat" + }, + "featherless_ai/featherless-ai/Qwerky-QwQ-32B": { + "litellm_provider": "featherless_ai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 32768, + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": { + "input_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1": { + "input_cost_per_token": 0.000003, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 128000, + "max_output_tokens": 20480, + "max_tokens": 20480, + "mode": "chat", + "output_cost_per_token": 0.000008, + "source": "https://fireworks.ai/pricing", + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-0528": { + "input_cost_per_token": 0.000003, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 160000, + "max_output_tokens": 160000, + "max_tokens": 160000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "source": "https://fireworks.ai/pricing", + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-basic": { + "input_cost_per_token": 5.5e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 128000, + "max_output_tokens": 20480, + "max_tokens": 20480, + "mode": "chat", + "output_cost_per_token": 0.00000219, + "source": "https://fireworks.ai/pricing", + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v3": { + "input_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 9e-7, + "source": "https://fireworks.ai/pricing", + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v3-0324": { + "input_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 163840, + "mode": "chat", + "output_cost_per_token": 9e-7, + "source": "https://fireworks.ai/models/fireworks/deepseek-v3-0324", + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v3p1": { + "input_cost_per_token": 5.6e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000168, + "source": "https://fireworks.ai/pricing", + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v3p1-terminus": { + "input_cost_per_token": 5.6e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000168, + "source": "https://fireworks.ai/pricing", + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v3p2": { + "input_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 163840, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://fireworks.ai/models/fireworks/deepseek-v3p2", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/firefunction-v2": { + "input_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 9e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/glm-4p5": { + "input_cost_per_token": 5.5e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 128000, + "max_output_tokens": 96000, + "max_tokens": 96000, + "mode": "chat", + "output_cost_per_token": 0.00000219, + "source": "https://fireworks.ai/models/fireworks/glm-4p5", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/glm-4p5-air": { + "input_cost_per_token": 2.2e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 128000, + "max_output_tokens": 96000, + "max_tokens": 96000, + "mode": "chat", + "output_cost_per_token": 8.8e-7, + "source": "https://artificialanalysis.ai/models/glm-4-5-air", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/glm-4p6": { + "input_cost_per_token": 5.5e-7, + "output_cost_per_token": 0.00000219, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 202800, + "max_output_tokens": 202800, + "max_tokens": 202800, + "mode": "chat", + "source": "https://fireworks.ai/pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/gpt-oss-120b": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/gpt-oss-20b": { + "input_cost_per_token": 5e-8, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/kimi-k2-instruct": { + "input_cost_per_token": 6e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 131072, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "source": "https://fireworks.ai/models/fireworks/kimi-k2-instruct", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/kimi-k2-instruct-0905": { + "input_cost_per_token": 6e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 262144, + "max_output_tokens": 32768, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "source": "https://app.fireworks.ai/models/fireworks/kimi-k2-instruct-0905", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/kimi-k2-thinking": { + "input_cost_per_token": 6e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p1-405b-instruct": { + "input_cost_per_token": 0.000003, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p1-8b-instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 1e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": { + "input_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 1e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 1e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-90b-vision-instruct": { + "input_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 9e-7, + "source": "https://fireworks.ai/pricing", + "supports_response_schema": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "fireworks_ai/accounts/fireworks/models/llama4-maverick-instruct-basic": { + "input_cost_per_token": 2.2e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 8.8e-7, + "source": "https://fireworks.ai/pricing", + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/llama4-scout-instruct-basic": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://fireworks.ai/pricing", + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": { + "input_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": { + "input_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 9e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": { + "input_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 9e-7, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "fireworks_ai/accounts/fireworks/models/yi-large": { + "input_cost_per_token": 0.000003, + "litellm_provider": "fireworks_ai", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://fireworks.ai/pricing", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "friendliai/meta-llama-3.1-70b-instruct": { + "input_cost_per_token": 6e-7, + "litellm_provider": "friendliai", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "friendliai/meta-llama-3.1-8b-instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "friendliai", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-3.5-turbo": { + "input_cost_per_token": 0.000003, + "input_cost_per_token_batches": 0.0000015, + "litellm_provider": "openai", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000006, + "output_cost_per_token_batches": 0.000003, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-3.5-turbo-0125": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openai", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-3.5-turbo-0613": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openai", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-3.5-turbo-1106": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openai", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-4-0613": { + "input_cost_per_token": 0.00003, + "litellm_provider": "openai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00006, + "source": "OpenAI needs to add pricing for this ft model, will be updated when added by OpenAI. Defaulting to base model pricing", + "supports_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-4o-2024-08-06": { + "cache_read_input_token_cost": 0.000001875, + "input_cost_per_token": 0.00000375, + "input_cost_per_token_batches": 0.000001875, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000015, + "output_cost_per_token_batches": 0.0000075, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "ft:gpt-4o-2024-11-20": { + "cache_creation_input_token_cost": 0.000001875, + "input_cost_per_token": 0.00000375, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-4o-mini-2024-07-18": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 3e-7, + "input_cost_per_token_batches": 1.5e-7, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "output_cost_per_token_batches": 6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-4.1-2025-04-14": { + "cache_read_input_token_cost": 7.5e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_batches": 0.0000015, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000012, + "output_cost_per_token_batches": 0.000006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-4.1-mini-2025-04-14": { + "cache_read_input_token_cost": 2e-7, + "input_cost_per_token": 8e-7, + "input_cost_per_token_batches": 4e-7, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000032, + "output_cost_per_token_batches": 0.0000016, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:gpt-4.1-nano-2025-04-14": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_token": 2e-7, + "input_cost_per_token_batches": 1e-7, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 8e-7, + "output_cost_per_token_batches": 4e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "ft:o4-mini-2025-04-16": { + "cache_read_input_token_cost": 0.000001, + "input_cost_per_token": 0.000004, + "input_cost_per_token_batches": 0.000002, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.000016, + "output_cost_per_token_batches": 0.000008, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "gemini-1.0-pro": { + "input_cost_per_character": 1.25e-7, + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "input_cost_per_video_per_second": 0.002, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 3.75e-7, + "output_cost_per_token": 0.0000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#google_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "gemini-1.0-pro-001": { + "deprecation_date": "2025-04-09", + "input_cost_per_character": 1.25e-7, + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "input_cost_per_video_per_second": 0.002, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 3.75e-7, + "output_cost_per_token": 0.0000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "gemini-1.0-pro-002": { + "deprecation_date": "2025-04-09", + "input_cost_per_character": 1.25e-7, + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "input_cost_per_video_per_second": 0.002, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 3.75e-7, + "output_cost_per_token": 0.0000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "gemini-1.0-pro-vision": { + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "litellm_provider": "vertex_ai-vision-models", + "max_images_per_prompt": 16, + "max_input_tokens": 16384, + "max_output_tokens": 2048, + "max_tokens": 2048, + "max_video_length": 2, + "max_videos_per_prompt": 1, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.0-pro-vision-001": { + "deprecation_date": "2025-04-09", + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "litellm_provider": "vertex_ai-vision-models", + "max_images_per_prompt": 16, + "max_input_tokens": 16384, + "max_output_tokens": 2048, + "max_tokens": 2048, + "max_video_length": 2, + "max_videos_per_prompt": 1, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.0-ultra": { + "input_cost_per_character": 1.25e-7, + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "input_cost_per_video_per_second": 0.002, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 8192, + "max_output_tokens": 2048, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 3.75e-7, + "output_cost_per_token": 0.0000015, + "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "gemini-1.0-ultra-001": { + "input_cost_per_character": 1.25e-7, + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "input_cost_per_video_per_second": 0.002, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 8192, + "max_output_tokens": 2048, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 3.75e-7, + "output_cost_per_token": 0.0000015, + "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "gemini-1.5-flash": { + "input_cost_per_audio_per_second": 0.000002, + "input_cost_per_audio_per_second_above_128k_tokens": 0.000004, + "input_cost_per_character": 1.875e-8, + "input_cost_per_character_above_128k_tokens": 2.5e-7, + "input_cost_per_image": 0.00002, + "input_cost_per_image_above_128k_tokens": 0.00004, + "input_cost_per_token": 7.5e-8, + "input_cost_per_token_above_128k_tokens": 0.000001, + "input_cost_per_video_per_second": 0.00002, + "input_cost_per_video_per_second_above_128k_tokens": 0.00004, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 7.5e-8, + "output_cost_per_character_above_128k_tokens": 1.5e-7, + "output_cost_per_token": 3e-7, + "output_cost_per_token_above_128k_tokens": 6e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.5-flash-001": { + "deprecation_date": "2025-05-24", + "input_cost_per_audio_per_second": 0.000002, + "input_cost_per_audio_per_second_above_128k_tokens": 0.000004, + "input_cost_per_character": 1.875e-8, + "input_cost_per_character_above_128k_tokens": 2.5e-7, + "input_cost_per_image": 0.00002, + "input_cost_per_image_above_128k_tokens": 0.00004, + "input_cost_per_token": 7.5e-8, + "input_cost_per_token_above_128k_tokens": 0.000001, + "input_cost_per_video_per_second": 0.00002, + "input_cost_per_video_per_second_above_128k_tokens": 0.00004, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 7.5e-8, + "output_cost_per_character_above_128k_tokens": 1.5e-7, + "output_cost_per_token": 3e-7, + "output_cost_per_token_above_128k_tokens": 6e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.5-flash-002": { + "deprecation_date": "2025-09-24", + "input_cost_per_audio_per_second": 0.000002, + "input_cost_per_audio_per_second_above_128k_tokens": 0.000004, + "input_cost_per_character": 1.875e-8, + "input_cost_per_character_above_128k_tokens": 2.5e-7, + "input_cost_per_image": 0.00002, + "input_cost_per_image_above_128k_tokens": 0.00004, + "input_cost_per_token": 7.5e-8, + "input_cost_per_token_above_128k_tokens": 0.000001, + "input_cost_per_video_per_second": 0.00002, + "input_cost_per_video_per_second_above_128k_tokens": 0.00004, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 7.5e-8, + "output_cost_per_character_above_128k_tokens": 1.5e-7, + "output_cost_per_token": 3e-7, + "output_cost_per_token_above_128k_tokens": 6e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-flash", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.5-flash-exp-0827": { + "input_cost_per_audio_per_second": 0.000002, + "input_cost_per_audio_per_second_above_128k_tokens": 0.000004, + "input_cost_per_character": 1.875e-8, + "input_cost_per_character_above_128k_tokens": 2.5e-7, + "input_cost_per_image": 0.00002, + "input_cost_per_image_above_128k_tokens": 0.00004, + "input_cost_per_token": 4.688e-9, + "input_cost_per_token_above_128k_tokens": 0.000001, + "input_cost_per_video_per_second": 0.00002, + "input_cost_per_video_per_second_above_128k_tokens": 0.00004, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 1.875e-8, + "output_cost_per_character_above_128k_tokens": 3.75e-8, + "output_cost_per_token": 4.6875e-9, + "output_cost_per_token_above_128k_tokens": 9.375e-9, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.5-flash-preview-0514": { + "input_cost_per_audio_per_second": 0.000002, + "input_cost_per_audio_per_second_above_128k_tokens": 0.000004, + "input_cost_per_character": 1.875e-8, + "input_cost_per_character_above_128k_tokens": 2.5e-7, + "input_cost_per_image": 0.00002, + "input_cost_per_image_above_128k_tokens": 0.00004, + "input_cost_per_token": 7.5e-8, + "input_cost_per_token_above_128k_tokens": 0.000001, + "input_cost_per_video_per_second": 0.00002, + "input_cost_per_video_per_second_above_128k_tokens": 0.00004, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 1.875e-8, + "output_cost_per_character_above_128k_tokens": 3.75e-8, + "output_cost_per_token": 4.6875e-9, + "output_cost_per_token_above_128k_tokens": 9.375e-9, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.5-pro": { + "input_cost_per_audio_per_second": 0.00003125, + "input_cost_per_audio_per_second_above_128k_tokens": 0.0000625, + "input_cost_per_character": 3.125e-7, + "input_cost_per_character_above_128k_tokens": 6.25e-7, + "input_cost_per_image": 0.00032875, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_128k_tokens": 0.0000025, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0.00000125, + "output_cost_per_character_above_128k_tokens": 0.0000025, + "output_cost_per_token": 0.000005, + "output_cost_per_token_above_128k_tokens": 0.00001, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.5-pro-001": { + "deprecation_date": "2025-05-24", + "input_cost_per_audio_per_second": 0.00003125, + "input_cost_per_audio_per_second_above_128k_tokens": 0.0000625, + "input_cost_per_character": 3.125e-7, + "input_cost_per_character_above_128k_tokens": 6.25e-7, + "input_cost_per_image": 0.00032875, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_128k_tokens": 0.0000025, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0.00000125, + "output_cost_per_character_above_128k_tokens": 0.0000025, + "output_cost_per_token": 0.000005, + "output_cost_per_token_above_128k_tokens": 0.00001, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.5-pro-002": { + "deprecation_date": "2025-09-24", + "input_cost_per_audio_per_second": 0.00003125, + "input_cost_per_audio_per_second_above_128k_tokens": 0.0000625, + "input_cost_per_character": 3.125e-7, + "input_cost_per_character_above_128k_tokens": 6.25e-7, + "input_cost_per_image": 0.00032875, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_128k_tokens": 0.0000025, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0.00000125, + "output_cost_per_character_above_128k_tokens": 0.0000025, + "output_cost_per_token": 0.000005, + "output_cost_per_token_above_128k_tokens": 0.00001, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-pro", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini-1.5-pro-preview-0215": { + "input_cost_per_audio_per_second": 0.00003125, + "input_cost_per_audio_per_second_above_128k_tokens": 0.0000625, + "input_cost_per_character": 3.125e-7, + "input_cost_per_character_above_128k_tokens": 6.25e-7, + "input_cost_per_image": 0.00032875, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_token": 7.8125e-8, + "input_cost_per_token_above_128k_tokens": 1.5625e-7, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0.00000125, + "output_cost_per_character_above_128k_tokens": 0.0000025, + "output_cost_per_token": 3.125e-7, + "output_cost_per_token_above_128k_tokens": 6.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gemini-1.5-pro-preview-0409": { + "input_cost_per_audio_per_second": 0.00003125, + "input_cost_per_audio_per_second_above_128k_tokens": 0.0000625, + "input_cost_per_character": 3.125e-7, + "input_cost_per_character_above_128k_tokens": 6.25e-7, + "input_cost_per_image": 0.00032875, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_token": 7.8125e-8, + "input_cost_per_token_above_128k_tokens": 1.5625e-7, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0.00000125, + "output_cost_per_character_above_128k_tokens": 0.0000025, + "output_cost_per_token": 3.125e-7, + "output_cost_per_token_above_128k_tokens": 6.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "gemini-1.5-pro-preview-0514": { + "input_cost_per_audio_per_second": 0.00003125, + "input_cost_per_audio_per_second_above_128k_tokens": 0.0000625, + "input_cost_per_character": 3.125e-7, + "input_cost_per_character_above_128k_tokens": 6.25e-7, + "input_cost_per_image": 0.00032875, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_token": 7.8125e-8, + "input_cost_per_token_above_128k_tokens": 1.5625e-7, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0.00000125, + "output_cost_per_character_above_128k_tokens": 0.0000025, + "output_cost_per_token": 3.125e-7, + "output_cost_per_token_above_128k_tokens": 6.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gemini-2.0-flash": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 4e-7, + "source": "https://ai.google.dev/pricing#2_0flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.0-flash-001": { + "cache_read_input_token_cost": 3.75e-8, + "deprecation_date": "2026-02-05", + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.0-flash-exp": { + "cache_read_input_token_cost": 3.75e-8, + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 1.5e-7, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 6e-7, + "output_cost_per_token_above_128k_tokens": 0, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.0-flash-lite": { + "cache_read_input_token_cost": 1.875e-8, + "input_cost_per_audio_token": 7.5e-8, + "input_cost_per_token": 7.5e-8, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 50, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.0-flash-lite-001": { + "cache_read_input_token_cost": 1.875e-8, + "deprecation_date": "2026-02-25", + "input_cost_per_audio_token": 7.5e-8, + "input_cost_per_token": 7.5e-8, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 50, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.0-flash-live-preview-04-09": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.000003, + "input_cost_per_image": 0.000003, + "input_cost_per_token": 5e-7, + "input_cost_per_video_per_second": 0.000003, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_audio_token": 0.000012, + "output_cost_per_token": 0.000002, + "rpm": 10, + "source": "https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/gemini#gemini-2-0-flash-live-preview-04-09", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini-2.0-flash-preview-image-generation": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 4e-7, + "source": "https://ai.google.dev/pricing#2_0flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.0-flash-thinking-exp": { + "cache_read_input_token_cost": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.0-flash-thinking-exp-01-21": { + "cache_read_input_token_cost": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65536, + "max_pdf_size_mb": 30, + "max_tokens": 65536, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_output": false, + "supports_function_calling": false, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.0-pro-exp-02-05": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-flash": { + "cache_read_input_token_cost": 3e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000025, + "output_cost_per_token": 0.0000025, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-flash-lite": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 5e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 4e-7, + "output_cost_per_token": 4e-7, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-flash-lite-preview-09-2025": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 3e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 4e-7, + "output_cost_per_token": 4e-7, + "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-flash-preview-09-2025": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000025, + "output_cost_per_token": 0.0000025, + "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-live-2.5-flash-preview-native-audio-09-2025": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.000003, + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_audio_token": 0.000012, + "output_cost_per_token": 0.000002, + "source": "https://ai.google.dev/gemini-api/docs/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini/gemini-live-2.5-flash-preview-native-audio-09-2025": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.000003, + "input_cost_per_token": 3e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_audio_token": 0.000012, + "output_cost_per_token": 0.000002, + "rpm": 100000, + "source": "https://ai.google.dev/gemini-api/docs/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 8000000 + }, + "gemini-2.5-flash-lite-preview-06-17": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 5e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 4e-7, + "output_cost_per_token": 4e-7, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-flash-preview-04-17": { + "cache_read_input_token_cost": 3.75e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000035, + "output_cost_per_token": 6e-7, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-flash-preview-05-20": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000025, + "output_cost_per_token": 0.0000025, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-pro": { + "cache_read_input_token_cost": 1.25e-7, + "cache_creation_input_token_cost_above_200k_tokens": 2.5e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-3-pro-preview": { + "cache_read_input_token_cost": 2e-7, + "cache_read_input_token_cost_above_200k_tokens": 4e-7, + "cache_creation_input_token_cost_above_200k_tokens": 2.5e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_above_200k_tokens": 0.000004, + "input_cost_per_token_batches": 0.000001, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.000012, + "output_cost_per_token_above_200k_tokens": 0.000018, + "output_cost_per_token_batches": 0.000006, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true + }, + "vertex_ai/gemini-3-pro-preview": { + "cache_read_input_token_cost": 2e-7, + "cache_read_input_token_cost_above_200k_tokens": 4e-7, + "cache_creation_input_token_cost_above_200k_tokens": 2.5e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_above_200k_tokens": 0.000004, + "input_cost_per_token_batches": 0.000001, + "litellm_provider": "vertex_ai", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.000012, + "output_cost_per_token_above_200k_tokens": 0.000018, + "output_cost_per_token_batches": 0.000006, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true + }, + "vertex_ai/gemini-3-flash-preview": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_token": 5e-7, + "input_cost_per_audio_token": 0.000001, + "litellm_provider": "vertex_ai", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-pro-exp-03-25": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-pro-preview-03-25": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_audio_token": 0.00000125, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-pro-preview-05-06": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_audio_token": 0.00000125, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supported_regions": [ + "global" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-pro-preview-06-05": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_audio_token": 0.00000125, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-2.5-pro-preview-tts": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", + "supported_modalities": [ + "text" + ], + "supported_output_modalities": [ + "audio" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini-flash-experimental": { + "input_cost_per_character": 0, + "input_cost_per_token": 0, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_token": 0, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental", + "supports_function_calling": false, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "gemini-pro": { + "input_cost_per_character": 1.25e-7, + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "input_cost_per_video_per_second": 0.002, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 3.75e-7, + "output_cost_per_token": 0.0000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "gemini-pro-experimental": { + "input_cost_per_character": 0, + "input_cost_per_token": 0, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_token": 0, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental", + "supports_function_calling": false, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "gemini-pro-vision": { + "input_cost_per_image": 0.0025, + "input_cost_per_token": 5e-7, + "litellm_provider": "vertex_ai-vision-models", + "max_images_per_prompt": 16, + "max_input_tokens": 16384, + "max_output_tokens": 2048, + "max_tokens": 2048, + "max_video_length": 2, + "max_videos_per_prompt": 1, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini/gemini-1.5-flash": { + "input_cost_per_token": 7.5e-8, + "input_cost_per_token_above_128k_tokens": 1.5e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 3e-7, + "output_cost_per_token_above_128k_tokens": 6e-7, + "rpm": 2000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-flash-001": { + "cache_creation_input_token_cost": 0.000001, + "cache_read_input_token_cost": 1.875e-8, + "deprecation_date": "2025-05-24", + "input_cost_per_token": 7.5e-8, + "input_cost_per_token_above_128k_tokens": 1.5e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 3e-7, + "output_cost_per_token_above_128k_tokens": 6e-7, + "rpm": 2000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-flash-002": { + "cache_creation_input_token_cost": 0.000001, + "cache_read_input_token_cost": 1.875e-8, + "deprecation_date": "2025-09-24", + "input_cost_per_token": 7.5e-8, + "input_cost_per_token_above_128k_tokens": 1.5e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 3e-7, + "output_cost_per_token_above_128k_tokens": 6e-7, + "rpm": 2000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-flash-8b": { + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 4000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-flash-8b-exp-0827": { + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 4000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-flash-8b-exp-0924": { + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 4000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-flash-exp-0827": { + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 2000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-flash-latest": { + "input_cost_per_token": 7.5e-8, + "input_cost_per_token_above_128k_tokens": 1.5e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 3e-7, + "output_cost_per_token_above_128k_tokens": 6e-7, + "rpm": 2000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-pro": { + "input_cost_per_token": 0.0000035, + "input_cost_per_token_above_128k_tokens": 0.000007, + "litellm_provider": "gemini", + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000105, + "output_cost_per_token_above_128k_tokens": 0.000021, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-pro-001": { + "deprecation_date": "2025-05-24", + "input_cost_per_token": 0.0000035, + "input_cost_per_token_above_128k_tokens": 0.000007, + "litellm_provider": "gemini", + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000105, + "output_cost_per_token_above_128k_tokens": 0.000021, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-pro-002": { + "deprecation_date": "2025-09-24", + "input_cost_per_token": 0.0000035, + "input_cost_per_token_above_128k_tokens": 0.000007, + "litellm_provider": "gemini", + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000105, + "output_cost_per_token_above_128k_tokens": 0.000021, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-pro-exp-0801": { + "input_cost_per_token": 0.0000035, + "input_cost_per_token_above_128k_tokens": 0.000007, + "litellm_provider": "gemini", + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000105, + "output_cost_per_token_above_128k_tokens": 0.000021, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-pro-exp-0827": { + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-1.5-pro-latest": { + "input_cost_per_token": 0.0000035, + "input_cost_per_token_above_128k_tokens": 0.000007, + "litellm_provider": "gemini", + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000105, + "output_cost_per_token_above_128k_tokens": 0.000021, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-2.0-flash": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 4e-7, + "rpm": 10000, + "source": "https://ai.google.dev/pricing#2_0flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 10000000 + }, + "gemini/gemini-2.0-flash-001": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 4e-7, + "rpm": 10000, + "source": "https://ai.google.dev/pricing#2_0flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 10000000 + }, + "gemini/gemini-2.0-flash-exp": { + "cache_read_input_token_cost": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 10, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 4000000 + }, + "gemini/gemini-2.0-flash-lite": { + "cache_read_input_token_cost": 1.875e-8, + "input_cost_per_audio_token": 7.5e-8, + "input_cost_per_token": 7.5e-8, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 50, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 3e-7, + "rpm": 4000, + "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.0-flash-lite", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 4000000 + }, + "gemini/gemini-2.0-flash-lite-preview-02-05": { + "cache_read_input_token_cost": 1.875e-8, + "input_cost_per_audio_token": 7.5e-8, + "input_cost_per_token": 7.5e-8, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 3e-7, + "rpm": 60000, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash-lite", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 10000000 + }, + "gemini/gemini-2.0-flash-live-001": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.0000021, + "input_cost_per_image": 0.0000021, + "input_cost_per_token": 3.5e-7, + "input_cost_per_video_per_second": 0.0000021, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_audio_token": 0.0000085, + "output_cost_per_token": 0.0000015, + "rpm": 10, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2-0-flash-live-001", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.0-flash-preview-image-generation": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 4e-7, + "rpm": 10000, + "source": "https://ai.google.dev/pricing#2_0flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 10000000 + }, + "gemini/gemini-2.0-flash-thinking-exp": { + "cache_read_input_token_cost": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65536, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 10, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 4000000 + }, + "gemini/gemini-2.0-flash-thinking-exp-01-21": { + "cache_read_input_token_cost": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65536, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 10, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_audio_output": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 4000000 + }, + "gemini/gemini-2.0-pro-exp-02-05": { + "cache_read_input_token_cost": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 2, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 1000000 + }, + "gemini/gemini-2.5-flash": { + "cache_read_input_token_cost": 3e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 3e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000025, + "output_cost_per_token": 0.0000025, + "rpm": 100000, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 8000000 + }, + "gemini/gemini-2.5-flash-lite": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 5e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 4e-7, + "output_cost_per_token": 4e-7, + "rpm": 15, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-lite", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.5-flash-lite-preview-09-2025": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 3e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 4e-7, + "output_cost_per_token": 4e-7, + "rpm": 15, + "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.5-flash-preview-09-2025": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 3e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000025, + "output_cost_per_token": 0.0000025, + "rpm": 15, + "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-flash-latest": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 3e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000025, + "output_cost_per_token": 0.0000025, + "rpm": 15, + "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-flash-lite-latest": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 3e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 4e-7, + "output_cost_per_token": 4e-7, + "rpm": 15, + "source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.5-flash-lite-preview-06-17": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_audio_token": 5e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 4e-7, + "output_cost_per_token": 4e-7, + "rpm": 15, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-lite", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.5-flash-preview-04-17": { + "cache_read_input_token_cost": 3.75e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000035, + "output_cost_per_token": 6e-7, + "rpm": 10, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.5-flash-preview-05-20": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 3e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000025, + "output_cost_per_token": 0.0000025, + "rpm": 10, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.5-flash-preview-tts": { + "cache_read_input_token_cost": 3.75e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.0000035, + "output_cost_per_token": 6e-7, + "rpm": 10, + "source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text" + ], + "supported_output_modalities": [ + "audio" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.5-pro": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "rpm": 2000, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 800000 + }, + "gemini/gemini-2.5-computer-use-preview-10-2025": { + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "gemini", + "max_images_per_prompt": 3000, + "max_input_tokens": 128000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "rpm": 2000, + "source": "https://ai.google.dev/gemini-api/docs/computer-use", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_computer_use": true, + "supports_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 800000 + }, + "gemini/gemini-3-pro-preview": { + "cache_read_input_token_cost": 2e-7, + "cache_read_input_token_cost_above_200k_tokens": 4e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_above_200k_tokens": 0.000004, + "input_cost_per_token_batches": 0.000001, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.000012, + "output_cost_per_token_above_200k_tokens": 0.000018, + "output_cost_per_token_batches": 0.000006, + "rpm": 2000, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 800000 + }, + "gemini/gemini-3-flash-preview": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 5e-7, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.000003, + "output_cost_per_token": 0.000003, + "rpm": 2000, + "source": "https://ai.google.dev/pricing/gemini-3", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 800000 + }, + "gemini-3-flash-preview": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 5e-7, + "litellm_provider": "vertex_ai-language-models", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_reasoning_token": 0.000003, + "output_cost_per_token": 0.000003, + "source": "https://ai.google.dev/pricing/gemini-3", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true + }, + "gemini/gemini-2.5-pro-exp-03-25": { + "cache_read_input_token_cost": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_200k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0, + "output_cost_per_token_above_200k_tokens": 0, + "rpm": 5, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 250000 + }, + "gemini/gemini-2.5-pro-preview-03-25": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "rpm": 10000, + "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 10000000 + }, + "gemini/gemini-2.5-pro-preview-05-06": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "rpm": 10000, + "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 10000000 + }, + "gemini/gemini-2.5-pro-preview-06-05": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "rpm": 10000, + "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_url_context": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 10000000 + }, + "gemini/gemini-2.5-pro-preview-tts": { + "cache_read_input_token_cost": 3.125e-7, + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_above_200k_tokens": 0.0000025, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_above_200k_tokens": 0.000015, + "rpm": 10000, + "source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview", + "supported_modalities": [ + "text" + ], + "supported_output_modalities": [ + "audio" + ], + "supports_audio_output": false, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true, + "tpm": 10000000 + }, + "gemini/gemini-exp-1114": { + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "metadata": { + "notes": "Rate limits not documented for gemini-exp-1114. Assuming same as gemini-1.5-pro.", + "supports_tool_choice": true + }, + "mode": "chat", + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-exp-1206": { + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "metadata": { + "notes": "Rate limits not documented for gemini-exp-1206. Assuming same as gemini-1.5-pro.", + "supports_tool_choice": true + }, + "mode": "chat", + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 4000000 + }, + "gemini/gemini-gemma-2-27b-it": { + "input_cost_per_token": 3.5e-7, + "litellm_provider": "gemini", + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000105, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini/gemini-gemma-2-9b-it": { + "input_cost_per_token": 3.5e-7, + "litellm_provider": "gemini", + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000105, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini/gemini-pro": { + "input_cost_per_token": 3.5e-7, + "input_cost_per_token_above_128k_tokens": 7e-7, + "litellm_provider": "gemini", + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000105, + "output_cost_per_token_above_128k_tokens": 0.0000021, + "rpd": 30000, + "rpm": 360, + "source": "https://ai.google.dev/gemini-api/docs/models/gemini", + "supports_function_calling": true, + "supports_tool_choice": true, + "tpm": 120000 + }, + "gemini/gemini-pro-vision": { + "input_cost_per_token": 3.5e-7, + "input_cost_per_token_above_128k_tokens": 7e-7, + "litellm_provider": "gemini", + "max_input_tokens": 30720, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.00000105, + "output_cost_per_token_above_128k_tokens": 0.0000021, + "rpd": 30000, + "rpm": 360, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "tpm": 120000 + }, + "gemini/gemma-3-27b-it": { + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_input_tokens": 131072, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "source": "https://aistudio.google.com", + "supports_audio_output": false, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": false, + "supports_tool_choice": true, + "supports_vision": true + }, + "gemini/learnlm-1.5-pro-experimental": { + "input_cost_per_audio_per_second": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "input_cost_per_character": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "litellm_provider": "gemini", + "max_input_tokens": 32767, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0, + "output_cost_per_character_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "source": "https://aistudio.google.com", + "supports_audio_output": false, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "github_copilot/claude-haiku-4.5": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 16000, + "max_tokens": 16000, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/claude-opus-4.5": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 16000, + "max_tokens": 16000, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/claude-opus-41": { + "litellm_provider": "github_copilot", + "max_input_tokens": 80000, + "max_output_tokens": 16000, + "max_tokens": 16000, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supports_vision": true + }, + "github_copilot/claude-sonnet-4": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 16000, + "max_tokens": 16000, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/claude-sonnet-4.5": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 16000, + "max_tokens": 16000, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/gemini-2.5-pro": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/gemini-3-pro-preview": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/gpt-3.5-turbo": { + "litellm_provider": "github_copilot", + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true + }, + "github_copilot/gpt-3.5-turbo-0613": { + "litellm_provider": "github_copilot", + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true + }, + "github_copilot/gpt-4": { + "litellm_provider": "github_copilot", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true + }, + "github_copilot/gpt-4-0613": { + "litellm_provider": "github_copilot", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true + }, + "github_copilot/gpt-4-o-preview": { + "litellm_provider": "github_copilot", + "max_input_tokens": 64000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "github_copilot/gpt-4.1": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "github_copilot/gpt-4.1-2025-04-14": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "github_copilot/gpt-4o": { + "litellm_provider": "github_copilot", + "max_input_tokens": 64000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/gpt-4o-2024-05-13": { + "litellm_provider": "github_copilot", + "max_input_tokens": 64000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/gpt-4o-2024-08-06": { + "litellm_provider": "github_copilot", + "max_input_tokens": 64000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "github_copilot/gpt-4o-2024-11-20": { + "litellm_provider": "github_copilot", + "max_input_tokens": 64000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "github_copilot/gpt-4o-mini": { + "litellm_provider": "github_copilot", + "max_input_tokens": 64000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "github_copilot/gpt-4o-mini-2024-07-18": { + "litellm_provider": "github_copilot", + "max_input_tokens": 64000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "github_copilot/gpt-5": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "github_copilot/gpt-5-mini": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "github_copilot/gpt-5.1": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "github_copilot/gpt-5.2": { + "litellm_provider": "github_copilot", + "max_input_tokens": 128000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "google.gemma-3-12b-it": { + "input_cost_per_token": 9e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2.9e-7, + "supports_system_messages": true, + "supports_vision": true + }, + "google.gemma-3-27b-it": { + "input_cost_per_token": 2.3e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 3.8e-7, + "supports_system_messages": true, + "supports_vision": true + }, + "google.gemma-3-4b-it": { + "input_cost_per_token": 4e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 8e-8, + "supports_system_messages": true, + "supports_vision": true + }, + "global.anthropic.claude-sonnet-4-5-20250929-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "global.anthropic.claude-sonnet-4-20250514-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "global.anthropic.claude-haiku-4-5-20251001-v1:0": { + "cache_creation_input_token_cost": 0.00000125, + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 0.000001, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "global.amazon.nova-2-lite-v1:0": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_video_input": true, + "supports_vision": true + }, + "gpt-3.5-turbo": { + "input_cost_per_token": 5e-7, + "litellm_provider": "openai", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 4097, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-3.5-turbo-0125": { + "input_cost_per_token": 5e-7, + "litellm_provider": "openai", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 16385, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-3.5-turbo-0301": { + "input_cost_per_token": 0.0000015, + "litellm_provider": "openai", + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "max_tokens": 4097, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-3.5-turbo-0613": { + "input_cost_per_token": 0.0000015, + "litellm_provider": "openai", + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "max_tokens": 4097, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-3.5-turbo-1106": { + "deprecation_date": "2026-09-28", + "input_cost_per_token": 0.000001, + "litellm_provider": "openai", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 16385, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-3.5-turbo-16k": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openai", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 16385, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-3.5-turbo-16k-0613": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openai", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 16385, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4": { + "input_cost_per_token": 0.00003, + "litellm_provider": "openai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-0125-preview": { + "deprecation_date": "2026-03-26", + "input_cost_per_token": 0.00001, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-0314": { + "input_cost_per_token": 0.00003, + "litellm_provider": "openai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-0613": { + "deprecation_date": "2025-06-06", + "input_cost_per_token": 0.00003, + "litellm_provider": "openai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-1106-preview": { + "deprecation_date": "2026-03-26", + "input_cost_per_token": 0.00001, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-1106-vision-preview": { + "deprecation_date": "2024-12-06", + "input_cost_per_token": 0.00001, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-4-32k": { + "input_cost_per_token": 0.00006, + "litellm_provider": "openai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00012, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-32k-0314": { + "input_cost_per_token": 0.00006, + "litellm_provider": "openai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00012, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-32k-0613": { + "input_cost_per_token": 0.00006, + "litellm_provider": "openai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00012, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-turbo": { + "input_cost_per_token": 0.00001, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-4-turbo-2024-04-09": { + "input_cost_per_token": 0.00001, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-4-turbo-preview": { + "input_cost_per_token": 0.00001, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4-vision-preview": { + "deprecation_date": "2024-12-06", + "input_cost_per_token": 0.00001, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-4.1": { + "cache_read_input_token_cost": 5e-7, + "cache_read_input_token_cost_priority": 8.75e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_batches": 0.000001, + "input_cost_per_token_priority": 0.0000035, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000008, + "output_cost_per_token_batches": 0.000004, + "output_cost_per_token_priority": 0.000014, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4.1-2025-04-14": { + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_batches": 0.000001, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000008, + "output_cost_per_token_batches": 0.000004, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4.1-mini": { + "cache_read_input_token_cost": 1e-7, + "cache_read_input_token_cost_priority": 1.75e-7, + "input_cost_per_token": 4e-7, + "input_cost_per_token_batches": 2e-7, + "input_cost_per_token_priority": 7e-7, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000016, + "output_cost_per_token_batches": 8e-7, + "output_cost_per_token_priority": 0.0000028, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4.1-mini-2025-04-14": { + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 4e-7, + "input_cost_per_token_batches": 2e-7, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000016, + "output_cost_per_token_batches": 8e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4.1-nano": { + "cache_read_input_token_cost": 2.5e-8, + "cache_read_input_token_cost_priority": 5e-8, + "input_cost_per_token": 1e-7, + "input_cost_per_token_batches": 5e-8, + "input_cost_per_token_priority": 2e-7, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4e-7, + "output_cost_per_token_batches": 2e-7, + "output_cost_per_token_priority": 8e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4.1-nano-2025-04-14": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 1e-7, + "input_cost_per_token_batches": 5e-8, + "litellm_provider": "openai", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4e-7, + "output_cost_per_token_batches": 2e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4.5-preview": { + "cache_read_input_token_cost": 0.0000375, + "input_cost_per_token": 0.000075, + "input_cost_per_token_batches": 0.0000375, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00015, + "output_cost_per_token_batches": 0.000075, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-4.5-preview-2025-02-27": { + "cache_read_input_token_cost": 0.0000375, + "deprecation_date": "2025-07-14", + "input_cost_per_token": 0.000075, + "input_cost_per_token_batches": 0.0000375, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00015, + "output_cost_per_token_batches": 0.000075, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-4o": { + "cache_read_input_token_cost": 0.00000125, + "cache_read_input_token_cost_priority": 0.000002125, + "input_cost_per_token": 0.0000025, + "input_cost_per_token_batches": 0.00000125, + "input_cost_per_token_priority": 0.00000425, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_batches": 0.000005, + "output_cost_per_token_priority": 0.000017, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4o-2024-05-13": { + "input_cost_per_token": 0.000005, + "input_cost_per_token_batches": 0.0000025, + "input_cost_per_token_priority": 0.00000875, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "output_cost_per_token_batches": 0.0000075, + "output_cost_per_token_priority": 0.00002625, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-4o-2024-08-06": { + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "input_cost_per_token_batches": 0.00000125, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_batches": 0.000005, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4o-2024-11-20": { + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "input_cost_per_token_batches": 0.00000125, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_batches": 0.000005, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4o-audio-preview": { + "input_cost_per_audio_token": 0.0001, + "input_cost_per_token": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.0002, + "output_cost_per_token": 0.00001, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-audio-preview-2024-10-01": { + "input_cost_per_audio_token": 0.0001, + "input_cost_per_token": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.0002, + "output_cost_per_token": 0.00001, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-audio-preview-2024-12-17": { + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00001, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-audio-preview-2025-06-03": { + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00001, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-mini": { + "cache_read_input_token_cost": 7.5e-8, + "cache_read_input_token_cost_priority": 1.25e-7, + "input_cost_per_token": 1.5e-7, + "input_cost_per_token_batches": 7.5e-8, + "input_cost_per_token_priority": 2.5e-7, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6e-7, + "output_cost_per_token_batches": 3e-7, + "output_cost_per_token_priority": 0.000001, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4o-mini-2024-07-18": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_token": 1.5e-7, + "input_cost_per_token_batches": 7.5e-8, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6e-7, + "output_cost_per_token_batches": 3e-7, + "search_context_cost_per_query": { + "search_context_size_high": 0.03, + "search_context_size_low": 0.025, + "search_context_size_medium": 0.0275 + }, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-4o-mini-audio-preview": { + "input_cost_per_audio_token": 0.00001, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.00002, + "output_cost_per_token": 6e-7, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-mini-audio-preview-2024-12-17": { + "input_cost_per_audio_token": 0.00001, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_audio_token": 0.00002, + "output_cost_per_token": 6e-7, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-mini-realtime-preview": { + "cache_creation_input_audio_token_cost": 3e-7, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_audio_token": 0.00001, + "input_cost_per_token": 6e-7, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00002, + "output_cost_per_token": 0.0000024, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-mini-realtime-preview-2024-12-17": { + "cache_creation_input_audio_token_cost": 3e-7, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_audio_token": 0.00001, + "input_cost_per_token": 6e-7, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00002, + "output_cost_per_token": 0.0000024, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-mini-search-preview": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_token": 1.5e-7, + "input_cost_per_token_batches": 7.5e-8, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6e-7, + "output_cost_per_token_batches": 3e-7, + "search_context_cost_per_query": { + "search_context_size_high": 0.03, + "search_context_size_low": 0.025, + "search_context_size_medium": 0.0275 + }, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gpt-4o-mini-search-preview-2025-03-11": { + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_token": 1.5e-7, + "input_cost_per_token_batches": 7.5e-8, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 6e-7, + "output_cost_per_token_batches": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-4o-realtime-preview": { + "cache_read_input_token_cost": 0.0000025, + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.000005, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00002, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-realtime-preview-2024-10-01": { + "cache_creation_input_audio_token_cost": 0.00002, + "cache_read_input_token_cost": 0.0000025, + "input_cost_per_audio_token": 0.0001, + "input_cost_per_token": 0.000005, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.0002, + "output_cost_per_token": 0.00002, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-realtime-preview-2024-12-17": { + "cache_read_input_token_cost": 0.0000025, + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.000005, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00002, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-realtime-preview-2025-06-03": { + "cache_read_input_token_cost": 0.0000025, + "input_cost_per_audio_token": 0.00004, + "input_cost_per_token": 0.000005, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00008, + "output_cost_per_token": 0.00002, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4o-search-preview": { + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "input_cost_per_token_batches": 0.00000125, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_batches": 0.000005, + "search_context_cost_per_query": { + "search_context_size_high": 0.05, + "search_context_size_low": 0.03, + "search_context_size_medium": 0.035 + }, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "gpt-4o-search-preview-2025-03-11": { + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "input_cost_per_token_batches": 0.00000125, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_batches": 0.000005, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-5": { + "cache_read_input_token_cost": 1.25e-7, + "cache_read_input_token_cost_flex": 6.25e-8, + "cache_read_input_token_cost_priority": 2.5e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_flex": 6.25e-7, + "input_cost_per_token_priority": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_flex": 0.000005, + "output_cost_per_token_priority": 0.00002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5.1": { + "cache_read_input_token_cost": 1.25e-7, + "cache_read_input_token_cost_priority": 2.5e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_priority": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_priority": 0.00002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5.1-2025-11-13": { + "cache_read_input_token_cost": 1.25e-7, + "cache_read_input_token_cost_priority": 2.5e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_priority": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_priority": 0.00002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5.1-chat-latest": { + "cache_read_input_token_cost": 1.25e-7, + "cache_read_input_token_cost_priority": 2.5e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_priority": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_priority": 0.00002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": false, + "supports_native_streaming": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "gpt-5.2": { + "cache_read_input_token_cost": 1.75e-7, + "cache_read_input_token_cost_priority": 3.5e-7, + "input_cost_per_token": 0.00000175, + "input_cost_per_token_priority": 0.0000035, + "litellm_provider": "openai", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000014, + "output_cost_per_token_priority": 0.000028, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5.2-2025-12-11": { + "cache_read_input_token_cost": 1.75e-7, + "cache_read_input_token_cost_priority": 3.5e-7, + "input_cost_per_token": 0.00000175, + "input_cost_per_token_priority": 0.0000035, + "litellm_provider": "openai", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000014, + "output_cost_per_token_priority": 0.000028, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "image" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5.2-chat-latest": { + "cache_read_input_token_cost": 1.75e-7, + "cache_read_input_token_cost_priority": 3.5e-7, + "input_cost_per_token": 0.00000175, + "input_cost_per_token_priority": 0.0000035, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000014, + "output_cost_per_token_priority": 0.000028, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-5-2025-08-07": { + "cache_read_input_token_cost": 1.25e-7, + "cache_read_input_token_cost_flex": 6.25e-8, + "cache_read_input_token_cost_priority": 2.5e-7, + "input_cost_per_token": 0.00000125, + "input_cost_per_token_flex": 6.25e-7, + "input_cost_per_token_priority": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "output_cost_per_token_flex": 0.000005, + "output_cost_per_token_priority": 0.00002, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5-chat": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": false, + "supports_native_streaming": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "gpt-5-chat-latest": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": false, + "supports_native_streaming": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "gpt-5-mini": { + "cache_read_input_token_cost": 2.5e-8, + "cache_read_input_token_cost_flex": 1.25e-8, + "cache_read_input_token_cost_priority": 4.5e-8, + "input_cost_per_token": 2.5e-7, + "input_cost_per_token_flex": 1.25e-7, + "input_cost_per_token_priority": 4.5e-7, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "output_cost_per_token_flex": 0.000001, + "output_cost_per_token_priority": 0.0000036, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5-mini-2025-08-07": { + "cache_read_input_token_cost": 2.5e-8, + "cache_read_input_token_cost_flex": 1.25e-8, + "cache_read_input_token_cost_priority": 4.5e-8, + "input_cost_per_token": 2.5e-7, + "input_cost_per_token_flex": 1.25e-7, + "input_cost_per_token_priority": 4.5e-7, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "output_cost_per_token_flex": 0.000001, + "output_cost_per_token_priority": 0.0000036, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "gpt-5-nano": { + "cache_read_input_token_cost": 5e-9, + "cache_read_input_token_cost_flex": 2.5e-9, + "input_cost_per_token": 5e-8, + "input_cost_per_token_flex": 2.5e-8, + "input_cost_per_token_priority": 0.0000025, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "output_cost_per_token_flex": 2e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-5-nano-2025-08-07": { + "cache_read_input_token_cost": 5e-9, + "cache_read_input_token_cost_flex": 2.5e-9, + "input_cost_per_token": 5e-8, + "input_cost_per_token_flex": 2.5e-8, + "litellm_provider": "openai", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "output_cost_per_token_flex": 2e-7, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/batch", + "/v1/responses" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_native_streaming": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "gpt-realtime": { + "cache_creation_input_audio_token_cost": 4e-7, + "cache_read_input_token_cost": 4e-7, + "input_cost_per_audio_token": 0.000032, + "input_cost_per_image": 0.000005, + "input_cost_per_token": 0.000004, + "litellm_provider": "openai", + "max_input_tokens": 32000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.000064, + "output_cost_per_token": 0.000016, + "supported_endpoints": [ + "/v1/realtime" + ], + "supported_modalities": [ + "text", + "image", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-realtime-mini": { + "cache_creation_input_audio_token_cost": 3e-7, + "cache_read_input_audio_token_cost": 3e-7, + "input_cost_per_audio_token": 0.00001, + "input_cost_per_token": 6e-7, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.00002, + "output_cost_per_token": 0.0000024, + "supported_endpoints": [ + "/v1/realtime" + ], + "supported_modalities": [ + "text", + "image", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-realtime-2025-08-28": { + "cache_creation_input_audio_token_cost": 4e-7, + "cache_read_input_token_cost": 4e-7, + "input_cost_per_audio_token": 0.000032, + "input_cost_per_image": 0.000005, + "input_cost_per_token": 0.000004, + "litellm_provider": "openai", + "max_input_tokens": 32000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_audio_token": 0.000064, + "output_cost_per_token": 0.000016, + "supported_endpoints": [ + "/v1/realtime" + ], + "supported_modalities": [ + "text", + "image", + "audio" + ], + "supported_output_modalities": [ + "text", + "audio" + ], + "supports_audio_input": true, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gradient_ai/alibaba-qwen3-32b": { + "litellm_provider": "gradient_ai", + "max_tokens": 2048, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/anthropic-claude-3-opus": { + "input_cost_per_token": 0.000015, + "litellm_provider": "gradient_ai", + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/anthropic-claude-3.5-haiku": { + "input_cost_per_token": 8e-7, + "litellm_provider": "gradient_ai", + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/anthropic-claude-3.5-sonnet": { + "input_cost_per_token": 0.000003, + "litellm_provider": "gradient_ai", + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/anthropic-claude-3.7-sonnet": { + "input_cost_per_token": 0.000003, + "litellm_provider": "gradient_ai", + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/deepseek-r1-distill-llama-70b": { + "input_cost_per_token": 9.9e-7, + "litellm_provider": "gradient_ai", + "max_tokens": 8000, + "mode": "chat", + "output_cost_per_token": 9.9e-7, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/llama3-8b-instruct": { + "input_cost_per_token": 2e-7, + "litellm_provider": "gradient_ai", + "max_tokens": 512, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/llama3.3-70b-instruct": { + "input_cost_per_token": 6.5e-7, + "litellm_provider": "gradient_ai", + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 6.5e-7, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/mistral-nemo-instruct-2407": { + "input_cost_per_token": 3e-7, + "litellm_provider": "gradient_ai", + "max_tokens": 512, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/openai-gpt-4o": { + "litellm_provider": "gradient_ai", + "max_tokens": 16384, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/openai-gpt-4o-mini": { + "litellm_provider": "gradient_ai", + "max_tokens": 16384, + "mode": "chat", + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/openai-o3": { + "input_cost_per_token": 0.000002, + "litellm_provider": "gradient_ai", + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "gradient_ai/openai-o3-mini": { + "input_cost_per_token": 0.0000011, + "litellm_provider": "gradient_ai", + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supported_endpoints": [ + "/v1/chat/completions" + ], + "supported_modalities": [ + "text" + ], + "supports_tool_choice": false + }, + "lemonade/Qwen3-Coder-30B-A3B-Instruct-GGUF": { + "input_cost_per_token": 0, + "litellm_provider": "lemonade", + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "lemonade/gpt-oss-20b-mxfp4-GGUF": { + "input_cost_per_token": 0, + "litellm_provider": "lemonade", + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "lemonade/gpt-oss-120b-mxfp-GGUF": { + "input_cost_per_token": 0, + "litellm_provider": "lemonade", + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "lemonade/Gemma-3-4b-it-GGUF": { + "input_cost_per_token": 0, + "litellm_provider": "lemonade", + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "lemonade/Qwen3-4B-Instruct-2507-GGUF": { + "input_cost_per_token": 0, + "litellm_provider": "lemonade", + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "amazon-nova/nova-micro-v1": { + "input_cost_per_token": 3.5e-8, + "litellm_provider": "amazon_nova", + "max_input_tokens": 128000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 1.4e-7, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "amazon-nova/nova-lite-v1": { + "input_cost_per_token": 6e-8, + "litellm_provider": "amazon_nova", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 2.4e-7, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "amazon-nova/nova-premier-v1": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "amazon_nova", + "max_input_tokens": 1000000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.0000125, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": false, + "supports_response_schema": true, + "supports_vision": true + }, + "amazon-nova/nova-pro-v1": { + "input_cost_per_token": 8e-7, + "litellm_provider": "amazon_nova", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.0000032, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "groq/deepseek-r1-distill-llama-70b": { + "input_cost_per_token": 7.5e-7, + "litellm_provider": "groq", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9.9e-7, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/gemma-7b-it": { + "deprecation_date": "2024-12-18", + "input_cost_per_token": 7e-8, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 7e-8, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/gemma2-9b-it": { + "input_cost_per_token": 2e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_function_calling": false, + "supports_response_schema": false, + "supports_tool_choice": false + }, + "groq/llama-3.1-405b-reasoning": { + "input_cost_per_token": 5.9e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 7.9e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama-3.1-70b-versatile": { + "deprecation_date": "2025-01-24", + "input_cost_per_token": 5.9e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 7.9e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama-3.1-8b-instant": { + "input_cost_per_token": 5e-8, + "litellm_provider": "groq", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 8e-8, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama-3.2-11b-text-preview": { + "deprecation_date": "2024-10-28", + "input_cost_per_token": 1.8e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1.8e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama-3.2-11b-vision-preview": { + "deprecation_date": "2025-04-14", + "input_cost_per_token": 1.8e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1.8e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_vision": true + }, + "groq/llama-3.2-1b-preview": { + "deprecation_date": "2025-04-14", + "input_cost_per_token": 4e-8, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 4e-8, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama-3.2-3b-preview": { + "deprecation_date": "2025-04-14", + "input_cost_per_token": 6e-8, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6e-8, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama-3.2-90b-text-preview": { + "deprecation_date": "2024-11-25", + "input_cost_per_token": 9e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 9e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama-3.2-90b-vision-preview": { + "deprecation_date": "2025-04-14", + "input_cost_per_token": 9e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 9e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_vision": true + }, + "groq/llama-3.3-70b-specdec": { + "deprecation_date": "2025-04-14", + "input_cost_per_token": 5.9e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 9.9e-7, + "supports_tool_choice": true + }, + "groq/llama-3.3-70b-versatile": { + "input_cost_per_token": 5.9e-7, + "litellm_provider": "groq", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 7.9e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama-guard-3-8b": { + "input_cost_per_token": 2e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2e-7 + }, + "groq/llama2-70b-4096": { + "input_cost_per_token": 7e-7, + "litellm_provider": "groq", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 8e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama3-groq-70b-8192-tool-use-preview": { + "deprecation_date": "2025-01-06", + "input_cost_per_token": 8.9e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 8.9e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/llama3-groq-8b-8192-tool-use-preview": { + "deprecation_date": "2025-01-06", + "input_cost_per_token": 1.9e-7, + "litellm_provider": "groq", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1.9e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/meta-llama/llama-4-maverick-17b-128e-instruct": { + "input_cost_per_token": 2e-7, + "litellm_provider": "groq", + "max_input_tokens": 131072, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "groq/meta-llama/llama-4-scout-17b-16e-instruct": { + "input_cost_per_token": 1.1e-7, + "litellm_provider": "groq", + "max_input_tokens": 131072, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 3.4e-7, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "groq/mistral-saba-24b": { + "input_cost_per_token": 7.9e-7, + "litellm_provider": "groq", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 7.9e-7 + }, + "groq/mixtral-8x7b-32768": { + "deprecation_date": "2025-03-20", + "input_cost_per_token": 2.4e-7, + "litellm_provider": "groq", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 2.4e-7, + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "groq/moonshotai/kimi-k2-instruct": { + "input_cost_per_token": 0.000001, + "litellm_provider": "groq", + "max_input_tokens": 131072, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "groq/moonshotai/kimi-k2-instruct-0905": { + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000003, + "cache_read_input_token_cost": 5e-7, + "litellm_provider": "groq", + "max_input_tokens": 262144, + "max_output_tokens": 16384, + "max_tokens": 278528, + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "groq/openai/gpt-oss-120b": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "groq", + "max_input_tokens": 131072, + "max_output_tokens": 32766, + "max_tokens": 32766, + "mode": "chat", + "output_cost_per_token": 7.5e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "groq/openai/gpt-oss-20b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "groq", + "max_input_tokens": 131072, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 5e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "groq/qwen/qwen3-32b": { + "input_cost_per_token": 2.9e-7, + "litellm_provider": "groq", + "max_input_tokens": 131000, + "max_output_tokens": 131000, + "max_tokens": 131000, + "mode": "chat", + "output_cost_per_token": 5.9e-7, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true + }, + "heroku/claude-3-5-haiku": { + "litellm_provider": "heroku", + "max_tokens": 4096, + "mode": "chat", + "supports_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "heroku/claude-3-5-sonnet-latest": { + "litellm_provider": "heroku", + "max_tokens": 8192, + "mode": "chat", + "supports_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "heroku/claude-3-7-sonnet": { + "litellm_provider": "heroku", + "max_tokens": 8192, + "mode": "chat", + "supports_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "heroku/claude-4-sonnet": { + "litellm_provider": "heroku", + "max_tokens": 8192, + "mode": "chat", + "supports_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/NousResearch/Hermes-3-Llama-3.1-70B": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/Qwen/QwQ-32B": { + "input_cost_per_token": 2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/Qwen/Qwen2.5-72B-Instruct": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/Qwen/Qwen2.5-Coder-32B-Instruct": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/Qwen/Qwen3-235B-A22B": { + "input_cost_per_token": 0.000002, + "litellm_provider": "hyperbolic", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/deepseek-ai/DeepSeek-R1": { + "input_cost_per_token": 4e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/deepseek-ai/DeepSeek-R1-0528": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/deepseek-ai/DeepSeek-V3": { + "input_cost_per_token": 2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/deepseek-ai/DeepSeek-V3-0324": { + "input_cost_per_token": 4e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/meta-llama/Llama-3.2-3B-Instruct": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/meta-llama/Llama-3.3-70B-Instruct": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/meta-llama/Meta-Llama-3-70B-Instruct": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/meta-llama/Meta-Llama-3.1-405B-Instruct": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/meta-llama/Meta-Llama-3.1-70B-Instruct": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/meta-llama/Meta-Llama-3.1-8B-Instruct": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "hyperbolic", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "hyperbolic/moonshotai/Kimi-K2-Instruct": { + "input_cost_per_token": 0.000002, + "litellm_provider": "hyperbolic", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "jamba-1.5": { + "input_cost_per_token": 2e-7, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_tool_choice": true + }, + "jamba-1.5-large": { + "input_cost_per_token": 0.000002, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supports_tool_choice": true + }, + "jamba-1.5-large@001": { + "input_cost_per_token": 0.000002, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supports_tool_choice": true + }, + "jamba-1.5-mini": { + "input_cost_per_token": 2e-7, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_tool_choice": true + }, + "jamba-1.5-mini@001": { + "input_cost_per_token": 2e-7, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_tool_choice": true + }, + "jamba-large-1.6": { + "input_cost_per_token": 0.000002, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supports_tool_choice": true + }, + "jamba-large-1.7": { + "input_cost_per_token": 0.000002, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supports_tool_choice": true + }, + "jamba-mini-1.6": { + "input_cost_per_token": 2e-7, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_tool_choice": true + }, + "jamba-mini-1.7": { + "input_cost_per_token": 2e-7, + "litellm_provider": "ai21", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_tool_choice": true + }, + "jp.anthropic.claude-sonnet-4-5-20250929-v1:0": { + "cache_creation_input_token_cost": 0.000004125, + "cache_read_input_token_cost": 3.3e-7, + "input_cost_per_token": 0.0000033, + "input_cost_per_token_above_200k_tokens": 0.0000066, + "output_cost_per_token_above_200k_tokens": 0.00002475, + "cache_creation_input_token_cost_above_200k_tokens": 0.00000825, + "cache_read_input_token_cost_above_200k_tokens": 6.6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000165, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "jp.anthropic.claude-haiku-4-5-20251001-v1:0": { + "cache_creation_input_token_cost": 0.000001375, + "cache_read_input_token_cost": 1.1e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000055, + "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "lambda_ai/deepseek-llama3.3-70b": { + "input_cost_per_token": 2e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/deepseek-r1-0528": { + "input_cost_per_token": 2e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/deepseek-r1-671b": { + "input_cost_per_token": 8e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 8e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/deepseek-v3-0324": { + "input_cost_per_token": 2e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/hermes3-405b": { + "input_cost_per_token": 8e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 8e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/hermes3-70b": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/hermes3-8b": { + "input_cost_per_token": 2.5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 4e-8, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/lfm-40b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/lfm-7b": { + "input_cost_per_token": 2.5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 4e-8, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/llama-4-maverick-17b-128e-instruct-fp8": { + "input_cost_per_token": 5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 8192, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/llama-4-scout-17b-16e-instruct": { + "input_cost_per_token": 5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 16384, + "max_output_tokens": 8192, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/llama3.1-405b-instruct-fp8": { + "input_cost_per_token": 8e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 8e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/llama3.1-70b-instruct-fp8": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/llama3.1-8b-instruct": { + "input_cost_per_token": 2.5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 4e-8, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/llama3.1-nemotron-70b-instruct-fp8": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/llama3.2-11b-vision-instruct": { + "input_cost_per_token": 1.5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 2.5e-8, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "lambda_ai/llama3.2-3b-instruct": { + "input_cost_per_token": 1.5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 2.5e-8, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/llama3.3-70b-instruct-fp8": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/qwen25-coder-32b-instruct": { + "input_cost_per_token": 5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "lambda_ai/qwen3-32b-fp8": { + "input_cost_per_token": 5e-8, + "litellm_provider": "lambda_ai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "luminous-base-control": { + "input_cost_per_token": 0.0000375, + "litellm_provider": "aleph_alpha", + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.00004125 + }, + "luminous-extended-control": { + "input_cost_per_token": 0.00005625, + "litellm_provider": "aleph_alpha", + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.000061875 + }, + "luminous-supreme-control": { + "input_cost_per_token": 0.00021875, + "litellm_provider": "aleph_alpha", + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 0.000240625 + }, + "medlm-large": { + "input_cost_per_character": 0.000005, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "max_tokens": 1024, + "mode": "chat", + "output_cost_per_character": 0.000015, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "medlm-medium": { + "input_cost_per_character": 5e-7, + "litellm_provider": "vertex_ai-language-models", + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_character": 0.000001, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "meta.llama2-13b-chat-v1": { + "input_cost_per_token": 7.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000001 + }, + "meta.llama2-70b-chat-v1": { + "input_cost_per_token": 0.00000195, + "litellm_provider": "bedrock", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000256 + }, + "meta.llama3-1-405b-instruct-v1:0": { + "input_cost_per_token": 0.00000532, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000016, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-1-70b-instruct-v1:0": { + "input_cost_per_token": 9.9e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9.9e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-1-8b-instruct-v1:0": { + "input_cost_per_token": 2.2e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 2.2e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-2-11b-instruct-v1:0": { + "input_cost_per_token": 3.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 3.5e-7, + "supports_function_calling": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "meta.llama3-2-1b-instruct-v1:0": { + "input_cost_per_token": 1e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-2-3b-instruct-v1:0": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-2-90b-instruct-v1:0": { + "input_cost_per_token": 0.000002, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "meta.llama3-3-70b-instruct-v1:0": { + "input_cost_per_token": 7.2e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 7.2e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-70b-instruct-v1:0": { + "input_cost_per_token": 0.00000265, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000035 + }, + "meta.llama3-8b-instruct-v1:0": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6e-7 + }, + "meta.llama4-maverick-17b-instruct-v1:0": { + "input_cost_per_token": 2.4e-7, + "input_cost_per_token_batches": 1.2e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 9.7e-7, + "output_cost_per_token_batches": 4.85e-7, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "code" + ], + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama4-scout-17b-instruct-v1:0": { + "input_cost_per_token": 1.7e-7, + "input_cost_per_token_batches": 8.5e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6.6e-7, + "output_cost_per_token_batches": 3.3e-7, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "code" + ], + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta_llama/Llama-3.3-70B-Instruct": { + "litellm_provider": "meta_llama", + "max_input_tokens": 128000, + "max_output_tokens": 4028, + "max_tokens": 128000, + "mode": "chat", + "source": "https://llama.developer.meta.com/docs/models", + "supported_modalities": [ + "text" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_tool_choice": true + }, + "meta_llama/Llama-3.3-8B-Instruct": { + "litellm_provider": "meta_llama", + "max_input_tokens": 128000, + "max_output_tokens": 4028, + "max_tokens": 128000, + "mode": "chat", + "source": "https://llama.developer.meta.com/docs/models", + "supported_modalities": [ + "text" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_tool_choice": true + }, + "meta_llama/Llama-4-Maverick-17B-128E-Instruct-FP8": { + "litellm_provider": "meta_llama", + "max_input_tokens": 1000000, + "max_output_tokens": 4028, + "max_tokens": 128000, + "mode": "chat", + "source": "https://llama.developer.meta.com/docs/models", + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_tool_choice": true + }, + "meta_llama/Llama-4-Scout-17B-16E-Instruct-FP8": { + "litellm_provider": "meta_llama", + "max_input_tokens": 10000000, + "max_output_tokens": 4028, + "max_tokens": 128000, + "mode": "chat", + "source": "https://llama.developer.meta.com/docs/models", + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_tool_choice": true + }, + "minimax.minimax-m2": { + "input_cost_per_token": 3e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "supports_system_messages": true + }, + "mistral.magistral-small-2509": { + "input_cost_per_token": 5e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_system_messages": true + }, + "mistral.ministral-3-14b-instruct": { + "input_cost_per_token": 2e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_function_calling": true, + "supports_system_messages": true + }, + "mistral.ministral-3-3b-instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_system_messages": true + }, + "mistral.ministral-3-8b-instruct": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "supports_function_calling": true, + "supports_system_messages": true + }, + "mistral.mistral-7b-instruct-v0:2": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_tool_choice": true + }, + "mistral.mistral-large-2402-v1:0": { + "input_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_function_calling": true + }, + "mistral.mistral-large-2407-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000009, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "mistral.mistral-large-3-675b-instruct": { + "input_cost_per_token": 5e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_system_messages": true + }, + "mistral.mistral-small-2402-v1:0": { + "input_cost_per_token": 0.000001, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_function_calling": true + }, + "mistral.mixtral-8x7b-instruct-v0:1": { + "input_cost_per_token": 4.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 7e-7, + "supports_tool_choice": true + }, + "mistral.voxtral-mini-3b-2507": { + "input_cost_per_token": 4e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 4e-8, + "supports_audio_input": true, + "supports_system_messages": true + }, + "mistral.voxtral-small-24b-2507": { + "input_cost_per_token": 1e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_audio_input": true, + "supports_system_messages": true + }, + "mistral/codestral-2405": { + "input_cost_per_token": 0.000001, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_assistant_prefill": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/codestral-2508": { + "input_cost_per_token": 3e-7, + "litellm_provider": "mistral", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 9e-7, + "source": "https://mistral.ai/news/codestral-25-08", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/codestral-latest": { + "input_cost_per_token": 0.000001, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_assistant_prefill": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/codestral-mamba-latest": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "mistral", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "source": "https://mistral.ai/technology/", + "supports_assistant_prefill": true, + "supports_tool_choice": true + }, + "mistral/devstral-medium-2507": { + "input_cost_per_token": 4e-7, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "source": "https://mistral.ai/news/devstral", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/devstral-small-2505": { + "input_cost_per_token": 1e-7, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://mistral.ai/news/devstral", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/devstral-small-2507": { + "input_cost_per_token": 1e-7, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://mistral.ai/news/devstral", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/labs-devstral-small-2512": { + "input_cost_per_token": 1e-7, + "litellm_provider": "mistral", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://docs.mistral.ai/models/devstral-small-2-25-12", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/devstral-2512": { + "input_cost_per_token": 4e-7, + "litellm_provider": "mistral", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "source": "https://mistral.ai/news/devstral-2-vibe-cli", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/magistral-medium-2506": { + "input_cost_per_token": 0.000002, + "litellm_provider": "mistral", + "max_input_tokens": 40000, + "max_output_tokens": 40000, + "max_tokens": 40000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://mistral.ai/news/magistral", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/magistral-medium-2509": { + "input_cost_per_token": 0.000002, + "litellm_provider": "mistral", + "max_input_tokens": 40000, + "max_output_tokens": 40000, + "max_tokens": 40000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://mistral.ai/news/magistral", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/magistral-medium-latest": { + "input_cost_per_token": 0.000002, + "litellm_provider": "mistral", + "max_input_tokens": 40000, + "max_output_tokens": 40000, + "max_tokens": 40000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://mistral.ai/news/magistral", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/magistral-small-2506": { + "input_cost_per_token": 5e-7, + "litellm_provider": "mistral", + "max_input_tokens": 40000, + "max_output_tokens": 40000, + "max_tokens": 40000, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://mistral.ai/pricing#api-pricing", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/magistral-small-latest": { + "input_cost_per_token": 5e-7, + "litellm_provider": "mistral", + "max_input_tokens": 40000, + "max_output_tokens": 40000, + "max_tokens": 40000, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://mistral.ai/pricing#api-pricing", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-large-2402": { + "input_cost_per_token": 0.000004, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000012, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-large-2407": { + "input_cost_per_token": 0.000003, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000009, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-large-2411": { + "input_cost_per_token": 0.000002, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-large-latest": { + "input_cost_per_token": 0.000002, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-large-3": { + "input_cost_per_token": 5e-7, + "litellm_provider": "mistral", + "max_input_tokens": 256000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://docs.mistral.ai/models/mistral-large-3-25-12", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "mistral/mistral-medium": { + "input_cost_per_token": 0.0000027, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000081, + "supports_assistant_prefill": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-medium-2312": { + "input_cost_per_token": 0.0000027, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.0000081, + "supports_assistant_prefill": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-medium-2505": { + "input_cost_per_token": 4e-7, + "litellm_provider": "mistral", + "max_input_tokens": 131072, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-medium-latest": { + "input_cost_per_token": 4e-7, + "litellm_provider": "mistral", + "max_input_tokens": 131072, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-small": { + "input_cost_per_token": 1e-7, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-small-latest": { + "input_cost_per_token": 1e-7, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/mistral-tiny": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_assistant_prefill": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/open-codestral-mamba": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "mistral", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "source": "https://mistral.ai/technology/", + "supports_assistant_prefill": true, + "supports_tool_choice": true + }, + "mistral/open-mistral-7b": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_assistant_prefill": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/open-mistral-nemo": { + "input_cost_per_token": 3e-7, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://mistral.ai/technology/", + "supports_assistant_prefill": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/open-mistral-nemo-2407": { + "input_cost_per_token": 3e-7, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://mistral.ai/technology/", + "supports_assistant_prefill": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/open-mixtral-8x22b": { + "input_cost_per_token": 0.000002, + "litellm_provider": "mistral", + "max_input_tokens": 65336, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/open-mixtral-8x7b": { + "input_cost_per_token": 7e-7, + "litellm_provider": "mistral", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 7e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "mistral/pixtral-12b-2409": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "mistral/pixtral-large-2411": { + "input_cost_per_token": 0.000002, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "mistral/pixtral-large-latest": { + "input_cost_per_token": 0.000002, + "litellm_provider": "mistral", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "moonshot.kimi-k2-thinking": { + "input_cost_per_token": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "supports_reasoning": true, + "supports_system_messages": true + }, + "moonshot/kimi-k2-0711-preview": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 6e-7, + "litellm_provider": "moonshot", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "moonshot/kimi-k2-0905-preview": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 6e-7, + "litellm_provider": "moonshot", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "moonshot/kimi-k2-turbo-preview": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 0.00000115, + "litellm_provider": "moonshot", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.000008, + "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "moonshot/kimi-latest": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "moonshot", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "moonshot/kimi-latest-128k": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "moonshot", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "moonshot/kimi-latest-32k": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 0.000001, + "litellm_provider": "moonshot", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "moonshot/kimi-latest-8k": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 2e-7, + "litellm_provider": "moonshot", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000002, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "moonshot/kimi-thinking-preview": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 6e-7, + "litellm_provider": "moonshot", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", + "supports_vision": true + }, + "moonshot/kimi-k2-thinking": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 6e-7, + "litellm_provider": "moonshot", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "moonshot/kimi-k2-thinking-turbo": { + "cache_read_input_token_cost": 1.5e-7, + "input_cost_per_token": 0.00000115, + "litellm_provider": "moonshot", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.000008, + "source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "moonshot/moonshot-v1-128k": { + "input_cost_per_token": 0.000002, + "litellm_provider": "moonshot", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "moonshot/moonshot-v1-128k-0430": { + "input_cost_per_token": 0.000002, + "litellm_provider": "moonshot", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "moonshot/moonshot-v1-128k-vision-preview": { + "input_cost_per_token": 0.000002, + "litellm_provider": "moonshot", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "moonshot/moonshot-v1-32k": { + "input_cost_per_token": 0.000001, + "litellm_provider": "moonshot", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "moonshot/moonshot-v1-32k-0430": { + "input_cost_per_token": 0.000001, + "litellm_provider": "moonshot", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "moonshot/moonshot-v1-32k-vision-preview": { + "input_cost_per_token": 0.000001, + "litellm_provider": "moonshot", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "moonshot/moonshot-v1-8k": { + "input_cost_per_token": 2e-7, + "litellm_provider": "moonshot", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000002, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "moonshot/moonshot-v1-8k-0430": { + "input_cost_per_token": 2e-7, + "litellm_provider": "moonshot", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000002, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "moonshot/moonshot-v1-8k-vision-preview": { + "input_cost_per_token": 2e-7, + "litellm_provider": "moonshot", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000002, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "moonshot/moonshot-v1-auto": { + "input_cost_per_token": 0.000002, + "litellm_provider": "moonshot", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://platform.moonshot.ai/docs/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "morph/morph-v3-fast": { + "input_cost_per_token": 8e-7, + "litellm_provider": "morph", + "max_input_tokens": 16000, + "max_output_tokens": 16000, + "max_tokens": 16000, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": false + }, + "morph/morph-v3-large": { + "input_cost_per_token": 9e-7, + "litellm_provider": "morph", + "max_input_tokens": 16000, + "max_output_tokens": 16000, + "max_tokens": 16000, + "mode": "chat", + "output_cost_per_token": 0.0000019, + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_system_messages": true, + "supports_tool_choice": false, + "supports_vision": false + }, + "nscale/Qwen/QwQ-32B": { + "input_cost_per_token": 1.8e-7, + "litellm_provider": "nscale", + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/Qwen/Qwen2.5-Coder-32B-Instruct": { + "input_cost_per_token": 6e-8, + "litellm_provider": "nscale", + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/Qwen/Qwen2.5-Coder-3B-Instruct": { + "input_cost_per_token": 1e-8, + "litellm_provider": "nscale", + "mode": "chat", + "output_cost_per_token": 3e-8, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/Qwen/Qwen2.5-Coder-7B-Instruct": { + "input_cost_per_token": 1e-8, + "litellm_provider": "nscale", + "mode": "chat", + "output_cost_per_token": 3e-8, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/deepseek-ai/DeepSeek-R1-Distill-Llama-70B": { + "input_cost_per_token": 3.75e-7, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $0.75/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 3.75e-7, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/deepseek-ai/DeepSeek-R1-Distill-Llama-8B": { + "input_cost_per_token": 2.5e-8, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $0.05/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 2.5e-8, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B": { + "input_cost_per_token": 9e-8, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $0.18/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 9e-8, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-14B": { + "input_cost_per_token": 7e-8, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $0.14/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 7e-8, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $0.30/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B": { + "input_cost_per_token": 2e-7, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $0.40/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/meta-llama/Llama-3.1-8B-Instruct": { + "input_cost_per_token": 3e-8, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $0.06/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 3e-8, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/meta-llama/Llama-3.3-70B-Instruct": { + "input_cost_per_token": 2e-7, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $0.40/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/meta-llama/Llama-4-Scout-17B-16E-Instruct": { + "input_cost_per_token": 9e-8, + "litellm_provider": "nscale", + "mode": "chat", + "output_cost_per_token": 2.9e-7, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nscale/mistralai/mixtral-8x22b-instruct-v0.1": { + "input_cost_per_token": 6e-7, + "litellm_provider": "nscale", + "metadata": { + "notes": "Pricing listed as $1.20/1M tokens total. Assumed 50/50 split for input/output." + }, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models" + }, + "nvidia.nemotron-nano-12b-v2": { + "input_cost_per_token": 2e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_system_messages": true, + "supports_vision": true + }, + "nvidia.nemotron-nano-9b-v2": { + "input_cost_per_token": 6e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2.3e-7, + "supports_system_messages": true + }, + "o1": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "o1-2024-12-17": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "o1-mini": { + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_vision": true + }, + "o1-mini-2024-09-12": { + "deprecation_date": "2025-10-27", + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000003, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.000012, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_vision": true + }, + "o1-preview": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_vision": true + }, + "o1-preview-2024-09-12": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "openai", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_vision": true + }, + "o3": { + "cache_read_input_token_cost": 5e-7, + "cache_read_input_token_cost_flex": 2.5e-7, + "cache_read_input_token_cost_priority": 8.75e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_flex": 0.000001, + "input_cost_per_token_priority": 0.0000035, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "output_cost_per_token_flex": 0.000004, + "output_cost_per_token_priority": 0.000014, + "supported_endpoints": [ + "/v1/responses", + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "o3-2025-04-16": { + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supported_endpoints": [ + "/v1/responses", + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "o3-mini": { + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "o3-mini-2025-01-31": { + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "o4-mini": { + "cache_read_input_token_cost": 2.75e-7, + "cache_read_input_token_cost_flex": 1.375e-7, + "cache_read_input_token_cost_priority": 5e-7, + "input_cost_per_token": 0.0000011, + "input_cost_per_token_flex": 5.5e-7, + "input_cost_per_token_priority": 0.000002, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "output_cost_per_token_flex": 0.0000022, + "output_cost_per_token_priority": 0.000008, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "o4-mini-2025-04-16": { + "cache_read_input_token_cost": 2.75e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "openai", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_service_tier": true, + "supports_vision": true + }, + "oci/meta.llama-3.1-405b-instruct": { + "input_cost_per_token": 0.00001068, + "litellm_provider": "oci", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001068, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/meta.llama-3.2-90b-vision-instruct": { + "input_cost_per_token": 0.000002, + "litellm_provider": "oci", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/meta.llama-3.3-70b-instruct": { + "input_cost_per_token": 7.2e-7, + "litellm_provider": "oci", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 7.2e-7, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/meta.llama-4-maverick-17b-128e-instruct-fp8": { + "input_cost_per_token": 7.2e-7, + "litellm_provider": "oci", + "max_input_tokens": 512000, + "max_output_tokens": 4000, + "max_tokens": 512000, + "mode": "chat", + "output_cost_per_token": 7.2e-7, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/meta.llama-4-scout-17b-16e-instruct": { + "input_cost_per_token": 7.2e-7, + "litellm_provider": "oci", + "max_input_tokens": 192000, + "max_output_tokens": 4000, + "max_tokens": 192000, + "mode": "chat", + "output_cost_per_token": 7.2e-7, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/xai.grok-3": { + "input_cost_per_token": 0.000003, + "litellm_provider": "oci", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/xai.grok-3-fast": { + "input_cost_per_token": 0.000005, + "litellm_provider": "oci", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000025, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/xai.grok-3-mini": { + "input_cost_per_token": 3e-7, + "litellm_provider": "oci", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/xai.grok-3-mini-fast": { + "input_cost_per_token": 6e-7, + "litellm_provider": "oci", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000004, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/xai.grok-4": { + "input_cost_per_token": 0.000003, + "litellm_provider": "oci", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/cohere.command-latest": { + "input_cost_per_token": 0.00000156, + "litellm_provider": "oci", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00000156, + "source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/cohere.command-a-03-2025": { + "input_cost_per_token": 0.00000156, + "litellm_provider": "oci", + "max_input_tokens": 256000, + "max_output_tokens": 4000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.00000156, + "source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/", + "supports_function_calling": true, + "supports_response_schema": false + }, + "oci/cohere.command-plus-latest": { + "input_cost_per_token": 0.00000156, + "litellm_provider": "oci", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00000156, + "source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/", + "supports_function_calling": true, + "supports_response_schema": false + }, + "ollama/codegeex4": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": false + }, + "ollama/deepseek-coder-v2-instruct": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/deepseek-coder-v2-lite-instruct": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/deepseek-v3.1:671b-cloud": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 163840, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/gpt-oss:120b-cloud": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/gpt-oss:20b-cloud": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/internlm2_5-20b-chat": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/llama2": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0 + }, + "ollama/llama2:13b": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0 + }, + "ollama/llama2:70b": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0 + }, + "ollama/llama2:7b": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0 + }, + "ollama/llama3": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0 + }, + "ollama/llama3.1": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/llama3:70b": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0 + }, + "ollama/llama3:8b": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0 + }, + "ollama/mistral-7B-Instruct-v0.1": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/mistral-7B-Instruct-v0.2": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/mistral-large-instruct-2407": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/mixtral-8x22B-Instruct-v0.1": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/mixtral-8x7B-Instruct-v0.1": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "ollama/qwen3-coder:480b-cloud": { + "input_cost_per_token": 0, + "litellm_provider": "ollama", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true + }, + "openai.gpt-oss-120b-1:0": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "openai.gpt-oss-20b-1:0": { + "input_cost_per_token": 7e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "openai.gpt-oss-safeguard-120b": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_system_messages": true + }, + "openai.gpt-oss-safeguard-20b": { + "input_cost_per_token": 7e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_system_messages": true + }, + "openrouter/anthropic/claude-2": { + "input_cost_per_token": 0.00001102, + "litellm_provider": "openrouter", + "max_output_tokens": 8191, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00003268, + "supports_tool_choice": true + }, + "openrouter/anthropic/claude-3-5-haiku": { + "input_cost_per_token": 0.000001, + "litellm_provider": "openrouter", + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "openrouter/anthropic/claude-3-5-haiku-20241022": { + "input_cost_per_token": 0.000001, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000005, + "supports_function_calling": true, + "supports_tool_choice": true, + "tool_use_system_prompt_tokens": 264 + }, + "openrouter/anthropic/claude-3-haiku": { + "input_cost_per_image": 0.0004, + "input_cost_per_token": 2.5e-7, + "litellm_provider": "openrouter", + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/anthropic/claude-3-haiku-20240307": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 264 + }, + "openrouter/anthropic/claude-3-opus": { + "input_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395 + }, + "openrouter/anthropic/claude-3-sonnet": { + "input_cost_per_image": 0.0048, + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/anthropic/claude-3.5-sonnet": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-3.5-sonnet:beta": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-3.7-sonnet": { + "input_cost_per_image": 0.0048, + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-3.7-sonnet:beta": { + "input_cost_per_image": 0.0048, + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-instant-v1": { + "input_cost_per_token": 0.00000163, + "litellm_provider": "openrouter", + "max_output_tokens": 8191, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00000551, + "supports_tool_choice": true + }, + "openrouter/anthropic/claude-opus-4": { + "input_cost_per_image": 0.0048, + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-opus-4.1": { + "input_cost_per_image": 0.0048, + "cache_creation_input_token_cost": 0.00001875, + "cache_creation_input_token_cost_above_1hr": 0.00003, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-sonnet-4": { + "input_cost_per_image": 0.0048, + "cache_creation_input_token_cost": 0.00000375, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost": 3e-7, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "litellm_provider": "openrouter", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-opus-4.5": { + "cache_creation_input_token_cost": 0.00000625, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-sonnet-4.5": { + "input_cost_per_image": 0.0048, + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 1000000, + "max_output_tokens": 1000000, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-haiku-4.5": { + "cache_creation_input_token_cost": 0.00000125, + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 0.000001, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 200000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "openrouter/bytedance/ui-tars-1.5-7b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 131072, + "max_output_tokens": 2048, + "max_tokens": 2048, + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://openrouter.ai/api/v1/models/bytedance/ui-tars-1.5-7b", + "supports_tool_choice": true + }, + "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { + "input_cost_per_token": 5e-7, + "litellm_provider": "openrouter", + "max_tokens": 32769, + "mode": "chat", + "output_cost_per_token": 5e-7, + "supports_tool_choice": true + }, + "openrouter/cohere/command-r-plus": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_tool_choice": true + }, + "openrouter/databricks/dbrx-instruct": { + "input_cost_per_token": 6e-7, + "litellm_provider": "openrouter", + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-chat": { + "input_cost_per_token": 1.4e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2.8e-7, + "supports_prompt_caching": true, + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-chat-v3-0324": { + "input_cost_per_token": 1.4e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2.8e-7, + "supports_prompt_caching": true, + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-chat-v3.1": { + "input_cost_per_token": 2e-7, + "input_cost_per_token_cache_hit": 2e-8, + "litellm_provider": "openrouter", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 8e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-v3.2": { + "input_cost_per_token": 2.8e-7, + "input_cost_per_token_cache_hit": 2.8e-8, + "litellm_provider": "openrouter", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-v3.2-exp": { + "input_cost_per_token": 2e-7, + "input_cost_per_token_cache_hit": 2e-8, + "litellm_provider": "openrouter", + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": false, + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-coder": { + "input_cost_per_token": 1.4e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 66000, + "max_output_tokens": 4096, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2.8e-7, + "supports_prompt_caching": true, + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-r1": { + "input_cost_per_token": 5.5e-7, + "input_cost_per_token_cache_hit": 1.4e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 65336, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000219, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-r1-0528": { + "input_cost_per_token": 5e-7, + "input_cost_per_token_cache_hit": 1.4e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 65336, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000215, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/fireworks/firellava-13b": { + "input_cost_per_token": 2e-7, + "litellm_provider": "openrouter", + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_tool_choice": true + }, + "openrouter/google/gemini-2.0-flash-001": { + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 1e-7, + "litellm_provider": "openrouter", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/google/gemini-2.5-flash": { + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 3e-7, + "litellm_provider": "openrouter", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/google/gemini-2.5-pro": { + "input_cost_per_audio_token": 7e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "openrouter", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_pdf_size_mb": 30, + "max_tokens": 8192, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_audio_output": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/google/gemini-3-pro-preview": { + "cache_read_input_token_cost": 2e-7, + "cache_read_input_token_cost_above_200k_tokens": 4e-7, + "cache_creation_input_token_cost_above_200k_tokens": 2.5e-7, + "input_cost_per_token": 0.000002, + "input_cost_per_token_above_200k_tokens": 0.000004, + "input_cost_per_token_batches": 0.000001, + "litellm_provider": "openrouter", + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_images_per_prompt": 3000, + "max_input_tokens": 1048576, + "max_output_tokens": 65535, + "max_pdf_size_mb": 30, + "max_tokens": 65535, + "max_video_length": 1, + "max_videos_per_prompt": 10, + "mode": "chat", + "output_cost_per_token": 0.000012, + "output_cost_per_token_above_200k_tokens": 0.000018, + "output_cost_per_token_batches": 0.000006, + "supported_endpoints": [ + "/v1/chat/completions", + "/v1/completions", + "/v1/batch" + ], + "supported_modalities": [ + "text", + "image", + "audio", + "video" + ], + "supported_output_modalities": [ + "text" + ], + "supports_audio_input": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_video_input": true, + "supports_vision": true, + "supports_web_search": true + }, + "openrouter/google/gemini-pro-1.5": { + "input_cost_per_image": 0.00265, + "input_cost_per_token": 0.0000025, + "litellm_provider": "openrouter", + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000075, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/google/gemini-pro-vision": { + "input_cost_per_image": 0.0025, + "input_cost_per_token": 1.25e-7, + "litellm_provider": "openrouter", + "max_tokens": 45875, + "mode": "chat", + "output_cost_per_token": 3.75e-7, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/google/palm-2-chat-bison": { + "input_cost_per_token": 5e-7, + "litellm_provider": "openrouter", + "max_tokens": 25804, + "mode": "chat", + "output_cost_per_token": 5e-7, + "supports_tool_choice": true + }, + "openrouter/google/palm-2-codechat-bison": { + "input_cost_per_token": 5e-7, + "litellm_provider": "openrouter", + "max_tokens": 20070, + "mode": "chat", + "output_cost_per_token": 5e-7, + "supports_tool_choice": true + }, + "openrouter/gryphe/mythomax-l2-13b": { + "input_cost_per_token": 0.000001875, + "litellm_provider": "openrouter", + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000001875, + "supports_tool_choice": true + }, + "openrouter/jondurbin/airoboros-l2-70b-2.1": { + "input_cost_per_token": 0.000013875, + "litellm_provider": "openrouter", + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000013875, + "supports_tool_choice": true + }, + "openrouter/mancer/weaver": { + "input_cost_per_token": 0.000005625, + "litellm_provider": "openrouter", + "max_tokens": 8000, + "mode": "chat", + "output_cost_per_token": 0.000005625, + "supports_tool_choice": true + }, + "openrouter/meta-llama/codellama-34b-instruct": { + "input_cost_per_token": 5e-7, + "litellm_provider": "openrouter", + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 5e-7, + "supports_tool_choice": true + }, + "openrouter/meta-llama/llama-2-13b-chat": { + "input_cost_per_token": 2e-7, + "litellm_provider": "openrouter", + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_tool_choice": true + }, + "openrouter/meta-llama/llama-2-70b-chat": { + "input_cost_per_token": 0.0000015, + "litellm_provider": "openrouter", + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_tool_choice": true + }, + "openrouter/meta-llama/llama-3-70b-instruct": { + "input_cost_per_token": 5.9e-7, + "litellm_provider": "openrouter", + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 7.9e-7, + "supports_tool_choice": true + }, + "openrouter/meta-llama/llama-3-70b-instruct:nitro": { + "input_cost_per_token": 9e-7, + "litellm_provider": "openrouter", + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 9e-7, + "supports_tool_choice": true + }, + "openrouter/meta-llama/llama-3-8b-instruct:extended": { + "input_cost_per_token": 2.25e-7, + "litellm_provider": "openrouter", + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00000225, + "supports_tool_choice": true + }, + "openrouter/meta-llama/llama-3-8b-instruct:free": { + "input_cost_per_token": 0, + "litellm_provider": "openrouter", + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "supports_tool_choice": true + }, + "openrouter/microsoft/wizardlm-2-8x22b:nitro": { + "input_cost_per_token": 0.000001, + "litellm_provider": "openrouter", + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.000001, + "supports_tool_choice": true + }, + "openrouter/minimax/minimax-m2": { + "input_cost_per_token": 2.55e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 204800, + "max_output_tokens": 204800, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00000102, + "supports_function_calling": true, + "supports_prompt_caching": false, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/mistralai/devstral-2512:free": { + "input_cost_per_image": 0, + "input_cost_per_token": 0, + "litellm_provider": "openrouter", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true, + "supports_prompt_caching": false, + "supports_tool_choice": true, + "supports_vision": false + }, + "openrouter/mistralai/devstral-2512": { + "input_cost_per_image": 0, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 262144, + "max_output_tokens": 65536, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_prompt_caching": false, + "supports_tool_choice": true, + "supports_vision": false + }, + "openrouter/mistralai/ministral-3b-2512": { + "input_cost_per_image": 0, + "input_cost_per_token": 1e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_prompt_caching": false, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/mistralai/ministral-8b-2512": { + "input_cost_per_image": 0, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "supports_function_calling": true, + "supports_prompt_caching": false, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/mistralai/ministral-14b-2512": { + "input_cost_per_image": 0, + "input_cost_per_token": 2e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_function_calling": true, + "supports_prompt_caching": false, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/mistralai/mistral-large-2512": { + "input_cost_per_image": 0, + "input_cost_per_token": 5e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "supports_function_calling": true, + "supports_prompt_caching": false, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/mistralai/mistral-7b-instruct": { + "input_cost_per_token": 1.3e-7, + "litellm_provider": "openrouter", + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 1.3e-7, + "supports_tool_choice": true + }, + "openrouter/mistralai/mistral-7b-instruct:free": { + "input_cost_per_token": 0, + "litellm_provider": "openrouter", + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "supports_tool_choice": true + }, + "openrouter/mistralai/mistral-large": { + "input_cost_per_token": 0.000008, + "litellm_provider": "openrouter", + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000024, + "supports_tool_choice": true + }, + "openrouter/mistralai/mistral-small-3.1-24b-instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "openrouter", + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_tool_choice": true + }, + "openrouter/mistralai/mistral-small-3.2-24b-instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "openrouter", + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 3e-7, + "supports_tool_choice": true + }, + "openrouter/mistralai/mixtral-8x22b-instruct": { + "input_cost_per_token": 6.5e-7, + "litellm_provider": "openrouter", + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 6.5e-7, + "supports_tool_choice": true + }, + "openrouter/nousresearch/nous-hermes-llama2-13b": { + "input_cost_per_token": 2e-7, + "litellm_provider": "openrouter", + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-3.5-turbo": { + "input_cost_per_token": 0.0000015, + "litellm_provider": "openrouter", + "max_tokens": 4095, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-3.5-turbo-16k": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_tokens": 16383, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-4": { + "input_cost_per_token": 0.00003, + "litellm_provider": "openrouter", + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-4-vision-preview": { + "input_cost_per_image": 0.01445, + "input_cost_per_token": 0.00001, + "litellm_provider": "openrouter", + "max_tokens": 130000, + "mode": "chat", + "output_cost_per_token": 0.00003, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4.1": { + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "openrouter", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4.1-2025-04-14": { + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "openrouter", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4.1-mini": { + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 4e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000016, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4.1-mini-2025-04-14": { + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 4e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000016, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4.1-nano": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 1e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4.1-nano-2025-04-14": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 1e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4o": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4o-2024-05-13": { + "input_cost_per_token": 0.000005, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-5-chat": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "openrouter", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-5-codex": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "openrouter", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-5": { + "cache_read_input_token_cost": 1.25e-7, + "input_cost_per_token": 0.00000125, + "litellm_provider": "openrouter", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-5-mini": { + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 2.5e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-5-nano": { + "cache_read_input_token_cost": 5e-9, + "input_cost_per_token": 5e-8, + "litellm_provider": "openrouter", + "max_input_tokens": 272000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text" + ], + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-5.2": { + "input_cost_per_image": 0, + "cache_read_input_token_cost": 1.75e-7, + "input_cost_per_token": 0.00000175, + "litellm_provider": "openrouter", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 400000, + "mode": "chat", + "output_cost_per_token": 0.000014, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-5.2-chat": { + "input_cost_per_image": 0, + "cache_read_input_token_cost": 1.75e-7, + "input_cost_per_token": 0.00000175, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000014, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-5.2-pro": { + "input_cost_per_image": 0, + "input_cost_per_token": 0.000021, + "litellm_provider": "openrouter", + "max_input_tokens": 400000, + "max_output_tokens": 128000, + "max_tokens": 400000, + "mode": "chat", + "output_cost_per_token": 0.000168, + "supports_function_calling": true, + "supports_prompt_caching": false, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/gpt-oss-120b": { + "input_cost_per_token": 1.8e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 131072, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 8e-7, + "source": "https://openrouter.ai/openai/gpt-oss-120b", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "openrouter/openai/gpt-oss-20b": { + "input_cost_per_token": 1.8e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 131072, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 8e-7, + "source": "https://openrouter.ai/openai/gpt-oss-20b", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "openrouter/openai/o1": { + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 100000, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/openai/o1-mini": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.000012, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "openrouter/openai/o1-mini-2024-09-12": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.000012, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "openrouter/openai/o1-preview": { + "input_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "openrouter/openai/o1-preview-2024-09-12": { + "input_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00006, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "openrouter/openai/o3-mini": { + "input_cost_per_token": 0.0000011, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "openrouter/openai/o3-mini-high": { + "input_cost_per_token": 0.0000011, + "litellm_provider": "openrouter", + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.0000044, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "openrouter/pygmalionai/mythalion-13b": { + "input_cost_per_token": 0.000001875, + "litellm_provider": "openrouter", + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000001875, + "supports_tool_choice": true + }, + "openrouter/qwen/qwen-2.5-coder-32b-instruct": { + "input_cost_per_token": 1.8e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 33792, + "max_output_tokens": 33792, + "max_tokens": 33792, + "mode": "chat", + "output_cost_per_token": 1.8e-7, + "supports_tool_choice": true + }, + "openrouter/qwen/qwen-vl-plus": { + "input_cost_per_token": 2.1e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 8192, + "max_output_tokens": 2048, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 6.3e-7, + "supports_tool_choice": true, + "supports_vision": true + }, + "openrouter/qwen/qwen3-coder": { + "input_cost_per_token": 2.2e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 262100, + "max_output_tokens": 262100, + "max_tokens": 262100, + "mode": "chat", + "output_cost_per_token": 9.5e-7, + "source": "https://openrouter.ai/qwen/qwen3-coder", + "supports_tool_choice": true, + "supports_function_calling": true + }, + "openrouter/switchpoint/router": { + "input_cost_per_token": 8.5e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000034, + "source": "https://openrouter.ai/switchpoint/router", + "supports_tool_choice": true + }, + "openrouter/undi95/remm-slerp-l2-13b": { + "input_cost_per_token": 0.000001875, + "litellm_provider": "openrouter", + "max_tokens": 6144, + "mode": "chat", + "output_cost_per_token": 0.000001875, + "supports_tool_choice": true + }, + "openrouter/x-ai/grok-4": { + "input_cost_per_token": 0.000003, + "litellm_provider": "openrouter", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "source": "https://openrouter.ai/x-ai/grok-4", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "openrouter/x-ai/grok-4-fast:free": { + "input_cost_per_token": 0, + "litellm_provider": "openrouter", + "max_input_tokens": 2000000, + "max_output_tokens": 30000, + "max_tokens": 2000000, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://openrouter.ai/x-ai/grok-4-fast:free", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true, + "supports_web_search": false + }, + "openrouter/z-ai/glm-4.6": { + "input_cost_per_token": 4e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 202800, + "max_output_tokens": 131000, + "max_tokens": 202800, + "mode": "chat", + "output_cost_per_token": 0.00000175, + "source": "https://openrouter.ai/z-ai/glm-4.6", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "openrouter/z-ai/glm-4.6:exacto": { + "input_cost_per_token": 4.5e-7, + "litellm_provider": "openrouter", + "max_input_tokens": 202800, + "max_output_tokens": 131000, + "max_tokens": 202800, + "mode": "chat", + "output_cost_per_token": 0.0000019, + "source": "https://openrouter.ai/z-ai/glm-4.6:exacto", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "ovhcloud/DeepSeek-R1-Distill-Llama-70B": { + "input_cost_per_token": 6.7e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 131000, + "max_output_tokens": 131000, + "max_tokens": 131000, + "mode": "chat", + "output_cost_per_token": 6.7e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/deepseek-r1-distill-llama-70b", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "ovhcloud/Llama-3.1-8B-Instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 131000, + "max_output_tokens": 131000, + "max_tokens": 131000, + "mode": "chat", + "output_cost_per_token": 1e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/llama-3-1-8b-instruct", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "ovhcloud/Meta-Llama-3_1-70B-Instruct": { + "input_cost_per_token": 6.7e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 131000, + "max_output_tokens": 131000, + "max_tokens": 131000, + "mode": "chat", + "output_cost_per_token": 6.7e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/meta-llama-3-1-70b-instruct", + "supports_function_calling": false, + "supports_response_schema": false, + "supports_tool_choice": false + }, + "ovhcloud/Meta-Llama-3_3-70B-Instruct": { + "input_cost_per_token": 6.7e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 131000, + "max_output_tokens": 131000, + "max_tokens": 131000, + "mode": "chat", + "output_cost_per_token": 6.7e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/meta-llama-3-3-70b-instruct", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "ovhcloud/Mistral-7B-Instruct-v0.3": { + "input_cost_per_token": 1e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 127000, + "max_output_tokens": 127000, + "max_tokens": 127000, + "mode": "chat", + "output_cost_per_token": 1e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/mistral-7b-instruct-v0-3", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "ovhcloud/Mistral-Nemo-Instruct-2407": { + "input_cost_per_token": 1.3e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 118000, + "max_output_tokens": 118000, + "max_tokens": 118000, + "mode": "chat", + "output_cost_per_token": 1.3e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/mistral-nemo-instruct-2407", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "ovhcloud/Mistral-Small-3.2-24B-Instruct-2506": { + "input_cost_per_token": 9e-8, + "litellm_provider": "ovhcloud", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 2.8e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/mistral-small-3-2-24b-instruct-2506", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "ovhcloud/Mixtral-8x7B-Instruct-v0.1": { + "input_cost_per_token": 6.3e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 6.3e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/mixtral-8x7b-instruct-v0-1", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "ovhcloud/Qwen2.5-Coder-32B-Instruct": { + "input_cost_per_token": 8.7e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 8.7e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/qwen2-5-coder-32b-instruct", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "ovhcloud/Qwen2.5-VL-72B-Instruct": { + "input_cost_per_token": 9.1e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 9.1e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/qwen2-5-vl-72b-instruct", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "ovhcloud/Qwen3-32B": { + "input_cost_per_token": 8e-8, + "litellm_provider": "ovhcloud", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 2.3e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/qwen3-32b", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "ovhcloud/gpt-oss-120b": { + "input_cost_per_token": 8e-8, + "litellm_provider": "ovhcloud", + "max_input_tokens": 131000, + "max_output_tokens": 131000, + "max_tokens": 131000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/gpt-oss-120b", + "supports_function_calling": false, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "ovhcloud/gpt-oss-20b": { + "input_cost_per_token": 4e-8, + "litellm_provider": "ovhcloud", + "max_input_tokens": 131000, + "max_output_tokens": 131000, + "max_tokens": 131000, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/gpt-oss-20b", + "supports_function_calling": false, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "ovhcloud/llava-v1.6-mistral-7b-hf": { + "input_cost_per_token": 2.9e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 2.9e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/llava-next-mistral-7b", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "ovhcloud/mamba-codestral-7B-v0.1": { + "input_cost_per_token": 1.9e-7, + "litellm_provider": "ovhcloud", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 1.9e-7, + "source": "https://endpoints.ai.cloud.ovh.net/models/mamba-codestral-7b-v0-1", + "supports_function_calling": false, + "supports_response_schema": true, + "supports_tool_choice": false + }, + "palm/chat-bison": { + "input_cost_per_token": 1.25e-7, + "litellm_provider": "palm", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "palm/chat-bison-001": { + "input_cost_per_token": 1.25e-7, + "litellm_provider": "palm", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 1.25e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "perplexity/codellama-34b-instruct": { + "input_cost_per_token": 3.5e-7, + "litellm_provider": "perplexity", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.0000014 + }, + "perplexity/codellama-70b-instruct": { + "input_cost_per_token": 7e-7, + "litellm_provider": "perplexity", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.0000028 + }, + "perplexity/llama-2-70b-chat": { + "input_cost_per_token": 7e-7, + "litellm_provider": "perplexity", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000028 + }, + "perplexity/llama-3.1-70b-instruct": { + "input_cost_per_token": 0.000001, + "litellm_provider": "perplexity", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000001 + }, + "perplexity/llama-3.1-8b-instruct": { + "input_cost_per_token": 2e-7, + "litellm_provider": "perplexity", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 2e-7 + }, + "perplexity/llama-3.1-sonar-huge-128k-online": { + "deprecation_date": "2025-02-22", + "input_cost_per_token": 0.000005, + "litellm_provider": "perplexity", + "max_input_tokens": 127072, + "max_output_tokens": 127072, + "max_tokens": 127072, + "mode": "chat", + "output_cost_per_token": 0.000005 + }, + "perplexity/llama-3.1-sonar-large-128k-chat": { + "deprecation_date": "2025-02-22", + "input_cost_per_token": 0.000001, + "litellm_provider": "perplexity", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000001 + }, + "perplexity/llama-3.1-sonar-large-128k-online": { + "deprecation_date": "2025-02-22", + "input_cost_per_token": 0.000001, + "litellm_provider": "perplexity", + "max_input_tokens": 127072, + "max_output_tokens": 127072, + "max_tokens": 127072, + "mode": "chat", + "output_cost_per_token": 0.000001 + }, + "perplexity/llama-3.1-sonar-small-128k-chat": { + "deprecation_date": "2025-02-22", + "input_cost_per_token": 2e-7, + "litellm_provider": "perplexity", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 2e-7 + }, + "perplexity/llama-3.1-sonar-small-128k-online": { + "deprecation_date": "2025-02-22", + "input_cost_per_token": 2e-7, + "litellm_provider": "perplexity", + "max_input_tokens": 127072, + "max_output_tokens": 127072, + "max_tokens": 127072, + "mode": "chat", + "output_cost_per_token": 2e-7 + }, + "perplexity/mistral-7b-instruct": { + "input_cost_per_token": 7e-8, + "litellm_provider": "perplexity", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.8e-7 + }, + "perplexity/mixtral-8x7b-instruct": { + "input_cost_per_token": 7e-8, + "litellm_provider": "perplexity", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.8e-7 + }, + "perplexity/pplx-70b-chat": { + "input_cost_per_token": 7e-7, + "litellm_provider": "perplexity", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000028 + }, + "perplexity/pplx-70b-online": { + "input_cost_per_request": 0.005, + "input_cost_per_token": 0, + "litellm_provider": "perplexity", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000028 + }, + "perplexity/pplx-7b-chat": { + "input_cost_per_token": 7e-8, + "litellm_provider": "perplexity", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2.8e-7 + }, + "perplexity/pplx-7b-online": { + "input_cost_per_request": 0.005, + "input_cost_per_token": 0, + "litellm_provider": "perplexity", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.8e-7 + }, + "perplexity/sonar": { + "input_cost_per_token": 0.000001, + "litellm_provider": "perplexity", + "max_input_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000001, + "search_context_cost_per_query": { + "search_context_size_high": 0.012, + "search_context_size_low": 0.005, + "search_context_size_medium": 0.008 + }, + "supports_web_search": true + }, + "perplexity/sonar-deep-research": { + "citation_cost_per_token": 0.000002, + "input_cost_per_token": 0.000002, + "litellm_provider": "perplexity", + "max_input_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_reasoning_token": 0.000003, + "output_cost_per_token": 0.000008, + "search_context_cost_per_query": { + "search_context_size_high": 0.005, + "search_context_size_low": 0.005, + "search_context_size_medium": 0.005 + }, + "supports_reasoning": true, + "supports_web_search": true + }, + "perplexity/sonar-medium-chat": { + "input_cost_per_token": 6e-7, + "litellm_provider": "perplexity", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.0000018 + }, + "perplexity/sonar-medium-online": { + "input_cost_per_request": 0.005, + "input_cost_per_token": 0, + "litellm_provider": "perplexity", + "max_input_tokens": 12000, + "max_output_tokens": 12000, + "max_tokens": 12000, + "mode": "chat", + "output_cost_per_token": 0.0000018 + }, + "perplexity/sonar-pro": { + "input_cost_per_token": 0.000003, + "litellm_provider": "perplexity", + "max_input_tokens": 200000, + "max_output_tokens": 8000, + "max_tokens": 8000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.014, + "search_context_size_low": 0.006, + "search_context_size_medium": 0.01 + }, + "supports_web_search": true + }, + "perplexity/sonar-reasoning": { + "input_cost_per_token": 0.000001, + "litellm_provider": "perplexity", + "max_input_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000005, + "search_context_cost_per_query": { + "search_context_size_high": 0.014, + "search_context_size_low": 0.005, + "search_context_size_medium": 0.008 + }, + "supports_reasoning": true, + "supports_web_search": true + }, + "perplexity/sonar-reasoning-pro": { + "input_cost_per_token": 0.000002, + "litellm_provider": "perplexity", + "max_input_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "search_context_cost_per_query": { + "search_context_size_high": 0.014, + "search_context_size_low": 0.006, + "search_context_size_medium": 0.01 + }, + "supports_reasoning": true, + "supports_web_search": true + }, + "perplexity/sonar-small-chat": { + "input_cost_per_token": 7e-8, + "litellm_provider": "perplexity", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 2.8e-7 + }, + "perplexity/sonar-small-online": { + "input_cost_per_request": 0.005, + "input_cost_per_token": 0, + "litellm_provider": "perplexity", + "max_input_tokens": 12000, + "max_output_tokens": 12000, + "max_tokens": 12000, + "mode": "chat", + "output_cost_per_token": 2.8e-7 + }, + "publicai/swiss-ai/apertus-8b-instruct": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "publicai/swiss-ai/apertus-70b-instruct": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "publicai/aisingapore/Gemma-SEA-LION-v4-27B-IT": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "publicai/BSC-LT/salamandra-7b-instruct-tools-16k": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "publicai/BSC-LT/ALIA-40b-instruct_Q8_0": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "publicai/allenai/Olmo-3-7B-Instruct": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "publicai/aisingapore/Qwen-SEA-LION-v4-32B-IT": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "publicai/allenai/Olmo-3-7B-Think": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_reasoning": true + }, + "publicai/allenai/Olmo-3-32B-Think": { + "input_cost_per_token": 0, + "litellm_provider": "publicai", + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://platform.publicai.co/docs", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_reasoning": true + }, + "qwen.qwen3-coder-480b-a35b-v1:0": { + "input_cost_per_token": 2.2e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 262000, + "max_output_tokens": 65536, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000018, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "qwen.qwen3-235b-a22b-2507-v1:0": { + "input_cost_per_token": 2.2e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 262144, + "max_output_tokens": 131072, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 8.8e-7, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "qwen.qwen3-coder-30b-a3b-v1:0": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 262144, + "max_output_tokens": 131072, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "qwen.qwen3-32b-v1:0": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 131072, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "qwen.qwen3-next-80b-a3b": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "supports_function_calling": true, + "supports_system_messages": true + }, + "qwen.qwen3-vl-235b-a22b": { + "input_cost_per_token": 5.3e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000266, + "supports_function_calling": true, + "supports_system_messages": true, + "supports_vision": true + }, + "replicate/meta/llama-2-13b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 5e-7, + "supports_tool_choice": true + }, + "replicate/meta/llama-2-13b-chat": { + "input_cost_per_token": 1e-7, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 5e-7, + "supports_tool_choice": true + }, + "replicate/meta/llama-2-70b": { + "input_cost_per_token": 6.5e-7, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000275, + "supports_tool_choice": true + }, + "replicate/meta/llama-2-70b-chat": { + "input_cost_per_token": 6.5e-7, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000275, + "supports_tool_choice": true + }, + "replicate/meta/llama-2-7b": { + "input_cost_per_token": 5e-8, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_tool_choice": true + }, + "replicate/meta/llama-2-7b-chat": { + "input_cost_per_token": 5e-8, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_tool_choice": true + }, + "replicate/meta/llama-3-70b": { + "input_cost_per_token": 6.5e-7, + "litellm_provider": "replicate", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000275, + "supports_tool_choice": true + }, + "replicate/meta/llama-3-70b-instruct": { + "input_cost_per_token": 6.5e-7, + "litellm_provider": "replicate", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000275, + "supports_tool_choice": true + }, + "replicate/meta/llama-3-8b": { + "input_cost_per_token": 5e-8, + "litellm_provider": "replicate", + "max_input_tokens": 8086, + "max_output_tokens": 8086, + "max_tokens": 8086, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_tool_choice": true + }, + "replicate/meta/llama-3-8b-instruct": { + "input_cost_per_token": 5e-8, + "litellm_provider": "replicate", + "max_input_tokens": 8086, + "max_output_tokens": 8086, + "max_tokens": 8086, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_tool_choice": true + }, + "replicate/mistralai/mistral-7b-instruct-v0.2": { + "input_cost_per_token": 5e-8, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_tool_choice": true + }, + "replicate/mistralai/mistral-7b-v0.1": { + "input_cost_per_token": 5e-8, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 2.5e-7, + "supports_tool_choice": true + }, + "replicate/mistralai/mixtral-8x7b-instruct-v0.1": { + "input_cost_per_token": 3e-7, + "litellm_provider": "replicate", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000001, + "supports_tool_choice": true + }, + "sagemaker/meta-textgeneration-llama-2-13b-f": { + "input_cost_per_token": 0, + "litellm_provider": "sagemaker", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0 + }, + "sagemaker/meta-textgeneration-llama-2-70b-b-f": { + "input_cost_per_token": 0, + "litellm_provider": "sagemaker", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0 + }, + "sagemaker/meta-textgeneration-llama-2-7b-f": { + "input_cost_per_token": 0, + "litellm_provider": "sagemaker", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0 + }, + "sambanova/DeepSeek-R1": { + "input_cost_per_token": 0.000005, + "litellm_provider": "sambanova", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000007, + "source": "https://cloud.sambanova.ai/plans/pricing" + }, + "sambanova/DeepSeek-R1-Distill-Llama-70B": { + "input_cost_per_token": 7e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000014, + "source": "https://cloud.sambanova.ai/plans/pricing" + }, + "sambanova/DeepSeek-V3-0324": { + "input_cost_per_token": 0.000003, + "litellm_provider": "sambanova", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000045, + "source": "https://cloud.sambanova.ai/plans/pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "sambanova/Llama-4-Maverick-17B-128E-Instruct": { + "input_cost_per_token": 6.3e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "metadata": { + "notes": "For vision models, images are converted to 6432 input tokens and are billed at that amount" + }, + "mode": "chat", + "output_cost_per_token": 0.0000018, + "source": "https://cloud.sambanova.ai/plans/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "sambanova/Llama-4-Scout-17B-16E-Instruct": { + "input_cost_per_token": 4e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "metadata": { + "notes": "For vision models, images are converted to 6432 input tokens and are billed at that amount" + }, + "mode": "chat", + "output_cost_per_token": 7e-7, + "source": "https://cloud.sambanova.ai/plans/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "sambanova/Meta-Llama-3.1-405B-Instruct": { + "input_cost_per_token": 0.000005, + "litellm_provider": "sambanova", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.00001, + "source": "https://cloud.sambanova.ai/plans/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "sambanova/Meta-Llama-3.1-8B-Instruct": { + "input_cost_per_token": 1e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://cloud.sambanova.ai/plans/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "sambanova/Meta-Llama-3.2-1B-Instruct": { + "input_cost_per_token": 4e-8, + "litellm_provider": "sambanova", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 8e-8, + "source": "https://cloud.sambanova.ai/plans/pricing" + }, + "sambanova/Meta-Llama-3.2-3B-Instruct": { + "input_cost_per_token": 8e-8, + "litellm_provider": "sambanova", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 1.6e-7, + "source": "https://cloud.sambanova.ai/plans/pricing" + }, + "sambanova/Meta-Llama-3.3-70B-Instruct": { + "input_cost_per_token": 6e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://cloud.sambanova.ai/plans/pricing", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "sambanova/Meta-Llama-Guard-3-8B": { + "input_cost_per_token": 3e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://cloud.sambanova.ai/plans/pricing" + }, + "sambanova/QwQ-32B": { + "input_cost_per_token": 5e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000001, + "source": "https://cloud.sambanova.ai/plans/pricing" + }, + "sambanova/Qwen2-Audio-7B-Instruct": { + "input_cost_per_token": 5e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0001, + "source": "https://cloud.sambanova.ai/plans/pricing", + "supports_audio_input": true + }, + "sambanova/Qwen3-32B": { + "input_cost_per_token": 4e-7, + "litellm_provider": "sambanova", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 8e-7, + "source": "https://cloud.sambanova.ai/plans/pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "sambanova/DeepSeek-V3.1": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.0000045, + "litellm_provider": "sambanova", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_reasoning": true, + "source": "https://cloud.sambanova.ai/plans/pricing" + }, + "sambanova/gpt-oss-120b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.0000045, + "litellm_provider": "sambanova", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_reasoning": true, + "source": "https://cloud.sambanova.ai/plans/pricing" + }, + "snowflake/claude-3-5-sonnet": { + "litellm_provider": "snowflake", + "max_input_tokens": 18000, + "max_output_tokens": 8192, + "max_tokens": 18000, + "mode": "chat", + "supports_computer_use": true + }, + "snowflake/deepseek-r1": { + "litellm_provider": "snowflake", + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "max_tokens": 32768, + "mode": "chat", + "supports_reasoning": true + }, + "snowflake/gemma-7b": { + "litellm_provider": "snowflake", + "max_input_tokens": 8000, + "max_output_tokens": 8192, + "max_tokens": 8000, + "mode": "chat" + }, + "snowflake/jamba-1.5-large": { + "litellm_provider": "snowflake", + "max_input_tokens": 256000, + "max_output_tokens": 8192, + "max_tokens": 256000, + "mode": "chat" + }, + "snowflake/jamba-1.5-mini": { + "litellm_provider": "snowflake", + "max_input_tokens": 256000, + "max_output_tokens": 8192, + "max_tokens": 256000, + "mode": "chat" + }, + "snowflake/jamba-instruct": { + "litellm_provider": "snowflake", + "max_input_tokens": 256000, + "max_output_tokens": 8192, + "max_tokens": 256000, + "mode": "chat" + }, + "snowflake/llama2-70b-chat": { + "litellm_provider": "snowflake", + "max_input_tokens": 4096, + "max_output_tokens": 8192, + "max_tokens": 4096, + "mode": "chat" + }, + "snowflake/llama3-70b": { + "litellm_provider": "snowflake", + "max_input_tokens": 8000, + "max_output_tokens": 8192, + "max_tokens": 8000, + "mode": "chat" + }, + "snowflake/llama3-8b": { + "litellm_provider": "snowflake", + "max_input_tokens": 8000, + "max_output_tokens": 8192, + "max_tokens": 8000, + "mode": "chat" + }, + "snowflake/llama3.1-405b": { + "litellm_provider": "snowflake", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat" + }, + "snowflake/llama3.1-70b": { + "litellm_provider": "snowflake", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat" + }, + "snowflake/llama3.1-8b": { + "litellm_provider": "snowflake", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat" + }, + "snowflake/llama3.2-1b": { + "litellm_provider": "snowflake", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat" + }, + "snowflake/llama3.2-3b": { + "litellm_provider": "snowflake", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat" + }, + "snowflake/llama3.3-70b": { + "litellm_provider": "snowflake", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat" + }, + "snowflake/mistral-7b": { + "litellm_provider": "snowflake", + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "max_tokens": 32000, + "mode": "chat" + }, + "snowflake/mistral-large": { + "litellm_provider": "snowflake", + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "max_tokens": 32000, + "mode": "chat" + }, + "snowflake/mistral-large2": { + "litellm_provider": "snowflake", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat" + }, + "snowflake/mixtral-8x7b": { + "litellm_provider": "snowflake", + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "max_tokens": 32000, + "mode": "chat" + }, + "snowflake/reka-core": { + "litellm_provider": "snowflake", + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "max_tokens": 32000, + "mode": "chat" + }, + "snowflake/reka-flash": { + "litellm_provider": "snowflake", + "max_input_tokens": 100000, + "max_output_tokens": 8192, + "max_tokens": 100000, + "mode": "chat" + }, + "snowflake/snowflake-arctic": { + "litellm_provider": "snowflake", + "max_input_tokens": 4096, + "max_output_tokens": 8192, + "max_tokens": 4096, + "mode": "chat" + }, + "snowflake/snowflake-llama-3.1-405b": { + "litellm_provider": "snowflake", + "max_input_tokens": 8000, + "max_output_tokens": 8192, + "max_tokens": 8000, + "mode": "chat" + }, + "snowflake/snowflake-llama-3.3-70b": { + "litellm_provider": "snowflake", + "max_input_tokens": 8000, + "max_output_tokens": 8192, + "max_tokens": 8000, + "mode": "chat" + }, + "together-ai-21.1b-41b": { + "input_cost_per_token": 8e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 8e-7 + }, + "together-ai-4.1b-8b": { + "input_cost_per_token": 2e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 2e-7 + }, + "together-ai-41.1b-80b": { + "input_cost_per_token": 9e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 9e-7 + }, + "together-ai-8.1b-21b": { + "input_cost_per_token": 3e-7, + "litellm_provider": "together_ai", + "max_tokens": 1000, + "mode": "chat", + "output_cost_per_token": 3e-7 + }, + "together-ai-81.1b-110b": { + "input_cost_per_token": 0.0000018, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 0.0000018 + }, + "together-ai-up-to-4b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 1e-7 + }, + "together_ai/Qwen/Qwen2.5-72B-Instruct-Turbo": { + "litellm_provider": "together_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/Qwen/Qwen2.5-7B-Instruct-Turbo": { + "litellm_provider": "together_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/Qwen/Qwen3-235B-A22B-Instruct-2507-tput": { + "input_cost_per_token": 2e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 262000, + "mode": "chat", + "output_cost_per_token": 0.000006, + "source": "https://www.together.ai/models/qwen3-235b-a22b-instruct-2507-fp8", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/Qwen/Qwen3-235B-A22B-Thinking-2507": { + "input_cost_per_token": 6.5e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://www.together.ai/models/qwen3-235b-a22b-thinking-2507", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/Qwen/Qwen3-235B-A22B-fp8-tput": { + "input_cost_per_token": 2e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 40000, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://www.together.ai/models/qwen3-235b-a22b-fp8-tput", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_tool_choice": false + }, + "together_ai/Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8": { + "input_cost_per_token": 0.000002, + "litellm_provider": "together_ai", + "max_input_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "source": "https://www.together.ai/models/qwen3-coder-480b-a35b-instruct", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/deepseek-ai/DeepSeek-R1": { + "input_cost_per_token": 0.000003, + "litellm_provider": "together_ai", + "max_input_tokens": 128000, + "max_output_tokens": 20480, + "max_tokens": 20480, + "mode": "chat", + "output_cost_per_token": 0.000007, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/deepseek-ai/DeepSeek-R1-0528-tput": { + "input_cost_per_token": 5.5e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00000219, + "source": "https://www.together.ai/models/deepseek-r1-0528-throughput", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/deepseek-ai/DeepSeek-V3": { + "input_cost_per_token": 0.00000125, + "litellm_provider": "together_ai", + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/deepseek-ai/DeepSeek-V3.1": { + "input_cost_per_token": 6e-7, + "litellm_provider": "together_ai", + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.0000017, + "source": "https://www.together.ai/models/deepseek-v3-1", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "together_ai/meta-llama/Llama-3.2-3B-Instruct-Turbo": { + "litellm_provider": "together_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo": { + "input_cost_per_token": 8.8e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 8.8e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo-Free": { + "input_cost_per_token": 0, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 0, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "together_ai/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": { + "input_cost_per_token": 2.7e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 8.5e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/meta-llama/Llama-4-Scout-17B-16E-Instruct": { + "input_cost_per_token": 1.8e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 5.9e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": { + "input_cost_per_token": 0.0000035, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 0.0000035, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { + "input_cost_per_token": 8.8e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 8.8e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "together_ai/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": { + "input_cost_per_token": 1.8e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 1.8e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "together_ai/mistralai/Mistral-7B-Instruct-v0.1": { + "litellm_provider": "together_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "together_ai/mistralai/Mistral-Small-24B-Instruct-2501": { + "litellm_provider": "together_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1": { + "input_cost_per_token": 6e-7, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "together_ai/moonshotai/Kimi-K2-Instruct": { + "input_cost_per_token": 0.000001, + "litellm_provider": "together_ai", + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://www.together.ai/models/kimi-k2-instruct", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/openai/gpt-oss-120b": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://www.together.ai/models/gpt-oss-120b", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/openai/gpt-oss-20b": { + "input_cost_per_token": 5e-8, + "litellm_provider": "together_ai", + "max_input_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 2e-7, + "source": "https://www.together.ai/models/gpt-oss-20b", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/togethercomputer/CodeLlama-34b-Instruct": { + "litellm_provider": "together_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/zai-org/GLM-4.5-Air-FP8": { + "input_cost_per_token": 2e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.0000011, + "source": "https://www.together.ai/models/glm-4-5-air", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/zai-org/GLM-4.6": { + "input_cost_per_token": 6e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 200000, + "max_output_tokens": 200000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.0000022, + "source": "https://www.together.ai/models/glm-4-6", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "together_ai/moonshotai/Kimi-K2-Instruct-0905": { + "input_cost_per_token": 0.000001, + "litellm_provider": "together_ai", + "max_input_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.000003, + "source": "https://www.together.ai/models/kimi-k2-0905", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/Qwen/Qwen3-Next-80B-A3B-Instruct": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://www.together.ai/models/qwen3-next-80b-a3b-instruct", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "together_ai/Qwen/Qwen3-Next-80B-A3B-Thinking": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "together_ai", + "max_input_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://www.together.ai/models/qwen3-next-80b-a3b-thinking", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "us.amazon.nova-lite-v1:0": { + "input_cost_per_token": 6e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 2.4e-7, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "us.amazon.nova-micro-v1:0": { + "input_cost_per_token": 3.5e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 1.4e-7, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "us.amazon.nova-premier-v1:0": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.0000125, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": false, + "supports_response_schema": true, + "supports_vision": true + }, + "us.amazon.nova-pro-v1:0": { + "input_cost_per_token": 8e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 300000, + "max_output_tokens": 10000, + "max_tokens": 10000, + "mode": "chat", + "output_cost_per_token": 0.0000032, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_vision": true + }, + "us.anthropic.claude-3-5-haiku-20241022-v1:0": { + "cache_creation_input_token_cost": 0.000001, + "cache_read_input_token_cost": 8e-8, + "input_cost_per_token": 8e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000004, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "us.anthropic.claude-haiku-4-5-20251001-v1:0": { + "cache_creation_input_token_cost": 0.000001375, + "cache_read_input_token_cost": 1.1e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000055, + "source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock", + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "us.anthropic.claude-3-5-sonnet-20240620-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "us.anthropic.claude-3-5-sonnet-20241022-v2:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "us.anthropic.claude-3-7-sonnet-20250219-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "us.anthropic.claude-3-haiku-20240307-v1:0": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "us.anthropic.claude-3-opus-20240229-v1:0": { + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "us.anthropic.claude-3-sonnet-20240229-v1:0": { + "input_cost_per_token": 0.000003, + "litellm_provider": "bedrock", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "us.anthropic.claude-opus-4-1-20250805-v1:0": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "us.anthropic.claude-sonnet-4-5-20250929-v1:0": { + "cache_creation_input_token_cost": 0.000004125, + "cache_read_input_token_cost": 3.3e-7, + "input_cost_per_token": 0.0000033, + "input_cost_per_token_above_200k_tokens": 0.0000066, + "output_cost_per_token_above_200k_tokens": 0.00002475, + "cache_creation_input_token_cost_above_200k_tokens": 0.00000825, + "cache_read_input_token_cost_above_200k_tokens": 6.6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000165, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "au.anthropic.claude-haiku-4-5-20251001-v1:0": { + "cache_creation_input_token_cost": 0.000001375, + "cache_read_input_token_cost": 1.1e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.0000055, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 346 + }, + "us.anthropic.claude-opus-4-20250514-v1:0": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "us.anthropic.claude-opus-4-5-20251101-v1:0": { + "cache_creation_input_token_cost": 0.00000625, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "global.anthropic.claude-opus-4-5-20251101-v1:0": { + "cache_creation_input_token_cost": 0.00000625, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "eu.anthropic.claude-opus-4-5-20251101-v1:0": { + "cache_creation_input_token_cost": 0.00000625, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "us.anthropic.claude-sonnet-4-20250514-v1:0": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "us.deepseek.r1-v1:0": { + "input_cost_per_token": 0.00000135, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.0000054, + "supports_function_calling": false, + "supports_reasoning": true, + "supports_tool_choice": false + }, + "us.meta.llama3-1-405b-instruct-v1:0": { + "input_cost_per_token": 0.00000532, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000016, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-1-70b-instruct-v1:0": { + "input_cost_per_token": 9.9e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9.9e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-1-8b-instruct-v1:0": { + "input_cost_per_token": 2.2e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 2.2e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-2-11b-instruct-v1:0": { + "input_cost_per_token": 3.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 3.5e-7, + "supports_function_calling": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "us.meta.llama3-2-1b-instruct-v1:0": { + "input_cost_per_token": 1e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-2-3b-instruct-v1:0": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-2-90b-instruct-v1:0": { + "input_cost_per_token": 0.000002, + "litellm_provider": "bedrock", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "us.meta.llama3-3-70b-instruct-v1:0": { + "input_cost_per_token": 7.2e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 7.2e-7, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama4-maverick-17b-instruct-v1:0": { + "input_cost_per_token": 2.4e-7, + "input_cost_per_token_batches": 1.2e-7, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 9.7e-7, + "output_cost_per_token_batches": 4.85e-7, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "code" + ], + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama4-scout-17b-instruct-v1:0": { + "input_cost_per_token": 1.7e-7, + "input_cost_per_token_batches": 8.5e-8, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 6.6e-7, + "output_cost_per_token_batches": 3.3e-7, + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "code" + ], + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.mistral.pixtral-large-2502-v1:0": { + "input_cost_per_token": 0.000002, + "litellm_provider": "bedrock_converse", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_function_calling": true, + "supports_tool_choice": false + }, + "v0/v0-1.0-md": { + "input_cost_per_token": 0.000003, + "litellm_provider": "v0", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "v0/v0-1.5-lg": { + "input_cost_per_token": 0.000015, + "litellm_provider": "v0", + "max_input_tokens": 512000, + "max_output_tokens": 512000, + "max_tokens": 512000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "v0/v0-1.5-md": { + "input_cost_per_token": 0.000003, + "litellm_provider": "v0", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vercel_ai_gateway/alibaba/qwen-3-14b": { + "input_cost_per_token": 8e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 40960, + "max_output_tokens": 16384, + "max_tokens": 40960, + "mode": "chat", + "output_cost_per_token": 2.4e-7 + }, + "vercel_ai_gateway/alibaba/qwen-3-235b": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 40960, + "max_output_tokens": 16384, + "max_tokens": 40960, + "mode": "chat", + "output_cost_per_token": 6e-7 + }, + "vercel_ai_gateway/alibaba/qwen-3-30b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 40960, + "max_output_tokens": 16384, + "max_tokens": 40960, + "mode": "chat", + "output_cost_per_token": 3e-7 + }, + "vercel_ai_gateway/alibaba/qwen-3-32b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 40960, + "max_output_tokens": 16384, + "max_tokens": 40960, + "mode": "chat", + "output_cost_per_token": 3e-7 + }, + "vercel_ai_gateway/alibaba/qwen3-coder": { + "input_cost_per_token": 4e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 262144, + "max_output_tokens": 66536, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000016 + }, + "vercel_ai_gateway/amazon/nova-lite": { + "input_cost_per_token": 6e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 300000, + "max_output_tokens": 8192, + "max_tokens": 300000, + "mode": "chat", + "output_cost_per_token": 2.4e-7 + }, + "vercel_ai_gateway/amazon/nova-micro": { + "input_cost_per_token": 3.5e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.4e-7 + }, + "vercel_ai_gateway/amazon/nova-pro": { + "input_cost_per_token": 8e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 300000, + "max_output_tokens": 8192, + "max_tokens": 300000, + "mode": "chat", + "output_cost_per_token": 0.0000032 + }, + "vercel_ai_gateway/amazon/titan-embed-text-v2": { + "input_cost_per_token": 2e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 0, + "max_output_tokens": 0, + "max_tokens": 0, + "mode": "chat", + "output_cost_per_token": 0 + }, + "vercel_ai_gateway/anthropic/claude-3-haiku": { + "cache_creation_input_token_cost": 3e-7, + "cache_read_input_token_cost": 3e-8, + "input_cost_per_token": 2.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.00000125 + }, + "vercel_ai_gateway/anthropic/claude-3-opus": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000075 + }, + "vercel_ai_gateway/anthropic/claude-3.5-haiku": { + "cache_creation_input_token_cost": 0.000001, + "cache_read_input_token_cost": 8e-8, + "input_cost_per_token": 8e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000004 + }, + "vercel_ai_gateway/anthropic/claude-3.5-sonnet": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000015 + }, + "vercel_ai_gateway/anthropic/claude-3.7-sonnet": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000015 + }, + "vercel_ai_gateway/anthropic/claude-4-opus": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000075 + }, + "vercel_ai_gateway/anthropic/claude-4-sonnet": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000015 + }, + "vercel_ai_gateway/cohere/command-a": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 256000, + "max_output_tokens": 8000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.00001 + }, + "vercel_ai_gateway/cohere/command-r": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 6e-7 + }, + "vercel_ai_gateway/cohere/command-r-plus": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001 + }, + "vercel_ai_gateway/cohere/embed-v4.0": { + "input_cost_per_token": 1.2e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 0, + "max_output_tokens": 0, + "max_tokens": 0, + "mode": "chat", + "output_cost_per_token": 0 + }, + "vercel_ai_gateway/deepseek/deepseek-r1": { + "input_cost_per_token": 5.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00000219 + }, + "vercel_ai_gateway/deepseek/deepseek-r1-distill-llama-70b": { + "input_cost_per_token": 7.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 9.9e-7 + }, + "vercel_ai_gateway/deepseek/deepseek-v3": { + "input_cost_per_token": 9e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-7 + }, + "vercel_ai_gateway/google/gemini-2.0-flash": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_tokens": 1048576, + "mode": "chat", + "output_cost_per_token": 6e-7 + }, + "vercel_ai_gateway/google/gemini-2.0-flash-lite": { + "input_cost_per_token": 7.5e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_tokens": 1048576, + "mode": "chat", + "output_cost_per_token": 3e-7 + }, + "vercel_ai_gateway/google/gemini-2.5-flash": { + "input_cost_per_token": 3e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 1000000, + "max_output_tokens": 65536, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_token": 0.0000025 + }, + "vercel_ai_gateway/google/gemini-2.5-pro": { + "input_cost_per_token": 0.0000025, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 1048576, + "max_output_tokens": 65536, + "max_tokens": 1048576, + "mode": "chat", + "output_cost_per_token": 0.00001 + }, + "vercel_ai_gateway/google/gemma-2-9b": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2e-7 + }, + "vercel_ai_gateway/inception/mercury-coder-small": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 32000, + "max_output_tokens": 16384, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000001 + }, + "vercel_ai_gateway/meta/llama-3-70b": { + "input_cost_per_token": 5.9e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 7.9e-7 + }, + "vercel_ai_gateway/meta/llama-3-8b": { + "input_cost_per_token": 5e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 8e-8 + }, + "vercel_ai_gateway/meta/llama-3.1-70b": { + "input_cost_per_token": 7.2e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 7.2e-7 + }, + "vercel_ai_gateway/meta/llama-3.1-8b": { + "input_cost_per_token": 5e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131000, + "max_output_tokens": 131072, + "max_tokens": 131000, + "mode": "chat", + "output_cost_per_token": 8e-8 + }, + "vercel_ai_gateway/meta/llama-3.2-11b": { + "input_cost_per_token": 1.6e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.6e-7 + }, + "vercel_ai_gateway/meta/llama-3.2-1b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1e-7 + }, + "vercel_ai_gateway/meta/llama-3.2-3b": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.5e-7 + }, + "vercel_ai_gateway/meta/llama-3.2-90b": { + "input_cost_per_token": 7.2e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 7.2e-7 + }, + "vercel_ai_gateway/meta/llama-3.3-70b": { + "input_cost_per_token": 7.2e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 7.2e-7 + }, + "vercel_ai_gateway/meta/llama-4-maverick": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 8192, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 6e-7 + }, + "vercel_ai_gateway/meta/llama-4-scout": { + "input_cost_per_token": 1e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 8192, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 3e-7 + }, + "vercel_ai_gateway/mistral/codestral": { + "input_cost_per_token": 3e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 256000, + "max_output_tokens": 4000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 9e-7 + }, + "vercel_ai_gateway/mistral/codestral-embed": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 0, + "max_output_tokens": 0, + "max_tokens": 0, + "mode": "chat", + "output_cost_per_token": 0 + }, + "vercel_ai_gateway/mistral/devstral-small": { + "input_cost_per_token": 7e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 2.8e-7 + }, + "vercel_ai_gateway/mistral/magistral-medium": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 64000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000005 + }, + "vercel_ai_gateway/mistral/magistral-small": { + "input_cost_per_token": 5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 64000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.0000015 + }, + "vercel_ai_gateway/mistral/ministral-3b": { + "input_cost_per_token": 4e-8, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 4e-8 + }, + "vercel_ai_gateway/mistral/ministral-8b": { + "input_cost_per_token": 1e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1e-7 + }, + "vercel_ai_gateway/mistral/mistral-embed": { + "input_cost_per_token": 1e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 0, + "max_output_tokens": 0, + "max_tokens": 0, + "mode": "chat", + "output_cost_per_token": 0 + }, + "vercel_ai_gateway/mistral/mistral-large": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 32000, + "max_output_tokens": 4000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000006 + }, + "vercel_ai_gateway/mistral/mistral-saba-24b": { + "input_cost_per_token": 7.9e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 7.9e-7 + }, + "vercel_ai_gateway/mistral/mistral-small": { + "input_cost_per_token": 1e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 32000, + "max_output_tokens": 4000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 3e-7 + }, + "vercel_ai_gateway/mistral/mixtral-8x22b-instruct": { + "input_cost_per_token": 0.0000012, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 65536, + "max_output_tokens": 2048, + "max_tokens": 65536, + "mode": "chat", + "output_cost_per_token": 0.0000012 + }, + "vercel_ai_gateway/mistral/pixtral-12b": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.5e-7 + }, + "vercel_ai_gateway/mistral/pixtral-large": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 4000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000006 + }, + "vercel_ai_gateway/moonshotai/kimi-k2": { + "input_cost_per_token": 5.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000022 + }, + "vercel_ai_gateway/morph/morph-v3-fast": { + "input_cost_per_token": 8e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 32768, + "max_output_tokens": 16384, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000012 + }, + "vercel_ai_gateway/morph/morph-v3-large": { + "input_cost_per_token": 9e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 32768, + "max_output_tokens": 16384, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.0000019 + }, + "vercel_ai_gateway/openai/gpt-3.5-turbo": { + "input_cost_per_token": 5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "max_tokens": 16385, + "mode": "chat", + "output_cost_per_token": 0.0000015 + }, + "vercel_ai_gateway/openai/gpt-3.5-turbo-instruct": { + "input_cost_per_token": 0.0000015, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000002 + }, + "vercel_ai_gateway/openai/gpt-4-turbo": { + "input_cost_per_token": 0.00001, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00003 + }, + "vercel_ai_gateway/openai/gpt-4.1": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 1047576, + "mode": "chat", + "output_cost_per_token": 0.000008 + }, + "vercel_ai_gateway/openai/gpt-4.1-mini": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 4e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 1047576, + "mode": "chat", + "output_cost_per_token": 0.0000016 + }, + "vercel_ai_gateway/openai/gpt-4.1-nano": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 2.5e-8, + "input_cost_per_token": 1e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 1047576, + "max_output_tokens": 32768, + "max_tokens": 1047576, + "mode": "chat", + "output_cost_per_token": 4e-7 + }, + "vercel_ai_gateway/openai/gpt-4o": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 0.00000125, + "input_cost_per_token": 0.0000025, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.00001 + }, + "vercel_ai_gateway/openai/gpt-4o-mini": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 7.5e-8, + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 6e-7 + }, + "vercel_ai_gateway/openai/o1": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 0.0000075, + "input_cost_per_token": 0.000015, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.00006 + }, + "vercel_ai_gateway/openai/o3": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000002, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000008 + }, + "vercel_ai_gateway/openai/o3-mini": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 5.5e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.0000044 + }, + "vercel_ai_gateway/openai/o4-mini": { + "cache_creation_input_token_cost": 0, + "cache_read_input_token_cost": 2.75e-7, + "input_cost_per_token": 0.0000011, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.0000044 + }, + "vercel_ai_gateway/perplexity/sonar": { + "input_cost_per_token": 0.000001, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 127000, + "max_output_tokens": 8000, + "max_tokens": 127000, + "mode": "chat", + "output_cost_per_token": 0.000001 + }, + "vercel_ai_gateway/perplexity/sonar-pro": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 200000, + "max_output_tokens": 8000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.000015 + }, + "vercel_ai_gateway/perplexity/sonar-reasoning": { + "input_cost_per_token": 0.000001, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 127000, + "max_output_tokens": 8000, + "max_tokens": 127000, + "mode": "chat", + "output_cost_per_token": 0.000005 + }, + "vercel_ai_gateway/perplexity/sonar-reasoning-pro": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 127000, + "max_output_tokens": 8000, + "max_tokens": 127000, + "mode": "chat", + "output_cost_per_token": 0.000008 + }, + "vercel_ai_gateway/vercel/v0-1.0-md": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000015 + }, + "vercel_ai_gateway/vercel/v0-1.5-md": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000015 + }, + "vercel_ai_gateway/xai/grok-2": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 4000, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.00001 + }, + "vercel_ai_gateway/xai/grok-2-vision": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00001 + }, + "vercel_ai_gateway/xai/grok-3": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000015 + }, + "vercel_ai_gateway/xai/grok-3-fast": { + "input_cost_per_token": 0.000005, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000025 + }, + "vercel_ai_gateway/xai/grok-3-mini": { + "input_cost_per_token": 3e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 5e-7 + }, + "vercel_ai_gateway/xai/grok-3-mini-fast": { + "input_cost_per_token": 6e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000004 + }, + "vercel_ai_gateway/xai/grok-4": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000015 + }, + "vercel_ai_gateway/zai/glm-4.5": { + "input_cost_per_token": 6e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.0000022 + }, + "vercel_ai_gateway/zai/glm-4.5-air": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vercel_ai_gateway", + "max_input_tokens": 128000, + "max_output_tokens": 96000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.0000011 + }, + "vercel_ai_gateway/zai/glm-4.6": { + "litellm_provider": "vercel_ai_gateway", + "cache_read_input_token_cost": 1.1e-7, + "input_cost_per_token": 4.5e-7, + "max_input_tokens": 200000, + "max_output_tokens": 200000, + "max_tokens": 200000, + "mode": "chat", + "output_cost_per_token": 0.0000018, + "source": "https://vercel.com/ai-gateway/models/glm-4.6", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/claude-3-5-haiku": { + "input_cost_per_token": 0.000001, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000005, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_tool_choice": true + }, + "vertex_ai/claude-3-5-haiku@20241022": { + "input_cost_per_token": 0.000001, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000005, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_tool_choice": true + }, + "vertex_ai/claude-haiku-4-5@20251001": { + "cache_creation_input_token_cost": 0.00000125, + "cache_read_input_token_cost": 1e-7, + "input_cost_per_token": 0.000001, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000005, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/claude/haiku-4-5", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "vertex_ai/claude-3-5-sonnet": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-5-sonnet-v2": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-5-sonnet-v2@20241022": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-5-sonnet@20240620": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-7-sonnet@20250219": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "deprecation_date": "2025-06-01", + "input_cost_per_token": 0.000003, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "vertex_ai/claude-3-haiku": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-haiku@20240307": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.00000125, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-opus": { + "input_cost_per_token": 0.000015, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-opus@20240229": { + "input_cost_per_token": 0.000015, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000075, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-sonnet": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-3-sonnet@20240229": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "max_tokens": 4096, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-opus-4": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "vertex_ai/claude-opus-4-1": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "input_cost_per_token_batches": 0.0000075, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "output_cost_per_token_batches": 0.0000375, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-opus-4-1@20250805": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "input_cost_per_token_batches": 0.0000075, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "output_cost_per_token_batches": 0.0000375, + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-opus-4-5": { + "cache_creation_input_token_cost": 0.00000625, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "vertex_ai/claude-opus-4-5@20251101": { + "cache_creation_input_token_cost": 0.00000625, + "cache_read_input_token_cost": 5e-7, + "input_cost_per_token": 0.000005, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000025, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "vertex_ai/claude-sonnet-4-5": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "input_cost_per_token_batches": 0.0000015, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "output_cost_per_token_batches": 0.0000075, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-sonnet-4-5@20250929": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "input_cost_per_token_batches": 0.0000015, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "output_cost_per_token_batches": 0.0000075, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/claude-opus-4@20250514": { + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.000015, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 200000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0.000075, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "vertex_ai/claude-sonnet-4": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "vertex_ai/claude-sonnet-4@20250514": { + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 3e-7, + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_200k_tokens": 0.000006, + "output_cost_per_token_above_200k_tokens": 0.0000225, + "cache_creation_input_token_cost_above_200k_tokens": 0.0000075, + "cache_read_input_token_cost_above_200k_tokens": 6e-7, + "litellm_provider": "vertex_ai-anthropic_models", + "max_input_tokens": 1000000, + "max_output_tokens": 64000, + "max_tokens": 64000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "search_context_cost_per_query": { + "search_context_size_high": 0.01, + "search_context_size_low": 0.01, + "search_context_size_medium": 0.01 + }, + "supports_assistant_prefill": true, + "supports_computer_use": true, + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "vertex_ai/mistralai/codestral-2@001": { + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/codestral-2": { + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/codestral-2@001": { + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistralai/codestral-2": { + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 9e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/codestral-2501": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/codestral@2405": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/codestral@latest": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 6e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/deepseek-ai/deepseek-v3.1-maas": { + "input_cost_per_token": 0.00000135, + "litellm_provider": "vertex_ai-deepseek_models", + "max_input_tokens": 163840, + "max_output_tokens": 32768, + "max_tokens": 163840, + "mode": "chat", + "output_cost_per_token": 0.0000054, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supported_regions": [ + "us-west2" + ], + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "vertex_ai/deepseek-ai/deepseek-v3.2-maas": { + "input_cost_per_token": 5.6e-7, + "input_cost_per_token_batches": 2.8e-7, + "litellm_provider": "vertex_ai-deepseek_models", + "max_input_tokens": 163840, + "max_output_tokens": 32768, + "max_tokens": 163840, + "mode": "chat", + "output_cost_per_token": 0.00000168, + "output_cost_per_token_batches": 8.4e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supported_regions": [ + "us-west2" + ], + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "vertex_ai/deepseek-ai/deepseek-r1-0528-maas": { + "input_cost_per_token": 0.00000135, + "litellm_provider": "vertex_ai-deepseek_models", + "max_input_tokens": 65336, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.0000054, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "vertex_ai/jamba-1.5": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vertex_ai-ai21_models", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_tool_choice": true + }, + "vertex_ai/jamba-1.5-large": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vertex_ai-ai21_models", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supports_tool_choice": true + }, + "vertex_ai/jamba-1.5-large@001": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vertex_ai-ai21_models", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000008, + "supports_tool_choice": true + }, + "vertex_ai/jamba-1.5-mini": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vertex_ai-ai21_models", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_tool_choice": true + }, + "vertex_ai/jamba-1.5-mini@001": { + "input_cost_per_token": 2e-7, + "litellm_provider": "vertex_ai-ai21_models", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 4e-7, + "supports_tool_choice": true + }, + "vertex_ai/meta/llama-3.1-405b-instruct-maas": { + "input_cost_per_token": 0.000005, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000016, + "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/meta/llama-3.1-70b-instruct-maas": { + "input_cost_per_token": 0, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/meta/llama-3.1-8b-instruct-maas": { + "input_cost_per_token": 0, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 128000, + "metadata": { + "notes": "VertexAI states that The Llama 3.1 API service for llama-3.1-70b-instruct-maas and llama-3.1-8b-instruct-maas are in public preview and at no cost." + }, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/meta/llama-3.2-90b-vision-instruct-maas": { + "input_cost_per_token": 0, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "max_tokens": 128000, + "metadata": { + "notes": "VertexAI states that The Llama 3.2 API service is at no cost during public preview, and will be priced as per dollar-per-1M-tokens at GA." + }, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/meta/llama-4-maverick-17b-128e-instruct-maas": { + "input_cost_per_token": 3.5e-7, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 1000000, + "max_output_tokens": 1000000, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_token": 0.00000115, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "code" + ], + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/meta/llama-4-maverick-17b-16e-instruct-maas": { + "input_cost_per_token": 3.5e-7, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 1000000, + "max_output_tokens": 1000000, + "max_tokens": 1000000, + "mode": "chat", + "output_cost_per_token": 0.00000115, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "code" + ], + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/meta/llama-4-scout-17b-128e-instruct-maas": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 10000000, + "max_output_tokens": 10000000, + "max_tokens": 10000000, + "mode": "chat", + "output_cost_per_token": 7e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "code" + ], + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/meta/llama-4-scout-17b-16e-instruct-maas": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 10000000, + "max_output_tokens": 10000000, + "max_tokens": 10000000, + "mode": "chat", + "output_cost_per_token": 7e-7, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supported_modalities": [ + "text", + "image" + ], + "supported_output_modalities": [ + "text", + "code" + ], + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/meta/llama3-405b-instruct-maas": { + "input_cost_per_token": 0, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_tool_choice": true + }, + "vertex_ai/meta/llama3-70b-instruct-maas": { + "input_cost_per_token": 0, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_tool_choice": true + }, + "vertex_ai/meta/llama3-8b-instruct-maas": { + "input_cost_per_token": 0, + "litellm_provider": "vertex_ai-llama_models", + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_tokens": 32000, + "mode": "chat", + "output_cost_per_token": 0, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_tool_choice": true + }, + "vertex_ai/minimaxai/minimax-m2-maas": { + "input_cost_per_token": 3e-7, + "litellm_provider": "vertex_ai-minimax_models", + "max_input_tokens": 196608, + "max_output_tokens": 196608, + "max_tokens": 196608, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/moonshotai/kimi-k2-thinking-maas": { + "input_cost_per_token": 6e-7, + "litellm_provider": "vertex_ai-moonshot_models", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.0000025, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "vertex_ai/mistral-medium-3": { + "input_cost_per_token": 4e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-medium-3@001": { + "input_cost_per_token": 4e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistralai/mistral-medium-3": { + "input_cost_per_token": 4e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistralai/mistral-medium-3@001": { + "input_cost_per_token": 4e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000002, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-large-2411": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-large@2407": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-large@2411-001": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-large@latest": { + "input_cost_per_token": 0.000002, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000006, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-nemo@2407": { + "input_cost_per_token": 0.000003, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-nemo@latest": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 1.5e-7, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/mistral-small-2503": { + "input_cost_per_token": 0.000001, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "max_tokens": 128000, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true + }, + "vertex_ai/mistral-small-2503@001": { + "input_cost_per_token": 0.000001, + "litellm_provider": "vertex_ai-mistral_models", + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "max_tokens": 8191, + "mode": "chat", + "output_cost_per_token": 0.000003, + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/openai/gpt-oss-120b-maas": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vertex_ai-openai_models", + "max_input_tokens": 131072, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 6e-7, + "source": "https://console.cloud.google.com/vertex-ai/publishers/openai/model-garden/gpt-oss-120b-maas", + "supports_reasoning": true + }, + "vertex_ai/openai/gpt-oss-20b-maas": { + "input_cost_per_token": 7.5e-8, + "litellm_provider": "vertex_ai-openai_models", + "max_input_tokens": 131072, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 3e-7, + "source": "https://console.cloud.google.com/vertex-ai/publishers/openai/model-garden/gpt-oss-120b-maas", + "supports_reasoning": true + }, + "vertex_ai/qwen/qwen3-235b-a22b-instruct-2507-maas": { + "input_cost_per_token": 2.5e-7, + "litellm_provider": "vertex_ai-qwen_models", + "max_input_tokens": 262144, + "max_output_tokens": 16384, + "max_tokens": 16384, + "mode": "chat", + "output_cost_per_token": 0.000001, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/qwen/qwen3-coder-480b-a35b-instruct-maas": { + "input_cost_per_token": 0.000001, + "litellm_provider": "vertex_ai-qwen_models", + "max_input_tokens": 262144, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.000004, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/qwen/qwen3-next-80b-a3b-instruct-maas": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vertex_ai-qwen_models", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/qwen/qwen3-next-80b-a3b-thinking-maas": { + "input_cost_per_token": 1.5e-7, + "litellm_provider": "vertex_ai-qwen_models", + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "max_tokens": 262144, + "mode": "chat", + "output_cost_per_token": 0.0000012, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "wandb/openai/gpt-oss-120b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.015, + "output_cost_per_token": 0.06, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/openai/gpt-oss-20b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.005, + "output_cost_per_token": 0.02, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/zai-org/GLM-4.5": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.055, + "output_cost_per_token": 0.2, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/Qwen/Qwen3-235B-A22B-Instruct-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 0.01, + "output_cost_per_token": 0.01, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/Qwen/Qwen3-Coder-480B-A35B-Instruct": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 0.1, + "output_cost_per_token": 0.15, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/Qwen/Qwen3-235B-A22B-Thinking-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 0.01, + "output_cost_per_token": 0.01, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/moonshotai/Kimi-K2-Instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 6e-7, + "output_cost_per_token": 0.0000025, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/meta-llama/Llama-3.1-8B-Instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.022, + "output_cost_per_token": 0.022, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/deepseek-ai/DeepSeek-V3.1": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.055, + "output_cost_per_token": 0.165, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/deepseek-ai/DeepSeek-R1-0528": { + "max_tokens": 161000, + "max_input_tokens": 161000, + "max_output_tokens": 161000, + "input_cost_per_token": 0.135, + "output_cost_per_token": 0.54, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/deepseek-ai/DeepSeek-V3-0324": { + "max_tokens": 161000, + "max_input_tokens": 161000, + "max_output_tokens": 161000, + "input_cost_per_token": 0.114, + "output_cost_per_token": 0.275, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/meta-llama/Llama-3.3-70B-Instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.071, + "output_cost_per_token": 0.071, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/meta-llama/Llama-4-Scout-17B-16E-Instruct": { + "max_tokens": 64000, + "max_input_tokens": 64000, + "max_output_tokens": 64000, + "input_cost_per_token": 0.017, + "output_cost_per_token": 0.066, + "litellm_provider": "wandb", + "mode": "chat" + }, + "wandb/microsoft/Phi-4-mini-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.008, + "output_cost_per_token": 0.035, + "litellm_provider": "wandb", + "mode": "chat" + }, + "watsonx/ibm/granite-3-8b-instruct": { + "input_cost_per_token": 2e-7, + "litellm_provider": "watsonx", + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 2e-7, + "supports_audio_input": false, + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "watsonx/mistralai/mistral-large": { + "input_cost_per_token": 0.000003, + "litellm_provider": "watsonx", + "max_input_tokens": 131072, + "max_output_tokens": 16384, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_audio_input": false, + "supports_audio_output": false, + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_system_messages": true, + "supports_tool_choice": true, + "supports_vision": false + }, + "watsonx/bigscience/mt0-xxl-13b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0005, + "output_cost_per_token": 0.002, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/core42/jais-13b-chat": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0005, + "output_cost_per_token": 0.002, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/google/flan-t5-xl-3b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 6e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/ibm/granite-13b-chat-v2": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 6e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/ibm/granite-13b-instruct-v2": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 6e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/ibm/granite-3-3-8b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/ibm/granite-4-h-small": { + "max_tokens": 20480, + "max_input_tokens": 20480, + "max_output_tokens": 20480, + "input_cost_per_token": 6e-8, + "output_cost_per_token": 2.5e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/ibm/granite-guardian-3-2-2b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/ibm/granite-guardian-3-3-8b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/ibm/granite-ttm-1024-96-r2": { + "max_tokens": 512, + "max_input_tokens": 512, + "max_output_tokens": 512, + "input_cost_per_token": 3.8e-7, + "output_cost_per_token": 3.8e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/ibm/granite-ttm-1536-96-r2": { + "max_tokens": 512, + "max_input_tokens": 512, + "max_output_tokens": 512, + "input_cost_per_token": 3.8e-7, + "output_cost_per_token": 3.8e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/ibm/granite-ttm-512-96-r2": { + "max_tokens": 512, + "max_input_tokens": 512, + "max_output_tokens": 512, + "input_cost_per_token": 3.8e-7, + "output_cost_per_token": 3.8e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/ibm/granite-vision-3-2-2b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": true + }, + "watsonx/meta-llama/llama-3-2-11b-vision-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 3.5e-7, + "output_cost_per_token": 3.5e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "watsonx/meta-llama/llama-3-2-1b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/meta-llama/llama-3-2-3b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 1.5e-7, + "output_cost_per_token": 1.5e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/meta-llama/llama-3-2-90b-vision-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.000002, + "output_cost_per_token": 0.000002, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "watsonx/meta-llama/llama-3-3-70b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 7.1e-7, + "output_cost_per_token": 7.1e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/meta-llama/llama-4-maverick-17b": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 3.5e-7, + "output_cost_per_token": 0.0000014, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/meta-llama/llama-guard-3-11b-vision": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 3.5e-7, + "output_cost_per_token": 3.5e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": true + }, + "watsonx/mistralai/mistral-medium-2505": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.00001, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/mistralai/mistral-small-2503": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 3e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/mistralai/mistral-small-3-1-24b-instruct-2503": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 3e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "watsonx/mistralai/pixtral-12b-2409": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 3.5e-7, + "output_cost_per_token": 3.5e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": true + }, + "watsonx/openai/gpt-oss-120b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1.5e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "watsonx/sdaia/allam-1-13b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0000018, + "output_cost_per_token": 0.0000018, + "litellm_provider": "watsonx", + "mode": "chat", + "supports_function_calling": false, + "supports_parallel_function_calling": false, + "supports_vision": false + }, + "xai/grok-2": { + "input_cost_per_token": 0.000002, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-2-1212": { + "input_cost_per_token": 0.000002, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-2-latest": { + "input_cost_per_token": 0.000002, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-2-vision": { + "input_cost_per_image": 0.000002, + "input_cost_per_token": 0.000002, + "litellm_provider": "xai", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-2-vision-1212": { + "input_cost_per_image": 0.000002, + "input_cost_per_token": 0.000002, + "litellm_provider": "xai", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-2-vision-latest": { + "input_cost_per_image": 0.000002, + "input_cost_per_token": 0.000002, + "litellm_provider": "xai", + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "max_tokens": 32768, + "mode": "chat", + "output_cost_per_token": 0.00001, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-3": { + "input_cost_per_token": 0.000003, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000015, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-beta": { + "input_cost_per_token": 0.000003, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000015, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-fast-beta": { + "input_cost_per_token": 0.000005, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000025, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-fast-latest": { + "input_cost_per_token": 0.000005, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000025, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-latest": { + "input_cost_per_token": 0.000003, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000015, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-mini": { + "input_cost_per_token": 3e-7, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-mini-beta": { + "input_cost_per_token": 3e-7, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-mini-fast": { + "input_cost_per_token": 6e-7, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000004, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-mini-fast-beta": { + "input_cost_per_token": 6e-7, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000004, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-mini-fast-latest": { + "input_cost_per_token": 6e-7, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000004, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-3-mini-latest": { + "input_cost_per_token": 3e-7, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 5e-7, + "source": "https://x.ai/api#pricing", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": false, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-4": { + "input_cost_per_token": 0.000003, + "litellm_provider": "xai", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "source": "https://docs.x.ai/docs/models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-4-fast-reasoning": { + "litellm_provider": "xai", + "max_input_tokens": 2000000, + "max_output_tokens": 2000000, + "max_tokens": 2000000, + "mode": "chat", + "input_cost_per_token": 2e-7, + "input_cost_per_token_above_128k_tokens": 4e-7, + "output_cost_per_token": 5e-7, + "output_cost_per_token_above_128k_tokens": 0.000001, + "cache_read_input_token_cost": 5e-8, + "source": "https://docs.x.ai/docs/models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-4-fast-non-reasoning": { + "litellm_provider": "xai", + "max_input_tokens": 2000000, + "max_output_tokens": 2000000, + "cache_read_input_token_cost": 5e-8, + "max_tokens": 2000000, + "mode": "chat", + "input_cost_per_token": 2e-7, + "input_cost_per_token_above_128k_tokens": 4e-7, + "output_cost_per_token": 5e-7, + "output_cost_per_token_above_128k_tokens": 0.000001, + "source": "https://docs.x.ai/docs/models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-4-0709": { + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_128k_tokens": 0.000006, + "litellm_provider": "xai", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "output_cost_per_token_above_128k_tokens": 0.00003, + "source": "https://docs.x.ai/docs/models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-4-latest": { + "input_cost_per_token": 0.000003, + "input_cost_per_token_above_128k_tokens": 0.000006, + "litellm_provider": "xai", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.000015, + "output_cost_per_token_above_128k_tokens": 0.00003, + "source": "https://docs.x.ai/docs/models", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_web_search": true + }, + "xai/grok-4-1-fast": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_token": 2e-7, + "input_cost_per_token_above_128k_tokens": 4e-7, + "litellm_provider": "xai", + "max_input_tokens": 2000000, + "max_output_tokens": 2000000, + "max_tokens": 2000000, + "mode": "chat", + "output_cost_per_token": 5e-7, + "output_cost_per_token_above_128k_tokens": 0.000001, + "source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning", + "supports_audio_input": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-4-1-fast-reasoning": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_token": 2e-7, + "input_cost_per_token_above_128k_tokens": 4e-7, + "litellm_provider": "xai", + "max_input_tokens": 2000000, + "max_output_tokens": 2000000, + "max_tokens": 2000000, + "mode": "chat", + "output_cost_per_token": 5e-7, + "output_cost_per_token_above_128k_tokens": 0.000001, + "source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning", + "supports_audio_input": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-4-1-fast-reasoning-latest": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_token": 2e-7, + "input_cost_per_token_above_128k_tokens": 4e-7, + "litellm_provider": "xai", + "max_input_tokens": 2000000, + "max_output_tokens": 2000000, + "max_tokens": 2000000, + "mode": "chat", + "output_cost_per_token": 5e-7, + "output_cost_per_token_above_128k_tokens": 0.000001, + "source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning", + "supports_audio_input": true, + "supports_function_calling": true, + "supports_reasoning": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-4-1-fast-non-reasoning": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_token": 2e-7, + "input_cost_per_token_above_128k_tokens": 4e-7, + "litellm_provider": "xai", + "max_input_tokens": 2000000, + "max_output_tokens": 2000000, + "max_tokens": 2000000, + "mode": "chat", + "output_cost_per_token": 5e-7, + "output_cost_per_token_above_128k_tokens": 0.000001, + "source": "https://docs.x.ai/docs/models/grok-4-1-fast-non-reasoning", + "supports_audio_input": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-4-1-fast-non-reasoning-latest": { + "cache_read_input_token_cost": 5e-8, + "input_cost_per_token": 2e-7, + "input_cost_per_token_above_128k_tokens": 4e-7, + "litellm_provider": "xai", + "max_input_tokens": 2000000, + "max_output_tokens": 2000000, + "max_tokens": 2000000, + "mode": "chat", + "output_cost_per_token": 5e-7, + "output_cost_per_token_above_128k_tokens": 0.000001, + "source": "https://docs.x.ai/docs/models/grok-4-1-fast-non-reasoning", + "supports_audio_input": true, + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-beta": { + "input_cost_per_token": 0.000005, + "litellm_provider": "xai", + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "max_tokens": 131072, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "xai/grok-code-fast": { + "cache_read_input_token_cost": 2e-8, + "input_cost_per_token": 2e-7, + "litellm_provider": "xai", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://docs.x.ai/docs/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "xai/grok-code-fast-1": { + "cache_read_input_token_cost": 2e-8, + "input_cost_per_token": 2e-7, + "litellm_provider": "xai", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://docs.x.ai/docs/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "xai/grok-code-fast-1-0825": { + "cache_read_input_token_cost": 2e-8, + "input_cost_per_token": 2e-7, + "litellm_provider": "xai", + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "max_tokens": 256000, + "mode": "chat", + "output_cost_per_token": 0.0000015, + "source": "https://docs.x.ai/docs/models", + "supports_function_calling": true, + "supports_reasoning": true, + "supports_tool_choice": true + }, + "xai/grok-vision-beta": { + "input_cost_per_image": 0.000005, + "input_cost_per_token": 0.000005, + "litellm_provider": "xai", + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "max_tokens": 8192, + "mode": "chat", + "output_cost_per_token": 0.000015, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "supports_web_search": true + }, + "zai/glm-4.6": { + "input_cost_per_token": 6e-7, + "output_cost_per_token": 0.0000022, + "litellm_provider": "zai", + "max_input_tokens": 200000, + "max_output_tokens": 128000, + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "source": "https://docs.z.ai/guides/overview/pricing" + }, + "zai/glm-4.5": { + "input_cost_per_token": 6e-7, + "output_cost_per_token": 0.0000022, + "litellm_provider": "zai", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "source": "https://docs.z.ai/guides/overview/pricing" + }, + "zai/glm-4.5v": { + "input_cost_per_token": 6e-7, + "output_cost_per_token": 0.0000018, + "litellm_provider": "zai", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_vision": true, + "source": "https://docs.z.ai/guides/overview/pricing" + }, + "zai/glm-4.5-x": { + "input_cost_per_token": 0.0000022, + "output_cost_per_token": 0.0000089, + "litellm_provider": "zai", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "source": "https://docs.z.ai/guides/overview/pricing" + }, + "zai/glm-4.5-air": { + "input_cost_per_token": 2e-7, + "output_cost_per_token": 0.0000011, + "litellm_provider": "zai", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "source": "https://docs.z.ai/guides/overview/pricing" + }, + "zai/glm-4.5-airx": { + "input_cost_per_token": 0.0000011, + "output_cost_per_token": 0.0000045, + "litellm_provider": "zai", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "source": "https://docs.z.ai/guides/overview/pricing" + }, + "zai/glm-4-32b-0414-128k": { + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "zai", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "source": "https://docs.z.ai/guides/overview/pricing" + }, + "zai/glm-4.5-flash": { + "input_cost_per_token": 0, + "output_cost_per_token": 0, + "litellm_provider": "zai", + "max_input_tokens": 128000, + "max_output_tokens": 32000, + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "source": "https://docs.z.ai/guides/overview/pricing" + }, + "openai/container": { + "code_interpreter_cost_per_session": 0.03, + "litellm_provider": "openai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-coder-480b-a35b-instruct": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 4.5e-7, + "output_cost_per_token": 0.0000018, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_reasoning": true + }, + "fireworks_ai/accounts/fireworks/models/chronos-hermes-13b-v2": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-13b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-13b-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-13b-python": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-34b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-34b-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-34b-python": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-70b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-70b-python": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-7b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-7b-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-llama-7b-python": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/code-qwen-1p5-7b": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/codegemma-2b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/codegemma-7b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/cogito-671b-v2-p1": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-3b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-70b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-llama-8b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-qwen-14b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/cogito-v1-preview-qwen-32b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/dbrx-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-1b-base": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-33b-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-base": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-base-v1p5": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-7b-instruct-v1p5": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-lite-base": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-lite-instruct": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-prover-v2": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-0528-distill-qwen3-8b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-llama-70b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-llama-8b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-14b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-1p5b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-32b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-r1-distill-qwen-7b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v2-lite-chat": { + "max_tokens": 163840, + "max_input_tokens": 163840, + "max_output_tokens": 163840, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v2p5": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/devstral-small-2505": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/dobby-mini-unhinged-plus-llama-3-1-8b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/dobby-unhinged-llama-3-3-70b-new": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/dolphin-2-9-2-qwen2-72b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/dolphin-2p6-mixtral-8x7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/ernie-4p5-21b-a3b-pt": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/ernie-4p5-300b-a47b-pt": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/fare-20b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/firefunction-v1": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/firellava-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/firesearch-ocr-v6": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/flux-1-dev": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/flux-1-dev-controlnet-union": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-9, + "output_cost_per_token": 1e-9, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/flux-1-schnell": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/gemma-2b-it": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/gemma-3-27b-it": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/gemma-7b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/gemma-7b-it": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/gemma2-9b-it": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/glm-4p5v": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_reasoning": true + }, + "fireworks_ai/accounts/fireworks/models/gpt-oss-safeguard-120b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/gpt-oss-safeguard-20b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/hermes-2-pro-mistral-7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/internvl3-38b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/internvl3-78b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/internvl3-8b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/kat-coder": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/kat-dev-32b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/kat-dev-72b-exp": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-guard-2-8b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-guard-3-1b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-guard-3-8b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v2-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v2-13b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v2-70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v2-70b-chat": { + "max_tokens": 2048, + "max_input_tokens": 2048, + "max_output_tokens": 2048, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v2-7b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v2-7b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3-70b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3-70b-instruct-hf": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3-8b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3-8b-instruct-hf": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p1-405b-instruct-long": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p1-70b-instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p1-70b-instruct-1b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p1-nemotron-70b-instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-1b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-3b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p3-70b-instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llamaguard-7b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llava-yi-34b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/minimax-m1-80k": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/minimax-m2": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-7, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/ministral-3-14b-instruct-2512": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/ministral-3-3b-instruct-2512": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/ministral-3-8b-instruct-2512": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-4k": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-v0p2": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-7b-instruct-v3": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-7b-v0p2": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-large-3-fp8": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-nemo-base-2407": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-nemo-instruct-2407": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mistral-small-24b-instruct-2501": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mixtral-8x22b": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mixtral-8x7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mixtral-8x7b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mixtral-8x7b-instruct-hf": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/mythomax-l2-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nemotron-nano-v2-12b-vl": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nous-capybara-7b-v1p9": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nous-hermes-2-mixtral-8x7b-dpo": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nous-hermes-2-yi-34b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nous-hermes-llama2-7b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nvidia-nemotron-nano-12b-v2": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/nvidia-nemotron-nano-9b-v2": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/openchat-3p5-0106-7b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/openhermes-2-mistral-7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/openhermes-2p5-mistral-7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/openorca-7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/phi-2-3b": { + "max_tokens": 2048, + "max_input_tokens": 2048, + "max_output_tokens": 2048, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/phi-3-mini-128k-instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/phi-3-vision-128k-instruct": { + "max_tokens": 32064, + "max_input_tokens": 32064, + "max_output_tokens": 32064, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-python-v1": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-v1": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/phind-code-llama-34b-v2": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/pythia-12b": { + "max_tokens": 2048, + "max_input_tokens": 2048, + "max_output_tokens": 2048, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen-qwq-32b-preview": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen-v2p5-14b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen-v2p5-7b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen1p5-72b-chat": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2-7b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2-vl-2b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2-vl-72b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2-vl-7b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-0p5b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-14b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-1p5b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-32b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-32b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-72b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-72b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-7b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-0p5b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-0p5b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-14b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-14b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-1p5b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-1p5b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-128k": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-32k-rope": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct-64k": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-3b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-3b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-7b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-math-72b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-vl-32b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-vl-3b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-vl-72b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-vl-7b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-0p6b": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-14b": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-1p7b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft-131072": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-1p7b-fp8-draft-40960": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2.2e-7, + "output_cost_per_token": 8.8e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b-instruct-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 2.2e-7, + "output_cost_per_token": 8.8e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-235b-a22b-thinking-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 2.2e-7, + "output_cost_per_token": 8.8e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1.5e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b-instruct-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 5e-7, + "output_cost_per_token": 5e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-30b-a3b-thinking-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-32b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_reasoning": true + }, + "fireworks_ai/accounts/fireworks/models/qwen3-4b": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-4b-instruct-2507": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-8b": { + "max_tokens": 40960, + "max_input_tokens": 40960, + "max_output_tokens": 40960, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_reasoning": true + }, + "fireworks_ai/accounts/fireworks/models/qwen3-coder-30b-a3b-instruct": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 1.5e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-coder-480b-instruct-bf16": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-next-80b-a3b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-next-80b-a3b-thinking": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-vl-235b-a22b-instruct": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 2.2e-7, + "output_cost_per_token": 8.8e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-vl-235b-a22b-thinking": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 2.2e-7, + "output_cost_per_token": 8.8e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-vl-30b-a3b-instruct": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 1.5e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-vl-30b-a3b-thinking": { + "max_tokens": 262144, + "max_input_tokens": 262144, + "max_output_tokens": 262144, + "input_cost_per_token": 1.5e-7, + "output_cost_per_token": 6e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-vl-32b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwen3-vl-8b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/qwq-32b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/rolm-ocr": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/snorkel-mistral-7b-pairrm-dpo": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/stablecode-3b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/starcoder-16b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/starcoder-7b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/starcoder2-15b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/starcoder2-3b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 1e-7, + "output_cost_per_token": 1e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/starcoder2-7b": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/toppy-m-7b": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/yi-34b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/yi-34b-200k-capybara": { + "max_tokens": 200000, + "max_input_tokens": 200000, + "max_output_tokens": 200000, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/yi-34b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-7, + "output_cost_per_token": 9e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/yi-6b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/zephyr-7b-beta": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2e-7, + "output_cost_per_token": 2e-7, + "litellm_provider": "fireworks_ai", + "mode": "chat" + } } \ No newline at end of file diff --git a/scripts/filter-chat-mode.js b/scripts/filter-chat-mode.js new file mode 100644 index 00000000000..919765235ff --- /dev/null +++ b/scripts/filter-chat-mode.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node + +/** + * filter-chat-mode.js - Filter JSON entries to keep only those with "mode": "chat" + * + * Usage: + * node filter-chat-mode.js [output-file] + * cat input.json | node filter-chat-mode.js > output.json + * + * The script expects JSON input that is either: + * - An array of objects, each potentially having a "mode" field + * - An object where values are objects with a "mode" field + * + * It will filter to keep only entries where "mode" === "chat" + */ + +const fs = require('fs'); +const path = require('path'); + +function filterChatMode(data) { + if (Array.isArray(data)) { + // If input is an array, filter objects with mode: "chat" + return data.filter(item => + item && typeof item === 'object' && item.mode === 'chat' + ); + } else if (data && typeof data === 'object') { + // If input is an object, filter properties with mode: "chat" + const result = {}; + for (const [key, value] of Object.entries(data)) { + if (value && typeof value === 'object' && value.mode === 'chat') { + result[key] = value; + } + } + return result; + } else { + throw new Error('Input must be a JSON array or object'); + } +} + +function main() { + let inputData; + let inputPath = null; + let outputPath = null; + + // Parse command line arguments + const args = process.argv.slice(2); + + if (args.length > 0) { + // First argument is input file + inputPath = args[0]; + try { + const inputContent = fs.readFileSync(inputPath, 'utf8'); + inputData = JSON.parse(inputContent); + } catch (error) { + console.error(`Error reading or parsing ${inputPath}:`, error.message); + process.exit(1); + } + + // Second argument (optional) is output file + if (args.length > 1) { + outputPath = args[1]; + } else { + // If only input file is provided, overwrite it in place + outputPath = inputPath; + } + } else { + // Read from stdin + try { + const stdinContent = fs.readFileSync(0, 'utf8'); // 0 = stdin + inputData = JSON.parse(stdinContent); + } catch (error) { + console.error('Error reading or parsing stdin:', error.message); + console.error('\nUsage:'); + console.error(' node filter-chat-mode.js [output-file]'); + console.error(' cat input.json | node filter-chat-mode.js > output.json'); + process.exit(1); + } + } + + try { + const filteredData = filterChatMode(inputData); + + const outputJson = JSON.stringify(filteredData, null, 2); + + if (outputPath) { + fs.writeFileSync(outputPath, outputJson, 'utf8'); + if (outputPath === inputPath) { + console.error(`Filtered data written back to ${inputPath}`); + } else { + console.error(`Filtered data written to ${outputPath}`); + } + } else { + console.log(outputJson); + } + } catch (error) { + console.error('Error filtering data:', error.message); + process.exit(1); + } +} + +if (require.main === module) { + main(); +} + +module.exports = { filterChatMode }; From f3b2e38ccb00d3d42afc8c90cb8760a9bbdd8ac5 Mon Sep 17 00:00:00 2001 From: Dustin Washington Date: Fri, 19 Dec 2025 00:53:57 -0500 Subject: [PATCH 10/10] Add oneOf blocks to tools that require it in the schema --- aider/tools/insert_block.py | 14 ++++++++++++++ aider/tools/show_numbered_context.py | 4 ++++ 2 files changed, 18 insertions(+) diff --git a/aider/tools/insert_block.py b/aider/tools/insert_block.py index 96f13262814..68acc7738c3 100644 --- a/aider/tools/insert_block.py +++ b/aider/tools/insert_block.py @@ -38,6 +38,20 @@ class Tool(BaseTool): "use_regex": {"type": "boolean", "default": False}, }, "required": ["file_path", "content"], + "oneOf": [ + { + "required": ["after_pattern"], + "not": {"required": ["before_pattern", "position"]}, + }, + { + "required": ["before_pattern"], + "not": {"required": ["after_pattern", "position"]}, + }, + { + "required": ["position"], + "not": {"required": ["after_pattern", "before_pattern"]}, + }, + ], }, }, } diff --git a/aider/tools/show_numbered_context.py b/aider/tools/show_numbered_context.py index 45aff33b446..7bfd0d43b5b 100644 --- a/aider/tools/show_numbered_context.py +++ b/aider/tools/show_numbered_context.py @@ -25,6 +25,10 @@ class Tool(BaseTool): "context_lines": {"type": "integer", "default": 3}, }, "required": ["file_path"], + "oneOf": [ + {"required": ["pattern"], "not": {"required": ["line_number"]}}, + {"required": ["line_number"], "not": {"required": ["pattern"]}}, + ], }, }, }