Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions docs/my-website/docs/providers/openai.md
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,14 @@ os.environ["OPENAI_BASE_URL"] = "https://your_host/v1" # OPTIONAL

| Model Name | Function Call |
|-----------------------|-----------------------------------------------------------------|
| gpt-5 | `response = completion(model="gpt-5", messages=messages)` |
| gpt-5-mini | `response = completion(model="gpt-5-mini", messages=messages)` |
| gpt-5-nano | `response = completion(model="gpt-5-nano", messages=messages)` |
| gpt-5-chat | `response = completion(model="gpt-5-chat", messages=messages)` |
| gpt-5-chat-latest | `response = completion(model="gpt-5-chat-latest", messages=messages)` |
| gpt-5-2025-08-07 | `response = completion(model="gpt-5-2025-08-07", messages=messages)` |
| gpt-5-mini-2025-08-07 | `response = completion(model="gpt-5-mini-2025-08-07", messages=messages)` |
| gpt-5-nano-2025-08-07 | `response = completion(model="gpt-5-nano-2025-08-07", messages=messages)` |
| gpt-4.1 | `response = completion(model="gpt-4.1", messages=messages)` |
| gpt-4.1-mini | `response = completion(model="gpt-4.1-mini", messages=messages)` |
| gpt-4.1-nano | `response = completion(model="gpt-4.1-nano", messages=messages)` |
Expand Down
4 changes: 4 additions & 0 deletions litellm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1145,6 +1145,9 @@ def add_known_models():
from .llms.openai.chat.gpt_transformation import (
OpenAIGPTConfig,
)
from .llms.openai.chat.gpt_5_transformation import (
OpenAIGPT5Config,
)
from .llms.openai.transcriptions.whisper_transformation import (
OpenAIWhisperAudioTranscriptionConfig,
)
Expand All @@ -1158,6 +1161,7 @@ def add_known_models():
)

openAIGPTAudioConfig = OpenAIGPTAudioConfig()
openAIGPT5Config = OpenAIGPT5Config()

from .llms.nvidia_nim.chat.transformation import NvidiaNimConfig
from .llms.nvidia_nim.embed import NvidiaNimEmbeddingConfig
Expand Down
58 changes: 58 additions & 0 deletions litellm/llms/openai/chat/gpt_5_transformation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
"""Support for OpenAI gpt-5 model family."""

from typing import Optional

import litellm

from .gpt_transformation import OpenAIGPTConfig


class OpenAIGPT5Config(OpenAIGPTConfig):
"""Configuration for gpt-5 models.

Handles OpenAI API quirks for the gpt-5 series like:

- Mapping ``max_tokens`` -> ``max_completion_tokens``.
- Dropping unsupported ``temperature`` values when requested.
"""
@classmethod
def is_model_gpt_5_model(cls, model: str) -> bool:
return "gpt-5" in model

def map_openai_params(
self,
non_default_params: dict,
optional_params: dict,
model: str,
drop_params: bool,
) -> dict:
################################################################
# max_tokens is not supported for gpt-5 models on OpenAI API
# Relevant issue: https://github.com/BerriAI/litellm/issues/13381
################################################################
if "max_tokens" in non_default_params:
optional_params["max_completion_tokens"] = non_default_params.pop(
"max_tokens"
)

if "temperature" in non_default_params:
temperature_value: Optional[float] = non_default_params.pop("temperature")
if temperature_value is not None:
if temperature_value == 1:
optional_params["temperature"] = temperature_value
elif litellm.drop_params or drop_params:
pass
else:
raise litellm.utils.UnsupportedParamsError(
message=(
"gpt-5 models don't support temperature={}. Only temperature=1 is supported. To drop unsupported params set `litellm.drop_params = True`"
).format(temperature_value),
status_code=400,
)
return super()._map_openai_params(
non_default_params=non_default_params,
optional_params=optional_params,
model=model,
drop_params=drop_params,
)

11 changes: 11 additions & 0 deletions litellm/llms/openai/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@

from ...types.llms.openai import *
from ..base import BaseLLM
from .chat.gpt_5_transformation import OpenAIGPT5Config
from .chat.o_series_transformation import OpenAIOSeriesConfig
from .common_utils import (
BaseOpenAILLM,
Expand All @@ -55,6 +56,7 @@
)

openaiOSeriesConfig = OpenAIOSeriesConfig()
openAIGPT5Config = OpenAIGPT5Config()


class MistralEmbeddingConfig:
Expand Down Expand Up @@ -183,6 +185,8 @@ def get_supported_openai_params(self, model: str) -> list:
"""
if openaiOSeriesConfig.is_model_o_series_model(model=model):
return openaiOSeriesConfig.get_supported_openai_params(model=model)
elif openAIGPT5Config.is_model_gpt_5_model(model=model):
return openAIGPT5Config.get_supported_openai_params(model=model)
elif litellm.openAIGPTAudioConfig.is_model_gpt_audio_model(model=model):
return litellm.openAIGPTAudioConfig.get_supported_openai_params(model=model)
else:
Expand Down Expand Up @@ -217,6 +221,13 @@ def map_openai_params(
model=model,
drop_params=drop_params,
)
elif openAIGPT5Config.is_model_gpt_5_model(model=model):
return openAIGPT5Config.map_openai_params(
non_default_params=non_default_params,
optional_params=optional_params,
model=model,
drop_params=drop_params,
)
elif litellm.openAIGPTAudioConfig.is_model_gpt_audio_model(model=model):
return litellm.openAIGPTAudioConfig.map_openai_params(
non_default_params=non_default_params,
Expand Down
40 changes: 40 additions & 0 deletions tests/test_litellm/llms/openai/test_gpt5_transformation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import pytest

import litellm
from litellm.llms.openai.openai import OpenAIConfig


@pytest.fixture()
def config() -> OpenAIConfig:
return OpenAIConfig()


def test_gpt5_maps_max_tokens(config: OpenAIConfig):
params = config.map_openai_params(
non_default_params={"max_tokens": 10},
optional_params={},
model="gpt-5",
drop_params=False,
)
assert params["max_completion_tokens"] == 10
assert "max_tokens" not in params


def test_gpt5_temperature_drop(config: OpenAIConfig):
params = config.map_openai_params(
non_default_params={"temperature": 0.2},
optional_params={},
model="gpt-5",
drop_params=True,
)
assert "temperature" not in params


def test_gpt5_temperature_error(config: OpenAIConfig):
with pytest.raises(litellm.utils.UnsupportedParamsError):
config.map_openai_params(
non_default_params={"temperature": 0.2},
optional_params={},
model="gpt-5",
drop_params=False,
)
Loading