From bf60efcbc53d215316d61e51f4865df2b4bcef11 Mon Sep 17 00:00:00 2001 From: stainless-bot Date: Wed, 26 Jun 2024 17:26:24 +0000 Subject: [PATCH] feat(api): OpenAPI spec update via Stainless API --- .stats.yml | 2 +- README.md | 2 +- bin/publish-pypi | 3 ++ src/together/resources/chat/completions.py | 10 ++++++ src/together/types/__init__.py | 1 + src/together/types/chat/chat_completion.py | 33 ++++++++++++++----- .../types/chat/chat_completion_chunk.py | 32 +++++++++++------- .../types/chat/completion_create_params.py | 11 +++++++ src/together/types/completion.py | 2 +- src/together/types/fine_tune_event.py | 3 +- src/together/types/log_probs.py | 10 +++++- src/together/types/tool_choice.py | 23 +++++++++++++ src/together/types/tool_choice_param.py | 14 +++++--- tests/api_resources/chat/test_completions.py | 4 +++ 14 files changed, 121 insertions(+), 29 deletions(-) create mode 100644 src/together/types/tool_choice.py diff --git a/.stats.yml b/.stats.yml index d6da9ca3..cf0d3ce7 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 15 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2FTogetherAI-33661dd8fd4c26ecd595dee22e2c9274e6c4699ad8de5ece233e0d37376c6b7c.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2FTogetherAI-e8f4e11a2e3927c75dce42c913ef5c9adcf2aef3d3b1312b4825d9f135413c39.yml diff --git a/README.md b/README.md index e52c2070..70154594 100644 --- a/README.md +++ b/README.md @@ -362,7 +362,7 @@ You can directly override the [httpx client](https://www.python-httpx.org/api/#c - Support for proxies - Custom transports -- Additional [advanced](https://www.python-httpx.org/advanced/#client-instances) functionality +- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality ```python from together import Together, DefaultHttpxClient diff --git a/bin/publish-pypi b/bin/publish-pypi index 826054e9..05bfccbb 100644 --- a/bin/publish-pypi +++ b/bin/publish-pypi @@ -3,4 +3,7 @@ set -eux mkdir -p dist rye build --clean +# Patching importlib-metadata version until upstream library version is updated +# https://github.com/pypa/twine/issues/977#issuecomment-2189800841 +"$HOME/.rye/self/bin/python3" -m pip install 'importlib-metadata==7.2.1' rye publish --yes --token=$PYPI_TOKEN diff --git a/src/together/resources/chat/completions.py b/src/together/resources/chat/completions.py index 0d69a7c8..5d698125 100644 --- a/src/together/resources/chat/completions.py +++ b/src/together/resources/chat/completions.py @@ -50,6 +50,7 @@ def create( model: str, echo: bool | NotGiven = NOT_GIVEN, frequency_penalty: float | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN, logprobs: int | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, @@ -161,6 +162,7 @@ def create( stream: Literal[True], echo: bool | NotGiven = NOT_GIVEN, frequency_penalty: float | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN, logprobs: int | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, @@ -271,6 +273,7 @@ def create( stream: bool, echo: bool | NotGiven = NOT_GIVEN, frequency_penalty: float | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN, logprobs: int | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, @@ -380,6 +383,7 @@ def create( model: str, echo: bool | NotGiven = NOT_GIVEN, frequency_penalty: float | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN, logprobs: int | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, @@ -411,6 +415,7 @@ def create( "model": model, "echo": echo, "frequency_penalty": frequency_penalty, + "function_call": function_call, "logit_bias": logit_bias, "logprobs": logprobs, "max_tokens": max_tokens, @@ -456,6 +461,7 @@ async def create( model: str, echo: bool | NotGiven = NOT_GIVEN, frequency_penalty: float | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN, logprobs: int | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, @@ -567,6 +573,7 @@ async def create( stream: Literal[True], echo: bool | NotGiven = NOT_GIVEN, frequency_penalty: float | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN, logprobs: int | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, @@ -677,6 +684,7 @@ async def create( stream: bool, echo: bool | NotGiven = NOT_GIVEN, frequency_penalty: float | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN, logprobs: int | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, @@ -786,6 +794,7 @@ async def create( model: str, echo: bool | NotGiven = NOT_GIVEN, frequency_penalty: float | NotGiven = NOT_GIVEN, + function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN, logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN, logprobs: int | NotGiven = NOT_GIVEN, max_tokens: int | NotGiven = NOT_GIVEN, @@ -817,6 +826,7 @@ async def create( "model": model, "echo": echo, "frequency_penalty": frequency_penalty, + "function_call": function_call, "logit_bias": logit_bias, "logprobs": logprobs, "max_tokens": max_tokens, diff --git a/src/together/types/__init__.py b/src/together/types/__init__.py index ab0e2f56..cc25b8a0 100644 --- a/src/together/types/__init__.py +++ b/src/together/types/__init__.py @@ -7,6 +7,7 @@ from .log_probs import LogProbs as LogProbs from .completion import Completion as Completion from .image_file import ImageFile as ImageFile +from .tool_choice import ToolChoice as ToolChoice from .tools_param import ToolsParam as ToolsParam from .fine_tune_event import FineTuneEvent as FineTuneEvent from .tool_choice_param import ToolChoiceParam as ToolChoiceParam diff --git a/src/together/types/chat/chat_completion.py b/src/together/types/chat/chat_completion.py index 19fe4a27..ebe6722a 100644 --- a/src/together/types/chat/chat_completion.py +++ b/src/together/types/chat/chat_completion.py @@ -5,34 +5,51 @@ from ..._models import BaseModel from ..log_probs import LogProbs +from ..tool_choice import ToolChoice from .chat_completion_usage import ChatCompletionUsage -__all__ = ["ChatCompletion", "Choice", "ChoiceMessage"] +__all__ = ["ChatCompletion", "Choice", "ChoiceMessage", "ChoiceMessageFunctionCall"] + + +class ChoiceMessageFunctionCall(BaseModel): + arguments: str + + name: str class ChoiceMessage(BaseModel): content: Optional[str] = None - role: Optional[str] = None + role: Literal["assistant"] + + function_call: Optional[ChoiceMessageFunctionCall] = None + + tool_calls: Optional[List[ToolChoice]] = None class Choice(BaseModel): - finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls"]] = None + finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls", "function_call"]] = None + + index: Optional[int] = None logprobs: Optional[LogProbs] = None message: Optional[ChoiceMessage] = None + seed: Optional[int] = None + + text: Optional[str] = None + class ChatCompletion(BaseModel): - id: Optional[str] = None + id: str - choices: Optional[List[Choice]] = None + choices: List[Choice] - created: Optional[int] = None + created: int - model: Optional[str] = None + model: str - object: Optional[Literal["chat.completion"]] = None + object: Literal["chat.completion"] usage: Optional[ChatCompletionUsage] = None diff --git a/src/together/types/chat/chat_completion_chunk.py b/src/together/types/chat/chat_completion_chunk.py index df42bc2c..5107833d 100644 --- a/src/together/types/chat/chat_completion_chunk.py +++ b/src/together/types/chat/chat_completion_chunk.py @@ -4,42 +4,52 @@ from typing_extensions import Literal from ..._models import BaseModel +from ..log_probs import LogProbs +from ..tool_choice import ToolChoice from .chat_completion_usage import ChatCompletionUsage -__all__ = ["ChatCompletionChunk", "Token", "Choice", "ChoiceDelta"] +__all__ = ["ChatCompletionChunk", "Choice", "ChoiceDelta", "ChoiceDeltaFunctionCall"] -class Token(BaseModel): - id: int +class ChoiceDeltaFunctionCall(BaseModel): + arguments: str - logprob: float + name: str - special: bool - text: str +class ChoiceDelta(BaseModel): + content: Optional[str] = None + function_call: Optional[ChoiceDeltaFunctionCall] = None -class ChoiceDelta(BaseModel): - content: str + role: Optional[Literal["system", "user", "assistant", "function", "tool"]] = None + + token_id: Optional[int] = None + + tool_calls: Optional[List[ToolChoice]] = None class Choice(BaseModel): delta: ChoiceDelta + finish_reason: Literal["stop", "eos", "length", "tool_calls", "function_call"] + index: int + logprobs: Optional[LogProbs] = None + class ChatCompletionChunk(BaseModel): id: str - token: Token - choices: List[Choice] created: int + model: str + object: Literal["chat.completion.chunk"] - finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls"]] = None + system_fingerprint: Optional[str] = None usage: Optional[ChatCompletionUsage] = None diff --git a/src/together/types/chat/completion_create_params.py b/src/together/types/chat/completion_create_params.py index 8b57ef5b..99963d95 100644 --- a/src/together/types/chat/completion_create_params.py +++ b/src/together/types/chat/completion_create_params.py @@ -11,6 +11,8 @@ __all__ = [ "CompletionCreateParamsBase", "Message", + "FunctionCall", + "FunctionCallName", "ResponseFormat", "ToolChoice", "CompletionCreateParamsNonStreaming", @@ -37,6 +39,8 @@ class CompletionCreateParamsBase(TypedDict, total=False): repeating tokens that have already been mentioned. """ + function_call: FunctionCall + logit_bias: Dict[str, float] """Adjusts the likelihood of specific tokens appearing in the generated output.""" @@ -133,6 +137,13 @@ class Message(TypedDict, total=False): """The role of the messages author. Choice between: system, user, or assistant.""" +class FunctionCallName(TypedDict, total=False): + name: Required[str] + + +FunctionCall = Union[Literal["none", "auto"], FunctionCallName] + + class ResponseFormat(TypedDict, total=False): schema: Dict[str, str] """The schema of the response format.""" diff --git a/src/together/types/completion.py b/src/together/types/completion.py index b1ea84db..98ce814f 100644 --- a/src/together/types/completion.py +++ b/src/together/types/completion.py @@ -11,7 +11,7 @@ class Choice(BaseModel): - finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls"]] = None + finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls", "function_call"]] = None logprobs: Optional[LogProbs] = None diff --git a/src/together/types/fine_tune_event.py b/src/together/types/fine_tune_event.py index 09d6e795..bc98905f 100644 --- a/src/together/types/fine_tune_event.py +++ b/src/together/types/fine_tune_event.py @@ -1,6 +1,5 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -import builtins from typing import List, Optional from typing_extensions import Literal @@ -64,7 +63,7 @@ class Data(BaseModel): wandb_url: str - level: Optional[builtins.object] = None + level: Optional[Literal["info", "warning", "error", "legacy_info", "legacy_iwarning", "legacy_ierror"]] = None class FineTuneEvent(BaseModel): diff --git a/src/together/types/log_probs.py b/src/together/types/log_probs.py index a67d3607..a9c408a0 100644 --- a/src/together/types/log_probs.py +++ b/src/together/types/log_probs.py @@ -4,10 +4,18 @@ from .._models import BaseModel -__all__ = ["LogProbs"] +__all__ = ["LogProbs", "Content"] + + +class Content(BaseModel): + token: str + + logprob: float class LogProbs(BaseModel): + content: Optional[List[Content]] = None + token_logprobs: Optional[List[float]] = None """List of token log probabilities""" diff --git a/src/together/types/tool_choice.py b/src/together/types/tool_choice.py new file mode 100644 index 00000000..d48c79c6 --- /dev/null +++ b/src/together/types/tool_choice.py @@ -0,0 +1,23 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["ToolChoice", "Function"] + + +class Function(BaseModel): + arguments: str + + name: str + + +class ToolChoice(BaseModel): + id: str + + function: Function + + index: float + + type: Literal["function"] diff --git a/src/together/types/tool_choice_param.py b/src/together/types/tool_choice_param.py index 241b8471..98b759fe 100644 --- a/src/together/types/tool_choice_param.py +++ b/src/together/types/tool_choice_param.py @@ -2,16 +2,22 @@ from __future__ import annotations -from typing_extensions import TypedDict +from typing_extensions import Literal, Required, TypedDict __all__ = ["ToolChoiceParam", "Function"] class Function(TypedDict, total=False): - name: str + arguments: Required[str] + + name: Required[str] class ToolChoiceParam(TypedDict, total=False): - function: Function + id: Required[str] + + function: Required[Function] + + index: Required[float] - type: str + type: Required[Literal["function"]] diff --git a/tests/api_resources/chat/test_completions.py b/tests/api_resources/chat/test_completions.py index f35a3e90..28a2cc9b 100644 --- a/tests/api_resources/chat/test_completions.py +++ b/tests/api_resources/chat/test_completions.py @@ -58,6 +58,7 @@ def test_method_create_with_all_params_overload_1(self, client: Together) -> Non model="mistralai/Mixtral-8x7B-Instruct-v0.1", echo=True, frequency_penalty=0, + function_call="none", logit_bias={ "105": 21.4, "1024": -10.5, @@ -203,6 +204,7 @@ def test_method_create_with_all_params_overload_2(self, client: Together) -> Non stream=True, echo=True, frequency_penalty=0, + function_call="none", logit_bias={ "105": 21.4, "1024": -10.5, @@ -350,6 +352,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn model="mistralai/Mixtral-8x7B-Instruct-v0.1", echo=True, frequency_penalty=0, + function_call="none", logit_bias={ "105": 21.4, "1024": -10.5, @@ -495,6 +498,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn stream=True, echo=True, frequency_penalty=0, + function_call="none", logit_bias={ "105": 21.4, "1024": -10.5,