Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 15
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2FTogetherAI-33661dd8fd4c26ecd595dee22e2c9274e6c4699ad8de5ece233e0d37376c6b7c.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2FTogetherAI-e8f4e11a2e3927c75dce42c913ef5c9adcf2aef3d3b1312b4825d9f135413c39.yml
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ You can directly override the [httpx client](https://www.python-httpx.org/api/#c

- Support for proxies
- Custom transports
- Additional [advanced](https://www.python-httpx.org/advanced/#client-instances) functionality
- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality

```python
from together import Together, DefaultHttpxClient
Expand Down
3 changes: 3 additions & 0 deletions bin/publish-pypi
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,7 @@
set -eux
mkdir -p dist
rye build --clean
# Patching importlib-metadata version until upstream library version is updated
# https://github.com/pypa/twine/issues/977#issuecomment-2189800841
"$HOME/.rye/self/bin/python3" -m pip install 'importlib-metadata==7.2.1'
rye publish --yes --token=$PYPI_TOKEN
10 changes: 10 additions & 0 deletions src/together/resources/chat/completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ def create(
model: str,
echo: bool | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
logprobs: int | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -161,6 +162,7 @@ def create(
stream: Literal[True],
echo: bool | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
logprobs: int | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -271,6 +273,7 @@ def create(
stream: bool,
echo: bool | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
logprobs: int | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -380,6 +383,7 @@ def create(
model: str,
echo: bool | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
logprobs: int | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -411,6 +415,7 @@ def create(
"model": model,
"echo": echo,
"frequency_penalty": frequency_penalty,
"function_call": function_call,
"logit_bias": logit_bias,
"logprobs": logprobs,
"max_tokens": max_tokens,
Expand Down Expand Up @@ -456,6 +461,7 @@ async def create(
model: str,
echo: bool | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
logprobs: int | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -567,6 +573,7 @@ async def create(
stream: Literal[True],
echo: bool | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
logprobs: int | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -677,6 +684,7 @@ async def create(
stream: bool,
echo: bool | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
logprobs: int | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -786,6 +794,7 @@ async def create(
model: str,
echo: bool | NotGiven = NOT_GIVEN,
frequency_penalty: float | NotGiven = NOT_GIVEN,
function_call: completion_create_params.FunctionCall | NotGiven = NOT_GIVEN,
logit_bias: Dict[str, float] | NotGiven = NOT_GIVEN,
logprobs: int | NotGiven = NOT_GIVEN,
max_tokens: int | NotGiven = NOT_GIVEN,
Expand Down Expand Up @@ -817,6 +826,7 @@ async def create(
"model": model,
"echo": echo,
"frequency_penalty": frequency_penalty,
"function_call": function_call,
"logit_bias": logit_bias,
"logprobs": logprobs,
"max_tokens": max_tokens,
Expand Down
1 change: 1 addition & 0 deletions src/together/types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from .log_probs import LogProbs as LogProbs
from .completion import Completion as Completion
from .image_file import ImageFile as ImageFile
from .tool_choice import ToolChoice as ToolChoice
from .tools_param import ToolsParam as ToolsParam
from .fine_tune_event import FineTuneEvent as FineTuneEvent
from .tool_choice_param import ToolChoiceParam as ToolChoiceParam
Expand Down
33 changes: 25 additions & 8 deletions src/together/types/chat/chat_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,34 +5,51 @@

from ..._models import BaseModel
from ..log_probs import LogProbs
from ..tool_choice import ToolChoice
from .chat_completion_usage import ChatCompletionUsage

__all__ = ["ChatCompletion", "Choice", "ChoiceMessage"]
__all__ = ["ChatCompletion", "Choice", "ChoiceMessage", "ChoiceMessageFunctionCall"]


class ChoiceMessageFunctionCall(BaseModel):
arguments: str

name: str


class ChoiceMessage(BaseModel):
content: Optional[str] = None

role: Optional[str] = None
role: Literal["assistant"]

function_call: Optional[ChoiceMessageFunctionCall] = None

tool_calls: Optional[List[ToolChoice]] = None


class Choice(BaseModel):
finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls"]] = None
finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls", "function_call"]] = None

index: Optional[int] = None

logprobs: Optional[LogProbs] = None

message: Optional[ChoiceMessage] = None

seed: Optional[int] = None

text: Optional[str] = None


class ChatCompletion(BaseModel):
id: Optional[str] = None
id: str

choices: Optional[List[Choice]] = None
choices: List[Choice]

created: Optional[int] = None
created: int

model: Optional[str] = None
model: str

object: Optional[Literal["chat.completion"]] = None
object: Literal["chat.completion"]

usage: Optional[ChatCompletionUsage] = None
32 changes: 21 additions & 11 deletions src/together/types/chat/chat_completion_chunk.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,42 +4,52 @@
from typing_extensions import Literal

from ..._models import BaseModel
from ..log_probs import LogProbs
from ..tool_choice import ToolChoice
from .chat_completion_usage import ChatCompletionUsage

__all__ = ["ChatCompletionChunk", "Token", "Choice", "ChoiceDelta"]
__all__ = ["ChatCompletionChunk", "Choice", "ChoiceDelta", "ChoiceDeltaFunctionCall"]


class Token(BaseModel):
id: int
class ChoiceDeltaFunctionCall(BaseModel):
arguments: str

logprob: float
name: str

special: bool

text: str
class ChoiceDelta(BaseModel):
content: Optional[str] = None

function_call: Optional[ChoiceDeltaFunctionCall] = None

class ChoiceDelta(BaseModel):
content: str
role: Optional[Literal["system", "user", "assistant", "function", "tool"]] = None

token_id: Optional[int] = None

tool_calls: Optional[List[ToolChoice]] = None


class Choice(BaseModel):
delta: ChoiceDelta

finish_reason: Literal["stop", "eos", "length", "tool_calls", "function_call"]

index: int

logprobs: Optional[LogProbs] = None


class ChatCompletionChunk(BaseModel):
id: str

token: Token

choices: List[Choice]

created: int

model: str

object: Literal["chat.completion.chunk"]

finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls"]] = None
system_fingerprint: Optional[str] = None

usage: Optional[ChatCompletionUsage] = None
11 changes: 11 additions & 0 deletions src/together/types/chat/completion_create_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
__all__ = [
"CompletionCreateParamsBase",
"Message",
"FunctionCall",
"FunctionCallName",
"ResponseFormat",
"ToolChoice",
"CompletionCreateParamsNonStreaming",
Expand All @@ -37,6 +39,8 @@ class CompletionCreateParamsBase(TypedDict, total=False):
repeating tokens that have already been mentioned.
"""

function_call: FunctionCall

logit_bias: Dict[str, float]
"""Adjusts the likelihood of specific tokens appearing in the generated output."""

Expand Down Expand Up @@ -133,6 +137,13 @@ class Message(TypedDict, total=False):
"""The role of the messages author. Choice between: system, user, or assistant."""


class FunctionCallName(TypedDict, total=False):
name: Required[str]


FunctionCall = Union[Literal["none", "auto"], FunctionCallName]


class ResponseFormat(TypedDict, total=False):
schema: Dict[str, str]
"""The schema of the response format."""
Expand Down
2 changes: 1 addition & 1 deletion src/together/types/completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@


class Choice(BaseModel):
finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls"]] = None
finish_reason: Optional[Literal["stop", "eos", "length", "tool_calls", "function_call"]] = None

logprobs: Optional[LogProbs] = None

Expand Down
3 changes: 1 addition & 2 deletions src/together/types/fine_tune_event.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

import builtins
from typing import List, Optional
from typing_extensions import Literal

Expand Down Expand Up @@ -64,7 +63,7 @@ class Data(BaseModel):

wandb_url: str

level: Optional[builtins.object] = None
level: Optional[Literal["info", "warning", "error", "legacy_info", "legacy_iwarning", "legacy_ierror"]] = None


class FineTuneEvent(BaseModel):
Expand Down
10 changes: 9 additions & 1 deletion src/together/types/log_probs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,18 @@

from .._models import BaseModel

__all__ = ["LogProbs"]
__all__ = ["LogProbs", "Content"]


class Content(BaseModel):
token: str

logprob: float


class LogProbs(BaseModel):
content: Optional[List[Content]] = None

token_logprobs: Optional[List[float]] = None
"""List of token log probabilities"""

Expand Down
23 changes: 23 additions & 0 deletions src/together/types/tool_choice.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

from typing_extensions import Literal

from .._models import BaseModel

__all__ = ["ToolChoice", "Function"]


class Function(BaseModel):
arguments: str

name: str


class ToolChoice(BaseModel):
id: str

function: Function

index: float

type: Literal["function"]
14 changes: 10 additions & 4 deletions src/together/types/tool_choice_param.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,22 @@

from __future__ import annotations

from typing_extensions import TypedDict
from typing_extensions import Literal, Required, TypedDict

__all__ = ["ToolChoiceParam", "Function"]


class Function(TypedDict, total=False):
name: str
arguments: Required[str]

name: Required[str]


class ToolChoiceParam(TypedDict, total=False):
function: Function
id: Required[str]

function: Required[Function]

index: Required[float]

type: str
type: Required[Literal["function"]]
4 changes: 4 additions & 0 deletions tests/api_resources/chat/test_completions.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def test_method_create_with_all_params_overload_1(self, client: Together) -> Non
model="mistralai/Mixtral-8x7B-Instruct-v0.1",
echo=True,
frequency_penalty=0,
function_call="none",
logit_bias={
"105": 21.4,
"1024": -10.5,
Expand Down Expand Up @@ -203,6 +204,7 @@ def test_method_create_with_all_params_overload_2(self, client: Together) -> Non
stream=True,
echo=True,
frequency_penalty=0,
function_call="none",
logit_bias={
"105": 21.4,
"1024": -10.5,
Expand Down Expand Up @@ -350,6 +352,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
model="mistralai/Mixtral-8x7B-Instruct-v0.1",
echo=True,
frequency_penalty=0,
function_call="none",
logit_bias={
"105": 21.4,
"1024": -10.5,
Expand Down Expand Up @@ -495,6 +498,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
stream=True,
echo=True,
frequency_penalty=0,
function_call="none",
logit_bias={
"105": 21.4,
"1024": -10.5,
Expand Down