Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion assemblyai/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.43.1"
__version__ = "0.44.2"
2 changes: 2 additions & 0 deletions assemblyai/streaming/v3/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@ class StreamingSessionParameters(BaseModel):
min_end_of_turn_silence_when_confident: Optional[int] = None
max_turn_silence: Optional[int] = None
format_turns: Optional[bool] = None
keyterms_prompt: Optional[List[str]] = None
filter_profanity: Optional[bool] = None


class Encoding(str, Enum):
Expand Down
19 changes: 19 additions & 0 deletions assemblyai/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,6 +525,7 @@ def validate_max_speakers(cls, v, info):
"max_speakers_expected must be greater than or equal to min_speakers_expected"
)
return v

else:

@validator("max_speakers_expected")
Expand Down Expand Up @@ -1607,6 +1608,7 @@ class Word(BaseModel):
@field_validator("start", mode="before")
def set_start_default(cls, v):
return 0 if v is None else v

else:

@validator("start", pre=True)
Expand Down Expand Up @@ -2317,13 +2319,30 @@ class LemurUsage(BaseModel):
"The number of output tokens generated by the model"


class LemurRequestDetails(BaseModel):
request_endpoint: str
temperature: float
final_model: str
max_output_size: int
created_at: datetime
transcript_ids: Optional[List[str]] = None
input_text: Optional[str] = None
questions: Optional[List[LemurQuestion]] = None
prompt: Optional[str] = None
context: Optional[Union[dict, str]] = None
answer_format: Optional[str] = None


class BaseLemurResponse(BaseModel):
request_id: str
"The unique identifier of your LeMUR request"

usage: LemurUsage
"The usage numbers for the LeMUR request"

request: Optional[LemurRequestDetails] = None
"The request details the user passed into the POST request. Optional since this only exists on the GET request."


class LemurStringResponse(BaseLemurResponse):
"""
Expand Down
80 changes: 80 additions & 0 deletions tests/unit/factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,49 @@ class Meta:
)


class LemurRequestDetails(factory.Factory):
class Meta:
model = types.LemurRequestDetails

request_endpoint = factory.Faker("text")
temperature = factory.Faker("pyfloat")
final_model = factory.Faker("text")
max_output_size = factory.Faker("pyint")
created_at = factory.Faker("iso8601")


class LemurTaskRequestDetails(LemurRequestDetails):
"""Request details specific to LeMUR task operations"""

request_endpoint = "/lemur/v3/task"
prompt = factory.Faker("text")


class LemurSummaryRequestDetails(LemurRequestDetails):
"""Request details specific to LeMUR summary operations"""

request_endpoint = "/lemur/v3/summary"
context = factory.LazyFunction(lambda: {"key": "value"})
answer_format = factory.Faker("sentence")


class LemurQuestionRequestDetails(LemurRequestDetails):
"""Request details specific to LeMUR question-answer operations"""

request_endpoint = "/lemur/v3/question-answer"
questions = [
{
"question": "What is the main topic?",
"answer_format": "short sentence",
"context": "Meeting context",
},
{
"question": "What is the sentiment?",
"answer_options": ["positive", "negative", "neutral"],
},
]


class LemurUsage(factory.Factory):
class Meta:
model = types.LemurUsage
Expand Down Expand Up @@ -310,6 +353,43 @@ class Meta:
request_id = factory.Faker("uuid4")
usage = factory.SubFactory(LemurUsage)
response = factory.Faker("text")
request = factory.SubFactory(LemurRequestDetails)


# Factories specifically for get_response endpoint tests (include request field)
class LemurTaskResponseWithRequest(factory.Factory):
class Meta:
model = types.LemurTaskResponse

request_id = factory.Faker("uuid4")
usage = factory.SubFactory(LemurUsage)
response = factory.Faker("text")
request = factory.SubFactory(LemurTaskRequestDetails)


class LemurSummaryResponseWithRequest(factory.Factory):
class Meta:
model = types.LemurSummaryResponse

request_id = factory.Faker("uuid4")
usage = factory.SubFactory(LemurUsage)
response = factory.Faker("text")
request = factory.SubFactory(LemurSummaryRequestDetails)


class LemurQuestionResponseWithRequest(factory.Factory):
class Meta:
model = types.LemurQuestionResponse

request_id = factory.Faker("uuid4")
usage = factory.SubFactory(LemurUsage)
response = factory.List(
[
factory.SubFactory(LemurQuestionAnswer),
factory.SubFactory(LemurQuestionAnswer),
]
)
request = factory.SubFactory(LemurQuestionRequestDetails)


class LemurPurgeResponse(factory.Factory):
Expand Down
Loading