Skip to content
Merged
99 changes: 83 additions & 16 deletions async-openai/src/chat.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
use serde::Serialize;

use crate::{
config::Config,
error::OpenAIError,
types::{
types::chat::{
ChatCompletionDeleted, ChatCompletionList, ChatCompletionMessageList,
ChatCompletionResponseStream, CreateChatCompletionRequest, CreateChatCompletionResponse,
UpdateChatCompletionRequest,
},
Client,
};

/// Given a list of messages comprising a conversation, the model will return a response.
///
/// Related guide: [Chat completions](https://platform.openai.com//docs/guides/text-generation)
/// Related guide: [Chat Completions](https://platform.openai.com/docs/guides/text-generation)
pub struct Chat<'c, C: Config> {
client: &'c Client<C>,
}
Expand All @@ -19,21 +23,13 @@ impl<'c, C: Config> Chat<'c, C> {
Self { client }
}

/// Creates a model response for the given chat conversation. Learn more in
/// the
///
/// [text generation](https://platform.openai.com/docs/guides/text-generation),
/// [vision](https://platform.openai.com/docs/guides/vision),
///
/// and [audio](https://platform.openai.com/docs/guides/audio) guides.
/// Creates a model response for the given chat conversation.
///
/// Returns a [chat completion](https://platform.openai.com/docs/api-reference/chat/object) object, or a streamed sequence of [chat completion chunk](https://platform.openai.com/docs/api-reference/chat/streaming) objects if the request is streamed.
///
/// Parameter support can differ depending on the model used to generate the
/// response, particularly for newer reasoning models. Parameters that are
/// only supported for reasoning models are noted below. For the current state
/// of unsupported parameters in reasoning models,
/// Learn more in the [text generation](https://platform.openai.com/docs/guides/text-generation), [vision](https://platform.openai.com/docs/guides/vision), and [audio](https://platform.openai.com/docs/guides/audio) guides.
///
/// [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning).
/// Parameter support can differ depending on the model used to generate the response, particularly for newer reasoning models. Parameters that are only supported for reasoning models are noted below. For the current state of unsupported parameters in reasoning models, [refer to the reasoning guide](https://platform.openai.com/docs/guides/reasoning).
///
/// byot: You must ensure "stream: false" in serialized `request`
#[crate::byot(
Expand All @@ -55,9 +51,11 @@ impl<'c, C: Config> Chat<'c, C> {
self.client.post("/chat/completions", request).await
}

/// Creates a completion for the chat message
/// Creates a completion for the chat message.
///
/// partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) as they become available, with the stream terminated by a `data: [DONE]` message.
/// If set to true, the model response data will be streamed to the client as it is generated using [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format).
///
/// See the [Streaming section](https://platform.openai.com/docs/api-reference/chat/streaming) for more information, along with the [streaming responses](https://platform.openai.com/docs/guides/streaming-responses) guide for more information on how to handle the streaming events.
///
/// [ChatCompletionResponseStream] is a parsed SSE stream until a \[DONE\] is received from server.
///
Expand Down Expand Up @@ -85,4 +83,73 @@ impl<'c, C: Config> Chat<'c, C> {
}
Ok(self.client.post_stream("/chat/completions", request).await)
}

/// List stored Chat Completions. Only Chat Completions that have been stored
/// with the `store` parameter set to `true` will be returned.
#[crate::byot(T0 = serde::Serialize, R = serde::de::DeserializeOwned)]
pub async fn list<Q>(&self, query: &Q) -> Result<ChatCompletionList, OpenAIError>
where
Q: Serialize + ?Sized,
{
self.client
.get_with_query("/chat/completions", &query)
.await
}

/// Get a stored chat completion. Only Chat Completions that have been created
/// with the `store` parameter set to `true` will be returned.
#[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
pub async fn retrieve(
&self,
completion_id: &str,
) -> Result<CreateChatCompletionResponse, OpenAIError> {
self.client
.get(&format!("/chat/completions/{completion_id}"))
.await
}

/// Modify a stored chat completion. Only Chat Completions that have been
/// created with the `store` parameter set to `true` can be modified. Currently,
/// the only supported modification is to update the `metadata` field.
#[crate::byot(
T0 = std::fmt::Display,
T1 = serde::Serialize,
R = serde::de::DeserializeOwned
)]
pub async fn update(
&self,
completion_id: &str,
request: UpdateChatCompletionRequest,
) -> Result<CreateChatCompletionResponse, OpenAIError> {
self.client
.post(&format!("/chat/completions/{completion_id}"), request)
.await
}

/// Delete a stored chat completion. Only Chat Completions that have been
/// created with the `store` parameter set to `true` can be deleted.
#[crate::byot(T0 = std::fmt::Display, R = serde::de::DeserializeOwned)]
pub async fn delete(&self, completion_id: &str) -> Result<ChatCompletionDeleted, OpenAIError> {
self.client
.delete(&format!("/chat/completions/{completion_id}"))
.await
}

/// Get a list of messages for the specified chat completion.
#[crate::byot(T0 = std::fmt::Display, T1 = serde::Serialize, R = serde::de::DeserializeOwned)]
pub async fn messages<Q>(
&self,
completion_id: &str,
query: &Q,
) -> Result<ChatCompletionMessageList, OpenAIError>
where
Q: Serialize + ?Sized,
{
self.client
.get_with_query(
&format!("/chat/completions/{completion_id}/messages"),
&query,
)
.await
}
}
2 changes: 1 addition & 1 deletion async-openai/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ impl Config for AzureConfig {
#[cfg(test)]
mod test {
use super::*;
use crate::types::{
use crate::types::chat::{
ChatCompletionRequestMessage, ChatCompletionRequestUserMessage, CreateChatCompletionRequest,
};
use crate::Client;
Expand Down
2 changes: 1 addition & 1 deletion async-openai/src/container_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use serde::Serialize;
use crate::{
config::Config,
error::OpenAIError,
types::{
types::containers::{
ContainerFileListResource, ContainerFileResource, CreateContainerFileRequest,
DeleteContainerFileResponse,
},
Expand Down
2 changes: 1 addition & 1 deletion async-openai/src/containers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::{
config::Config,
container_files::ContainerFiles,
error::OpenAIError,
types::{
types::containers::{
ContainerListResource, ContainerResource, CreateContainerRequest, DeleteContainerResponse,
},
Client,
Expand Down
2 changes: 1 addition & 1 deletion async-openai/src/types/assistant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};

use crate::error::OpenAIError;

use super::{FunctionName, FunctionObject, ResponseFormat};
use crate::types::chat::{FunctionName, FunctionObject, ResponseFormat};

#[derive(Clone, Serialize, Debug, Deserialize, PartialEq, Default)]
pub struct AssistantToolCodeInterpreterResources {
Expand Down
6 changes: 3 additions & 3 deletions async-openai/src/types/assistant_impls.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use super::{
AssistantToolCodeInterpreterResources, AssistantToolFileSearchResources,
use crate::types::{
chat::FunctionObject, AssistantToolCodeInterpreterResources, AssistantToolFileSearchResources,
AssistantToolResources, AssistantTools, AssistantToolsFileSearch, AssistantToolsFunction,
CreateAssistantToolFileSearchResources, CreateAssistantToolResources, FunctionObject,
CreateAssistantToolFileSearchResources, CreateAssistantToolResources,
};

impl From<AssistantToolsFileSearch> for AssistantTools {
Expand Down
Loading
Loading