From 97700ed18cfb315005219b2f09ec53d36933ebc2 Mon Sep 17 00:00:00 2001 From: Dongri Jin Date: Thu, 6 Nov 2025 13:35:40 +0900 Subject: [PATCH] Add response api --- README.md | 1 + examples/responses.rs | 21 +++ src/v1/api.rs | 67 +++++++++ src/v1/mod.rs | 1 + src/v1/responses.rs | 312 ++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 402 insertions(+) create mode 100644 examples/responses.rs create mode 100644 src/v1/responses.rs diff --git a/README.md b/README.md index 16c3172f..e3af4895 100644 --- a/README.md +++ b/README.md @@ -166,6 +166,7 @@ Check out the [full API documentation](https://platform.openai.com/docs/api-refe - [x] [Assistants](https://platform.openai.com/docs/assistants/overview) - [x] [Batch](https://platform.openai.com/docs/api-reference/batch) - [x] [Realtime](https://platform.openai.com/docs/api-reference/realtime) +- [x] [Responses](https://platform.openai.com/docs/api-reference/responses) ## License diff --git a/examples/responses.rs b/examples/responses.rs new file mode 100644 index 00000000..33734277 --- /dev/null +++ b/examples/responses.rs @@ -0,0 +1,21 @@ +use openai_api_rs::v1::api::OpenAIClient; +use openai_api_rs::v1::common::GPT4_1_MINI; +use openai_api_rs::v1::responses::CreateResponseRequest; +use serde_json::json; +use std::env; + +#[tokio::main] +async fn main() -> Result<(), Box> { + let api_key = env::var("OPENAI_API_KEY").unwrap(); + let mut client = OpenAIClient::builder().with_api_key(api_key).build()?; + + let mut req = CreateResponseRequest::new(); + req.model = Some(GPT4_1_MINI.to_string()); + req.input = Some(json!("Write a haiku about Rust.")); + req.extra.insert("temperature".to_string(), json!(0.7)); + + let resp = client.create_response(req).await?; + println!("response id: {} status: {:?}", resp.id, resp.status); + println!("response output: {:?}", resp.output); + Ok(()) +} diff --git a/src/v1/api.rs b/src/v1/api.rs index 8ff11652..5d8a13d4 100644 --- a/src/v1/api.rs +++ b/src/v1/api.rs @@ -35,6 +35,9 @@ use crate::v1::message::{ }; use crate::v1::model::{ModelResponse, ModelsResponse}; use crate::v1::moderation::{CreateModerationRequest, CreateModerationResponse}; +use crate::v1::responses::{ + CountTokensRequest, CountTokensResponse, CreateResponseRequest, ListResponses, ResponseObject, +}; use crate::v1::run::{ CreateRunRequest, CreateThreadAndRunRequest, ListRun, ListRunStep, ModifyRunRequest, RunObject, RunStepObject, @@ -819,6 +822,70 @@ impl OpenAIClient { self.get(&url).await } + // Responses API + pub async fn create_response( + &mut self, + req: CreateResponseRequest, + ) -> Result { + self.post("responses", &req).await + } + + pub async fn retrieve_response( + &mut self, + response_id: String, + ) -> Result { + self.get(&format!("responses/{response_id}")).await + } + + pub async fn delete_response( + &mut self, + response_id: String, + ) -> Result { + self.delete(&format!("responses/{response_id}")).await + } + + pub async fn cancel_response( + &mut self, + response_id: String, + ) -> Result { + self.post( + &format!("responses/{response_id}/cancel"), + &common::EmptyRequestBody {}, + ) + .await + } + + pub async fn list_response_input_items( + &mut self, + response_id: String, + after: Option, + limit: Option, + order: Option, + ) -> Result { + let mut url = format!("responses/{}/input_items", response_id); + let mut params = vec![]; + if let Some(after) = after { + params.push(format!("after={}", after)); + } + if let Some(limit) = limit { + params.push(format!("limit={}", limit)); + } + if let Some(order) = order { + params.push(format!("order={}", order)); + } + if !params.is_empty() { + url = format!("{}?{}", url, params.join("&")); + } + self.get(&url).await + } + + pub async fn count_response_input_tokens( + &mut self, + req: CountTokensRequest, + ) -> Result { + self.post("responses/input_tokens", &req).await + } + pub async fn list_models(&mut self) -> Result { self.get("models").await } diff --git a/src/v1/mod.rs b/src/v1/mod.rs index d44ed319..0dcbcbb6 100644 --- a/src/v1/mod.rs +++ b/src/v1/mod.rs @@ -13,6 +13,7 @@ pub mod fine_tuning; pub mod image; pub mod model; pub mod moderation; +pub mod responses; // beta pub mod assistant; diff --git a/src/v1/responses.rs b/src/v1/responses.rs new file mode 100644 index 00000000..348b1fea --- /dev/null +++ b/src/v1/responses.rs @@ -0,0 +1,312 @@ +use crate::v1::types::Tools; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::BTreeMap; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct CreateResponseRequest { + // background + #[serde(skip_serializing_if = "Option::is_none")] + pub background: Option, + + // conversation + #[serde(skip_serializing_if = "Option::is_none")] + pub conversation: Option, + + // include + #[serde(skip_serializing_if = "Option::is_none")] + pub include: Option>, + + // input + #[serde(skip_serializing_if = "Option::is_none")] + pub input: Option, + + // instructions + #[serde(skip_serializing_if = "Option::is_none")] + pub instructions: Option, + + // max_output_tokens + #[serde(skip_serializing_if = "Option::is_none")] + pub max_output_tokens: Option, + + // max_tool_calls + #[serde(skip_serializing_if = "Option::is_none")] + pub max_tool_calls: Option, + + // metadata + #[serde(skip_serializing_if = "Option::is_none")] + pub metadata: Option>, + + // model + #[serde(skip_serializing_if = "Option::is_none")] + pub model: Option, + + // parallel_tool_calls + #[serde(skip_serializing_if = "Option::is_none")] + pub parallel_tool_calls: Option, + + // previous_response_id + #[serde(skip_serializing_if = "Option::is_none")] + pub previous_response_id: Option, + + // prompt + #[serde(skip_serializing_if = "Option::is_none")] + pub prompt: Option, + + // prompt_cache_key + #[serde(skip_serializing_if = "Option::is_none")] + pub prompt_cache_key: Option, + + // reasoning + #[serde(skip_serializing_if = "Option::is_none")] + pub reasoning: Option, + + // safety_identifier + #[serde(skip_serializing_if = "Option::is_none")] + pub safety_identifier: Option, + + // service_tier + #[serde(skip_serializing_if = "Option::is_none")] + pub service_tier: Option, + + // store + #[serde(skip_serializing_if = "Option::is_none")] + pub store: Option, + + // stream + #[serde(skip_serializing_if = "Option::is_none")] + pub stream: Option, + + // stream_options + #[serde(skip_serializing_if = "Option::is_none")] + pub stream_options: Option, + + // temperature + #[serde(skip_serializing_if = "Option::is_none")] + pub temperature: Option, + + // text + #[serde(skip_serializing_if = "Option::is_none")] + pub text: Option, + + // tool_choice + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_choice: Option, + + // tools + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option>, + + // top_logprobs + #[serde(skip_serializing_if = "Option::is_none")] + pub top_logprobs: Option, + + // top_p + #[serde(skip_serializing_if = "Option::is_none")] + pub top_p: Option, + + // truncation + #[serde(skip_serializing_if = "Option::is_none")] + pub truncation: Option, + + // user (deprecated) + #[serde(skip_serializing_if = "Option::is_none")] + pub user: Option, + + // Future-proof + #[serde(flatten)] + pub extra: BTreeMap, +} + +impl CreateResponseRequest { + pub fn new() -> Self { + Self { + background: None, + conversation: None, + include: None, + input: None, + instructions: None, + max_output_tokens: None, + max_tool_calls: None, + metadata: None, + model: None, + parallel_tool_calls: None, + previous_response_id: None, + prompt: None, + prompt_cache_key: None, + reasoning: None, + safety_identifier: None, + service_tier: None, + store: None, + stream: None, + stream_options: None, + temperature: None, + text: None, + tool_choice: None, + tools: None, + top_logprobs: None, + top_p: None, + truncation: None, + user: None, + extra: BTreeMap::new(), + } + } +} + +impl Default for CreateResponseRequest { + fn default() -> Self { + Self::new() + } +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ResponseObject { + pub id: String, + pub object: String, + + // Core + #[serde(skip_serializing_if = "Option::is_none")] + pub created_at: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub model: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option, + + // Output + #[serde(skip_serializing_if = "Option::is_none")] + pub output: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub output_text: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub output_audio: Option, + + // Control / reasons + #[serde(skip_serializing_if = "Option::is_none")] + pub stop_reason: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub refusal: Option, + + // Tools + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_calls: Option, + + // Misc + #[serde(skip_serializing_if = "Option::is_none")] + pub metadata: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub usage: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub system_fingerprint: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub service_tier: Option, + + // Errors / details + #[serde(skip_serializing_if = "Option::is_none")] + pub status_details: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub incomplete_details: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, + + // Future-proof + #[serde(flatten)] + pub extra: BTreeMap, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ListResponses { + pub object: String, + pub data: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub last_id: Option, + pub has_more: bool, +} + +// Get input token counts (POST /v1/responses/input_tokens) +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct CountTokensRequest { + // conversation + #[serde(skip_serializing_if = "Option::is_none")] + pub conversation: Option, + + // input + #[serde(skip_serializing_if = "Option::is_none")] + pub input: Option, + + // instructions + #[serde(skip_serializing_if = "Option::is_none")] + pub instructions: Option, + + // model + #[serde(skip_serializing_if = "Option::is_none")] + pub model: Option, + + // parallel_tool_calls + #[serde(skip_serializing_if = "Option::is_none")] + pub parallel_tool_calls: Option, + + // previous_response_id + #[serde(skip_serializing_if = "Option::is_none")] + pub previous_response_id: Option, + + // reasoning + #[serde(skip_serializing_if = "Option::is_none")] + pub reasoning: Option, + + // text + #[serde(skip_serializing_if = "Option::is_none")] + pub text: Option, + + // tool_choice + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_choice: Option, + + // tools + #[serde(skip_serializing_if = "Option::is_none")] + pub tools: Option>, + + // truncation + #[serde(skip_serializing_if = "Option::is_none")] + pub truncation: Option, + + // Future-proof + #[serde(flatten)] + pub extra: BTreeMap, +} + +impl CountTokensRequest { + pub fn new() -> Self { + Self { + conversation: None, + input: None, + instructions: None, + model: None, + parallel_tool_calls: None, + previous_response_id: None, + reasoning: None, + text: None, + tool_choice: None, + tools: None, + truncation: None, + extra: BTreeMap::new(), + } + } +} + +impl Default for CountTokensRequest { + fn default() -> Self { + Self::new() + } +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct CountTokensResponse { + #[serde(skip_serializing_if = "Option::is_none")] + pub object: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub input_tokens: Option, + #[serde(flatten)] + pub extra: BTreeMap, +}