Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion codex-rs/core/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,10 @@ use toml_edit::DocumentMut;
use toml_edit::Item as TomlItem;
use toml_edit::Table as TomlTable;

const OPENAI_DEFAULT_MODEL: &str = "gpt-5-codex";
#[cfg(target_os = "windows")]
pub const OPENAI_DEFAULT_MODEL: &str = "gpt-5";
#[cfg(not(target_os = "windows"))]
pub const OPENAI_DEFAULT_MODEL: &str = "gpt-5-codex";
const OPENAI_DEFAULT_REVIEW_MODEL: &str = "gpt-5-codex";
pub const GPT_5_CODEX_MEDIUM_MODEL: &str = "gpt-5-codex";

Expand Down
12 changes: 7 additions & 5 deletions codex-rs/core/tests/suite/compact_resume_fork.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ use codex_core::NewConversation;
use codex_core::built_in_model_providers;
use codex_core::codex::compact::SUMMARIZATION_PROMPT;
use codex_core::config::Config;
use codex_core::config::OPENAI_DEFAULT_MODEL;
use codex_core::protocol::ConversationPathResponseEvent;
use codex_core::protocol::EventMsg;
use codex_core::protocol::InputItem;
Expand Down Expand Up @@ -131,9 +132,10 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
.as_str()
.unwrap_or_default()
.to_string();
let expected_model = OPENAI_DEFAULT_MODEL;
let user_turn_1 = json!(
{
"model": "gpt-5-codex",
"model": expected_model,
"instructions": prompt,
"input": [
{
Expand Down Expand Up @@ -182,7 +184,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
});
let compact_1 = json!(
{
"model": "gpt-5-codex",
"model": expected_model,
"instructions": prompt,
"input": [
{
Expand Down Expand Up @@ -251,7 +253,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() {
});
let user_turn_2_after_compact = json!(
{
"model": "gpt-5-codex",
"model": expected_model,
"instructions": prompt,
"input": [
{
Expand Down Expand Up @@ -316,7 +318,7 @@ SUMMARY_ONLY_CONTEXT"
});
let usert_turn_3_after_resume = json!(
{
"model": "gpt-5-codex",
"model": expected_model,
"instructions": prompt,
"input": [
{
Expand Down Expand Up @@ -401,7 +403,7 @@ SUMMARY_ONLY_CONTEXT"
});
let user_turn_3_after_fork = json!(
{
"model": "gpt-5-codex",
"model": expected_model,
"instructions": prompt,
"input": [
{
Expand Down
29 changes: 22 additions & 7 deletions codex-rs/core/tests/suite/prompt_caching.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use codex_core::CodexAuth;
use codex_core::ConversationManager;
use codex_core::ModelProviderInfo;
use codex_core::built_in_model_providers;
use codex_core::config::OPENAI_DEFAULT_MODEL;
use codex_core::model_family::find_family_for_model;
use codex_core::protocol::AskForApproval;
use codex_core::protocol::EventMsg;
Expand All @@ -18,6 +19,7 @@ use core_test_support::load_default_config_for_test;
use core_test_support::load_sse_fixture_with_id;
use core_test_support::skip_if_no_network;
use core_test_support::wait_for_event;
use std::collections::HashMap;
use tempfile::TempDir;
use wiremock::Mock;
use wiremock::MockServer;
Expand Down Expand Up @@ -219,13 +221,26 @@ async fn prompt_tools_are_consistent_across_requests() {

// our internal implementation is responsible for keeping tools in sync
// with the OpenAI schema, so we just verify the tool presence here
let expected_tools_names: &[&str] = &[
"shell",
"update_plan",
"apply_patch",
"read_file",
"view_image",
];
let tools_by_model: HashMap<&'static str, Vec<&'static str>> = HashMap::from([
(
"gpt-5",
vec!["shell", "update_plan", "apply_patch", "view_image"],
),
(
"gpt-5-codex",
vec![
"shell",
"update_plan",
"apply_patch",
"read_file",
"view_image",
],
),
]);
let expected_tools_names = tools_by_model
.get(OPENAI_DEFAULT_MODEL)
.unwrap_or_else(|| panic!("expected tools to be defined for model {OPENAI_DEFAULT_MODEL}"))
.as_slice();
let body0 = requests[0].body_json::<serde_json::Value>().unwrap();
assert_eq!(
body0["instructions"],
Expand Down
6 changes: 6 additions & 0 deletions codex-rs/tui/src/chatwidget/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use codex_core::CodexAuth;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config::OPENAI_DEFAULT_MODEL;
use codex_core::protocol::AgentMessageDeltaEvent;
use codex_core::protocol::AgentMessageEvent;
use codex_core::protocol::AgentReasoningDeltaEvent;
Expand Down Expand Up @@ -1101,6 +1102,11 @@ fn disabled_slash_command_while_task_running_snapshot() {

#[tokio::test(flavor = "current_thread")]
async fn binary_size_transcript_snapshot() {
// the snapshot in this test depends on gpt-5-codex. Skip for now. We will consider
// creating snapshots for other models in the future.
if OPENAI_DEFAULT_MODEL != "gpt-5-codex" {
return;
}
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual();

// Set up a VT100 test terminal to capture ANSI visual output
Expand Down
Loading