From 69fcdedd6a4e2545b65103d6074d151e8cd4eeaf Mon Sep 17 00:00:00 2001 From: David Wiesen Date: Fri, 3 Oct 2025 10:40:46 -0700 Subject: [PATCH 1/5] set gpt-5 as default model for Windows users. --- codex-rs/core/src/config.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/codex-rs/core/src/config.rs b/codex-rs/core/src/config.rs index afc9ff0f2c..6469eee033 100644 --- a/codex-rs/core/src/config.rs +++ b/codex-rs/core/src/config.rs @@ -42,6 +42,9 @@ use toml_edit::DocumentMut; use toml_edit::Item as TomlItem; use toml_edit::Table as TomlTable; +#[cfg(target_os = "windows")] +const OPENAI_DEFAULT_MODEL: &str = "gpt-5"; +#[cfg(not(target_os = "windows"))] const OPENAI_DEFAULT_MODEL: &str = "gpt-5-codex"; const OPENAI_DEFAULT_REVIEW_MODEL: &str = "gpt-5-codex"; pub const GPT_5_CODEX_MEDIUM_MODEL: &str = "gpt-5-codex"; From cbfd8db97cd903abad61f692256163cdc6ccd3ba Mon Sep 17 00:00:00 2001 From: David Wiesen Date: Fri, 3 Oct 2025 11:17:02 -0700 Subject: [PATCH 2/5] update tests that make assertions on model name. --- codex-rs/core/src/config.rs | 4 ++-- codex-rs/core/tests/suite/compact_resume_fork.rs | 12 +++++++----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/codex-rs/core/src/config.rs b/codex-rs/core/src/config.rs index 6469eee033..e40ed5b1a3 100644 --- a/codex-rs/core/src/config.rs +++ b/codex-rs/core/src/config.rs @@ -43,9 +43,9 @@ use toml_edit::Item as TomlItem; use toml_edit::Table as TomlTable; #[cfg(target_os = "windows")] -const OPENAI_DEFAULT_MODEL: &str = "gpt-5"; +pub const OPENAI_DEFAULT_MODEL: &str = "gpt-5"; #[cfg(not(target_os = "windows"))] -const OPENAI_DEFAULT_MODEL: &str = "gpt-5-codex"; +pub const OPENAI_DEFAULT_MODEL: &str = "gpt-5-codex"; const OPENAI_DEFAULT_REVIEW_MODEL: &str = "gpt-5-codex"; pub const GPT_5_CODEX_MEDIUM_MODEL: &str = "gpt-5-codex"; diff --git a/codex-rs/core/tests/suite/compact_resume_fork.rs b/codex-rs/core/tests/suite/compact_resume_fork.rs index 690e1aab4d..8197bed10f 100644 --- a/codex-rs/core/tests/suite/compact_resume_fork.rs +++ b/codex-rs/core/tests/suite/compact_resume_fork.rs @@ -17,6 +17,7 @@ use codex_core::NewConversation; use codex_core::built_in_model_providers; use codex_core::codex::compact::SUMMARIZATION_PROMPT; use codex_core::config::Config; +use codex_core::config::OPENAI_DEFAULT_MODEL; use codex_core::protocol::ConversationPathResponseEvent; use codex_core::protocol::EventMsg; use codex_core::protocol::InputItem; @@ -131,9 +132,10 @@ async fn compact_resume_and_fork_preserve_model_history_view() { .as_str() .unwrap_or_default() .to_string(); + let expected_model = OPENAI_DEFAULT_MODEL; let user_turn_1 = json!( { - "model": "gpt-5-codex", + "model": expected_model, "instructions": prompt, "input": [ { @@ -182,7 +184,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() { }); let compact_1 = json!( { - "model": "gpt-5-codex", + "model": expected_model, "instructions": prompt, "input": [ { @@ -251,7 +253,7 @@ async fn compact_resume_and_fork_preserve_model_history_view() { }); let user_turn_2_after_compact = json!( { - "model": "gpt-5-codex", + "model": expected_model, "instructions": prompt, "input": [ { @@ -316,7 +318,7 @@ SUMMARY_ONLY_CONTEXT" }); let usert_turn_3_after_resume = json!( { - "model": "gpt-5-codex", + "model": expected_model, "instructions": prompt, "input": [ { @@ -401,7 +403,7 @@ SUMMARY_ONLY_CONTEXT" }); let user_turn_3_after_fork = json!( { - "model": "gpt-5-codex", + "model": expected_model, "instructions": prompt, "input": [ { From 1179add1e5e45632392d04f29f88f22c57ce1735 Mon Sep 17 00:00:00 2001 From: David Wiesen Date: Fri, 3 Oct 2025 12:14:08 -0700 Subject: [PATCH 3/5] specify correct list of tools for default model. --- codex-rs/core/tests/suite/prompt_caching.rs | 29 ++++++++++++++++----- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/codex-rs/core/tests/suite/prompt_caching.rs b/codex-rs/core/tests/suite/prompt_caching.rs index bc66be18f6..21df40b72d 100644 --- a/codex-rs/core/tests/suite/prompt_caching.rs +++ b/codex-rs/core/tests/suite/prompt_caching.rs @@ -4,6 +4,7 @@ use codex_core::CodexAuth; use codex_core::ConversationManager; use codex_core::ModelProviderInfo; use codex_core::built_in_model_providers; +use codex_core::config::OPENAI_DEFAULT_MODEL; use codex_core::model_family::find_family_for_model; use codex_core::protocol::AskForApproval; use codex_core::protocol::EventMsg; @@ -18,6 +19,7 @@ use core_test_support::load_default_config_for_test; use core_test_support::load_sse_fixture_with_id; use core_test_support::skip_if_no_network; use core_test_support::wait_for_event; +use std::collections::HashMap; use tempfile::TempDir; use wiremock::Mock; use wiremock::MockServer; @@ -219,13 +221,26 @@ async fn prompt_tools_are_consistent_across_requests() { // our internal implementation is responsible for keeping tools in sync // with the OpenAI schema, so we just verify the tool presence here - let expected_tools_names: &[&str] = &[ - "shell", - "update_plan", - "apply_patch", - "read_file", - "view_image", - ]; + let tools_by_model: HashMap<&'static str, Vec<&'static str>> = HashMap::from([ + ( + "gpt-5", + vec!["shell", "update_plan", "apply_patch", "view_image"], + ), + ( + "gpt-5-codex", + vec![ + "shell", + "update_plan", + "apply_patch", + "read_file", + "view_image", + ], + ), + ]); + let expected_tools_names = tools_by_model + .get(OPENAI_DEFAULT_MODEL) + .unwrap_or_else(|| panic!("expected tools to be defined for model {OPENAI_DEFAULT_MODEL}")) + .as_slice(); let body0 = requests[0].body_json::().unwrap(); assert_eq!( body0["instructions"], From 7719738384881d548361eb2f67a9233a9f0a691f Mon Sep 17 00:00:00 2001 From: David Wiesen Date: Fri, 3 Oct 2025 12:49:21 -0700 Subject: [PATCH 4/5] skip gpt-5-codex specific snapshot if the default model is something else. --- codex-rs/tui/src/chatwidget/tests.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/codex-rs/tui/src/chatwidget/tests.rs b/codex-rs/tui/src/chatwidget/tests.rs index 7a8015b185..6f2a5d9ebe 100644 --- a/codex-rs/tui/src/chatwidget/tests.rs +++ b/codex-rs/tui/src/chatwidget/tests.rs @@ -8,6 +8,7 @@ use codex_core::CodexAuth; use codex_core::config::Config; use codex_core::config::ConfigOverrides; use codex_core::config::ConfigToml; +use codex_core::config::OPENAI_DEFAULT_MODEL; use codex_core::protocol::AgentMessageDeltaEvent; use codex_core::protocol::AgentMessageEvent; use codex_core::protocol::AgentReasoningDeltaEvent; @@ -1101,6 +1102,12 @@ fn disabled_slash_command_while_task_running_snapshot() { #[tokio::test(flavor = "current_thread")] async fn binary_size_transcript_snapshot() { + if OPENAI_DEFAULT_MODEL != "gpt-5-codex" { + eprintln!( + "skipping binary_size_transcript_snapshot: unsupported default model {OPENAI_DEFAULT_MODEL}" + ); + return; + } let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(); // Set up a VT100 test terminal to capture ANSI visual output From 67e25a96949bbedca217c18c576f572fd277753c Mon Sep 17 00:00:00 2001 From: David Wiesen Date: Fri, 3 Oct 2025 12:52:37 -0700 Subject: [PATCH 5/5] don't eprintln in test. --- codex-rs/tui/src/chatwidget/tests.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/codex-rs/tui/src/chatwidget/tests.rs b/codex-rs/tui/src/chatwidget/tests.rs index 6f2a5d9ebe..4c2c4399fe 100644 --- a/codex-rs/tui/src/chatwidget/tests.rs +++ b/codex-rs/tui/src/chatwidget/tests.rs @@ -1102,10 +1102,9 @@ fn disabled_slash_command_while_task_running_snapshot() { #[tokio::test(flavor = "current_thread")] async fn binary_size_transcript_snapshot() { + // the snapshot in this test depends on gpt-5-codex. Skip for now. We will consider + // creating snapshots for other models in the future. if OPENAI_DEFAULT_MODEL != "gpt-5-codex" { - eprintln!( - "skipping binary_size_transcript_snapshot: unsupported default model {OPENAI_DEFAULT_MODEL}" - ); return; } let (mut chat, mut rx, _op_rx) = make_chatwidget_manual();