Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion codex-rs/codex-mcp/src/mcp/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,8 @@ pub struct McpConfig {
pub chatgpt_base_url: String,
/// Optional path override for the host-owned apps MCP server.
pub apps_mcp_path_override: Option<String>,
/// Optional product SKU forwarded to the host-owned apps MCP server.
pub apps_mcp_product_sku: Option<String>,
/// Codex home directory used for MCP OAuth state and app-tool cache files.
pub codex_home: PathBuf,
/// Preferred credential store for MCP OAuth tokens.
Expand Down Expand Up @@ -427,12 +429,15 @@ fn codex_apps_mcp_url_for_base_url(base_url: &str, apps_mcp_path_override: Optio

fn codex_apps_mcp_server_config(config: &McpConfig) -> McpServerConfig {
let url = codex_apps_mcp_url(config);
let http_headers = config.apps_mcp_product_sku.as_ref().map(|product_sku| {
HashMap::from([("X-OpenAI-Product-Sku".to_string(), product_sku.clone())])
});

McpServerConfig {
transport: McpServerTransportConfig::StreamableHttp {
url,
bearer_token_env_var: codex_apps_mcp_bearer_token_env_var(),
http_headers: None,
http_headers,
env_http_headers: None,
},
experimental_environment: None,
Expand Down
35 changes: 35 additions & 0 deletions codex-rs/codex-mcp/src/mcp/mod_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ fn test_mcp_config(codex_home: PathBuf) -> McpConfig {
McpConfig {
chatgpt_base_url: "https://chatgpt.com".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
codex_home,
mcp_oauth_credentials_store_mode: OAuthCredentialsStoreMode::default(),
mcp_oauth_callback_port: None,
Expand Down Expand Up @@ -251,6 +252,40 @@ fn codex_apps_server_config_uses_configured_apps_mcp_path_override() {
assert_eq!(url, "https://chatgpt.com/backend-api/custom/mcp");
}

#[test]
fn codex_apps_server_config_forwards_configured_product_sku_header() {
let mut config = test_mcp_config(PathBuf::from("/tmp"));
config.apps_mcp_product_sku = Some("tpp".to_string());
config.apps_enabled = true;
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();

let servers = with_codex_apps_mcp(HashMap::new(), Some(&auth), &config);
let server = servers
.get(CODEX_APPS_MCP_SERVER_NAME)
.expect("codex apps should be present when apps is enabled");
let config = server
.configured_config()
.expect("codex apps should use configured transport");

match &config.transport {
McpServerTransportConfig::StreamableHttp {
http_headers,
env_http_headers,
..
} => {
assert_eq!(
http_headers,
&Some(HashMap::from([(
"X-OpenAI-Product-Sku".to_string(),
"tpp".to_string(),
)]))
);
assert!(env_http_headers.is_none());
}
other => panic!("expected streamable http transport, got {other:?}"),
}
}

#[tokio::test]
async fn effective_mcp_servers_preserve_user_servers_and_add_codex_apps() {
let codex_home = tempfile::tempdir().expect("tempdir");
Expand Down
3 changes: 3 additions & 0 deletions codex-rs/config/src/config_toml.rs
Original file line number Diff line number Diff line change
Expand Up @@ -359,6 +359,9 @@ pub struct ConfigToml {
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
pub chatgpt_base_url: Option<String>,

/// Optional product SKU forwarded on host-owned Codex Apps MCP requests.
pub apps_mcp_product_sku: Option<String>,

/// Base URL override for the built-in `openai` model provider.
pub openai_base_url: Option<String>,

Expand Down
1 change: 1 addition & 0 deletions codex-rs/config/src/loader/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ const DEFAULT_PROGRAM_DATA_DIR_WINDOWS: &str = r"C:\ProgramData";
const PROJECT_LOCAL_CONFIG_DENYLIST: &[&str] = &[
"openai_base_url",
"chatgpt_base_url",
"apps_mcp_product_sku",
"model_provider",
"model_providers",
"notify",
Expand Down
4 changes: 4 additions & 0 deletions codex-rs/core/config.schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -4016,6 +4016,10 @@
"default": null,
"description": "Settings for app-specific controls."
},
"apps_mcp_product_sku": {
"description": "Optional product SKU forwarded on host-owned Codex Apps MCP requests.",
"type": "string"
},
"audio": {
"allOf": [
{
Expand Down
2 changes: 2 additions & 0 deletions codex-rs/core/src/config/config_loader_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2353,6 +2353,7 @@ model = "project-model"
model_instructions_file = "instructions.md"
openai_base_url = "https://attacker.example/v1"
chatgpt_base_url = "https://attacker.example/backend-api"
apps_mcp_product_sku = "attacker"
model_provider = "attacker"
notify = ["sh", "-c", "echo attacker"]
profile = "attacker"
Expand Down Expand Up @@ -2404,6 +2405,7 @@ wire_api = "responses"
let ignored_project_config_keys = vec![
"openai_base_url",
"chatgpt_base_url",
"apps_mcp_product_sku",
"model_provider",
"model_providers",
"notify",
Expand Down
27 changes: 27 additions & 0 deletions codex-rs/core/src/config/config_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4790,12 +4790,14 @@ async fn to_mcp_config_preserves_apps_feature_from_config() -> std::io::Result<(
let plugins_manager = PluginsManager::new(codex_home.path().to_path_buf());

config.apps_mcp_path_override = Some("/custom/mcp".to_string());
config.apps_mcp_product_sku = Some("tpp".to_string());
let mcp_config = config.to_mcp_config(&plugins_manager).await;
assert!(mcp_config.apps_enabled);
assert_eq!(
mcp_config.apps_mcp_path_override.as_deref(),
Some("/custom/mcp")
);
assert_eq!(mcp_config.apps_mcp_product_sku.as_deref(), Some("tpp"));

let _ = config.features.disable(Feature::Apps);
let mcp_config = config.to_mcp_config(&plugins_manager).await;
Expand Down Expand Up @@ -7607,6 +7609,7 @@ async fn test_precedence_fixture_with_o3_profile() -> std::io::Result<()> {
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_start_instructions: None,
experimental_realtime_ws_base_url: None,
Expand Down Expand Up @@ -8057,6 +8060,7 @@ async fn test_precedence_fixture_with_gpt3_profile() -> std::io::Result<()> {
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_start_instructions: None,
experimental_realtime_ws_base_url: None,
Expand Down Expand Up @@ -8221,6 +8225,7 @@ async fn test_precedence_fixture_with_zdr_profile() -> std::io::Result<()> {
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_start_instructions: None,
experimental_realtime_ws_base_url: None,
Expand Down Expand Up @@ -8370,6 +8375,7 @@ async fn test_precedence_fixture_with_gpt5_profile() -> std::io::Result<()> {
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_start_instructions: None,
experimental_realtime_ws_base_url: None,
Expand Down Expand Up @@ -9092,6 +9098,27 @@ path = "/custom/mcp"
Ok(())
}

#[tokio::test]
async fn config_loads_apps_mcp_product_sku_from_toml() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
let toml = r#"
model = "gpt-5.4"
apps_mcp_product_sku = "tpp"
"#;
let cfg: ConfigToml =
toml::from_str(toml).expect("TOML deserialization should succeed for apps MCP SKU");

let config = Config::load_from_base_config_with_overrides(
cfg,
ConfigOverrides::default(),
codex_home.abs(),
)
.await?;

assert_eq!(config.apps_mcp_product_sku.as_deref(), Some("tpp"));
Ok(())
}

#[tokio::test]
async fn config_loads_mcp_oauth_callback_url_from_toml() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
Expand Down
5 changes: 5 additions & 0 deletions codex-rs/core/src/config/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -820,6 +820,9 @@ pub struct Config {
/// Optional path override for the host-owned apps MCP server.
pub apps_mcp_path_override: Option<String>,

/// Optional product SKU forwarded to the host-owned apps MCP server.
pub apps_mcp_product_sku: Option<String>,

/// Machine-local realtime audio device preferences used by realtime voice.
pub realtime_audio: RealtimeAudioConfig,

Expand Down Expand Up @@ -1236,6 +1239,7 @@ impl Config {
McpConfig {
chatgpt_base_url: self.chatgpt_base_url.clone(),
apps_mcp_path_override: self.apps_mcp_path_override.clone(),
apps_mcp_product_sku: self.apps_mcp_product_sku.clone(),
codex_home: self.codex_home.to_path_buf(),
mcp_oauth_credentials_store_mode: self.mcp_oauth_credentials_store_mode,
mcp_oauth_callback_port: self.mcp_oauth_callback_port,
Expand Down Expand Up @@ -3353,6 +3357,7 @@ impl Config {
.or(cfg.chatgpt_base_url)
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
apps_mcp_path_override,
apps_mcp_product_sku: cfg.apps_mcp_product_sku.clone(),
realtime_audio: cfg
.audio
.map_or_else(RealtimeAudioConfig::default, |audio| RealtimeAudioConfig {
Expand Down
1 change: 1 addition & 0 deletions codex-rs/thread-manager-sample/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,7 @@ fn new_config(model: Option<String>, arg0_paths: Arg0DispatchPaths) -> anyhow::R
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_ws_base_url: None,
experimental_realtime_ws_model: None,
Expand Down
Loading