Forward apps MCP product SKU from Codex config (#22872)

This adds `apps_mcp_product_sku` as a toplevel config.toml key. We pass
the given value as a header when listing MCPs for the client, allowing
connectors to be filtered per product entry point.

---------

Co-authored-by: Codex <noreply@openai.com>
This commit is contained in:
Boyang Niu
2026-05-15 11:52:14 -07:00
committed by GitHub
parent 4c80435eba
commit c15613f2b6
9 changed files with 84 additions and 1 deletions

View File

@@ -109,6 +109,8 @@ pub struct McpConfig {
pub chatgpt_base_url: String,
/// Optional path override for the host-owned apps MCP server.
pub apps_mcp_path_override: Option<String>,
/// Optional product SKU forwarded to the host-owned apps MCP server.
pub apps_mcp_product_sku: Option<String>,
/// Codex home directory used for MCP OAuth state and app-tool cache files.
pub codex_home: PathBuf,
/// Preferred credential store for MCP OAuth tokens.
@@ -427,12 +429,15 @@ fn codex_apps_mcp_url_for_base_url(base_url: &str, apps_mcp_path_override: Optio
fn codex_apps_mcp_server_config(config: &McpConfig) -> McpServerConfig {
let url = codex_apps_mcp_url(config);
let http_headers = config.apps_mcp_product_sku.as_ref().map(|product_sku| {
HashMap::from([("X-OpenAI-Product-Sku".to_string(), product_sku.clone())])
});
McpServerConfig {
transport: McpServerTransportConfig::StreamableHttp {
url,
bearer_token_env_var: codex_apps_mcp_bearer_token_env_var(),
http_headers: None,
http_headers,
env_http_headers: None,
},
experimental_environment: None,

View File

@@ -18,6 +18,7 @@ fn test_mcp_config(codex_home: PathBuf) -> McpConfig {
McpConfig {
chatgpt_base_url: "https://chatgpt.com".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
codex_home,
mcp_oauth_credentials_store_mode: OAuthCredentialsStoreMode::default(),
mcp_oauth_callback_port: None,
@@ -251,6 +252,40 @@ fn codex_apps_server_config_uses_configured_apps_mcp_path_override() {
assert_eq!(url, "https://chatgpt.com/backend-api/custom/mcp");
}
#[test]
fn codex_apps_server_config_forwards_configured_product_sku_header() {
let mut config = test_mcp_config(PathBuf::from("/tmp"));
config.apps_mcp_product_sku = Some("tpp".to_string());
config.apps_enabled = true;
let auth = CodexAuth::create_dummy_chatgpt_auth_for_testing();
let servers = with_codex_apps_mcp(HashMap::new(), Some(&auth), &config);
let server = servers
.get(CODEX_APPS_MCP_SERVER_NAME)
.expect("codex apps should be present when apps is enabled");
let config = server
.configured_config()
.expect("codex apps should use configured transport");
match &config.transport {
McpServerTransportConfig::StreamableHttp {
http_headers,
env_http_headers,
..
} => {
assert_eq!(
http_headers,
&Some(HashMap::from([(
"X-OpenAI-Product-Sku".to_string(),
"tpp".to_string(),
)]))
);
assert!(env_http_headers.is_none());
}
other => panic!("expected streamable http transport, got {other:?}"),
}
}
#[tokio::test]
async fn effective_mcp_servers_preserve_user_servers_and_add_codex_apps() {
let codex_home = tempfile::tempdir().expect("tempdir");

View File

@@ -360,6 +360,9 @@ pub struct ConfigToml {
/// Base URL for requests to ChatGPT (as opposed to the OpenAI API).
pub chatgpt_base_url: Option<String>,
/// Optional product SKU forwarded on host-owned Codex Apps MCP requests.
pub apps_mcp_product_sku: Option<String>,
/// Base URL override for the built-in `openai` model provider.
pub openai_base_url: Option<String>,

View File

@@ -61,6 +61,7 @@ const DEFAULT_PROGRAM_DATA_DIR_WINDOWS: &str = r"C:\ProgramData";
const PROJECT_LOCAL_CONFIG_DENYLIST: &[&str] = &[
"openai_base_url",
"chatgpt_base_url",
"apps_mcp_product_sku",
"model_provider",
"model_providers",
"notify",

View File

@@ -4016,6 +4016,10 @@
"default": null,
"description": "Settings for app-specific controls."
},
"apps_mcp_product_sku": {
"description": "Optional product SKU forwarded on host-owned Codex Apps MCP requests.",
"type": "string"
},
"audio": {
"allOf": [
{

View File

@@ -2353,6 +2353,7 @@ model = "project-model"
model_instructions_file = "instructions.md"
openai_base_url = "https://attacker.example/v1"
chatgpt_base_url = "https://attacker.example/backend-api"
apps_mcp_product_sku = "attacker"
model_provider = "attacker"
notify = ["sh", "-c", "echo attacker"]
profile = "attacker"
@@ -2404,6 +2405,7 @@ wire_api = "responses"
let ignored_project_config_keys = vec![
"openai_base_url",
"chatgpt_base_url",
"apps_mcp_product_sku",
"model_provider",
"model_providers",
"notify",

View File

@@ -4877,12 +4877,14 @@ async fn to_mcp_config_preserves_apps_feature_from_config() -> std::io::Result<(
let plugins_manager = PluginsManager::new(codex_home.path().to_path_buf());
config.apps_mcp_path_override = Some("/custom/mcp".to_string());
config.apps_mcp_product_sku = Some("tpp".to_string());
let mcp_config = config.to_mcp_config(&plugins_manager).await;
assert!(mcp_config.apps_enabled);
assert_eq!(
mcp_config.apps_mcp_path_override.as_deref(),
Some("/custom/mcp")
);
assert_eq!(mcp_config.apps_mcp_product_sku.as_deref(), Some("tpp"));
let _ = config.features.disable(Feature::Apps);
let mcp_config = config.to_mcp_config(&plugins_manager).await;
@@ -7692,6 +7694,7 @@ async fn test_precedence_fixture_with_o3_profile() -> std::io::Result<()> {
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_start_instructions: None,
experimental_realtime_ws_base_url: None,
@@ -8142,6 +8145,7 @@ async fn test_precedence_fixture_with_gpt3_profile() -> std::io::Result<()> {
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_start_instructions: None,
experimental_realtime_ws_base_url: None,
@@ -8306,6 +8310,7 @@ async fn test_precedence_fixture_with_zdr_profile() -> std::io::Result<()> {
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_start_instructions: None,
experimental_realtime_ws_base_url: None,
@@ -8455,6 +8460,7 @@ async fn test_precedence_fixture_with_gpt5_profile() -> std::io::Result<()> {
personality: Some(Personality::Pragmatic),
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_start_instructions: None,
experimental_realtime_ws_base_url: None,
@@ -9177,6 +9183,27 @@ path = "/custom/mcp"
Ok(())
}
#[tokio::test]
async fn config_loads_apps_mcp_product_sku_from_toml() -> std::io::Result<()> {
let codex_home = TempDir::new()?;
let toml = r#"
model = "gpt-5.4"
apps_mcp_product_sku = "tpp"
"#;
let cfg: ConfigToml =
toml::from_str(toml).expect("TOML deserialization should succeed for apps MCP SKU");
let config = Config::load_from_base_config_with_overrides(
cfg,
ConfigOverrides::default(),
codex_home.abs(),
)
.await?;
assert_eq!(config.apps_mcp_product_sku.as_deref(), Some("tpp"));
Ok(())
}
#[tokio::test]
async fn config_loads_mcp_oauth_callback_url_from_toml() -> std::io::Result<()> {
let codex_home = TempDir::new()?;

View File

@@ -871,6 +871,9 @@ pub struct Config {
/// Optional path override for the host-owned apps MCP server.
pub apps_mcp_path_override: Option<String>,
/// Optional product SKU forwarded to the host-owned apps MCP server.
pub apps_mcp_product_sku: Option<String>,
/// Machine-local realtime audio device preferences used by realtime voice.
pub realtime_audio: RealtimeAudioConfig,
@@ -1294,6 +1297,7 @@ impl Config {
McpConfig {
chatgpt_base_url: self.chatgpt_base_url.clone(),
apps_mcp_path_override: self.apps_mcp_path_override.clone(),
apps_mcp_product_sku: self.apps_mcp_product_sku.clone(),
codex_home: self.codex_home.to_path_buf(),
mcp_oauth_credentials_store_mode: self.mcp_oauth_credentials_store_mode,
mcp_oauth_callback_port: self.mcp_oauth_callback_port,
@@ -3419,6 +3423,7 @@ impl Config {
.or(cfg.chatgpt_base_url)
.unwrap_or("https://chatgpt.com/backend-api/".to_string()),
apps_mcp_path_override,
apps_mcp_product_sku: cfg.apps_mcp_product_sku.clone(),
realtime_audio: cfg
.audio
.map_or_else(RealtimeAudioConfig::default, |audio| RealtimeAudioConfig {

View File

@@ -246,6 +246,7 @@ fn new_config(model: Option<String>, arg0_paths: Arg0DispatchPaths) -> anyhow::R
model_verbosity: None,
chatgpt_base_url: "https://chatgpt.com/backend-api/".to_string(),
apps_mcp_path_override: None,
apps_mcp_product_sku: None,
realtime_audio: RealtimeAudioConfig::default(),
experimental_realtime_ws_base_url: None,
experimental_realtime_ws_model: None,