Normalize forced workspace config reads

Normalize legacy single-string forced_chatgpt_workspace_id values before converting ConfigToml into the app-server v2 Config response.

Add config/read coverage for existing configs that still use the legacy single workspace string shape.
This commit is contained in:
rreichel3-oai
2026-05-12 12:24:31 -04:00
parent 1a7eeaaea8
commit 9200634ee6
2 changed files with 43 additions and 1 deletions

View File

@@ -17,6 +17,7 @@ use codex_config::ConfigLayerStack;
use codex_config::ConfigLayerStackOrdering;
use codex_config::ConfigRequirementsToml;
use codex_config::config_toml::ConfigToml;
use codex_config::config_toml::ForcedChatgptWorkspaceIds;
use codex_config::merge_toml_values;
use codex_core::config::deserialize_config_toml_with_base;
use codex_core::config::edit::ConfigEdit;
@@ -126,9 +127,16 @@ impl ConfigManager {
let effective = layers.effective_config();
let effective_config_toml: ConfigToml = effective
let mut effective_config_toml: ConfigToml = effective
.try_into()
.map_err(|err| ConfigManagerError::toml("invalid configuration", err))?;
// The API always returns the normalized list shape, even though config.toml
// still accepts the legacy single-string workspace form.
if let Some(workspace_ids) = effective_config_toml.forced_chatgpt_workspace_id.take() {
effective_config_toml.forced_chatgpt_workspace_id = Some(
ForcedChatgptWorkspaceIds::Multiple(workspace_ids.into_vec()),
);
}
let json_value = serde_json::to_value(&effective_config_toml)
.map_err(|err| ConfigManagerError::json("failed to serialize configuration", err))?;

View File

@@ -161,6 +161,40 @@ allowed_domains = ["example.com"]
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn config_read_normalizes_legacy_forced_chatgpt_workspace_id() -> Result<()> {
let codex_home = TempDir::new()?;
write_config(
&codex_home,
r#"
forced_chatgpt_workspace_id = "ws_123"
"#,
)?;
let mut mcp = McpProcess::new(codex_home.path()).await?;
timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()).await??;
let request_id = mcp
.send_config_read_request(ConfigReadParams {
include_layers: false,
cwd: None,
})
.await?;
let resp: JSONRPCResponse = timeout(
DEFAULT_READ_TIMEOUT,
mcp.read_stream_until_response_message(RequestId::Integer(request_id)),
)
.await??;
let ConfigReadResponse { config, .. } = to_response(resp)?;
assert_eq!(
config.forced_chatgpt_workspace_id,
Some(vec!["ws_123".to_string()])
);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn config_read_includes_nested_web_search_tool_config() -> Result<()> {
let codex_home = TempDir::new()?;