# PR #2799: Persist model & reasoning changes - URL: https://github.com/openai/codex/pull/2799 - Author: dedrisian-oai - Created: 2025-08-27 22:43:27 UTC - Updated: 2025-09-02 02:28:52 UTC - Changes: +380/-7, Files changed: 4, Commits: 9 ## Description Persists `/model` changes across both general and profile-specific sessions. ## Full Diff ```diff diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs index d7c39c14a5..6de04b3052 100644 --- a/codex-rs/core/src/codex.rs +++ b/codex-rs/core/src/codex.rs @@ -41,6 +41,7 @@ use crate::client::ModelClient; use crate::client_common::Prompt; use crate::client_common::ResponseEvent; use crate::config::Config; +use crate::config::set_default_model_and_effort_for_profile; use crate::config_types::ShellEnvironmentPolicy; use crate::conversation_history::ConversationHistory; use crate::environment_context::EnvironmentContext; @@ -1076,10 +1077,10 @@ async fn submission_loop( let provider = prev.client.get_provider(); // Effective model + family - let (effective_model, effective_family) = if let Some(m) = model { + let (effective_model, effective_family) = if let Some(ref m) = model { let fam = - find_family_for_model(&m).unwrap_or_else(|| config.model_family.clone()); - (m, fam) + find_family_for_model(m).unwrap_or_else(|| config.model_family.clone()); + (m.clone(), fam) } else { (prev.client.get_model(), prev.client.get_model_family()) }; @@ -1138,6 +1139,17 @@ async fn submission_loop( // Install the new persistent context for subsequent tasks/turns. turn_context = Arc::new(new_turn_context); + + // Persist model and reasoning effort across sessions. + if model.is_some() || effort.is_some() { + let _ = set_default_model_and_effort_for_profile( + &config.codex_home, + config.active_profile.as_deref(), + &effective_model, + effective_effort, + ); + } + if cwd.is_some() || approval_policy.is_some() || sandbox_policy.is_some() { sess.record_conversation_items(&[ResponseItem::from(EnvironmentContext::new( cwd, diff --git a/codex-rs/core/src/config.rs b/codex-rs/core/src/config.rs index 7845f5d4ce..bcc8ccfcb4 100644 --- a/codex-rs/core/src/config.rs +++ b/codex-rs/core/src/config.rs @@ -1,3 +1,4 @@ +pub use crate::config_edit::set_default_model_and_effort_for_profile; use crate::config_profile::ConfigProfile; use crate::config_types::History; use crate::config_types::McpServerConfig; @@ -35,7 +36,7 @@ const OPENAI_DEFAULT_MODEL: &str = "gpt-5"; /// the context window. pub(crate) const PROJECT_DOC_MAX_BYTES: usize = 32 * 1024; // 32 KiB -const CONFIG_TOML_FILE: &str = "config.toml"; +pub(crate) const CONFIG_TOML_FILE: &str = "config.toml"; const DEFAULT_RESPONSES_ORIGINATOR_HEADER: &str = "codex_cli_rs"; @@ -181,6 +182,10 @@ pub struct Config { /// Include the `view_image` tool that lets the agent attach a local image path to context. pub include_view_image_tool: bool, + + /// The active profile name used to derive this `Config` (if any). + pub active_profile: Option, + /// When true, disables burst-paste detection for typed input entirely. /// All characters are inserted as they are received, and no buffering /// or placeholder replacement will occur for fast keypress bursts. @@ -635,7 +640,11 @@ impl Config { tools_web_search_request: override_tools_web_search_request, } = overrides; - let config_profile = match config_profile_key.as_ref().or(cfg.profile.as_ref()) { + let active_profile_name = config_profile_key + .as_ref() + .or(cfg.profile.as_ref()) + .cloned(); + let config_profile = match active_profile_name.as_ref() { Some(key) => cfg .profiles .get(key) @@ -806,6 +815,7 @@ impl Config { .experimental_use_exec_command_tool .unwrap_or(false), include_view_image_tool, + active_profile: active_profile_name, disable_paste_burst: cfg.disable_paste_burst.unwrap_or(false), }; Ok(config) @@ -1176,6 +1186,7 @@ disable_response_storage = true preferred_auth_method: AuthMode::ChatGPT, use_experimental_streamable_shell_tool: false, include_view_image_tool: true, + active_profile: Some("o3".to_string()), disable_paste_burst: false, }, o3_profile_config @@ -1234,6 +1245,7 @@ disable_response_storage = true preferred_auth_method: AuthMode::ChatGPT, use_experimental_streamable_shell_tool: false, include_view_image_tool: true, + active_profile: Some("gpt3".to_string()), disable_paste_burst: false, }; @@ -1307,6 +1319,7 @@ disable_response_storage = true preferred_auth_method: AuthMode::ChatGPT, use_experimental_streamable_shell_tool: false, include_view_image_tool: true, + active_profile: Some("zdr".to_string()), disable_paste_burst: false, }; @@ -1382,6 +1395,4 @@ trust_level = "trusted" Ok(()) } - - // No test enforcing the presence of a standalone [projects] header. } diff --git a/codex-rs/core/src/config_edit.rs b/codex-rs/core/src/config_edit.rs new file mode 100644 index 0000000000..1697865157 --- /dev/null +++ b/codex-rs/core/src/config_edit.rs @@ -0,0 +1,349 @@ +use crate::config::CONFIG_TOML_FILE; +use crate::config::load_config_as_toml; +use codex_protocol::config_types::ReasoningEffort; +use std::path::Path; +use tempfile::NamedTempFile; +use toml_edit::DocumentMut; + +/// Persist the default `model` and `model_reasoning_effort` to +/// `CODEX_HOME/config.toml` so the selection is used across sessions. +/// +/// If a `profile` is set in `config.toml`, this updates the corresponding +/// `[profiles.]` table; otherwise it updates the top-level keys. +pub fn set_default_model_and_effort( + codex_home: &Path, + model: &str, + effort: ReasoningEffort, +) -> anyhow::Result<()> { + set_default_model_and_effort_for_profile(codex_home, None, model, effort) +} + +/// Persist defaults under the specified profile if provided; otherwise, if a +/// `profile` is set in `config.toml`, use it; if neither is present, update +/// the top-level keys. +pub fn set_default_model_and_effort_for_profile( + codex_home: &Path, + profile_override: Option<&str>, + model: &str, + effort: ReasoningEffort, +) -> anyhow::Result<()> { + let effort_str = effort.to_string(); + let overrides: [(&[&str], &str); 2] = [ + (&["model"], model), + (&["model_reasoning_effort"], effort_str.as_str()), + ]; + persist_overrides(codex_home, profile_override, &overrides) +} + +/// Persist overrides into `config.toml` using explicit key segments per +/// override. This avoids ambiguity with keys that contain dots or spaces. +fn persist_overrides( + codex_home: &Path, + profile: Option<&str>, + overrides: &[(&[&str], &str)], +) -> anyhow::Result<()> { + let config_path = codex_home.join(CONFIG_TOML_FILE); + + let mut doc = match std::fs::read_to_string(&config_path) { + Ok(s) => s.parse::()?, + Err(e) if e.kind() == std::io::ErrorKind::NotFound => DocumentMut::new(), + Err(e) => return Err(e.into()), + }; + + let effective_profile = profile.map(str::to_owned).or_else(|| { + load_config_as_toml(codex_home).ok().and_then(|v| { + v.get("profile") + .and_then(|i| i.as_str()) + .map(|s| s.to_string()) + }) + }); + + for (segments, val) in overrides.iter().copied() { + let value = toml_edit::value(val); + if let Some(ref name) = effective_profile { + if segments.first().copied() == Some("profiles") { + apply_toml_edit_override_segments(&mut doc, segments, value); + } else { + let mut seg_buf: Vec<&str> = Vec::with_capacity(2 + segments.len()); + seg_buf.push("profiles"); + seg_buf.push(name.as_str()); + seg_buf.extend_from_slice(segments); + apply_toml_edit_override_segments(&mut doc, &seg_buf, value); + } + } else { + apply_toml_edit_override_segments(&mut doc, segments, value); + } + } + + std::fs::create_dir_all(codex_home)?; + let tmp_file = NamedTempFile::new_in(codex_home)?; + std::fs::write(tmp_file.path(), doc.to_string())?; + tmp_file.persist(config_path)?; + + Ok(()) +} + +/// Apply a single override onto a `toml_edit` document while preserving +/// existing formatting/comments. +/// The key is expressed as explicit segments to correctly handle keys that +/// contain dots or spaces. +fn apply_toml_edit_override_segments( + doc: &mut DocumentMut, + segments: &[&str], + value: toml_edit::Item, +) { + use toml_edit::Item; + + if segments.is_empty() { + return; + } + + let mut current = doc.as_table_mut(); + for seg in &segments[..segments.len() - 1] { + if !current.contains_key(seg) { + current[*seg] = Item::Table(toml_edit::Table::new()); + if let Some(t) = current[*seg].as_table_mut() { + t.set_implicit(true); + } + } + + let maybe_item = current.get_mut(seg); + let Some(item) = maybe_item else { return }; + + if !item.is_table() { + *item = Item::Table(toml_edit::Table::new()); + if let Some(t) = item.as_table_mut() { + t.set_implicit(true); + } + } + + let Some(tbl) = item.as_table_mut() else { + return; + }; + current = tbl; + } + + let last = segments[segments.len() - 1]; + current[last] = value; +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + use std::fs; + use tempfile::tempdir; + + fn read_config(codex_home: &Path) -> String { + let p = codex_home.join(CONFIG_TOML_FILE); + fs::read_to_string(p).unwrap_or_default() + } + + #[test] + fn set_default_model_top_level_when_no_profile() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + + set_default_model_and_effort(codex_home, "gpt-5", ReasoningEffort::High).expect("persist"); + + let contents = read_config(codex_home); + let expected = r#"model = "gpt-5" +model_reasoning_effort = "high" +"#; + assert_eq!(contents, expected); + } + + #[test] + fn set_default_model_updates_profile_when_profile_set() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + + // Seed config with a profile selection but without profiles table + let seed = "profile = \"o3\"\n"; + fs::write(codex_home.join(CONFIG_TOML_FILE), seed).expect("seed write"); + + set_default_model_and_effort(codex_home, "o3", ReasoningEffort::Minimal).expect("persist"); + + let contents = read_config(codex_home); + let expected = r#"profile = "o3" + +[profiles.o3] +model = "o3" +model_reasoning_effort = "minimal" +"#; + assert_eq!(contents, expected); + } + + #[test] + fn set_default_model_updates_profile_with_dot_and_space() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + + // Seed config with a profile name that contains a dot and a space + let seed = "profile = \"my.team name\"\n"; + fs::write(codex_home.join(CONFIG_TOML_FILE), seed).expect("seed write"); + + set_default_model_and_effort(codex_home, "o3", ReasoningEffort::Minimal).expect("persist"); + + let contents = read_config(codex_home); + let expected = r#"profile = "my.team name" + +[profiles."my.team name"] +model = "o3" +model_reasoning_effort = "minimal" +"#; + assert_eq!(contents, expected); + } + + #[test] + fn set_default_model_updates_when_profile_override_supplied() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + + // No profile key in config.toml + fs::write(codex_home.join(CONFIG_TOML_FILE), "").expect("seed write"); + + // Persist with an explicit profile override + set_default_model_and_effort_for_profile( + codex_home, + Some("o3"), + "o3", + ReasoningEffort::High, + ) + .expect("persist"); + + let contents = read_config(codex_home); + let expected = r#"[profiles.o3] +model = "o3" +model_reasoning_effort = "high" +"#; + assert_eq!(contents, expected); + } + + #[test] + fn persist_overrides_creates_nested_tables() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + + persist_overrides( + codex_home, + None, + &[ + (&["a", "b", "c"], "v"), + (&["x"], "y"), + (&["profiles", "p1", "model"], "gpt-5"), + ], + ) + .expect("persist"); + + let contents = read_config(codex_home); + let expected = r#"x = "y" + +[a.b] +c = "v" + +[profiles.p1] +model = "gpt-5" +"#; + assert_eq!(contents, expected); + } + + #[test] + fn persist_overrides_replaces_scalar_with_table() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + let seed = "foo = \"bar\"\n"; + fs::write(codex_home.join(CONFIG_TOML_FILE), seed).expect("seed write"); + + persist_overrides(codex_home, None, &[(&["foo", "bar", "baz"], "ok")]).expect("persist"); + + let contents = read_config(codex_home); + let expected = r#"[foo.bar] +baz = "ok" +"#; + assert_eq!(contents, expected); + } + + #[test] + fn set_default_model_preserves_comments() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + + // Seed a config with comments and spacing we expect to preserve + let seed = r#"# Global comment +# Another line + +profile = "o3" + +# Profile settings +[profiles.o3] +# keep me +existing = "keep" +"#; + fs::write(codex_home.join(CONFIG_TOML_FILE), seed).expect("seed write"); + + // Apply defaults; since profile is set, it should write under [profiles.o3] + set_default_model_and_effort(codex_home, "o3", ReasoningEffort::High).expect("persist"); + + let contents = read_config(codex_home); + let expected = r#"# Global comment +# Another line + +profile = "o3" + +# Profile settings +[profiles.o3] +# keep me +existing = "keep" +model = "o3" +model_reasoning_effort = "high" +"#; + assert_eq!(contents, expected); + } + + #[test] + fn set_default_model_preserves_global_comments() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + + // Seed a config WITHOUT a profile, containing comments and spacing + let seed = r#"# Top-level comments +# should be preserved + +existing = "keep" +"#; + fs::write(codex_home.join(CONFIG_TOML_FILE), seed).expect("seed write"); + + // Since there is no profile, the defaults should be written at top-level + set_default_model_and_effort(codex_home, "gpt-5", ReasoningEffort::Minimal) + .expect("persist"); + + let contents = read_config(codex_home); + let expected = r#"# Top-level comments +# should be preserved + +existing = "keep" +model = "gpt-5" +model_reasoning_effort = "minimal" +"#; + assert_eq!(contents, expected); + } + + #[test] + fn persist_overrides_errors_on_parse_failure() { + let tmpdir = tempdir().expect("tmp"); + let codex_home = tmpdir.path(); + + // Write an intentionally invalid TOML file + let invalid = "invalid = [unclosed"; + fs::write(codex_home.join(CONFIG_TOML_FILE), invalid).expect("seed write"); + + // Attempting to persist should return an error and must not clobber the file. + let res = persist_overrides(codex_home, None, &[(&["x"], "y")]); + assert!(res.is_err(), "expected parse error to propagate"); + + // File should be unchanged + let contents = read_config(codex_home); + assert_eq!(contents, invalid); + } +} diff --git a/codex-rs/core/src/lib.rs b/codex-rs/core/src/lib.rs index 9f23420c7e..ba26f8922e 100644 --- a/codex-rs/core/src/lib.rs +++ b/codex-rs/core/src/lib.rs @@ -14,6 +14,7 @@ pub mod codex; mod codex_conversation; pub use codex_conversation::CodexConversation; pub mod config; +pub mod config_edit; pub mod config_profile; pub mod config_types; mod conversation_history; ``` ## Review Comments ### codex-rs/core/src/codex.rs - Created: 2025-08-28 16:48:51 UTC | Link: https://github.com/openai/codex/pull/2799#discussion_r2307985406 ```diff @@ -1126,6 +1126,17 @@ async fn submission_loop( // Install the new persistent context for subsequent tasks/turns. turn_context = Arc::new(new_turn_context); + + // Persist model and reasoning effort across sessions. + if model.is_some() || effort.is_some() { ``` > @dylan-hurd-oai @gabriel-openai I believe this codepath is not the one used by the extension, which is probably what we want for now, right? That is, changing the model on a conversation in VS Code should not update `config.toml`? ### codex-rs/core/src/config.rs - Created: 2025-08-28 16:22:29 UTC | Link: https://github.com/openai/codex/pull/2799#discussion_r2307923437 ```diff @@ -340,6 +343,129 @@ pub fn set_project_trusted(codex_home: &Path, project_path: &Path) -> anyhow::Re Ok(()) } +/// Persist the default `model` and `model_reasoning_effort` to ``` > Since this file is already quite large, can we consolidate logic related to config editing in another file? Maybe `config_edit.rs`? (I expect we'll be adding more code like this.) - Created: 2025-08-28 16:43:51 UTC | Link: https://github.com/openai/codex/pull/2799#discussion_r2307976033 ```diff @@ -340,6 +343,129 @@ pub fn set_project_trusted(codex_home: &Path, project_path: &Path) -> anyhow::Re Ok(()) } +/// Persist the default `model` and `model_reasoning_effort` to +/// `CODEX_HOME/config.toml` so the selection is used across sessions. +/// +/// If a `profile` is set in `config.toml`, this updates the corresponding +/// `[profiles.]` table; otherwise it updates the top-level keys. +pub fn set_default_model_and_effort( + codex_home: &Path, + model: &str, + effort: ReasoningEffort, +) -> anyhow::Result<()> { + set_default_model_and_effort_for_profile(codex_home, None, model, effort) +} + +/// Persist defaults under the specified profile if provided; otherwise, if a +/// `profile` is set in `config.toml`, use it; if neither is present, update +/// the top-level keys. +pub fn set_default_model_and_effort_for_profile( + codex_home: &Path, + profile_override: Option<&str>, + model: &str, + effort: ReasoningEffort, +) -> anyhow::Result<()> { + let effort_str = effort.to_string(); + let overrides: [(&[&str], &str); 2] = [ + (&["model"], model), + (&["model_reasoning_effort"], effort_str.as_str()), + ]; + persist_overrides(codex_home, profile_override, &overrides) +} + +/// Persist overrides into `config.toml` using explicit key segments per +/// override. This avoids ambiguity with keys that contain dots or spaces. +fn persist_overrides( + codex_home: &Path, + profile: Option<&str>, + overrides: &[(&[&str], &str)], +) -> anyhow::Result<()> { + let config_path = codex_home.join(CONFIG_TOML_FILE); + + let mut doc = match std::fs::read_to_string(&config_path) { + Ok(s) => s.parse::()?, + Err(e) if e.kind() == std::io::ErrorKind::NotFound => DocumentMut::new(), + Err(e) => return Err(e.into()), + }; + + let effective_profile: Option = match profile { + Some(name) => Some(name.to_string()), + None => load_config_as_toml(codex_home).ok().and_then(|v| { + v.get("profile") + .and_then(|i| i.as_str()) + .map(|s| s.to_string()) + }), + }; ``` > I asked chat to turn this into a one-liner, but I haven't verified this compiles: > > ```suggestion > let effective_profile = profile > .as_deref() > .or_else(|| { > load_config_as_toml(codex_home) > .ok() > .and_then(|v| v.get("profile").and_then(|i| i.as_str())) > }) > .map(ToString::to_string); > ``` - Created: 2025-08-28 16:46:30 UTC | Link: https://github.com/openai/codex/pull/2799#discussion_r2307981066 ```diff @@ -1371,5 +1512,198 @@ trust_level = "trusted" Ok(()) } - // No test enforcing the presence of a standalone [projects] header. + #[test] ``` > For these tests, what I really want to see is the full value of the `config.toml` that is produced as a result so we can be sure we are using toml-edit correctly and that is preserving things as comments, writing proper tables, etc. > > I would probably use a separate set of fixtures just for your new tests where both the input and expected output are strings of toml. > > See also: https://github.com/openai/codex/blob/4e9ad238649c71690cbb0402e110943223c16fcd/.github/codex/labels/codex-rust-review.md#L23-L27