Compare commits

...

1 Commits

Author SHA1 Message Date
easong-openai
6177d55d6b experimental model selector config 2025-09-25 01:48:52 -07:00
6 changed files with 355 additions and 28 deletions

View File

@@ -1,4 +1,5 @@
use crate::config_profile::ConfigProfile;
use crate::config_types::CustomSelectorModel;
use crate::config_types::History;
use crate::config_types::McpServerConfig;
use crate::config_types::Notifications;
@@ -122,6 +123,9 @@ pub struct Config {
/// and turn completions when not focused.
pub tui_notifications: Notifications,
/// Experimental custom model presets exposed in the TUI model selector.
pub experimental_custom_selector_models: Vec<CustomSelectorModel>,
/// The directory that should be treated as the current working directory
/// for the session. All relative paths inside the business-logic layer are
/// resolved against this path.
@@ -666,6 +670,9 @@ pub struct ConfigToml {
/// Collection of settings that are specific to the TUI.
pub tui: Option<Tui>,
/// Experimental custom model presets surfaced in the TUI model selector.
pub experimental_custom_selector_models: Option<Vec<CustomSelectorModel>>,
/// When set to `true`, `AgentReasoning` events will be hidden from the
/// UI/output. Defaults to `false`.
pub hide_agent_reasoning: Option<bool>,
@@ -935,6 +942,8 @@ impl Config {
};
let history = cfg.history.unwrap_or_default();
let experimental_custom_selector_models =
cfg.experimental_custom_selector_models.unwrap_or_default();
let tools_web_search_request = override_tools_web_search_request
.or(cfg.tools.as_ref().and_then(|t| t.web_search))
@@ -1014,6 +1023,7 @@ impl Config {
project_doc_max_bytes: cfg.project_doc_max_bytes.unwrap_or(PROJECT_DOC_MAX_BYTES),
codex_home,
history,
experimental_custom_selector_models,
file_opener: cfg.file_opener.unwrap_or(UriBasedFileOpener::VsCode),
codex_linux_sandbox_exe,
@@ -1201,6 +1211,59 @@ persistence = "none"
);
}
#[test]
fn experimental_custom_selector_models_round_trip_from_toml() {
let cfg = r#"
[[experimental_custom_selector_models]]
label = "o4-mini medium"
model = "o4-mini"
effort = "medium"
description = "balanced latency"
[[experimental_custom_selector_models]]
label = "local sandbox"
model = "local-model"
"#;
let parsed =
toml::from_str::<ConfigToml>(cfg).expect("custom selector models should deserialize");
let models = parsed
.experimental_custom_selector_models
.as_ref()
.expect("custom presets present");
assert_eq!(models.len(), 2);
assert_eq!(models[0].label, "o4-mini medium");
assert_eq!(models[0].model, "o4-mini");
assert_eq!(models[0].effort, Some(ReasoningEffort::Medium));
assert_eq!(models[0].description.as_deref(), Some("balanced latency"));
assert_eq!(models[1].label, "local sandbox");
assert_eq!(models[1].model, "local-model");
assert_eq!(models[1].effort, None);
assert_eq!(models[1].description, None);
let config = Config::load_from_base_config_with_overrides(
parsed.clone(),
ConfigOverrides::default(),
std::env::temp_dir(),
)
.expect("config should load");
assert_eq!(config.experimental_custom_selector_models.len(), 2);
assert_eq!(
config.experimental_custom_selector_models[0].label,
"o4-mini medium"
);
assert_eq!(
config.experimental_custom_selector_models[1].model,
"local-model"
);
assert_eq!(
config.experimental_custom_selector_models[0].effort,
Some(ReasoningEffort::Medium)
);
}
#[test]
fn tui_config_missing_notifications_field_defaults_to_disabled() {
let cfg = r#"
@@ -1654,6 +1717,7 @@ model_verbosity = "high"
include_view_image_tool: true,
active_profile: Some("o3".to_string()),
disable_paste_burst: false,
experimental_custom_selector_models: Vec::new(),
tui_notifications: Default::default(),
},
o3_profile_config
@@ -1712,6 +1776,7 @@ model_verbosity = "high"
include_view_image_tool: true,
active_profile: Some("gpt3".to_string()),
disable_paste_burst: false,
experimental_custom_selector_models: Vec::new(),
tui_notifications: Default::default(),
};
@@ -1785,6 +1850,7 @@ model_verbosity = "high"
include_view_image_tool: true,
active_profile: Some("zdr".to_string()),
disable_paste_burst: false,
experimental_custom_selector_models: Vec::new(),
tui_notifications: Default::default(),
};
@@ -1844,6 +1910,7 @@ model_verbosity = "high"
include_view_image_tool: true,
active_profile: Some("gpt5".to_string()),
disable_paste_burst: false,
experimental_custom_selector_models: Vec::new(),
tui_notifications: Default::default(),
};

View File

@@ -13,6 +13,8 @@ use serde::Deserializer;
use serde::Serialize;
use serde::de::Error as SerdeError;
use codex_protocol::config_types::ReasoningEffort;
#[derive(Serialize, Debug, Clone, PartialEq)]
pub struct McpServerConfig {
pub command: String,
@@ -177,6 +179,16 @@ pub struct Tui {
pub notifications: Notifications,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Default)]
pub struct CustomSelectorModel {
pub label: String,
pub model: String,
#[serde(default)]
pub effort: Option<ReasoningEffort>,
#[serde(default)]
pub description: Option<String>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Default)]
pub struct SandboxWorkspaceWrite {
#[serde(default)]

View File

@@ -67,6 +67,9 @@ pub(crate) struct BottomPane {
status: Option<StatusIndicatorWidget>,
/// Queued user messages to show under the status indicator.
queued_user_messages: Vec<String>,
#[cfg(test)]
pub(crate) debug_last_selection_items: Option<Vec<String>>,
}
pub(crate) struct BottomPaneParams {
@@ -99,6 +102,8 @@ impl BottomPane {
status: None,
queued_user_messages: Vec::new(),
esc_backtrack_hint: false,
#[cfg(test)]
debug_last_selection_items: None,
}
}
@@ -337,8 +342,18 @@ impl BottomPane {
/// Show a generic list selection view with the provided items.
pub(crate) fn show_selection_view(&mut self, params: list_selection_view::SelectionViewParams) {
#[cfg(test)]
let debug_names = params
.items
.iter()
.map(|item| item.name.clone())
.collect::<Vec<_>>();
let view = list_selection_view::ListSelectionView::new(params, self.app_event_tx.clone());
self.push_view(Box::new(view));
#[cfg(test)]
{
self.debug_last_selection_items = Some(debug_names);
}
}
/// Update the queued messages shown under the status header.
@@ -356,6 +371,11 @@ impl BottomPane {
self.request_redraw();
}
#[cfg(test)]
pub(crate) fn last_selection_item_names(&self) -> Option<&[String]> {
self.debug_last_selection_items.as_deref()
}
pub(crate) fn composer_is_empty(&self) -> bool {
self.composer.is_empty()
}

View File

@@ -1,4 +1,5 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::path::PathBuf;
use std::sync::Arc;
@@ -1513,40 +1514,52 @@ impl ChatWidget {
let presets: Vec<ModelPreset> = builtin_model_presets(auth_mode);
let mut items: Vec<SelectionItem> = Vec::new();
let mut seen_labels: HashSet<String> = HashSet::new();
let mut seen_model_effort: HashSet<(String, Option<String>)> = HashSet::new();
for preset in presets.iter() {
let name = preset.label.to_string();
let description = Some(preset.description.to_string());
let is_current = preset.model == current_model && preset.effort == current_effort;
let model_slug = preset.model.to_string();
let effort = preset.effort;
let current_model = current_model.clone();
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
tx.send(AppEvent::CodexOp(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
model: Some(model_slug.clone()),
effort: Some(effort),
summary: None,
}));
tx.send(AppEvent::UpdateModel(model_slug.clone()));
tx.send(AppEvent::UpdateReasoningEffort(effort));
tx.send(AppEvent::PersistModelSelection {
model: model_slug.clone(),
effort,
});
tracing::info!(
"New model: {}, New effort: {}, Current model: {}, Current effort: {}",
model_slug.clone(),
effort
.map(|effort| effort.to_string())
.unwrap_or_else(|| "none".to_string()),
current_model,
current_effort
.map(|effort| effort.to_string())
.unwrap_or_else(|| "none".to_string())
);
})];
let effort_key = effort.map(|value| value.to_string());
seen_labels.insert(name.clone());
seen_model_effort.insert((model_slug.clone(), effort_key.clone()));
let actions = build_model_selection_actions(
current_model.clone(),
current_effort,
model_slug.clone(),
effort,
);
items.push(SelectionItem {
name,
description,
is_current,
actions,
dismiss_on_select: true,
search_value: None,
});
}
for custom in self.config.experimental_custom_selector_models.iter() {
let name = custom.label.clone();
let description = custom.description.clone();
let is_current = custom.model == current_model && custom.effort == current_effort;
let model_slug = custom.model.clone();
let effort = custom.effort;
let effort_key = effort.map(|value| value.to_string());
let model_effort_key = (model_slug.clone(), effort_key);
if seen_labels.contains(&name) || seen_model_effort.contains(&model_effort_key) {
continue;
}
seen_labels.insert(name.clone());
seen_model_effort.insert(model_effort_key);
let actions = build_model_selection_actions(
current_model.clone(),
current_effort,
model_slug.clone(),
effort,
);
items.push(SelectionItem {
name,
description,
@@ -1925,6 +1938,41 @@ impl ChatWidget {
}
}
fn build_model_selection_actions(
current_model: String,
current_effort: Option<ReasoningEffortConfig>,
model_slug: String,
effort: Option<ReasoningEffortConfig>,
) -> Vec<SelectionAction> {
vec![Box::new(move |tx| {
tx.send(AppEvent::CodexOp(Op::OverrideTurnContext {
cwd: None,
approval_policy: None,
sandbox_policy: None,
model: Some(model_slug.clone()),
effort: Some(effort),
summary: None,
}));
tx.send(AppEvent::UpdateModel(model_slug.clone()));
tx.send(AppEvent::UpdateReasoningEffort(effort));
tx.send(AppEvent::PersistModelSelection {
model: model_slug.clone(),
effort,
});
tracing::info!(
"New model: {}, New effort: {}, Current model: {}, Current effort: {}",
model_slug.clone(),
effort
.map(|effort| effort.to_string())
.unwrap_or_else(|| "none".to_string()),
current_model,
current_effort
.map(|effort| effort.to_string())
.unwrap_or_else(|| "none".to_string())
);
})]
}
impl WidgetRef for &ChatWidget {
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
let [_, active_cell_area, bottom_pane_area] = self.layout_areas(area);

View File

@@ -1,11 +1,13 @@
use super::*;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use codex_common::model_presets::builtin_model_presets;
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::config::Config;
use codex_core::config::ConfigOverrides;
use codex_core::config::ConfigToml;
use codex_core::config_types::CustomSelectorModel;
use codex_core::plan_tool::PlanItemArg;
use codex_core::plan_tool::StepStatus;
use codex_core::plan_tool::UpdatePlanArgs;
@@ -411,6 +413,155 @@ fn rate_limit_warnings_emit_thresholds() {
);
}
#[test]
fn custom_model_presets_append_after_builtins() {
let (mut chat, mut _rx, _op_rx) = make_chatwidget_manual();
chat.config.experimental_custom_selector_models = vec![
CustomSelectorModel {
label: "gpt-5 medium".to_string(),
model: "gpt-5".to_string(),
effort: Some(ReasoningEffortConfig::Medium),
description: Some("duplicate label".to_string()),
},
CustomSelectorModel {
label: "Duplicate combo".to_string(),
model: "gpt-5".to_string(),
effort: Some(ReasoningEffortConfig::Medium),
description: Some("duplicate combination".to_string()),
},
CustomSelectorModel {
label: "Custom minimal".to_string(),
model: "custom-minimal".to_string(),
effort: None,
description: Some("fast local run".to_string()),
},
CustomSelectorModel {
label: "Custom high".to_string(),
model: "custom-high".to_string(),
effort: Some(ReasoningEffortConfig::High),
description: None,
},
];
chat.open_model_popup();
let names = chat
.bottom_pane
.last_selection_item_names()
.expect("selection items");
let builtin_len = builtin_model_presets(None).len();
assert!(
names.len() >= builtin_len + 2,
"expected custom presets appended"
);
let expected = vec!["Custom minimal".to_string(), "Custom high".to_string()];
assert_eq!(&names[builtin_len..], expected.as_slice());
}
#[test]
fn selecting_custom_model_emits_expected_events() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual();
chat.config.experimental_custom_selector_models = vec![CustomSelectorModel {
label: "Custom high".to_string(),
model: "custom-high".to_string(),
effort: Some(ReasoningEffortConfig::High),
description: Some("locally tuned".to_string()),
}];
chat.config.model = "custom-high".to_string();
chat.config.model_reasoning_effort = Some(ReasoningEffortConfig::High);
chat.open_model_popup();
chat.bottom_pane
.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
let mut seen_override = None;
let mut seen_update_model = None;
let mut seen_update_effort = None;
let mut seen_persist = None;
while let Ok(event) = rx.try_recv() {
match event {
AppEvent::CodexOp(Op::OverrideTurnContext { model, effort, .. }) => {
seen_override = Some((model, effort));
}
AppEvent::UpdateModel(model) => {
seen_update_model = Some(model);
}
AppEvent::UpdateReasoningEffort(effort) => {
seen_update_effort = Some(effort);
}
AppEvent::PersistModelSelection { model, effort } => {
seen_persist = Some((model, effort));
}
_ => {}
}
}
assert_eq!(
seen_override,
Some((
Some("custom-high".to_string()),
Some(Some(ReasoningEffortConfig::High))
))
);
assert_eq!(seen_update_model, Some("custom-high".to_string()));
assert_eq!(seen_update_effort, Some(Some(ReasoningEffortConfig::High)));
assert_eq!(
seen_persist,
Some(("custom-high".to_string(), Some(ReasoningEffortConfig::High)))
);
}
#[test]
fn custom_model_conflicts_are_skipped() {
let (mut chat, mut _rx, _op_rx) = make_chatwidget_manual();
chat.config.experimental_custom_selector_models = vec![
CustomSelectorModel {
label: "gpt-5 medium".to_string(),
model: "gpt-5".to_string(),
effort: Some(ReasoningEffortConfig::Medium),
description: None,
},
CustomSelectorModel {
label: "Duplicate combo".to_string(),
model: "gpt-5".to_string(),
effort: Some(ReasoningEffortConfig::Medium),
description: None,
},
CustomSelectorModel {
label: "Unique custom".to_string(),
model: "custom-unique".to_string(),
effort: None,
description: None,
},
];
chat.open_model_popup();
let names = chat
.bottom_pane
.last_selection_item_names()
.expect("selection items");
let builtin_len = builtin_model_presets(None).len();
assert!(names.len() > builtin_len);
assert!(names[builtin_len..].contains(&"Unique custom".to_string()));
assert!(
!names[builtin_len..]
.iter()
.any(|name| name == "gpt-5 medium")
);
assert!(
!names[builtin_len..]
.iter()
.any(|name| name == "Duplicate combo")
);
}
// (removed experimental resize snapshot test)
#[test]

View File

@@ -24,6 +24,34 @@ The model that Codex should use.
model = "o3" # overrides the default of "gpt-5-codex"
```
## experimental_custom_selector_models
Define additional presets that appear in the TUI `/model` selector. Each entry
includes the model slug plus optional reasoning effort and description. When you
pick a preset from the popup, Codex persists only the model and effort; the
label and description stay in your config.
```toml
[[experimental_custom_selector_models]]
label = "o4-mini medium"
model = "o4-mini"
effort = "medium"
description = "balanced latency"
[[experimental_custom_selector_models]]
label = "Local llama high"
model = "llama-3.1-70b-instruct"
effort = "high"
```
Custom presets are appended after the built-in entries. If an entry references
an unavailable model or effort, selecting it will surface the same error you
would see after changing `model` manually.
When a custom preset shares the same label or `(model, effort)` pair as a
built-in preset, the built-in entry wins and the conflicting custom entry is
ignored.
## model_providers
This option lets you override and amend the default set of model providers bundled with Codex. This value is a map where the key is the value to use with `model_provider` to select the corresponding provider.
@@ -607,6 +635,7 @@ notifications = [ "agent-turn-complete", "approval-requested" ]
| Key | Type / Values | Notes |
| --- | --- | --- |
| `model` | string | Model to use (e.g., `gpt-5-codex`). |
| `experimental_custom_selector_models` | array<table> | Extra presets for the `/model` TUI selector. |
| `model_provider` | string | Provider id from `model_providers` (default: `openai`). |
| `model_context_window` | number | Context window tokens. |
| `model_max_output_tokens` | number | Max output tokens. |