Migrate tui to use models manager (#7555)

- This PR treats the `ModelsManager` like `AuthManager` and propagate it
into the tui, replacing the `builtin_model_presets`
- We are also decreasing the visibility of `builtin_model_presets`

based on https://github.com/openai/codex/pull/7552
This commit is contained in:
Ahmed Ibrahim
2025-12-03 18:00:47 -08:00
committed by GitHub
parent 00cc00ead8
commit 8da91d1c89
7 changed files with 107 additions and 45 deletions

View File

@@ -201,6 +201,10 @@ impl ConversationManager {
pub async fn list_models(&self) -> Vec<ModelPreset> {
self.models_manager.available_models.read().await.clone()
}
pub fn get_models_manager(&self) -> Arc<ModelsManager> {
self.models_manager.clone()
}
}
/// Return a prefix of `items` obtained by cutting strictly before the nth user message

View File

@@ -209,7 +209,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
]
});
pub fn builtin_model_presets(auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
pub(crate) fn builtin_model_presets(auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
PRESETS
.iter()
.filter(|preset| match auth_mode {
@@ -220,6 +220,7 @@ pub fn builtin_model_presets(auth_mode: Option<AuthMode>) -> Vec<ModelPreset> {
.collect()
}
// todo(aibrahim): remove this once we migrate tests
pub fn all_model_presets() -> &'static Vec<ModelPreset> {
&PRESETS
}

View File

@@ -78,6 +78,8 @@ tokio = { workspace = true, features = [
"process",
"rt-multi-thread",
"signal",
"test-util",
"time",
] }
tokio-stream = { workspace = true }
toml = { workspace = true }

View File

@@ -29,7 +29,7 @@ use codex_core::features::Feature;
use codex_core::model_family::find_family_for_model;
use codex_core::openai_models::model_presets::HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG;
use codex_core::openai_models::model_presets::HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG;
use codex_core::openai_models::model_presets::all_model_presets;
use codex_core::openai_models::models_manager::ModelsManager;
use codex_core::protocol::EventMsg;
use codex_core::protocol::FinalOutput;
use codex_core::protocol::Op;
@@ -38,6 +38,7 @@ use codex_core::protocol::TokenUsage;
use codex_core::skills::load_skills;
use codex_core::skills::model::SkillMetadata;
use codex_protocol::ConversationId;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::ModelUpgrade;
use codex_protocol::openai_models::ReasoningEffort as ReasoningEffortConfig;
use color_eyre::eyre::Result;
@@ -98,12 +99,13 @@ fn should_show_model_migration_prompt(
current_model: &str,
target_model: &str,
hide_prompt_flag: Option<bool>,
available_models: Vec<ModelPreset>,
) -> bool {
if target_model == current_model || hide_prompt_flag.unwrap_or(false) {
return false;
}
all_model_presets()
available_models
.iter()
.filter(|preset| preset.upgrade.is_some())
.any(|preset| preset.model == current_model)
@@ -124,8 +126,10 @@ async fn handle_model_migration_prompt_if_needed(
config: &mut Config,
app_event_tx: &AppEventSender,
auth_mode: Option<AuthMode>,
models_manager: Arc<ModelsManager>,
) -> Option<AppExitInfo> {
let upgrade = all_model_presets()
let available_models = models_manager.available_models.read().await.clone();
let upgrade = available_models
.iter()
.find(|preset| preset.model == config.model)
.and_then(|preset| preset.upgrade.as_ref());
@@ -142,7 +146,12 @@ async fn handle_model_migration_prompt_if_needed(
let target_model = target_model.to_string();
let hide_prompt_flag = migration_prompt_hidden(config, migration_config_key);
if !should_show_model_migration_prompt(&config.model, &target_model, hide_prompt_flag) {
if !should_show_model_migration_prompt(
&config.model,
&target_model,
hide_prompt_flag,
available_models.clone(),
) {
return None;
}
@@ -200,7 +209,6 @@ pub(crate) struct App {
pub(crate) app_event_tx: AppEventSender,
pub(crate) chat_widget: ChatWidget,
pub(crate) auth_manager: Arc<AuthManager>,
/// Config is stored here so we can recreate ChatWidgets as needed.
pub(crate) config: Config,
pub(crate) active_profile: Option<String>,
@@ -261,17 +269,21 @@ impl App {
let app_event_tx = AppEventSender::new(app_event_tx);
let auth_mode = auth_manager.auth().map(|auth| auth.mode);
let exit_info =
handle_model_migration_prompt_if_needed(tui, &mut config, &app_event_tx, auth_mode)
.await;
if let Some(exit_info) = exit_info {
return Ok(exit_info);
}
let conversation_manager = Arc::new(ConversationManager::new(
auth_manager.clone(),
SessionSource::Cli,
));
let exit_info = handle_model_migration_prompt_if_needed(
tui,
&mut config,
&app_event_tx,
auth_mode,
conversation_manager.get_models_manager(),
)
.await;
if let Some(exit_info) = exit_info {
return Ok(exit_info);
}
let skills_outcome = load_skills(&config);
if !skills_outcome.errors.is_empty() {
@@ -305,6 +317,7 @@ impl App {
initial_images: initial_images.clone(),
enhanced_keys_supported,
auth_manager: auth_manager.clone(),
models_manager: conversation_manager.get_models_manager(),
feedback: feedback.clone(),
skills: skills.clone(),
is_first_run,
@@ -330,6 +343,7 @@ impl App {
initial_images: initial_images.clone(),
enhanced_keys_supported,
auth_manager: auth_manager.clone(),
models_manager: conversation_manager.get_models_manager(),
feedback: feedback.clone(),
skills: skills.clone(),
is_first_run,
@@ -349,7 +363,7 @@ impl App {
let upgrade_version = crate::updates::get_upgrade_version(&config);
let mut app = Self {
server: conversation_manager,
server: conversation_manager.clone(),
app_event_tx,
chat_widget,
auth_manager: auth_manager.clone(),
@@ -486,6 +500,7 @@ impl App {
initial_images: Vec::new(),
enhanced_keys_supported: self.enhanced_keys_supported,
auth_manager: self.auth_manager.clone(),
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
skills: self.skills.clone(),
is_first_run: false,
@@ -534,6 +549,7 @@ impl App {
initial_images: Vec::new(),
enhanced_keys_supported: self.enhanced_keys_supported,
auth_manager: self.auth_manager.clone(),
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
skills: self.skills.clone(),
is_first_run: false,
@@ -1205,28 +1221,41 @@ mod tests {
)
}
fn all_model_presets() -> Vec<ModelPreset> {
codex_core::openai_models::model_presets::all_model_presets().clone()
}
#[test]
fn model_migration_prompt_only_shows_for_deprecated_models() {
assert!(should_show_model_migration_prompt("gpt-5", "gpt-5.1", None));
assert!(should_show_model_migration_prompt(
"gpt-5",
"gpt-5.1",
None,
all_model_presets()
));
assert!(should_show_model_migration_prompt(
"gpt-5-codex",
"gpt-5.1-codex",
None
None,
all_model_presets()
));
assert!(should_show_model_migration_prompt(
"gpt-5-codex-mini",
"gpt-5.1-codex-mini",
None
None,
all_model_presets()
));
assert!(should_show_model_migration_prompt(
"gpt-5.1-codex",
"gpt-5.1-codex-max",
None
None,
all_model_presets()
));
assert!(!should_show_model_migration_prompt(
"gpt-5.1-codex",
"gpt-5.1-codex",
None
None,
all_model_presets()
));
}
@@ -1235,10 +1264,14 @@ mod tests {
assert!(!should_show_model_migration_prompt(
"gpt-5",
"gpt-5.1",
Some(true)
Some(true),
all_model_presets()
));
assert!(!should_show_model_migration_prompt(
"gpt-5.1", "gpt-5.1", None
"gpt-5.1",
"gpt-5.1",
None,
all_model_presets()
));
}

View File

@@ -346,6 +346,7 @@ impl App {
initial_images: Vec::new(),
enhanced_keys_supported: self.enhanced_keys_supported,
auth_manager: self.auth_manager.clone(),
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
skills: self.skills.clone(),
is_first_run: false,

View File

@@ -11,6 +11,7 @@ use codex_core::config::Config;
use codex_core::config::types::Notifications;
use codex_core::git_info::current_branch_name;
use codex_core::git_info::local_git_branches;
use codex_core::openai_models::models_manager::ModelsManager;
use codex_core::project_doc::DEFAULT_PROJECT_DOC_FILENAME;
use codex_core::protocol::AgentMessageDeltaEvent;
use codex_core::protocol::AgentMessageEvent;
@@ -126,7 +127,6 @@ use codex_common::approval_presets::builtin_approval_presets;
use codex_core::AuthManager;
use codex_core::CodexAuth;
use codex_core::ConversationManager;
use codex_core::openai_models::model_presets::builtin_model_presets;
use codex_core::protocol::AskForApproval;
use codex_core::protocol::SandboxPolicy;
use codex_file_search::FileMatch;
@@ -256,6 +256,7 @@ pub(crate) struct ChatWidgetInit {
pub(crate) initial_images: Vec<PathBuf>,
pub(crate) enhanced_keys_supported: bool,
pub(crate) auth_manager: Arc<AuthManager>,
pub(crate) models_manager: Arc<ModelsManager>,
pub(crate) feedback: codex_feedback::CodexFeedback,
pub(crate) skills: Option<Vec<SkillMetadata>>,
pub(crate) is_first_run: bool,
@@ -276,6 +277,7 @@ pub(crate) struct ChatWidget {
active_cell: Option<Box<dyn HistoryCell>>,
config: Config,
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
session_header: SessionHeader,
initial_user_message: Option<UserMessage>,
token_info: Option<TokenUsageInfo>,
@@ -1232,6 +1234,7 @@ impl ChatWidget {
initial_images,
enhanced_keys_supported,
auth_manager,
models_manager,
feedback,
skills,
is_first_run,
@@ -1257,6 +1260,7 @@ impl ChatWidget {
active_cell: None,
config: config.clone(),
auth_manager,
models_manager,
session_header: SessionHeader::new(config.model),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
@@ -1310,6 +1314,7 @@ impl ChatWidget {
initial_images,
enhanced_keys_supported,
auth_manager,
models_manager,
feedback,
skills,
..
@@ -1337,6 +1342,7 @@ impl ChatWidget {
active_cell: None,
config: config.clone(),
auth_manager,
models_manager,
session_header: SessionHeader::new(config.model),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
@@ -2025,10 +2031,11 @@ impl ChatWidget {
}
fn lower_cost_preset(&self) -> Option<ModelPreset> {
let auth_mode = self.auth_manager.auth().map(|auth| auth.mode);
builtin_model_presets(auth_mode)
.into_iter()
let models = self.models_manager.available_models.blocking_read();
models
.iter()
.find(|preset| preset.model == NUDGE_MODEL_SLUG)
.cloned()
}
fn rate_limit_switch_prompt_hidden(&self) -> bool {
@@ -2131,8 +2138,13 @@ impl ChatWidget {
/// a second popup is shown to choose the reasoning effort.
pub(crate) fn open_model_popup(&mut self) {
let current_model = self.config.model.clone();
let auth_mode = self.auth_manager.auth().map(|auth| auth.mode);
let presets: Vec<ModelPreset> = builtin_model_presets(auth_mode);
let presets: Vec<ModelPreset> = self
.models_manager
.available_models
.blocking_read()
.iter()
.cloned()
.collect();
let mut items: Vec<SelectionItem> = Vec::new();
for preset in presets.into_iter() {

View File

@@ -354,6 +354,7 @@ async fn helpers_are_available_and_do_not_panic() {
initial_images: Vec::new(),
enhanced_keys_supported: false,
auth_manager,
models_manager: conversation_manager.get_models_manager(),
feedback: codex_feedback::CodexFeedback::new(),
skills: None,
is_first_run: true,
@@ -390,7 +391,8 @@ fn make_chatwidget_manual() -> (
bottom_pane: bottom,
active_cell: None,
config: cfg.clone(),
auth_manager,
auth_manager: auth_manager.clone(),
models_manager: Arc::new(ModelsManager::new(auth_manager.get_auth_mode())),
session_header: SessionHeader::new(cfg.model),
initial_user_message: None,
token_info: None,
@@ -425,6 +427,12 @@ fn make_chatwidget_manual() -> (
(widget, rx, op_rx)
}
fn set_chatgpt_auth(chat: &mut ChatWidget) {
chat.auth_manager =
AuthManager::from_auth_for_testing(CodexAuth::create_dummy_chatgpt_auth_for_testing());
chat.models_manager = Arc::new(ModelsManager::new(chat.auth_manager.get_auth_mode()));
}
pub(crate) fn make_chatwidget_manual_with_sender() -> (
ChatWidget,
AppEventSender,
@@ -881,6 +889,16 @@ fn active_blob(chat: &ChatWidget) -> String {
lines_to_single_string(&lines)
}
fn get_available_model(chat: &ChatWidget, model: &str) -> ModelPreset {
chat.models_manager
.available_models
.blocking_read()
.iter()
.find(|&preset| preset.model == model)
.cloned()
.unwrap_or_else(|| panic!("{model} preset not found"))
}
#[test]
fn empty_enter_during_task_does_not_queue() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual();
@@ -1750,13 +1768,11 @@ fn startup_prompts_for_windows_sandbox_when_agent_requested() {
fn model_reasoning_selection_popup_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual();
set_chatgpt_auth(&mut chat);
chat.config.model = "gpt-5.1-codex-max".to_string();
chat.config.model_reasoning_effort = Some(ReasoningEffortConfig::High);
let preset = builtin_model_presets(None)
.into_iter()
.find(|preset| preset.model == "gpt-5.1-codex-max")
.expect("gpt-5.1-codex-max preset");
let preset = get_available_model(&chat, "gpt-5.1-codex-max");
chat.open_reasoning_popup(preset);
let popup = render_bottom_popup(&chat, 80);
@@ -1767,13 +1783,11 @@ fn model_reasoning_selection_popup_snapshot() {
fn model_reasoning_selection_popup_extra_high_warning_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual();
set_chatgpt_auth(&mut chat);
chat.config.model = "gpt-5.1-codex-max".to_string();
chat.config.model_reasoning_effort = Some(ReasoningEffortConfig::XHigh);
let preset = builtin_model_presets(None)
.into_iter()
.find(|preset| preset.model == "gpt-5.1-codex-max")
.expect("gpt-5.1-codex-max preset");
let preset = get_available_model(&chat, "gpt-5.1-codex-max");
chat.open_reasoning_popup(preset);
let popup = render_bottom_popup(&chat, 80);
@@ -1784,12 +1798,10 @@ fn model_reasoning_selection_popup_extra_high_warning_snapshot() {
fn reasoning_popup_shows_extra_high_with_space() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual();
set_chatgpt_auth(&mut chat);
chat.config.model = "gpt-5.1-codex-max".to_string();
let preset = builtin_model_presets(None)
.into_iter()
.find(|preset| preset.model == "gpt-5.1-codex-max")
.expect("gpt-5.1-codex-max preset");
let preset = get_available_model(&chat, "gpt-5.1-codex-max");
chat.open_reasoning_popup(preset);
let popup = render_bottom_popup(&chat, 120);
@@ -1872,11 +1884,8 @@ fn reasoning_popup_escape_returns_to_model_popup() {
chat.config.model = "gpt-5.1".to_string();
chat.open_model_popup();
let presets = builtin_model_presets(None)
.into_iter()
.find(|preset| preset.model == "gpt-5.1-codex")
.expect("gpt-5.1-codex preset");
chat.open_reasoning_popup(presets);
let preset = get_available_model(&chat, "gpt-5.1-codex");
chat.open_reasoning_popup(preset);
let before_escape = render_bottom_popup(&chat, 80);
assert!(before_escape.contains("Select Reasoning Level"));