This commit is contained in:
Ahmed Ibrahim
2026-01-05 10:44:23 -08:00
parent d7b0997d32
commit 5c79085dae
8 changed files with 237 additions and 102 deletions

View File

@@ -190,6 +190,41 @@ impl App {
SessionSource::Cli,
));
if let Some(notice) = &pending_model_migration_notice {
let outcome = crate::model_migration::run_startup_model_migration_prompt(
tui,
&config,
conversation_manager.get_models_manager().as_ref(),
notice,
)
.await?;
let mut edits = ConfigEditsBuilder::new(&config.codex_home)
.record_model_migration_seen(notice.from_model.as_str(), notice.to_model.as_str());
if matches!(
outcome,
crate::model_migration::ModelMigrationOutcome::Accepted
) {
config.model = Some(notice.to_model.clone());
edits = edits.set_model(config.model.as_deref(), config.model_reasoning_effort);
}
if let Err(err) = edits.apply().await {
tracing::error!(
error = %err,
"failed to persist model migration prompt outcome"
);
}
if matches!(outcome, crate::model_migration::ModelMigrationOutcome::Exit) {
return Ok(AppExitInfo {
token_usage: TokenUsage::default(),
conversation_id: None,
update_action: None,
});
}
}
let enhanced_keys_supported = tui.enhanced_keys_supported();
let mut chat_widget = match resume_selection {
ResumeSelection::StartFresh | ResumeSelection::Exit => {
@@ -242,10 +277,6 @@ impl App {
}
};
if let Some(notice) = pending_model_migration_notice {
chat_widget.show_model_migration_notice(notice);
}
chat_widget.maybe_prompt_windows_sandbox_enable();
let file_search = FileSearchManager::new(config.cwd.clone(), app_event_tx.clone());
@@ -875,24 +906,6 @@ impl App {
));
}
}
AppEvent::PersistModelMigrationPromptAcknowledged {
from_model,
to_model,
} => {
if let Err(err) = ConfigEditsBuilder::new(&self.config.codex_home)
.record_model_migration_seen(from_model.as_str(), to_model.as_str())
.apply()
.await
{
tracing::error!(
error = %err,
"failed to persist model migration prompt acknowledgement"
);
self.chat_widget.add_error_message(format!(
"Failed to save model migration prompt preference: {err}"
));
}
}
AppEvent::OpenApprovalsPopup => {
self.chat_widget.open_approvals_popup();
}

View File

@@ -143,12 +143,6 @@ pub(crate) enum AppEvent {
/// Persist the acknowledgement flag for the rate limit switch prompt.
PersistRateLimitSwitchPromptHidden,
/// Persist the acknowledgement flag for the model migration prompt.
PersistModelMigrationPromptAcknowledged {
from_model: String,
to_model: String,
},
/// Skip the next world-writable scan (one-shot) after a user-confirmed continue.
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
SkipNextWorldWritableScan,

View File

@@ -807,24 +807,6 @@ impl ChatWidget {
self.request_redraw();
}
pub(crate) fn show_model_migration_notice(
&mut self,
notice: model_migration::PendingModelMigrationNotice,
) {
self.add_to_history(history_cell::new_info_event(
format!(
"Model upgrade available: {} -> {}. Use /model to switch.",
notice.from_model, notice.to_model
),
None,
));
self.app_event_tx
.send(AppEvent::PersistModelMigrationPromptAcknowledged {
from_model: notice.from_model,
to_model: notice.to_model,
});
}
fn refresh_pending_model_migration_notice(&self) {
let available_models = match self.models_manager.try_list_models(&self.config) {
Ok(models) => models,

View File

@@ -3,6 +3,7 @@ use codex_core::models_manager::model_presets::HIDE_GPT_5_1_CODEX_MAX_MIGRATION_
use codex_core::models_manager::model_presets::HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::ModelUpgrade;
use color_eyre::eyre::Result;
use serde::Deserialize;
use serde::Serialize;
use std::io;
@@ -20,6 +21,11 @@ pub(crate) struct PendingModelMigrationNotice {
pub(crate) scheduled_at_unix_seconds: Option<u64>,
}
pub(crate) use prompt_ui::ModelMigrationCopy;
pub(crate) use prompt_ui::ModelMigrationOutcome;
pub(crate) use prompt_ui::ModelMigrationScreen;
pub(crate) use prompt_ui::migration_copy_for_models;
/// Read and clear the one-shot migration notice file, returning the notice if it should be shown.
///
/// If the notice is returned, this also updates `config.notices.model_migrations` to prevent
@@ -175,6 +181,83 @@ pub(crate) fn refresh_pending_model_migration_notice(
}
}
pub(crate) async fn run_startup_model_migration_prompt(
tui: &mut crate::tui::Tui,
config: &Config,
models_manager: &codex_core::models_manager::manager::ModelsManager,
notice: &PendingModelMigrationNotice,
) -> Result<ModelMigrationOutcome> {
use tokio_stream::StreamExt as _;
let available_models = models_manager.try_list_models(config).ok();
let copy = migration_copy_for_notice(notice, available_models.as_deref());
let mut screen = ModelMigrationScreen::new(tui.frame_requester(), copy);
tui.frame_requester().schedule_frame();
let tui_events = tui.event_stream();
tokio::pin!(tui_events);
while let Some(event) = tui_events.next().await {
match event {
crate::tui::TuiEvent::Key(key_event) => {
screen.handle_key(key_event);
if screen.is_done() {
return Ok(screen.outcome());
}
}
crate::tui::TuiEvent::Draw => {
let height = tui.terminal.size()?.height;
tui.draw(height, |frame| {
frame.render_widget_ref(&screen, frame.area());
})?;
}
crate::tui::TuiEvent::Paste(_) => {}
}
}
Ok(ModelMigrationOutcome::Accepted)
}
pub(crate) fn migration_copy_for_notice(
notice: &PendingModelMigrationNotice,
available_models: Option<&[ModelPreset]>,
) -> ModelMigrationCopy {
let from_model = notice.from_model.as_str();
let to_model = notice.to_model.as_str();
let from_preset = available_models
.unwrap_or_default()
.iter()
.find(|preset| preset.model == from_model);
let to_preset = available_models
.unwrap_or_default()
.iter()
.find(|preset| preset.model == to_model);
let upgrade = from_preset
.and_then(|preset| preset.upgrade.as_ref())
.filter(|upgrade| upgrade.id == to_model);
let can_opt_out = from_preset
.map(|preset| preset.show_in_picker)
.unwrap_or(true);
migration_copy_for_models(
from_model,
to_model,
upgrade.and_then(|u| u.model_link.clone()),
upgrade.and_then(|u| u.upgrade_copy.clone()),
to_preset
.map(|preset| preset.display_name.clone())
.unwrap_or_else(|| to_model.to_string()),
to_preset
.map(|preset| Some(preset.description.clone()))
.unwrap_or(None),
can_opt_out,
)
}
const PENDING_MODEL_MIGRATION_NOTICE_FILENAME: &str = "pending_model_migration_notice.json";
fn pending_model_migration_notice_path(config: &Config) -> PathBuf {
@@ -247,7 +330,6 @@ fn should_show_model_migration_notice(
.any(|preset| preset.upgrade.as_ref().map(|u| u.id.as_str()) == Some(target_model))
}
#[cfg(test)]
mod prompt_ui {
use crate::key_hint;
use crate::render::Insets;
@@ -373,7 +455,7 @@ mod prompt_ui {
}
impl ModelMigrationScreen {
pub(super) fn new(request_frame: FrameRequester, copy: ModelMigrationCopy) -> Self {
pub(crate) fn new(request_frame: FrameRequester, copy: ModelMigrationCopy) -> Self {
Self {
request_frame,
copy,
@@ -419,7 +501,7 @@ mod prompt_ui {
}
}
pub(super) fn handle_key(&mut self, key_event: KeyEvent) {
pub(crate) fn handle_key(&mut self, key_event: KeyEvent) {
if key_event.kind == KeyEventKind::Release {
return;
}
@@ -436,11 +518,11 @@ mod prompt_ui {
}
}
pub(super) fn is_done(&self) -> bool {
pub(crate) fn is_done(&self) -> bool {
self.done
}
pub(super) fn outcome(&self) -> ModelMigrationOutcome {
pub(crate) fn outcome(&self) -> ModelMigrationOutcome {
self.outcome
}
}

View File

@@ -231,6 +231,42 @@ impl App {
SessionSource::Cli,
));
if let Some(notice) = &pending_model_migration_notice {
let outcome = crate::model_migration::run_startup_model_migration_prompt(
tui,
&config,
conversation_manager.get_models_manager().as_ref(),
notice,
)
.await?;
let mut edits = ConfigEditsBuilder::new(&config.codex_home)
.record_model_migration_seen(notice.from_model.as_str(), notice.to_model.as_str());
if matches!(
outcome,
crate::model_migration::ModelMigrationOutcome::Accepted
) {
config.model = Some(notice.to_model.clone());
edits = edits.set_model(config.model.as_deref(), config.model_reasoning_effort);
}
if let Err(err) = edits.apply().await {
tracing::error!(
error = %err,
"failed to persist model migration prompt outcome"
);
}
if matches!(outcome, crate::model_migration::ModelMigrationOutcome::Exit) {
return Ok(AppExitInfo {
token_usage: TokenUsage::default(),
conversation_id: None,
update_action: None,
session_lines: Vec::new(),
});
}
}
let enhanced_keys_supported = tui.enhanced_keys_supported();
let mut chat_widget = match resume_selection {
ResumeSelection::StartFresh | ResumeSelection::Exit => {
@@ -283,10 +319,6 @@ impl App {
}
};
if let Some(notice) = pending_model_migration_notice {
chat_widget.show_model_migration_notice(notice);
}
chat_widget.maybe_prompt_windows_sandbox_enable();
let file_search = FileSearchManager::new(config.cwd.clone(), app_event_tx.clone());
@@ -1604,24 +1636,6 @@ impl App {
));
}
}
AppEvent::PersistModelMigrationPromptAcknowledged {
from_model,
to_model,
} => {
if let Err(err) = ConfigEditsBuilder::new(&self.config.codex_home)
.record_model_migration_seen(from_model.as_str(), to_model.as_str())
.apply()
.await
{
tracing::error!(
error = %err,
"failed to persist model migration prompt acknowledgement"
);
self.chat_widget.add_error_message(format!(
"Failed to save model migration prompt preference: {err}"
));
}
}
AppEvent::OpenApprovalsPopup => {
self.chat_widget.open_approvals_popup();
}

View File

@@ -137,12 +137,6 @@ pub(crate) enum AppEvent {
/// Persist the acknowledgement flag for the rate limit switch prompt.
PersistRateLimitSwitchPromptHidden,
/// Persist the acknowledgement flag for the model migration prompt.
PersistModelMigrationPromptAcknowledged {
from_model: String,
to_model: String,
},
/// Skip the next world-writable scan (one-shot) after a user-confirmed continue.
#[cfg_attr(not(target_os = "windows"), allow(dead_code))]
SkipNextWorldWritableScan,

View File

@@ -749,24 +749,6 @@ impl ChatWidget {
self.request_redraw();
}
pub(crate) fn show_model_migration_notice(
&mut self,
notice: model_migration::PendingModelMigrationNotice,
) {
self.add_to_history(history_cell::new_info_event(
format!(
"Model upgrade available: {} -> {}. Use /model to switch.",
notice.from_model, notice.to_model
),
None,
));
self.app_event_tx
.send(AppEvent::PersistModelMigrationPromptAcknowledged {
from_model: notice.from_model,
to_model: notice.to_model,
});
}
fn refresh_pending_model_migration_notice(&self) {
let available_models = match self.models_manager.try_list_models(&self.config) {
Ok(models) => models,

View File

@@ -3,6 +3,7 @@ use codex_core::models_manager::model_presets::HIDE_GPT_5_1_CODEX_MAX_MIGRATION_
use codex_core::models_manager::model_presets::HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG;
use codex_protocol::openai_models::ModelPreset;
use codex_protocol::openai_models::ModelUpgrade;
use color_eyre::eyre::Result;
use serde::Deserialize;
use serde::Serialize;
use std::io;
@@ -20,6 +21,11 @@ pub(crate) struct PendingModelMigrationNotice {
pub(crate) scheduled_at_unix_seconds: Option<u64>,
}
pub(crate) use prompt_ui::ModelMigrationCopy;
pub(crate) use prompt_ui::ModelMigrationOutcome;
pub(crate) use prompt_ui::ModelMigrationScreen;
pub(crate) use prompt_ui::migration_copy_for_models;
/// Read and clear the one-shot migration notice file, returning the notice if it should be shown.
///
/// If the notice is returned, this also updates `config.notices.model_migrations` to prevent
@@ -175,6 +181,75 @@ pub(crate) fn refresh_pending_model_migration_notice(
}
}
pub(crate) async fn run_startup_model_migration_prompt(
tui: &mut crate::tui::Tui,
config: &Config,
models_manager: &codex_core::models_manager::manager::ModelsManager,
notice: &PendingModelMigrationNotice,
) -> Result<ModelMigrationOutcome> {
use tokio_stream::StreamExt as _;
let available_models = models_manager.try_list_models(config).ok();
let copy = migration_copy_for_notice(notice, available_models.as_deref());
let mut screen = ModelMigrationScreen::new(tui.frame_requester(), copy);
tui.frame_requester().schedule_frame();
let tui_events = tui.event_stream();
tokio::pin!(tui_events);
while let Some(event) = tui_events.next().await {
match event {
crate::tui::TuiEvent::Key(key_event) => {
screen.handle_key(key_event);
if screen.is_done() {
return Ok(screen.outcome());
}
}
crate::tui::TuiEvent::Draw => {
let height = tui.terminal.size()?.height;
tui.draw(height, |frame| {
frame.render_widget_ref(&screen, frame.area());
})?;
}
crate::tui::TuiEvent::Mouse(_) | crate::tui::TuiEvent::Paste(_) => {}
}
}
Ok(ModelMigrationOutcome::Accepted)
}
pub(crate) fn migration_copy_for_notice(
notice: &PendingModelMigrationNotice,
available_models: Option<&[ModelPreset]>,
) -> ModelMigrationCopy {
let from_model = notice.from_model.as_str();
let to_model = notice.to_model.as_str();
let from_preset = available_models
.unwrap_or_default()
.iter()
.find(|preset| preset.model == from_model);
let to_preset = available_models
.unwrap_or_default()
.iter()
.find(|preset| preset.model == to_model);
let can_opt_out = from_preset
.map(|preset| preset.show_in_picker)
.unwrap_or(true);
migration_copy_for_models(
from_model,
to_model,
to_preset
.map(|preset| preset.display_name.clone())
.unwrap_or_else(|| to_model.to_string()),
to_preset.map(|preset| preset.description.clone()),
can_opt_out,
)
}
const PENDING_MODEL_MIGRATION_NOTICE_FILENAME: &str = "pending_model_migration_notice.json";
fn pending_model_migration_notice_path(config: &Config) -> PathBuf {
@@ -247,7 +322,6 @@ fn should_show_model_migration_notice(
.any(|preset| preset.upgrade.as_ref().map(|u| u.id.as_str()) == Some(target_model))
}
#[cfg(test)]
mod prompt_ui {
use crate::key_hint;
use crate::render::Insets;
@@ -353,7 +427,7 @@ mod prompt_ui {
}
impl ModelMigrationScreen {
pub(super) fn new(request_frame: FrameRequester, copy: ModelMigrationCopy) -> Self {
pub(crate) fn new(request_frame: FrameRequester, copy: ModelMigrationCopy) -> Self {
Self {
request_frame,
copy,
@@ -399,7 +473,7 @@ mod prompt_ui {
}
}
pub(super) fn handle_key(&mut self, key_event: KeyEvent) {
pub(crate) fn handle_key(&mut self, key_event: KeyEvent) {
if key_event.kind == KeyEventKind::Release {
return;
}
@@ -416,11 +490,11 @@ mod prompt_ui {
}
}
pub(super) fn is_done(&self) -> bool {
pub(crate) fn is_done(&self) -> bool {
self.done
}
pub(super) fn outcome(&self) -> ModelMigrationOutcome {
pub(crate) fn outcome(&self) -> ModelMigrationOutcome {
self.outcome
}
}