Make migration screen dynamic (#7896)

# External (non-OpenAI) Pull Request Requirements

Before opening this Pull Request, please read the dedicated
"Contributing" markdown file or your PR may be closed:
https://github.com/openai/codex/blob/main/docs/contributing.md

If your PR conforms to our contribution guidelines, replace this text
with a detailed and high quality description of your changes.

Include a link to a bug report or enhancement request.
This commit is contained in:
Ahmed Ibrahim
2025-12-11 16:41:04 -08:00
committed by GitHub
parent 95f7d37ec6
commit c787e9d0c0
9 changed files with 359 additions and 276 deletions

View File

@@ -32,6 +32,8 @@ pub enum ConfigEdit {
SetWindowsWslSetupAcknowledged(bool),
/// Toggle the model migration prompt acknowledgement flag.
SetNoticeHideModelMigrationPrompt(String, bool),
/// Record that a migration prompt was shown for an old->new model mapping.
RecordModelMigrationSeen { from: String, to: String },
/// Replace the entire `[mcp_servers]` table.
ReplaceMcpServers(BTreeMap<String, McpServerConfig>),
/// Set trust_level under `[projects."<path>"]`,
@@ -263,6 +265,11 @@ impl ConfigDocument {
value(*acknowledged),
))
}
ConfigEdit::RecordModelMigrationSeen { from, to } => Ok(self.write_value(
Scope::Global,
&[Notice::TABLE_KEY, "model_migrations", from.as_str()],
value(to.clone()),
)),
ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged) => Ok(self.write_value(
Scope::Global,
&["windows_wsl_setup_acknowledged"],
@@ -522,6 +529,14 @@ impl ConfigEditsBuilder {
self
}
pub fn record_model_migration_seen(mut self, from: &str, to: &str) -> Self {
self.edits.push(ConfigEdit::RecordModelMigrationSeen {
from: from.to_string(),
to: to.to_string(),
});
self
}
pub fn set_windows_wsl_setup_acknowledged(mut self, acknowledged: bool) -> Self {
self.edits
.push(ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged));
@@ -897,6 +912,38 @@ existing = "value"
assert_eq!(contents, expected);
}
#[test]
fn blocking_record_model_migration_seen_preserves_table() {
let tmp = tempdir().expect("tmpdir");
let codex_home = tmp.path();
std::fs::write(
codex_home.join(CONFIG_TOML_FILE),
r#"[notice]
existing = "value"
"#,
)
.expect("seed");
apply_blocking(
codex_home,
None,
&[ConfigEdit::RecordModelMigrationSeen {
from: "gpt-5".to_string(),
to: "gpt-5.1".to_string(),
}],
)
.expect("persist");
let contents =
std::fs::read_to_string(codex_home.join(CONFIG_TOML_FILE)).expect("read config");
let expected = r#"[notice]
existing = "value"
[notice.model_migrations]
gpt-5 = "gpt-5.1"
"#;
assert_eq!(contents, expected);
}
#[test]
fn blocking_replace_mcp_servers_round_trips() {
let tmp = tempdir().expect("tmpdir");

View File

@@ -4,6 +4,7 @@
// definitions that do not contain business logic.
use codex_utils_absolute_path::AbsolutePathBuf;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::path::PathBuf;
use std::time::Duration;
@@ -396,6 +397,9 @@ pub struct Notice {
/// Tracks whether the user has seen the gpt-5.1-codex-max migration prompt
#[serde(rename = "hide_gpt-5.1-codex-max_migration_prompt")]
pub hide_gpt_5_1_codex_max_migration_prompt: Option<bool>,
/// Tracks acknowledged model migrations as old->new model slug mappings.
#[serde(default)]
pub model_migrations: BTreeMap<String, String>,
}
impl Notice {

View File

@@ -8,7 +8,7 @@ use crate::exec_command::strip_bash_lc_and_escape;
use crate::file_search::FileSearchManager;
use crate::history_cell::HistoryCell;
use crate::model_migration::ModelMigrationOutcome;
use crate::model_migration::migration_copy_for_config;
use crate::model_migration::migration_copy_for_models;
use crate::model_migration::run_model_migration_prompt;
use crate::pager_overlay::Overlay;
use crate::render::highlight::highlight_bash_to_lines;
@@ -20,7 +20,6 @@ use crate::tui;
use crate::tui::TuiEvent;
use crate::update_action::UpdateAction;
use codex_ansi_escape::ansi_escape_line;
use codex_app_server_protocol::AuthMode;
use codex_core::AuthManager;
use codex_core::ConversationManager;
use codex_core::config::Config;
@@ -50,6 +49,7 @@ use ratatui::style::Stylize;
use ratatui::text::Line;
use ratatui::widgets::Paragraph;
use ratatui::widgets::Wrap;
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
@@ -62,9 +62,6 @@ use tokio::sync::mpsc::unbounded_channel;
#[cfg(not(debug_assertions))]
use crate::history_cell::UpdateAvailableHistoryCell;
const GPT_5_1_MIGRATION_AUTH_MODES: [AuthMode; 2] = [AuthMode::ChatGPT, AuthMode::ApiKey];
const GPT_5_1_CODEX_MIGRATION_AUTH_MODES: [AuthMode; 2] = [AuthMode::ChatGPT, AuthMode::ApiKey];
#[derive(Debug, Clone)]
pub struct AppExitInfo {
pub token_usage: TokenUsage,
@@ -109,26 +106,46 @@ struct SessionSummary {
fn should_show_model_migration_prompt(
current_model: &str,
target_model: &str,
hide_prompt_flag: Option<bool>,
available_models: Vec<ModelPreset>,
seen_migrations: &BTreeMap<String, String>,
available_models: &[ModelPreset],
) -> bool {
if target_model == current_model || hide_prompt_flag.unwrap_or(false) {
if target_model == current_model {
return false;
}
available_models
if let Some(seen_target) = seen_migrations.get(current_model)
&& seen_target == target_model
{
return false;
}
if available_models
.iter()
.filter(|preset| preset.upgrade.is_some())
.any(|preset| preset.model == current_model)
.any(|preset| preset.model == current_model && preset.upgrade.is_some())
{
return true;
}
if available_models
.iter()
.any(|preset| preset.upgrade.as_ref().map(|u| u.id.as_str()) == Some(target_model))
{
return true;
}
false
}
fn migration_prompt_hidden(config: &Config, migration_config_key: &str) -> Option<bool> {
fn migration_prompt_hidden(config: &Config, migration_config_key: &str) -> bool {
match migration_config_key {
HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG => {
config.notices.hide_gpt_5_1_codex_max_migration_prompt
HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG => config
.notices
.hide_gpt_5_1_codex_max_migration_prompt
.unwrap_or(false),
HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG => {
config.notices.hide_gpt5_1_migration_prompt.unwrap_or(false)
}
HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG => config.notices.hide_gpt5_1_migration_prompt,
_ => None,
_ => false,
}
}
@@ -137,7 +154,6 @@ async fn handle_model_migration_prompt_if_needed(
config: &mut Config,
model: &str,
app_event_tx: &AppEventSender,
auth_mode: Option<AuthMode>,
models_manager: Arc<ModelsManager>,
) -> Option<AppExitInfo> {
let available_models = models_manager.list_models(config).await;
@@ -152,26 +168,52 @@ async fn handle_model_migration_prompt_if_needed(
migration_config_key,
}) = upgrade
{
if !migration_prompt_allows_auth_mode(auth_mode, migration_config_key.as_str()) {
if migration_prompt_hidden(config, migration_config_key.as_str()) {
return None;
}
let target_model = target_model.to_string();
let hide_prompt_flag = migration_prompt_hidden(config, migration_config_key.as_str());
if !should_show_model_migration_prompt(
model,
&target_model,
hide_prompt_flag,
available_models.clone(),
&config.notices.model_migrations,
&available_models,
) {
return None;
}
let prompt_copy = migration_copy_for_config(migration_config_key.as_str());
let current_preset = available_models.iter().find(|preset| preset.model == model);
let target_preset = available_models
.iter()
.find(|preset| preset.model == target_model);
let target_display_name = target_preset
.map(|preset| preset.display_name.clone())
.unwrap_or_else(|| target_model.clone());
let heading_label = if target_display_name == model {
target_model.clone()
} else {
target_display_name.clone()
};
let target_description = target_preset.and_then(|preset| {
if preset.description.is_empty() {
None
} else {
Some(preset.description.clone())
}
});
let can_opt_out = current_preset.is_some();
let prompt_copy = migration_copy_for_models(
model,
&target_model,
heading_label,
target_description,
can_opt_out,
);
match run_model_migration_prompt(tui, prompt_copy).await {
ModelMigrationOutcome::Accepted => {
app_event_tx.send(AppEvent::PersistModelMigrationPromptAcknowledged {
migration_config: migration_config_key.to_string(),
from_model: model.to_string(),
to_model: target_model.clone(),
});
config.model = Some(target_model.clone());
@@ -197,7 +239,8 @@ async fn handle_model_migration_prompt_if_needed(
}
ModelMigrationOutcome::Rejected => {
app_event_tx.send(AppEvent::PersistModelMigrationPromptAcknowledged {
migration_config: migration_config_key.to_string(),
from_model: model.to_string(),
to_model: target_model.clone(),
});
}
ModelMigrationOutcome::Exit => {
@@ -276,7 +319,6 @@ impl App {
let (app_event_tx, mut app_event_rx) = unbounded_channel();
let app_event_tx = AppEventSender::new(app_event_tx);
let auth_mode = auth_manager.auth().map(|auth| auth.mode);
let conversation_manager = Arc::new(ConversationManager::new(
auth_manager.clone(),
SessionSource::Cli,
@@ -290,7 +332,6 @@ impl App {
&mut config,
model.as_str(),
&app_event_tx,
auth_mode,
conversation_manager.get_models_manager(),
)
.await;
@@ -948,13 +989,19 @@ impl App {
));
}
}
AppEvent::PersistModelMigrationPromptAcknowledged { migration_config } => {
AppEvent::PersistModelMigrationPromptAcknowledged {
from_model,
to_model,
} => {
if let Err(err) = ConfigEditsBuilder::new(&self.config.codex_home)
.set_hide_model_migration_prompt(&migration_config, true)
.record_model_migration_seen(from_model.as_str(), to_model.as_str())
.apply()
.await
{
tracing::error!(error = %err, "failed to persist model migration prompt acknowledgement");
tracing::error!(
error = %err,
"failed to persist model migration prompt acknowledgement"
);
self.chat_widget.add_error_message(format!(
"Failed to save model migration prompt preference: {err}"
));
@@ -1128,28 +1175,6 @@ impl App {
}
}
fn migration_prompt_allowed_auth_modes(migration_config_key: &str) -> Option<&'static [AuthMode]> {
match migration_config_key {
HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG => Some(&GPT_5_1_MIGRATION_AUTH_MODES),
HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG => Some(&GPT_5_1_CODEX_MIGRATION_AUTH_MODES),
_ => None,
}
}
fn migration_prompt_allows_auth_mode(
auth_mode: Option<AuthMode>,
migration_config_key: &str,
) -> bool {
if let Some(allowed_modes) = migration_prompt_allowed_auth_modes(migration_config_key) {
match auth_mode {
None => true,
Some(mode) => allowed_modes.contains(&mode),
}
} else {
auth_mode != Some(AuthMode::ApiKey)
}
}
#[cfg(test)]
mod tests {
use super::*;
@@ -1259,51 +1284,54 @@ mod tests {
#[test]
fn model_migration_prompt_only_shows_for_deprecated_models() {
let seen = BTreeMap::new();
assert!(should_show_model_migration_prompt(
"gpt-5",
"gpt-5.1",
None,
all_model_presets()
&seen,
&all_model_presets()
));
assert!(should_show_model_migration_prompt(
"gpt-5-codex",
"gpt-5.1-codex",
None,
all_model_presets()
&seen,
&all_model_presets()
));
assert!(should_show_model_migration_prompt(
"gpt-5-codex-mini",
"gpt-5.1-codex-mini",
None,
all_model_presets()
&seen,
&all_model_presets()
));
assert!(should_show_model_migration_prompt(
"gpt-5.1-codex",
"gpt-5.1-codex-max",
None,
all_model_presets()
&seen,
&all_model_presets()
));
assert!(!should_show_model_migration_prompt(
"gpt-5.1-codex",
"gpt-5.1-codex",
None,
all_model_presets()
&seen,
&all_model_presets()
));
}
#[test]
fn model_migration_prompt_respects_hide_flag_and_self_target() {
let mut seen = BTreeMap::new();
seen.insert("gpt-5".to_string(), "gpt-5.1".to_string());
assert!(!should_show_model_migration_prompt(
"gpt-5",
"gpt-5.1",
Some(true),
all_model_presets()
&seen,
&all_model_presets()
));
assert!(!should_show_model_migration_prompt(
"gpt-5.1",
"gpt-5.1",
None,
all_model_presets()
&seen,
&all_model_presets()
));
}
@@ -1457,40 +1485,4 @@ mod tests {
Some("codex resume 123e4567-e89b-12d3-a456-426614174000".to_string())
);
}
#[test]
fn gpt5_migration_allows_api_key_and_chatgpt() {
assert!(migration_prompt_allows_auth_mode(
Some(AuthMode::ApiKey),
HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG,
));
assert!(migration_prompt_allows_auth_mode(
Some(AuthMode::ChatGPT),
HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG,
));
}
#[test]
fn gpt_5_1_codex_max_migration_limits_to_chatgpt() {
assert!(migration_prompt_allows_auth_mode(
Some(AuthMode::ChatGPT),
HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG,
));
assert!(migration_prompt_allows_auth_mode(
Some(AuthMode::ApiKey),
HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG,
));
}
#[test]
fn other_migrations_block_api_key() {
assert!(!migration_prompt_allows_auth_mode(
Some(AuthMode::ApiKey),
"unknown"
));
assert!(migration_prompt_allows_auth_mode(
Some(AuthMode::ChatGPT),
"unknown"
));
}
}

View File

@@ -139,7 +139,8 @@ pub(crate) enum AppEvent {
/// Persist the acknowledgement flag for the model migration prompt.
PersistModelMigrationPromptAcknowledged {
migration_config: String,
from_model: String,
to_model: String,
},
/// Skip the next world-writable scan (one-shot) after a user-confirmed continue.

View File

@@ -7,8 +7,6 @@ use crate::selection_list::selection_option_row;
use crate::tui::FrameRequester;
use crate::tui::Tui;
use crate::tui::TuiEvent;
use codex_core::openai_models::model_presets::HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG;
use codex_core::openai_models::model_presets::HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyEventKind;
@@ -57,11 +55,44 @@ impl MigrationMenuOption {
}
}
pub(crate) fn migration_copy_for_config(migration_config_key: &str) -> ModelMigrationCopy {
match migration_config_key {
HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG => gpt5_migration_copy(),
HIDE_GPT_5_1_CODEX_MAX_MIGRATION_PROMPT_CONFIG => gpt_5_1_codex_max_migration_copy(),
_ => gpt_5_1_codex_max_migration_copy(),
pub(crate) fn migration_copy_for_models(
current_model: &str,
target_model: &str,
target_display_name: String,
target_description: Option<String>,
can_opt_out: bool,
) -> ModelMigrationCopy {
let heading_text = Span::from(format!("Try {target_display_name}")).bold();
let description_line = target_description
.filter(|desc| !desc.is_empty())
.map(Line::from)
.unwrap_or_else(|| {
Line::from(format!(
"{target_display_name} is recommended for better performance and reliability."
))
});
let mut content = vec![
Line::from(format!(
"We recommend switching from {current_model} to {target_model}."
)),
Line::from(""),
description_line,
Line::from(""),
];
if can_opt_out {
content.push(Line::from(format!(
"You can continue using {current_model} if you prefer."
)));
} else {
content.push(Line::from("Press enter to continue".dim()));
}
ModelMigrationCopy {
heading: vec![heading_text],
content,
can_opt_out,
}
}
@@ -69,26 +100,7 @@ pub(crate) async fn run_model_migration_prompt(
tui: &mut Tui,
copy: ModelMigrationCopy,
) -> ModelMigrationOutcome {
// Render the prompt on the terminal's alternate screen so exiting or cancelling
// does not leave a large blank region in the normal scrollback. This does not
// change the prompt's appearance only where it is drawn.
struct AltScreenGuard<'a> {
tui: &'a mut Tui,
}
impl<'a> AltScreenGuard<'a> {
fn enter(tui: &'a mut Tui) -> Self {
let _ = tui.enter_alt_screen();
Self { tui }
}
}
impl Drop for AltScreenGuard<'_> {
fn drop(&mut self) {
let _ = self.tui.leave_alt_screen();
}
}
let alt = AltScreenGuard::enter(tui);
let mut screen = ModelMigrationScreen::new(alt.tui.frame_requester(), copy);
let _ = alt.tui.draw(u16::MAX, |frame| {
@@ -178,39 +190,15 @@ impl ModelMigrationScreen {
return;
}
if key_event.modifiers.contains(KeyModifiers::CONTROL)
&& matches!(key_event.code, KeyCode::Char('c') | KeyCode::Char('d'))
{
if is_ctrl_exit_combo(key_event) {
self.exit();
return;
}
if !self.copy.can_opt_out {
if matches!(key_event.code, KeyCode::Esc | KeyCode::Enter) {
self.accept();
}
return;
}
match key_event.code {
KeyCode::Up | KeyCode::Char('k') => {
self.highlight_option(MigrationMenuOption::TryNewModel);
}
KeyCode::Down | KeyCode::Char('j') => {
self.highlight_option(MigrationMenuOption::UseExistingModel);
}
KeyCode::Char('1') => {
self.highlight_option(MigrationMenuOption::TryNewModel);
self.accept();
}
KeyCode::Char('2') => {
self.highlight_option(MigrationMenuOption::UseExistingModel);
self.reject();
}
KeyCode::Enter | KeyCode::Esc => {
self.confirm_selection();
}
_ => {}
if self.copy.can_opt_out {
self.handle_menu_key(key_event.code);
} else if matches!(key_event.code, KeyCode::Esc | KeyCode::Enter) {
self.accept();
}
}
@@ -228,110 +216,125 @@ impl WidgetRef for &ModelMigrationScreen {
Clear.render(area, buf);
let mut column = ColumnRenderable::new();
column.push("");
let mut heading = vec![Span::raw("> ")];
heading.extend(self.copy.heading.clone());
column.push(Line::from(heading));
column.push(self.heading_line());
column.push(Line::from(""));
for (idx, line) in self.copy.content.iter().enumerate() {
if idx != 0 {
column.push(Line::from(""));
}
column.push(
Paragraph::new(line.clone())
.wrap(Wrap { trim: false })
.inset(Insets::tlbr(0, 2, 0, 0)),
);
}
self.render_content(&mut column);
if self.copy.can_opt_out {
column.push(Line::from(""));
column.push(
Paragraph::new("Choose how you'd like Codex to proceed.")
.wrap(Wrap { trim: false })
.inset(Insets::tlbr(0, 2, 0, 0)),
);
column.push(Line::from(""));
for (idx, option) in MigrationMenuOption::all().into_iter().enumerate() {
column.push(selection_option_row(
idx,
option.label().to_string(),
self.highlighted_option == option,
));
}
column.push(Line::from(""));
column.push(
Line::from(vec![
"Use ".dim(),
key_hint::plain(KeyCode::Up).into(),
"/".dim(),
key_hint::plain(KeyCode::Down).into(),
" to move, press ".dim(),
key_hint::plain(KeyCode::Enter).into(),
" to confirm".dim(),
])
.inset(Insets::tlbr(0, 2, 0, 0)),
);
self.render_menu(&mut column);
}
column.render(area, buf);
}
}
fn gpt_5_1_codex_max_migration_copy() -> ModelMigrationCopy {
ModelMigrationCopy {
heading: vec!["Codex just got an upgrade. Introducing gpt-5.1-codex-max".bold()],
content: vec![
Line::from(
"Codex is now powered by gpt-5.1-codex-max, our latest frontier agentic coding model. It is smarter and faster than its predecessors and capable of long-running project-scale work.",
),
impl ModelMigrationScreen {
fn handle_menu_key(&mut self, code: KeyCode) {
match code {
KeyCode::Up | KeyCode::Char('k') => {
self.highlight_option(MigrationMenuOption::TryNewModel);
}
KeyCode::Down | KeyCode::Char('j') => {
self.highlight_option(MigrationMenuOption::UseExistingModel);
}
KeyCode::Char('1') => {
self.highlight_option(MigrationMenuOption::TryNewModel);
self.accept();
}
KeyCode::Char('2') => {
self.highlight_option(MigrationMenuOption::UseExistingModel);
self.reject();
}
KeyCode::Enter | KeyCode::Esc => self.confirm_selection(),
_ => {}
}
}
fn heading_line(&self) -> Line<'static> {
let mut heading = vec![Span::raw("> ")];
heading.extend(self.copy.heading.iter().cloned());
Line::from(heading)
}
fn render_content(&self, column: &mut ColumnRenderable) {
self.render_lines(&self.copy.content, column);
}
fn render_lines(&self, lines: &[Line<'static>], column: &mut ColumnRenderable) {
for line in lines {
column.push(
Paragraph::new(line.clone())
.wrap(Wrap { trim: false })
.inset(Insets::tlbr(0, 2, 0, 0)),
);
}
}
fn render_menu(&self, column: &mut ColumnRenderable) {
column.push(Line::from(""));
column.push(
Paragraph::new("Choose how you'd like Codex to proceed.")
.wrap(Wrap { trim: false })
.inset(Insets::tlbr(0, 2, 0, 0)),
);
column.push(Line::from(""));
for (idx, option) in MigrationMenuOption::all().into_iter().enumerate() {
column.push(selection_option_row(
idx,
option.label().to_string(),
self.highlighted_option == option,
));
}
column.push(Line::from(""));
column.push(
Line::from(vec![
"Learn more at ".into(),
"https://openai.com/index/gpt-5-1-codex-max/"
.cyan()
.underlined(),
".".into(),
]),
],
can_opt_out: true,
"Use ".dim(),
key_hint::plain(KeyCode::Up).into(),
"/".dim(),
key_hint::plain(KeyCode::Down).into(),
" to move, press ".dim(),
key_hint::plain(KeyCode::Enter).into(),
" to confirm".dim(),
])
.inset(Insets::tlbr(0, 2, 0, 0)),
);
}
}
fn gpt5_migration_copy() -> ModelMigrationCopy {
ModelMigrationCopy {
heading: vec!["Introducing our gpt-5.1 models".bold()],
content: vec![
Line::from(
"We've upgraded our family of models supported in Codex to gpt-5.1, gpt-5.1-codex and gpt-5.1-codex-mini.",
),
Line::from(
"You can continue using legacy models by specifying them directly with the -m option or in your config.toml.",
),
Line::from(vec![
"Learn more at ".into(),
"https://openai.com/index/gpt-5-1/".cyan().underlined(),
".".into(),
]),
Line::from(vec!["Press enter to continue".dim()]),
],
can_opt_out: false,
// Render the prompt on the terminal's alternate screen so exiting or cancelling
// does not leave a large blank region in the normal scrollback. This does not
// change the prompt's appearance only where it is drawn.
struct AltScreenGuard<'a> {
tui: &'a mut Tui,
}
impl<'a> AltScreenGuard<'a> {
fn enter(tui: &'a mut Tui) -> Self {
let _ = tui.enter_alt_screen();
Self { tui }
}
}
impl Drop for AltScreenGuard<'_> {
fn drop(&mut self) {
let _ = self.tui.leave_alt_screen();
}
}
fn is_ctrl_exit_combo(key_event: KeyEvent) -> bool {
key_event.modifiers.contains(KeyModifiers::CONTROL)
&& matches!(key_event.code, KeyCode::Char('c') | KeyCode::Char('d'))
}
#[cfg(test)]
mod tests {
use super::ModelMigrationScreen;
use super::gpt_5_1_codex_max_migration_copy;
use super::migration_copy_for_config;
use super::migration_copy_for_models;
use crate::custom_terminal::Terminal;
use crate::test_backend::VT100Backend;
use crate::tui::FrameRequester;
use codex_core::openai_models::model_presets::HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use insta::assert_snapshot;
@@ -340,14 +343,20 @@ mod tests {
#[test]
fn prompt_snapshot() {
let width: u16 = 60;
let height: u16 = 20;
let height: u16 = 28;
let backend = VT100Backend::new(width, height);
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, width, height));
let screen = ModelMigrationScreen::new(
FrameRequester::test_dummy(),
gpt_5_1_codex_max_migration_copy(),
migration_copy_for_models(
"gpt-5.1-codex-mini",
"gpt-5.1-codex-max",
"gpt-5.1-codex-max".to_string(),
Some("Latest Codex-optimized flagship for deep and fast reasoning.".to_string()),
true,
),
);
{
@@ -361,13 +370,19 @@ mod tests {
#[test]
fn prompt_snapshot_gpt5_family() {
let backend = VT100Backend::new(65, 12);
let backend = VT100Backend::new(65, 22);
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, 65, 12));
terminal.set_viewport_area(Rect::new(0, 0, 65, 22));
let screen = ModelMigrationScreen::new(
FrameRequester::test_dummy(),
migration_copy_for_config(HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG),
migration_copy_for_models(
"gpt-5",
"gpt-5.1",
"gpt-5.1".to_string(),
Some("Broad world knowledge with strong general reasoning.".to_string()),
false,
),
);
{
let mut frame = terminal.get_frame();
@@ -379,13 +394,19 @@ mod tests {
#[test]
fn prompt_snapshot_gpt5_codex() {
let backend = VT100Backend::new(60, 12);
let backend = VT100Backend::new(60, 22);
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, 60, 12));
terminal.set_viewport_area(Rect::new(0, 0, 60, 22));
let screen = ModelMigrationScreen::new(
FrameRequester::test_dummy(),
migration_copy_for_config(HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG),
migration_copy_for_models(
"gpt-5-codex",
"gpt-5.1-codex-max",
"gpt-5.1-codex-max".to_string(),
Some("Latest Codex-optimized flagship for deep and fast reasoning.".to_string()),
false,
),
);
{
let mut frame = terminal.get_frame();
@@ -397,13 +418,19 @@ mod tests {
#[test]
fn prompt_snapshot_gpt5_codex_mini() {
let backend = VT100Backend::new(60, 12);
let backend = VT100Backend::new(60, 22);
let mut terminal = Terminal::with_options(backend).expect("terminal");
terminal.set_viewport_area(Rect::new(0, 0, 60, 12));
terminal.set_viewport_area(Rect::new(0, 0, 60, 22));
let screen = ModelMigrationScreen::new(
FrameRequester::test_dummy(),
migration_copy_for_config(HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG),
migration_copy_for_models(
"gpt-5-codex-mini",
"gpt-5.1-codex-mini",
"gpt-5.1-codex-mini".to_string(),
Some("Optimized for codex. Cheaper, faster, but less capable.".to_string()),
false,
),
);
{
let mut frame = terminal.get_frame();
@@ -417,7 +444,13 @@ mod tests {
fn escape_key_accepts_prompt() {
let mut screen = ModelMigrationScreen::new(
FrameRequester::test_dummy(),
gpt_5_1_codex_max_migration_copy(),
migration_copy_for_models(
"gpt-old",
"gpt-new",
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,
),
);
// Simulate pressing Escape
@@ -437,7 +470,13 @@ mod tests {
fn selecting_use_existing_model_rejects_upgrade() {
let mut screen = ModelMigrationScreen::new(
FrameRequester::test_dummy(),
gpt_5_1_codex_max_migration_copy(),
migration_copy_for_models(
"gpt-old",
"gpt-new",
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,
),
);
screen.handle_key(KeyEvent::new(

View File

@@ -1,15 +1,18 @@
---
source: tui/src/model_migration.rs
assertion_line: 368
expression: terminal.backend()
---
> Codex just got an upgrade. Introducing gpt-5.1-codex-max
Codex is now powered by gpt-5.1-codex-max, our latest
frontier agentic coding model. It is smarter and faster
than its predecessors and capable of long-running
project-scale work.
> Try gpt-5.1-codex-max
Learn more at https://openai.com/index/gpt-5-1-codex-max/.
We recommend switching from gpt-5.1-codex-mini to
gpt-5.1-codex-max.
Latest Codex-optimized flagship for deep and fast
reasoning.
You can continue using gpt-5.1-codex-mini if you prefer.
Choose how you'd like Codex to proceed.

View File

@@ -1,15 +1,15 @@
---
source: tui/src/model_migration.rs
assertion_line: 416
expression: terminal.backend()
---
> Introducing our gpt-5.1 models
We've upgraded our family of models supported in Codex to
gpt-5.1, gpt-5.1-codex and gpt-5.1-codex-mini.
> Try gpt-5.1-codex-max
You can continue using legacy models by specifying them
directly with the -m option or in your config.toml.
We recommend switching from gpt-5-codex to
gpt-5.1-codex-max.
Learn more at https://openai.com/index/gpt-5-1/.
Latest Codex-optimized flagship for deep and fast
reasoning.
Press enter to continue

View File

@@ -1,15 +1,14 @@
---
source: tui/src/model_migration.rs
assertion_line: 440
expression: terminal.backend()
---
> Introducing our gpt-5.1 models
We've upgraded our family of models supported in Codex to
gpt-5.1, gpt-5.1-codex and gpt-5.1-codex-mini.
> Try gpt-5.1-codex-mini
You can continue using legacy models by specifying them
directly with the -m option or in your config.toml.
We recommend switching from gpt-5-codex-mini to
gpt-5.1-codex-mini.
Learn more at https://openai.com/index/gpt-5-1/.
Optimized for codex. Cheaper, faster, but less capable.
Press enter to continue

View File

@@ -1,15 +1,13 @@
---
source: tui/src/model_migration.rs
assertion_line: 392
expression: terminal.backend()
---
> Introducing our gpt-5.1 models
We've upgraded our family of models supported in Codex to
gpt-5.1, gpt-5.1-codex and gpt-5.1-codex-mini.
> Try gpt-5.1
You can continue using legacy models by specifying them
directly with the -m option or in your config.toml.
We recommend switching from gpt-5 to gpt-5.1.
Learn more at https://openai.com/index/gpt-5-1/.
Broad world knowledge with strong general reasoning.
Press enter to continue