Show migration link (#8228)

# External (non-OpenAI) Pull Request Requirements

Before opening this Pull Request, please read the dedicated
"Contributing" markdown file or your PR may be closed:
https://github.com/openai/codex/blob/main/docs/contributing.md

If your PR conforms to our contribution guidelines, replace this text
with a detailed and high quality description of your changes.

Include a link to a bug report or enhancement request.
This commit is contained in:
Ahmed Ibrahim
2025-12-17 18:03:40 -08:00
committed by GitHub
parent a8797019a1
commit 6f102e18c4
10 changed files with 46 additions and 10 deletions

View File

@@ -69,6 +69,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
show_in_picker: true,
supported_in_api: true,
@@ -99,6 +100,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
show_in_picker: true,
supported_in_api: true,
@@ -121,7 +123,12 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
},
],
is_default: false,
upgrade: None,
upgrade: Some(ModelUpgrade {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
show_in_picker: true,
supported_in_api: true,
},
@@ -154,6 +161,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
show_in_picker: true,
supported_in_api: true,
@@ -183,6 +191,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
show_in_picker: true,
supported_in_api: true,
@@ -213,6 +222,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
show_in_picker: false,
supported_in_api: true,
@@ -238,6 +248,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
id: "gpt-5.1-codex-mini".to_string(),
reasoning_effort_mapping: None,
migration_config_key: HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG.to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
show_in_picker: false,
supported_in_api: true,
@@ -271,6 +282,7 @@ static PRESETS: Lazy<Vec<ModelPreset>> = Lazy::new(|| {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}),
show_in_picker: false,
supported_in_api: true,

View File

@@ -174,7 +174,7 @@ fn gpt_5_1_codex_mini() -> ModelPreset {
),
],
is_default: false,
upgrade: None,
upgrade: Some(caribou_upgrade()),
show_in_picker: true,
supported_in_api: true,
}
@@ -247,6 +247,7 @@ fn caribou_upgrade() -> codex_protocol::openai_models::ModelUpgrade {
id: "caribou".to_string(),
reasoning_effort_mapping: None,
migration_config_key: "caribou".to_string(),
model_link: Some("https://www.codex.com/models/caribou".to_string()),
}
}

View File

@@ -52,6 +52,7 @@ pub struct ModelUpgrade {
pub id: String,
pub reasoning_effort_mapping: Option<HashMap<ReasoningEffort, ReasoningEffort>>,
pub migration_config_key: String,
pub model_link: Option<String>,
}
/// Metadata describing a Codex-supported model.
@@ -216,6 +217,8 @@ impl From<ModelInfo> for ModelPreset {
&info.supported_reasoning_levels,
),
migration_config_key: info.slug.clone(),
// todo(aibrahim): add the model link here.
model_link: None,
}),
show_in_picker: info.visibility == ModelVisibility::List,
supported_in_api: info.supported_in_api,

View File

@@ -186,6 +186,7 @@ async fn handle_model_migration_prompt_if_needed(
id: target_model,
reasoning_effort_mapping,
migration_config_key,
model_link,
}) = upgrade
{
if migration_prompt_hidden(config, migration_config_key.as_str()) {
@@ -217,6 +218,7 @@ async fn handle_model_migration_prompt_if_needed(
let prompt_copy = migration_copy_for_models(
model,
&target_model,
model_link.clone(),
heading_label,
target_description,
can_opt_out,
@@ -1398,6 +1400,7 @@ mod tests {
id: "missing-target".to_string(),
reasoning_effort_mapping: None,
migration_config_key: HIDE_GPT5_1_MIGRATION_PROMPT_CONFIG.to_string(),
model_link: None,
});
available.retain(|preset| preset.model != "gpt-5-codex");
available.push(current.clone());

View File

@@ -58,6 +58,7 @@ impl MigrationMenuOption {
pub(crate) fn migration_copy_for_models(
current_model: &str,
target_model: &str,
model_link: Option<String>,
target_display_name: String,
target_description: Option<String>,
can_opt_out: bool,
@@ -77,10 +78,19 @@ pub(crate) fn migration_copy_for_models(
"We recommend switching from {current_model} to {target_model}."
)),
Line::from(""),
description_line,
Line::from(""),
];
if let Some(model_link) = model_link {
content.push(Line::from(vec![
format!("{description_line} Learn more about {target_display_name} at ").into(),
model_link.cyan().underlined(),
]));
content.push(Line::from(""));
} else {
content.push(description_line);
content.push(Line::from(""));
}
if can_opt_out {
content.push(Line::from(format!(
"You can continue using {current_model} if you prefer."
@@ -353,6 +363,7 @@ mod tests {
migration_copy_for_models(
"gpt-5.1-codex-mini",
"gpt-5.1-codex-max",
None,
"gpt-5.1-codex-max".to_string(),
Some("Latest Codex-optimized flagship for deep and fast reasoning.".to_string()),
true,
@@ -379,6 +390,7 @@ mod tests {
migration_copy_for_models(
"gpt-5",
"gpt-5.1",
Some("https://www.codex.com/models/gpt-5.1".to_string()),
"gpt-5.1".to_string(),
Some("Broad world knowledge with strong general reasoning.".to_string()),
false,
@@ -403,6 +415,7 @@ mod tests {
migration_copy_for_models(
"gpt-5-codex",
"gpt-5.1-codex-max",
Some("https://www.codex.com/models/gpt-5.1-codex-max".to_string()),
"gpt-5.1-codex-max".to_string(),
Some("Latest Codex-optimized flagship for deep and fast reasoning.".to_string()),
false,
@@ -427,6 +440,7 @@ mod tests {
migration_copy_for_models(
"gpt-5-codex-mini",
"gpt-5.1-codex-mini",
Some("https://www.codex.com/models/gpt-5.1-codex-mini".to_string()),
"gpt-5.1-codex-mini".to_string(),
Some("Optimized for codex. Cheaper, faster, but less capable.".to_string()),
false,
@@ -447,6 +461,7 @@ mod tests {
migration_copy_for_models(
"gpt-old",
"gpt-new",
Some("https://www.codex.com/models/gpt-new".to_string()),
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,
@@ -473,6 +488,7 @@ mod tests {
migration_copy_for_models(
"gpt-old",
"gpt-new",
Some("https://www.codex.com/models/gpt-new".to_string()),
"gpt-new".to_string(),
Some("Latest recommended model for better performance.".to_string()),
true,

View File

@@ -1,6 +1,5 @@
---
source: tui/src/model_migration.rs
assertion_line: 368
expression: terminal.backend()
---

View File

@@ -1,6 +1,5 @@
---
source: tui/src/model_migration.rs
assertion_line: 416
expression: terminal.backend()
---
@@ -10,6 +9,7 @@ expression: terminal.backend()
gpt-5.1-codex-max.
Latest Codex-optimized flagship for deep and fast
reasoning.
reasoning. Learn more about gpt-5.1-codex-max at
https://www.codex.com/models/gpt-5.1-codex-max
Press enter to continue

View File

@@ -1,6 +1,5 @@
---
source: tui/src/model_migration.rs
assertion_line: 440
expression: terminal.backend()
---
@@ -10,5 +9,7 @@ expression: terminal.backend()
gpt-5.1-codex-mini.
Optimized for codex. Cheaper, faster, but less capable.
Learn more about gpt-5.1-codex-mini at
https://www.codex.com/models/gpt-5.1-codex-mini
Press enter to continue

View File

@@ -1,6 +1,5 @@
---
source: tui/src/model_migration.rs
assertion_line: 392
expression: terminal.backend()
---
@@ -8,6 +7,7 @@ expression: terminal.backend()
We recommend switching from gpt-5 to gpt-5.1.
Broad world knowledge with strong general reasoning.
Broad world knowledge with strong general reasoning. Learn more
about gpt-5.1 at https://www.codex.com/models/gpt-5.1
Press enter to continue

View File

@@ -206,6 +206,7 @@ async fn handle_model_migration_prompt_if_needed(
id: target_model,
reasoning_effort_mapping,
migration_config_key,
model_link: _,
}) = upgrade
{
if migration_prompt_hidden(config, migration_config_key.as_str()) {