Migrate codex max (#7566)

- make codex max the default
- fix: we were doing some async work in sync function which caused tui
to panic
This commit is contained in:
Ahmed Ibrahim
2025-12-03 20:54:48 -08:00
committed by GitHub
parent edd98dd3b7
commit 67e67e054f
6 changed files with 29 additions and 33 deletions

View File

@@ -30,7 +30,12 @@ async fn list_models_returns_chatgpt_models() -> Result<()> {
}
fn expected_models_for_api_key() -> Vec<ModelPreset> {
vec![gpt_5_1_codex(), gpt_5_1_codex_mini(), gpt_5_1()]
vec![
gpt_5_1_codex_max(),
gpt_5_1_codex(),
gpt_5_1_codex_mini(),
gpt_5_1(),
]
}
fn expected_models_for_chatgpt() -> Vec<ModelPreset> {