This commit is contained in:
Ahmed Ibrahim
2025-12-22 14:09:20 -08:00
parent 8e900c210c
commit cc55cbb357
11 changed files with 380 additions and 67 deletions

View File

@@ -631,6 +631,36 @@ impl ConfigEditsBuilder {
self
}
pub fn set_pending_model_migration_notice(mut self, from_model: &str, to_model: &str) -> Self {
self.edits.push(ConfigEdit::SetPath {
segments: vec![
Notice::TABLE_KEY.to_string(),
"pending_model_migration".to_string(),
"from_model".to_string(),
],
value: value(from_model.to_string()),
});
self.edits.push(ConfigEdit::SetPath {
segments: vec![
Notice::TABLE_KEY.to_string(),
"pending_model_migration".to_string(),
"to_model".to_string(),
],
value: value(to_model.to_string()),
});
self
}
pub fn clear_pending_model_migration_notice(mut self) -> Self {
self.edits.push(ConfigEdit::ClearPath {
segments: vec![
Notice::TABLE_KEY.to_string(),
"pending_model_migration".to_string(),
],
});
self
}
pub fn set_windows_wsl_setup_acknowledged(mut self, acknowledged: bool) -> Self {
self.edits
.push(ConfigEdit::SetWindowsWslSetupAcknowledged(acknowledged));

View File

@@ -514,6 +514,12 @@ const fn default_true() -> bool {
/// Settings for notices we display to users via the tui and app-server clients
/// (primarily the Codex IDE extension). NOTE: these are different from
/// notifications - notices are warnings, NUX screens, acknowledgements, etc.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct PendingModelMigrationNotice {
pub from_model: String,
pub to_model: String,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default)]
pub struct Notice {
/// Tracks whether the user has acknowledged the full access warning prompt.
@@ -530,6 +536,8 @@ pub struct Notice {
/// Tracks acknowledged model migrations as old->new model slug mappings.
#[serde(default)]
pub model_migrations: BTreeMap<String, String>,
/// Persisted "show on next run" model migration notice.
pub pending_model_migration: Option<PendingModelMigrationNotice>,
}
impl Notice {

View File

@@ -10,6 +10,7 @@ use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::Mutex;
use tokio::sync::RwLock;
use tokio::sync::TryLockError;
use tracing::error;
@@ -29,6 +30,7 @@ use crate::models_manager::model_presets::builtin_model_presets;
const MODEL_CACHE_FILE: &str = "models_cache.json";
const DEFAULT_MODEL_CACHE_TTL: Duration = Duration::from_secs(300);
const MODELS_REFRESH_TIMEOUT: Duration = Duration::from_secs(5);
const OPENAI_DEFAULT_API_MODEL: &str = "gpt-5.1-codex-max";
const OPENAI_DEFAULT_CHATGPT_MODEL: &str = "gpt-5.2-codex";
const CODEX_AUTO_BALANCED_MODEL: &str = "codex-auto-balanced";
@@ -39,6 +41,7 @@ pub struct ModelsManager {
// todo(aibrahim) merge available_models and model family creation into one struct
local_models: Vec<ModelPreset>,
remote_models: RwLock<Vec<ModelInfo>>,
refresh_lock: Mutex<()>,
auth_manager: Arc<AuthManager>,
etag: RwLock<Option<String>>,
codex_home: PathBuf,
@@ -53,6 +56,7 @@ impl ModelsManager {
Self {
local_models: builtin_model_presets(auth_manager.get_auth_mode()),
remote_models: RwLock::new(Self::load_remote_models_from_file().unwrap_or_default()),
refresh_lock: Mutex::new(()),
auth_manager,
etag: RwLock::new(None),
codex_home,
@@ -68,6 +72,7 @@ impl ModelsManager {
Self {
local_models: builtin_model_presets(auth_manager.get_auth_mode()),
remote_models: RwLock::new(Self::load_remote_models_from_file().unwrap_or_default()),
refresh_lock: Mutex::new(()),
auth_manager,
etag: RwLock::new(None),
codex_home,
@@ -83,6 +88,11 @@ impl ModelsManager {
{
return Ok(());
}
// Prevent duplicate `/models` refreshes when multiple callers try to refresh
// concurrently during startup (or when multiple features request models).
let _guard = self.refresh_lock.lock().await;
if self.try_load_cache().await {
return Ok(());
}
@@ -94,10 +104,23 @@ impl ModelsManager {
let client = ModelsClient::new(transport, api_provider, api_auth);
let client_version = format_client_version_to_whole();
let ModelsResponse { models, etag } = client
.list_models(&client_version, HeaderMap::new())
.await
.map_err(map_api_error)?;
let response = tokio::time::timeout(
MODELS_REFRESH_TIMEOUT,
client.list_models(&client_version, HeaderMap::new()),
)
.await;
let ModelsResponse { models, etag } = match response {
Ok(response) => response.map_err(map_api_error)?,
Err(_) => {
error!(
"timed out refreshing /models after {}s",
MODELS_REFRESH_TIMEOUT.as_secs()
);
// Leave `remote_models` unchanged so the preloaded fallback remains available.
return Ok(());
}
};
let etag = (!etag.is_empty()).then_some(etag);

View File

@@ -84,6 +84,33 @@ pub struct ModelFamily {
}
impl ModelFamily {
/// Placeholder model family for UI startup before Codex has selected the real model.
///
/// This must not be treated as an actual model slug; it only exists to allow
/// consumers (e.g. the TUI) to render while the session is still starting.
pub fn placeholder(config: &Config) -> Self {
let mf = Self {
slug: String::new(),
family: "unknown".to_string(),
needs_special_apply_patch_instructions: false,
context_window: None,
auto_compact_token_limit: None,
supports_reasoning_summaries: false,
default_reasoning_effort: None,
reasoning_summary_format: ReasoningSummaryFormat::None,
supports_parallel_tool_calls: false,
apply_patch_tool_type: None,
base_instructions: BASE_INSTRUCTIONS.to_string(),
experimental_supported_tools: Vec::new(),
effective_context_window_percent: 95,
support_verbosity: false,
default_verbosity: None,
shell_type: ConfigShellToolType::Default,
truncation_policy: TruncationPolicy::Bytes(10_000),
};
mf.with_config_overrides(config)
}
pub(super) fn with_config_overrides(mut self, config: &Config) -> Self {
if let Some(supports_reasoning_summaries) = config.model_supports_reasoning_summaries {
self.supports_reasoning_summaries = supports_reasoning_summaries;

View File

@@ -327,6 +327,94 @@ impl App {
}
}
async fn on_session_configured_model(&mut self, model: String) {
self.maybe_emit_pending_model_migration_notice(model.as_str())
.await;
self.spawn_schedule_model_migration_notice(model);
}
async fn maybe_emit_pending_model_migration_notice(&mut self, used_model: &str) {
let Some(pending) = self.config.notices.pending_model_migration.take() else {
return;
};
let should_show = pending.from_model == used_model;
if should_show {
let message = format!(
"Recommended model upgrade: switch from {} to {}. Run /model to change.",
pending.from_model, pending.to_model
);
self.app_event_tx.send(AppEvent::InsertHistoryCell(Box::new(
crate::history_cell::new_warning_event(message),
)));
self.config
.notices
.model_migrations
.insert(pending.from_model.clone(), pending.to_model.clone());
}
let mut edits = ConfigEditsBuilder::new(&self.config.codex_home);
if should_show {
edits = edits.record_model_migration_seen(
pending.from_model.as_str(),
pending.to_model.as_str(),
);
}
if let Err(err) = edits.clear_pending_model_migration_notice().apply().await {
tracing::error!(
error = %err,
"failed to clear pending model migration notice"
);
self.chat_widget.add_error_message(format!(
"Failed to persist model migration notice state: {err}"
));
}
}
fn spawn_schedule_model_migration_notice(&self, used_model: String) {
let config = self.config.clone();
let models_manager = self.server.get_models_manager();
tokio::spawn(async move {
// Build the candidate migration notice using the current models list (remote if
// refreshed, otherwise the startup fallback seeded in `ModelsManager`).
let available_models = models_manager.list_models(&config).await;
let upgrade = available_models
.iter()
.find(|preset| preset.model == used_model)
.and_then(|preset| preset.upgrade.as_ref());
let Some(upgrade) = upgrade else {
return;
};
if migration_prompt_hidden(&config, upgrade.migration_config_key.as_str()) {
return;
}
let target_model = upgrade.id.clone();
if !should_show_model_migration_prompt(
used_model.as_str(),
target_model.as_str(),
&config.notices.model_migrations,
&available_models,
) {
return;
}
if let Err(err) = ConfigEditsBuilder::new(&config.codex_home)
.set_pending_model_migration_notice(used_model.as_str(), target_model.as_str())
.apply()
.await
{
tracing::error!(
error = %err,
"failed to persist pending model migration notice"
);
}
});
}
#[allow(clippy::too_many_arguments)]
pub async fn run(
tui: &mut tui::Tui,
@@ -347,30 +435,10 @@ impl App {
auth_manager.clone(),
SessionSource::Cli,
));
let mut model = conversation_manager
.get_models_manager()
.get_model(&config.model, &config)
.await;
let exit_info = handle_model_migration_prompt_if_needed(
tui,
&mut config,
model.as_str(),
&app_event_tx,
conversation_manager.get_models_manager(),
)
.await;
if let Some(exit_info) = exit_info {
return Ok(exit_info);
}
if let Some(updated_model) = config.model.clone() {
model = updated_model;
}
let enhanced_keys_supported = tui.enhanced_keys_supported();
let model_family = conversation_manager
.get_models_manager()
.construct_model_family(model.as_str(), &config)
.await;
let model_family =
codex_core::models_manager::model_family::ModelFamily::placeholder(&config);
let mut chat_widget = match resume_selection {
ResumeSelection::StartFresh | ResumeSelection::Exit => {
let init = crate::chatwidget::ChatWidgetInit {
@@ -432,7 +500,7 @@ impl App {
chat_widget,
auth_manager: auth_manager.clone(),
config,
current_model: model.clone(),
current_model: String::new(),
active_profile,
file_search,
enhanced_keys_supported,
@@ -712,12 +780,21 @@ impl App {
self.suppress_shutdown_complete = false;
return Ok(true);
}
let configured_model = match &event.msg {
EventMsg::SessionConfigured(ev) => Some(ev.model.clone()),
_ => None,
};
if let EventMsg::ListSkillsResponse(response) = &event.msg {
let cwd = self.chat_widget.config_ref().cwd.clone();
let errors = errors_for_cwd(&cwd, response);
emit_skill_load_warnings(&self.app_event_tx, &errors);
}
self.chat_widget.handle_codex_event(event);
if let Some(model) = configured_model {
self.on_session_configured_model(model).await;
}
}
AppEvent::ConversationHistory(ev) => {
self.on_conversation_history_for_backtrack(tui, ev).await?;

View File

@@ -425,6 +425,9 @@ impl ChatWidget {
let initial_messages = event.initial_messages.clone();
let model_for_header = event.model.clone();
self.session_header.set_model(&model_for_header);
// Now that Codex has selected the actual model, update the model family used for UI.
self.app_event_tx
.send(AppEvent::UpdateModel(model_for_header.clone()));
self.add_to_history(history_cell::new_session_info(
&self.config,
&model_for_header,
@@ -440,9 +443,15 @@ impl ChatWidget {
cwds: Vec::new(),
force_reload: false,
});
let had_initial_message = self.initial_user_message.is_some();
if let Some(user_message) = self.initial_user_message.take() {
self.submit_user_message(user_message);
}
if !had_initial_message {
// If there are queued inputs from startup, begin the first turn now.
// Subsequent queued inputs are sent turn-by-turn via `maybe_send_next_queued_input`.
self.maybe_send_next_queued_input();
}
if !self.suppress_session_configured_redraw {
self.request_redraw();
}
@@ -1409,9 +1418,7 @@ impl ChatWidget {
is_first_run,
model_family,
} = common;
let model_slug = model_family.get_model_slug().to_string();
let mut config = config;
config.model = Some(model_slug.clone());
let config = config;
let mut rng = rand::rng();
let placeholder = EXAMPLE_PROMPTS[rng.random_range(0..EXAMPLE_PROMPTS.len())].to_string();
let codex_op_tx = spawn_agent(config.clone(), app_event_tx.clone(), conversation_manager);
@@ -1435,7 +1442,7 @@ impl ChatWidget {
model_family,
auth_manager,
models_manager,
session_header: SessionHeader::new(model_slug),
session_header: SessionHeader::new("Starting...".to_string()),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
@@ -1687,7 +1694,7 @@ impl ChatWidget {
return;
}
const INIT_PROMPT: &str = include_str!("../prompt_for_init_command.md");
self.submit_user_message(INIT_PROMPT.to_string().into());
self.queue_user_message(INIT_PROMPT.to_string().into());
}
SlashCommand::Compact => {
self.clear_token_usage();
@@ -1697,7 +1704,14 @@ impl ChatWidget {
self.open_review_popup();
}
SlashCommand::Model => {
self.open_model_popup();
if self.conversation_id.is_none() {
self.add_info_message(
"`/model` is unavailable until startup finishes.".to_string(),
None,
);
} else {
self.open_model_popup();
}
}
SlashCommand::Approvals => {
self.open_approvals_popup();
@@ -1868,7 +1882,7 @@ impl ChatWidget {
}
fn queue_user_message(&mut self, user_message: UserMessage) {
if self.bottom_pane.is_task_running() {
if self.conversation_id.is_none() || self.bottom_pane.is_task_running() {
self.queued_user_messages.push_back(user_message);
self.refresh_queued_user_messages();
} else {
@@ -2177,7 +2191,7 @@ impl ChatWidget {
// If idle and there are queued inputs, submit exactly one to start the next turn.
fn maybe_send_next_queued_input(&mut self) {
if self.bottom_pane.is_task_running() {
if self.conversation_id.is_none() || self.bottom_pane.is_task_running() {
return;
}
if let Some(user_message) = self.queued_user_messages.pop_front() {

View File

@@ -1022,6 +1022,27 @@ async fn alt_up_edits_most_recent_queued_message() {
async fn enqueueing_history_prompt_multiple_times_is_stable() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
// Ensure the session is configured so the first submission is sent (not queued),
// which seeds the prompt history for the subsequent Up-arrow recalls.
let conversation_id = ConversationId::new();
let rollout_file = NamedTempFile::new().unwrap();
chat.handle_codex_event(Event {
id: "configured".into(),
msg: EventMsg::SessionConfigured(codex_core::protocol::SessionConfiguredEvent {
session_id: conversation_id,
model: "test-model".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: Some(ReasoningEffortConfig::default()),
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: rollout_file.path().to_path_buf(),
}),
});
// Submit an initial prompt to seed history.
chat.bottom_pane.set_composer_text("repeat me".to_string());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));

View File

@@ -390,6 +390,94 @@ impl App {
}
}
async fn on_session_configured_model(&mut self, model: String) {
self.maybe_emit_pending_model_migration_notice(model.as_str())
.await;
self.spawn_schedule_model_migration_notice(model);
}
async fn maybe_emit_pending_model_migration_notice(&mut self, used_model: &str) {
let Some(pending) = self.config.notices.pending_model_migration.take() else {
return;
};
let should_show = pending.from_model == used_model;
if should_show {
let message = format!(
"Recommended model upgrade: switch from {} to {}. Run /model to change.",
pending.from_model, pending.to_model
);
self.app_event_tx.send(AppEvent::InsertHistoryCell(Box::new(
crate::history_cell::new_warning_event(message),
)));
self.config
.notices
.model_migrations
.insert(pending.from_model.clone(), pending.to_model.clone());
}
let mut edits = ConfigEditsBuilder::new(&self.config.codex_home);
if should_show {
edits = edits.record_model_migration_seen(
pending.from_model.as_str(),
pending.to_model.as_str(),
);
}
if let Err(err) = edits.clear_pending_model_migration_notice().apply().await {
tracing::error!(
error = %err,
"failed to clear pending model migration notice"
);
self.chat_widget.add_error_message(format!(
"Failed to persist model migration notice state: {err}"
));
}
}
fn spawn_schedule_model_migration_notice(&self, used_model: String) {
let config = self.config.clone();
let models_manager = self.server.get_models_manager();
tokio::spawn(async move {
// Build the candidate migration notice using the current models list (remote if
// refreshed, otherwise the startup fallback seeded in `ModelsManager`).
let available_models = models_manager.list_models(&config).await;
let upgrade = available_models
.iter()
.find(|preset| preset.model == used_model)
.and_then(|preset| preset.upgrade.as_ref());
let Some(upgrade) = upgrade else {
return;
};
if migration_prompt_hidden(&config, upgrade.migration_config_key.as_str()) {
return;
}
let target_model = upgrade.id.clone();
if !should_show_model_migration_prompt(
used_model.as_str(),
target_model.as_str(),
&config.notices.model_migrations,
&available_models,
) {
return;
}
if let Err(err) = ConfigEditsBuilder::new(&config.codex_home)
.set_pending_model_migration_notice(used_model.as_str(), target_model.as_str())
.apply()
.await
{
tracing::error!(
error = %err,
"failed to persist pending model migration notice"
);
}
});
}
#[allow(clippy::too_many_arguments)]
pub async fn run(
tui: &mut tui::Tui,
@@ -410,30 +498,10 @@ impl App {
auth_manager.clone(),
SessionSource::Cli,
));
let mut model = conversation_manager
.get_models_manager()
.get_model(&config.model, &config)
.await;
let exit_info = handle_model_migration_prompt_if_needed(
tui,
&mut config,
model.as_str(),
&app_event_tx,
conversation_manager.get_models_manager(),
)
.await;
if let Some(exit_info) = exit_info {
return Ok(exit_info);
}
if let Some(updated_model) = config.model.clone() {
model = updated_model;
}
let enhanced_keys_supported = tui.enhanced_keys_supported();
let model_family = conversation_manager
.get_models_manager()
.construct_model_family(model.as_str(), &config)
.await;
let model_family =
codex_core::models_manager::model_family::ModelFamily::placeholder(&config);
let mut chat_widget = match resume_selection {
ResumeSelection::StartFresh | ResumeSelection::Exit => {
let init = crate::chatwidget::ChatWidgetInit {
@@ -509,7 +577,7 @@ impl App {
chat_widget,
auth_manager: auth_manager.clone(),
config,
current_model: model.clone(),
current_model: String::new(),
active_profile,
file_search,
enhanced_keys_supported,
@@ -1715,12 +1783,21 @@ impl App {
self.suppress_shutdown_complete = false;
return Ok(true);
}
let configured_model = match &event.msg {
EventMsg::SessionConfigured(ev) => Some(ev.model.clone()),
_ => None,
};
if let EventMsg::ListSkillsResponse(response) = &event.msg {
let cwd = self.chat_widget.config_ref().cwd.clone();
let errors = errors_for_cwd(&cwd, response);
emit_skill_load_warnings(&self.app_event_tx, &errors);
}
self.chat_widget.handle_codex_event(event);
if let Some(model) = configured_model {
self.on_session_configured_model(model).await;
}
}
AppEvent::ConversationHistory(ev) => {
self.on_conversation_history_for_backtrack(tui, ev).await?;

View File

@@ -400,6 +400,9 @@ impl ChatWidget {
let initial_messages = event.initial_messages.clone();
let model_for_header = event.model.clone();
self.session_header.set_model(&model_for_header);
// Now that Codex has selected the actual model, update the model family used for UI.
self.app_event_tx
.send(AppEvent::UpdateModel(model_for_header.clone()));
self.add_to_history(history_cell::new_session_info(
&self.config,
&model_for_header,
@@ -415,9 +418,15 @@ impl ChatWidget {
cwds: Vec::new(),
force_reload: false,
});
let had_initial_message = self.initial_user_message.is_some();
if let Some(user_message) = self.initial_user_message.take() {
self.submit_user_message(user_message);
}
if !had_initial_message {
// If there are queued inputs from startup, begin the first turn now.
// Subsequent queued inputs are sent turn-by-turn via `maybe_send_next_queued_input`.
self.maybe_send_next_queued_input();
}
if !self.suppress_session_configured_redraw {
self.request_redraw();
}
@@ -1278,9 +1287,7 @@ impl ChatWidget {
is_first_run,
model_family,
} = common;
let model_slug = model_family.get_model_slug().to_string();
let mut config = config;
config.model = Some(model_slug.clone());
let config = config;
let mut rng = rand::rng();
let placeholder = EXAMPLE_PROMPTS[rng.random_range(0..EXAMPLE_PROMPTS.len())].to_string();
let codex_op_tx = spawn_agent(config.clone(), app_event_tx.clone(), conversation_manager);
@@ -1304,7 +1311,7 @@ impl ChatWidget {
model_family,
auth_manager,
models_manager,
session_header: SessionHeader::new(model_slug),
session_header: SessionHeader::new("Starting...".to_string()),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
@@ -1554,7 +1561,7 @@ impl ChatWidget {
return;
}
const INIT_PROMPT: &str = include_str!("../prompt_for_init_command.md");
self.submit_user_message(INIT_PROMPT.to_string().into());
self.queue_user_message(INIT_PROMPT.to_string().into());
}
SlashCommand::Compact => {
self.clear_token_usage();
@@ -1564,7 +1571,14 @@ impl ChatWidget {
self.open_review_popup();
}
SlashCommand::Model => {
self.open_model_popup();
if self.conversation_id.is_none() {
self.add_info_message(
"`/model` is unavailable until startup finishes.".to_string(),
None,
);
} else {
self.open_model_popup();
}
}
SlashCommand::Approvals => {
self.open_approvals_popup();
@@ -1707,7 +1721,7 @@ impl ChatWidget {
}
fn queue_user_message(&mut self, user_message: UserMessage) {
if self.bottom_pane.is_task_running() {
if self.conversation_id.is_none() || self.bottom_pane.is_task_running() {
self.queued_user_messages.push_back(user_message);
self.refresh_queued_user_messages();
} else {
@@ -2016,7 +2030,7 @@ impl ChatWidget {
// If idle and there are queued inputs, submit exactly one to start the next turn.
fn maybe_send_next_queued_input(&mut self) {
if self.bottom_pane.is_task_running() {
if self.conversation_id.is_none() || self.bottom_pane.is_task_running() {
return;
}
if let Some(user_message) = self.queued_user_messages.pop_front() {

View File

@@ -983,6 +983,27 @@ async fn alt_up_edits_most_recent_queued_message() {
async fn enqueueing_history_prompt_multiple_times_is_stable() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
// Ensure the session is configured so the first submission is sent (not queued),
// which seeds the prompt history for the subsequent Up-arrow recalls.
let conversation_id = ConversationId::new();
let rollout_file = NamedTempFile::new().unwrap();
chat.handle_codex_event(Event {
id: "configured".into(),
msg: EventMsg::SessionConfigured(codex_core::protocol::SessionConfiguredEvent {
session_id: conversation_id,
model: "test-model".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: Some(ReasoningEffortConfig::default()),
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: rollout_file.path().to_path_buf(),
}),
});
// Submit an initial prompt to seed history.
chat.bottom_pane.set_composer_text("repeat me".to_string());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));

View File

@@ -25,7 +25,8 @@ pub struct VT100Backend {
impl VT100Backend {
/// Creates a new `TestBackend` with the specified width and height.
pub fn new(width: u16, height: u16) -> Self {
crossterm::style::Colored::set_ansi_color_disabled(false);
// Force ANSI color output even when the writer isn't a real TTY (e.g., vt100::Parser in tests).
crossterm::style::force_color_output(true);
Self {
crossterm_backend: CrosstermBackend::new(vt100::Parser::new(height, width, 0)),
}