Get model on session configured (#9191)

- Don't try to precompute model unless you know it from `config`
- Block `/model` on session configured
- Queue messages until session configured
- show "loading" in status until session configured
This commit is contained in:
Ahmed Ibrahim
2026-01-14 10:20:41 -08:00
committed by GitHub
parent 02f67bace8
commit 8e937fbba9
8 changed files with 306 additions and 67 deletions

View File

@@ -418,7 +418,7 @@ impl App {
models_manager: thread_manager.get_models_manager(),
feedback: feedback.clone(),
is_first_run,
model: model.clone(),
model: config.model.clone(),
};
ChatWidget::new(init, thread_manager.clone())
}
@@ -441,7 +441,7 @@ impl App {
models_manager: thread_manager.get_models_manager(),
feedback: feedback.clone(),
is_first_run,
model: model.clone(),
model: config.model.clone(),
};
ChatWidget::new_from_existing(init, resumed.thread, resumed.session_configured)
}
@@ -464,7 +464,7 @@ impl App {
models_manager: thread_manager.get_models_manager(),
feedback: feedback.clone(),
is_first_run,
model: model.clone(),
model: config.model.clone(),
};
ChatWidget::new_from_existing(init, forked.thread, forked.session_configured)
}
@@ -641,7 +641,7 @@ impl App {
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: self.current_model.clone(),
model: Some(self.current_model.clone()),
};
self.chat_widget = ChatWidget::new(init, self.server.clone());
self.current_model = model_info.slug.clone();
@@ -691,7 +691,7 @@ impl App {
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: self.current_model.clone(),
model: Some(self.current_model.clone()),
};
self.chat_widget = ChatWidget::new_from_existing(
init,
@@ -760,7 +760,7 @@ impl App {
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: self.current_model.clone(),
model: Some(self.current_model.clone()),
};
self.chat_widget = ChatWidget::new_from_existing(
init,

View File

@@ -28,6 +28,7 @@ use std::sync::Arc;
use std::time::Duration;
use std::time::Instant;
use crate::version::CODEX_CLI_VERSION;
use codex_app_server_protocol::AuthMode;
use codex_backend_client::Client as BackendClient;
use codex_core::config::Config;
@@ -100,6 +101,8 @@ use rand::Rng;
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use ratatui::style::Color;
use ratatui::style::Modifier;
use ratatui::style::Style;
use ratatui::style::Stylize;
use ratatui::text::Line;
use ratatui::widgets::Paragraph;
@@ -108,6 +111,8 @@ use tokio::sync::mpsc::UnboundedSender;
use tokio::task::JoinHandle;
use tracing::debug;
const DEFAULT_MODEL_DISPLAY_NAME: &str = "loading";
use crate::app_event::AppEvent;
use crate::app_event::ExitMode;
#[cfg(target_os = "windows")]
@@ -344,7 +349,7 @@ pub(crate) struct ChatWidgetInit {
pub(crate) models_manager: Arc<ModelsManager>,
pub(crate) feedback: codex_feedback::CodexFeedback,
pub(crate) is_first_run: bool,
pub(crate) model: String,
pub(crate) model: Option<String>,
}
#[derive(Default)]
@@ -391,7 +396,7 @@ pub(crate) struct ChatWidget {
/// where the overlay may briefly treat new tail content as already cached.
active_cell_revision: u64,
config: Config,
model: String,
model: Option<String>,
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
session_header: SessionHeader,
@@ -586,13 +591,16 @@ impl ChatWidget {
self.current_rollout_path = Some(event.rollout_path.clone());
let initial_messages = event.initial_messages.clone();
let model_for_header = event.model.clone();
self.model = Some(model_for_header.clone());
self.session_header.set_model(&model_for_header);
self.add_to_history(history_cell::new_session_info(
let session_info_cell = history_cell::new_session_info(
&self.config,
&model_for_header,
event,
self.show_welcome_banner,
));
);
self.apply_session_info_cell(session_info_cell);
if let Some(messages) = initial_messages {
self.replay_initial_messages(messages);
}
@@ -836,7 +844,7 @@ impl ChatWidget {
if high_usage
&& !self.rate_limit_switch_prompt_hidden()
&& self.model != NUDGE_MODEL_SLUG
&& self.current_model() != Some(NUDGE_MODEL_SLUG)
&& !matches!(
self.rate_limit_switch_prompt,
RateLimitSwitchPromptState::Shown
@@ -1591,11 +1599,22 @@ impl ChatWidget {
model,
} = common;
let mut config = config;
config.model = Some(model.clone());
let model = model.filter(|m| !m.trim().is_empty());
config.model = model.clone();
let mut rng = rand::rng();
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
let codex_op_tx = spawn_agent(config.clone(), app_event_tx.clone(), thread_manager);
let model_for_header = config
.model
.clone()
.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
let active_cell = if model.is_none() {
Some(Self::placeholder_session_header_cell(&config))
} else {
None
};
let mut widget = Self {
app_event_tx: app_event_tx.clone(),
frame_requester: frame_requester.clone(),
@@ -1610,13 +1629,13 @@ impl ChatWidget {
animations_enabled: config.animations,
skills: None,
}),
active_cell: None,
active_cell,
active_cell_revision: 0,
config,
model: model.clone(),
model,
auth_manager,
models_manager,
session_header: SessionHeader::new(model),
session_header: SessionHeader::new(model_for_header),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
@@ -1684,9 +1703,12 @@ impl ChatWidget {
model,
..
} = common;
let model = model.filter(|m| !m.trim().is_empty());
let mut rng = rand::rng();
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
let header_model = model.unwrap_or_else(|| session_configured.model.clone());
let codex_op_tx =
spawn_agent_from_existing(conversation, session_configured, app_event_tx.clone());
@@ -1707,10 +1729,10 @@ impl ChatWidget {
active_cell: None,
active_cell_revision: 0,
config,
model: model.clone(),
model: Some(header_model.clone()),
auth_manager,
models_manager,
session_header: SessionHeader::new(model),
session_header: SessionHeader::new(header_model),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
@@ -1828,7 +1850,11 @@ impl ChatWidget {
text,
image_paths: self.bottom_pane.take_recent_submission_images(),
};
self.submit_user_message(user_message);
if !self.is_session_configured() {
self.queue_user_message(user_message);
} else {
self.submit_user_message(user_message);
}
}
InputResult::Queued(text) => {
// Tab queues the message if a task is running, otherwise submits immediately
@@ -2180,7 +2206,15 @@ impl ChatWidget {
}
fn add_boxed_history(&mut self, cell: Box<dyn HistoryCell>) {
if !cell.display_lines(u16::MAX).is_empty() {
// Keep the placeholder session header as the active cell until real session info arrives,
// so we can merge headers instead of committing a duplicate box to history.
let keep_placeholder_header_active = !self.is_session_configured()
&& self
.active_cell
.as_ref()
.is_some_and(|c| c.as_any().is::<history_cell::SessionHeaderHistoryCell>());
if !keep_placeholder_header_active && !cell.display_lines(u16::MAX).is_empty() {
// Only break exec grouping if the cell renders visible lines.
self.flush_active_cell();
self.needs_final_message_separator = true;
@@ -2189,7 +2223,10 @@ impl ChatWidget {
}
fn queue_user_message(&mut self, user_message: UserMessage) {
if self.bottom_pane.is_task_running() || self.is_review_mode {
if !self.is_session_configured()
|| self.bottom_pane.is_task_running()
|| self.is_review_mode
{
self.queued_user_messages.push_back(user_message);
self.refresh_queued_user_messages();
} else {
@@ -2581,7 +2618,7 @@ impl ChatWidget {
self.rate_limit_snapshot.as_ref(),
self.plan_type,
Local::now(),
&self.model,
self.model_display_name(),
));
}
@@ -2735,6 +2772,14 @@ impl ChatWidget {
/// Open a popup to choose a quick auto model. Selecting "All models"
/// opens the full picker with every available preset.
pub(crate) fn open_model_popup(&mut self) {
if !self.is_session_configured() {
self.add_info_message(
"Model selection is disabled until startup completes.".to_string(),
None,
);
return;
}
let presets: Vec<ModelPreset> = match self.models_manager.try_list_models(&self.config) {
Ok(models) => models,
Err(_) => {
@@ -2793,11 +2838,12 @@ impl ChatWidget {
.filter(|preset| preset.show_in_picker)
.collect();
let current_model = self.current_model();
let current_label = presets
.iter()
.find(|preset| preset.model == self.model)
.find(|preset| Some(preset.model.as_str()) == current_model)
.map(|preset| preset.display_name.to_string())
.unwrap_or_else(|| self.model.clone());
.unwrap_or_else(|| self.model_display_name().to_string());
let (mut auto_presets, other_presets): (Vec<ModelPreset>, Vec<ModelPreset>) = presets
.into_iter()
@@ -2823,7 +2869,7 @@ impl ChatWidget {
SelectionItem {
name: preset.display_name.clone(),
description,
is_current: model == self.model,
is_current: Some(model.as_str()) == current_model,
is_default: preset.is_default,
actions,
dismiss_on_select: true,
@@ -2893,7 +2939,7 @@ impl ChatWidget {
for preset in presets.into_iter() {
let description =
(!preset.description.is_empty()).then_some(preset.description.to_string());
let is_current = preset.model == self.model;
let is_current = Some(preset.model.as_str()) == self.current_model();
let single_supported_effort = preset.supported_reasoning_efforts.len() == 1;
let preset_for_action = preset.clone();
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
@@ -3019,7 +3065,7 @@ impl ChatWidget {
.or(Some(default_effort));
let model_slug = preset.model.to_string();
let is_current_model = self.model == preset.model;
let is_current_model = self.current_model() == Some(preset.model.as_str());
let highlight_choice = if is_current_model {
self.config.model_reasoning_effort
} else {
@@ -3852,7 +3898,55 @@ impl ChatWidget {
/// Set the model in the widget's config copy.
pub(crate) fn set_model(&mut self, model: &str) {
self.session_header.set_model(model);
self.model = model.to_string();
self.model = Some(model.to_string());
}
fn current_model(&self) -> Option<&str> {
self.model.as_deref()
}
fn model_display_name(&self) -> &str {
self.model.as_deref().unwrap_or(DEFAULT_MODEL_DISPLAY_NAME)
}
/// Build a placeholder header cell while the session is configuring.
fn placeholder_session_header_cell(config: &Config) -> Box<dyn HistoryCell> {
let placeholder_style = Style::default().add_modifier(Modifier::DIM | Modifier::ITALIC);
Box::new(history_cell::SessionHeaderHistoryCell::new_with_style(
DEFAULT_MODEL_DISPLAY_NAME.to_string(),
placeholder_style,
None,
config.cwd.clone(),
CODEX_CLI_VERSION,
))
}
/// Merge the real session info cell with any placeholder header to avoid double boxes.
fn apply_session_info_cell(&mut self, cell: history_cell::SessionInfoCell) {
let mut session_info_cell = Some(Box::new(cell) as Box<dyn HistoryCell>);
let merged_header = if let Some(active) = self.active_cell.take() {
if active
.as_any()
.is::<history_cell::SessionHeaderHistoryCell>()
{
// Reuse the existing placeholder header to avoid rendering two boxes.
if let Some(cell) = session_info_cell.take() {
self.active_cell = Some(cell);
}
true
} else {
self.active_cell = Some(active);
false
}
} else {
false
};
self.flush_active_cell();
if !merged_header && let Some(cell) = session_info_cell {
self.add_boxed_history(cell);
}
}
pub(crate) fn add_info_message(&mut self, message: String, hint: Option<String>) {
@@ -4196,6 +4290,10 @@ impl ChatWidget {
self.thread_id
}
fn is_session_configured(&self) -> bool {
self.thread_id.is_some()
}
pub(crate) fn rollout_path(&self) -> Option<PathBuf> {
self.current_rollout_path.clone()
}

View File

@@ -355,7 +355,7 @@ async fn helpers_are_available_and_do_not_panic() {
models_manager: thread_manager.get_models_manager(),
feedback: codex_feedback::CodexFeedback::new(),
is_first_run: true,
model: resolved_model,
model: Some(resolved_model),
};
let mut w = ChatWidget::new(init, thread_manager);
// Basic construction sanity.
@@ -400,7 +400,7 @@ async fn make_chatwidget_manual(
active_cell: None,
active_cell_revision: 0,
config: cfg,
model: resolved_model.clone(),
model: Some(resolved_model.clone()),
auth_manager: auth_manager.clone(),
models_manager: Arc::new(ModelsManager::new(codex_home, auth_manager)),
session_header: SessionHeader::new(resolved_model),
@@ -1057,6 +1057,7 @@ async fn alt_up_edits_most_recent_queued_message() {
#[tokio::test]
async fn enqueueing_history_prompt_multiple_times_is_stable() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
// Submit an initial prompt to seed history.
chat.bottom_pane.set_composer_text("repeat me".to_string());
@@ -1083,6 +1084,7 @@ async fn enqueueing_history_prompt_multiple_times_is_stable() {
#[tokio::test]
async fn streaming_final_answer_keeps_task_running_state() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.on_task_started();
chat.on_agent_message_delta("Final answer line\n".to_string());
@@ -2060,6 +2062,7 @@ async fn experimental_features_toggle_saves_on_exit() {
#[tokio::test]
async fn model_selection_popup_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5-codex")).await;
chat.thread_id = Some(ThreadId::new());
chat.open_model_popup();
let popup = render_bottom_popup(&chat, 80);
@@ -2069,6 +2072,7 @@ async fn model_selection_popup_snapshot() {
#[tokio::test]
async fn model_picker_hides_show_in_picker_false_models_from_cache() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("test-visible-model")).await;
chat.thread_id = Some(ThreadId::new());
let preset = |slug: &str, show_in_picker: bool| ModelPreset {
id: slug.to_string(),
model: slug.to_string(),
@@ -2342,6 +2346,7 @@ async fn feedback_upload_consent_popup_snapshot() {
#[tokio::test]
async fn reasoning_popup_escape_returns_to_model_popup() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1-codex-max")).await;
chat.thread_id = Some(ThreadId::new());
chat.open_model_popup();
let preset = get_available_model(&chat, "gpt-5.1-codex-max");
@@ -3888,6 +3893,7 @@ printf 'fenced within fenced\n'
#[tokio::test]
async fn chatwidget_tall() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.handle_codex_event(Event {
id: "t1".into(),
msg: EventMsg::TurnStarted(TurnStartedEvent {
@@ -3913,6 +3919,7 @@ async fn chatwidget_tall() {
#[tokio::test]
async fn review_queues_user_messages_snapshot() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
chat.handle_codex_event(Event {
id: "review-1".into(),

View File

@@ -891,23 +891,41 @@ pub(crate) fn new_user_prompt(message: String) -> UserHistoryCell {
}
#[derive(Debug)]
struct SessionHeaderHistoryCell {
pub(crate) struct SessionHeaderHistoryCell {
version: &'static str,
model: String,
model_style: Style,
reasoning_effort: Option<ReasoningEffortConfig>,
directory: PathBuf,
}
impl SessionHeaderHistoryCell {
fn new(
pub(crate) fn new(
model: String,
reasoning_effort: Option<ReasoningEffortConfig>,
directory: PathBuf,
version: &'static str,
) -> Self {
Self::new_with_style(
model,
Style::default(),
reasoning_effort,
directory,
version,
)
}
pub(crate) fn new_with_style(
model: String,
model_style: Style,
reasoning_effort: Option<ReasoningEffortConfig>,
directory: PathBuf,
version: &'static str,
) -> Self {
Self {
version,
model,
model_style,
reasoning_effort,
directory,
}
@@ -980,7 +998,7 @@ impl HistoryCell for SessionHeaderHistoryCell {
let reasoning_label = self.reasoning_label();
let mut model_spans: Vec<Span<'static>> = vec![
Span::from(format!("{model_label} ")).dim(),
Span::from(self.model.clone()),
Span::styled(self.model.clone(), self.model_style),
];
if let Some(reasoning) = reasoning_label {
model_spans.push(Span::from(" "));

View File

@@ -486,7 +486,7 @@ impl App {
models_manager: thread_manager.get_models_manager(),
feedback: feedback.clone(),
is_first_run,
model: model.clone(),
model: config.model.clone(),
};
ChatWidget::new(init, thread_manager.clone())
}
@@ -509,7 +509,7 @@ impl App {
models_manager: thread_manager.get_models_manager(),
feedback: feedback.clone(),
is_first_run,
model: model.clone(),
model: config.model.clone(),
};
ChatWidget::new_from_existing(init, resumed.thread, resumed.session_configured)
}
@@ -532,7 +532,7 @@ impl App {
models_manager: thread_manager.get_models_manager(),
feedback: feedback.clone(),
is_first_run,
model: model.clone(),
model: config.model.clone(),
};
ChatWidget::new_from_existing(init, forked.thread, forked.session_configured)
}
@@ -1446,7 +1446,7 @@ impl App {
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: self.current_model.clone(),
model: Some(self.current_model.clone()),
};
self.chat_widget = ChatWidget::new(init, self.server.clone());
if let Some(summary) = summary {
@@ -1495,7 +1495,7 @@ impl App {
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: self.current_model.clone(),
model: Some(self.current_model.clone()),
};
self.chat_widget = ChatWidget::new_from_existing(
init,
@@ -1563,7 +1563,7 @@ impl App {
models_manager: self.server.get_models_manager(),
feedback: self.feedback.clone(),
is_first_run: false,
model: self.current_model.clone(),
model: Some(self.current_model.clone()),
};
self.chat_widget = ChatWidget::new_from_existing(
init,

View File

@@ -99,6 +99,8 @@ use rand::Rng;
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use ratatui::style::Color;
use ratatui::style::Modifier;
use ratatui::style::Style;
use ratatui::style::Stylize;
use ratatui::text::Line;
use ratatui::widgets::Paragraph;
@@ -156,6 +158,7 @@ use self::agent::spawn_agent_from_existing;
mod session_header;
use self::session_header::SessionHeader;
use crate::streaming::controller::StreamController;
use crate::version::CODEX_CLI_VERSION;
use std::path::Path;
use chrono::Local;
@@ -198,6 +201,7 @@ impl UnifiedExecWaitState {
const RATE_LIMIT_WARNING_THRESHOLDS: [f64; 3] = [75.0, 90.0, 95.0];
const NUDGE_MODEL_SLUG: &str = "gpt-5.1-codex-mini";
const RATE_LIMIT_SWITCH_PROMPT_THRESHOLD: f64 = 90.0;
const DEFAULT_MODEL_DISPLAY_NAME: &str = "loading";
#[derive(Default)]
struct RateLimitWarningState {
@@ -298,7 +302,7 @@ pub(crate) struct ChatWidgetInit {
pub(crate) models_manager: Arc<ModelsManager>,
pub(crate) feedback: codex_feedback::CodexFeedback,
pub(crate) is_first_run: bool,
pub(crate) model: String,
pub(crate) model: Option<String>,
}
#[derive(Default)]
@@ -337,7 +341,7 @@ pub(crate) struct ChatWidget {
/// where the overlay may briefly treat new tail content as already cached.
active_cell_revision: u64,
config: Config,
model: String,
model: Option<String>,
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
session_header: SessionHeader,
@@ -509,13 +513,16 @@ impl ChatWidget {
self.current_rollout_path = Some(event.rollout_path.clone());
let initial_messages = event.initial_messages.clone();
let model_for_header = event.model.clone();
self.model = Some(model_for_header.clone());
self.session_header.set_model(&model_for_header);
self.add_to_history(history_cell::new_session_info(
let session_info_cell = history_cell::new_session_info(
&self.config,
&model_for_header,
event,
self.show_welcome_banner,
));
);
self.apply_session_info_cell(session_info_cell);
if let Some(messages) = initial_messages {
self.replay_initial_messages(messages);
}
@@ -751,7 +758,7 @@ impl ChatWidget {
if high_usage
&& !self.rate_limit_switch_prompt_hidden()
&& self.model != NUDGE_MODEL_SLUG
&& self.current_model() != Some(NUDGE_MODEL_SLUG)
&& !matches!(
self.rate_limit_switch_prompt,
RateLimitSwitchPromptState::Shown
@@ -1405,11 +1412,22 @@ impl ChatWidget {
model,
} = common;
let mut config = config;
config.model = Some(model.clone());
let model = model.filter(|m| !m.trim().is_empty());
config.model = model.clone();
let mut rng = rand::rng();
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
let codex_op_tx = spawn_agent(config.clone(), app_event_tx.clone(), thread_manager);
let model_for_header = config
.model
.clone()
.unwrap_or_else(|| DEFAULT_MODEL_DISPLAY_NAME.to_string());
let active_cell = if model.is_none() {
Some(Self::placeholder_session_header_cell(&config))
} else {
None
};
let mut widget = Self {
app_event_tx: app_event_tx.clone(),
frame_requester: frame_requester.clone(),
@@ -1424,13 +1442,13 @@ impl ChatWidget {
animations_enabled: config.animations,
skills: None,
}),
active_cell: None,
active_cell,
active_cell_revision: 0,
config,
model: model.clone(),
model,
auth_manager,
models_manager,
session_header: SessionHeader::new(model),
session_header: SessionHeader::new(model_for_header),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
@@ -1483,7 +1501,7 @@ impl ChatWidget {
session_configured: codex_core::protocol::SessionConfiguredEvent,
) -> Self {
let ChatWidgetInit {
config,
mut config,
frame_requester,
app_event_tx,
initial_prompt,
@@ -1495,9 +1513,13 @@ impl ChatWidget {
model,
..
} = common;
let model = model.filter(|m| !m.trim().is_empty());
config.model = model.clone();
let mut rng = rand::rng();
let placeholder = PLACEHOLDERS[rng.random_range(0..PLACEHOLDERS.len())].to_string();
let header_model = model.unwrap_or_else(|| session_configured.model.clone());
let codex_op_tx =
spawn_agent_from_existing(conversation, session_configured, app_event_tx.clone());
@@ -1518,10 +1540,10 @@ impl ChatWidget {
active_cell: None,
active_cell_revision: 0,
config,
model: model.clone(),
model: Some(header_model.clone()),
auth_manager,
models_manager,
session_header: SessionHeader::new(model),
session_header: SessionHeader::new(header_model),
initial_user_message: create_initial_user_message(
initial_prompt.unwrap_or_default(),
initial_images,
@@ -1652,7 +1674,11 @@ impl ChatWidget {
text,
image_paths: self.bottom_pane.take_recent_submission_images(),
};
self.submit_user_message(user_message);
if !self.is_session_configured() {
self.queue_user_message(user_message);
} else {
self.submit_user_message(user_message);
}
}
InputResult::Queued(text) => {
// Tab queues the message if a task is running, otherwise submits immediately
@@ -1933,7 +1959,15 @@ impl ChatWidget {
}
fn add_boxed_history(&mut self, cell: Box<dyn HistoryCell>) {
if !cell.display_lines(u16::MAX).is_empty() {
// Keep the placeholder session header as the active cell until real session info arrives,
// so we can merge headers instead of committing a duplicate box to history.
let keep_placeholder_header_active = !self.is_session_configured()
&& self
.active_cell
.as_ref()
.is_some_and(|c| c.as_any().is::<history_cell::SessionHeaderHistoryCell>());
if !keep_placeholder_header_active && !cell.display_lines(u16::MAX).is_empty() {
// Only break exec grouping if the cell renders visible lines.
self.flush_active_cell();
self.needs_final_message_separator = true;
@@ -1943,7 +1977,7 @@ impl ChatWidget {
#[allow(dead_code)] // Used in tests
fn queue_user_message(&mut self, user_message: UserMessage) {
if self.bottom_pane.is_task_running() {
if !self.is_session_configured() || self.bottom_pane.is_task_running() {
self.queued_user_messages.push_back(user_message);
self.refresh_queued_user_messages();
} else {
@@ -2333,7 +2367,7 @@ impl ChatWidget {
self.rate_limit_snapshot.as_ref(),
self.plan_type,
Local::now(),
&self.model,
self.model_display_name(),
));
}
fn stop_rate_limit_poller(&mut self) {
@@ -2477,6 +2511,14 @@ impl ChatWidget {
/// Open a popup to choose a quick auto model. Selecting "All models"
/// opens the full picker with every available preset.
pub(crate) fn open_model_popup(&mut self) {
if !self.is_session_configured() {
self.add_info_message(
"Model selection is disabled until startup completes.".to_string(),
None,
);
return;
}
let presets: Vec<ModelPreset> = match self.models_manager.try_list_models(&self.config) {
Ok(models) => models,
Err(_) => {
@@ -2496,11 +2538,12 @@ impl ChatWidget {
.filter(|preset| preset.show_in_picker)
.collect();
let current_model = self.current_model();
let current_label = presets
.iter()
.find(|preset| preset.model == self.model)
.find(|preset| Some(preset.model.as_str()) == current_model)
.map(|preset| preset.display_name.to_string())
.unwrap_or_else(|| self.model.clone());
.unwrap_or_else(|| self.model_display_name().to_string());
let (mut auto_presets, other_presets): (Vec<ModelPreset>, Vec<ModelPreset>) = presets
.into_iter()
@@ -2526,7 +2569,7 @@ impl ChatWidget {
SelectionItem {
name: preset.display_name.clone(),
description,
is_current: model == self.model,
is_current: Some(model.as_str()) == current_model,
is_default: preset.is_default,
actions,
dismiss_on_select: true,
@@ -2593,7 +2636,7 @@ impl ChatWidget {
for preset in presets.into_iter() {
let description =
(!preset.description.is_empty()).then_some(preset.description.to_string());
let is_current = preset.model == self.model;
let is_current = Some(preset.model.as_str()) == self.current_model();
let single_supported_effort = preset.supported_reasoning_efforts.len() == 1;
let preset_for_action = preset.clone();
let actions: Vec<SelectionAction> = vec![Box::new(move |tx| {
@@ -2719,7 +2762,7 @@ impl ChatWidget {
.or(Some(default_effort));
let model_slug = preset.model.to_string();
let is_current_model = self.model == preset.model;
let is_current_model = self.current_model() == Some(preset.model.as_str());
let highlight_choice = if is_current_model {
self.config.model_reasoning_effort
} else {
@@ -3529,7 +3572,54 @@ impl ChatWidget {
/// Set the model in the widget's config copy.
pub(crate) fn set_model(&mut self, model: &str) {
self.session_header.set_model(model);
self.model = model.to_string();
self.model = Some(model.to_string());
}
fn current_model(&self) -> Option<&str> {
self.model.as_deref()
}
fn model_display_name(&self) -> &str {
self.model.as_deref().unwrap_or(DEFAULT_MODEL_DISPLAY_NAME)
}
/// Build a placeholder header cell while the session is configuring.
fn placeholder_session_header_cell(config: &Config) -> Box<dyn HistoryCell> {
let placeholder_style = Style::default().add_modifier(Modifier::DIM | Modifier::ITALIC);
Box::new(history_cell::SessionHeaderHistoryCell::new_with_style(
DEFAULT_MODEL_DISPLAY_NAME.to_string(),
placeholder_style,
None,
config.cwd.clone(),
CODEX_CLI_VERSION,
))
}
/// Merge the real session info cell with any placeholder header to avoid double boxes.
fn apply_session_info_cell(&mut self, cell: history_cell::SessionInfoCell) {
let mut session_info_cell = Some(Box::new(cell) as Box<dyn HistoryCell>);
let merged_header = if let Some(active) = self.active_cell.take() {
if active
.as_any()
.is::<history_cell::SessionHeaderHistoryCell>()
{
if let Some(cell) = session_info_cell.take() {
self.active_cell = Some(cell);
}
true
} else {
self.active_cell = Some(active);
false
}
} else {
false
};
self.flush_active_cell();
if !merged_header && let Some(cell) = session_info_cell {
self.add_boxed_history(cell);
}
}
pub(crate) fn add_info_message(&mut self, message: String, hint: Option<String>) {
@@ -3905,6 +3995,10 @@ impl ChatWidget {
self.current_rollout_path.clone()
}
fn is_session_configured(&self) -> bool {
self.conversation_id.is_some()
}
/// Returns a cache key describing the current in-flight active cell for the transcript overlay.
///
/// `Ctrl+T` renders committed transcript cells plus a render-only live tail derived from the

View File

@@ -327,14 +327,13 @@ async fn helpers_are_available_and_do_not_panic() {
let (tx_raw, _rx) = unbounded_channel::<AppEvent>();
let tx = AppEventSender::new(tx_raw);
let cfg = test_config().await;
let resolved_model = ModelsManager::get_model_offline(cfg.model.as_deref());
let thread_manager = Arc::new(ThreadManager::with_models_provider(
CodexAuth::from_api_key("test"),
cfg.model_provider.clone(),
));
let auth_manager = AuthManager::from_auth_for_testing(CodexAuth::from_api_key("test"));
let init = ChatWidgetInit {
config: cfg,
config: cfg.clone(),
frame_requester: FrameRequester::test_dummy(),
app_event_tx: tx,
initial_prompt: None,
@@ -344,7 +343,7 @@ async fn helpers_are_available_and_do_not_panic() {
models_manager: thread_manager.get_models_manager(),
feedback: codex_feedback::CodexFeedback::new(),
is_first_run: true,
model: resolved_model,
model: cfg.model,
};
let mut w = ChatWidget::new(init, thread_manager);
// Basic construction sanity.
@@ -389,7 +388,7 @@ async fn make_chatwidget_manual(
active_cell: None,
active_cell_revision: 0,
config: cfg,
model: resolved_model.clone(),
model: Some(resolved_model.clone()),
auth_manager: auth_manager.clone(),
models_manager: Arc::new(ModelsManager::new(codex_home, auth_manager)),
session_header: SessionHeader::new(resolved_model),
@@ -1007,6 +1006,8 @@ async fn alt_up_edits_most_recent_queued_message() {
#[tokio::test]
async fn enqueueing_history_prompt_multiple_times_is_stable() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.conversation_id = Some(ThreadId::new());
assert!(!chat.bottom_pane.is_task_running());
// Submit an initial prompt to seed history.
chat.bottom_pane.set_composer_text("repeat me".to_string());
@@ -1014,6 +1015,7 @@ async fn enqueueing_history_prompt_multiple_times_is_stable() {
// Simulate an active task so further submissions are queued.
chat.bottom_pane.set_task_running(true);
assert!(chat.bottom_pane.is_task_running());
for _ in 0..3 {
// Recall the prompt from history and ensure it is what we expect.
@@ -1033,8 +1035,10 @@ async fn enqueueing_history_prompt_multiple_times_is_stable() {
#[tokio::test]
async fn streaming_final_answer_keeps_task_running_state() {
let (mut chat, _rx, mut op_rx) = make_chatwidget_manual(None).await;
chat.conversation_id = Some(ThreadId::new());
chat.on_task_started();
assert!(chat.bottom_pane.is_task_running());
chat.on_agent_message_delta("Final answer line\n".to_string());
chat.on_commit_tick();
@@ -1799,6 +1803,7 @@ fn render_bottom_popup(chat: &ChatWidget, width: u16) -> String {
#[tokio::test]
async fn model_selection_popup_snapshot() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5-codex")).await;
chat.conversation_id = Some(ThreadId::new());
chat.open_model_popup();
let popup = render_bottom_popup(&chat, 80);
@@ -2081,6 +2086,7 @@ async fn feedback_upload_consent_popup_snapshot() {
#[tokio::test]
async fn reasoning_popup_escape_returns_to_model_popup() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.1-codex-max")).await;
chat.conversation_id = Some(ThreadId::new());
chat.open_model_popup();
let preset = get_available_model(&chat, "gpt-5.1-codex-max");
@@ -3491,6 +3497,7 @@ printf 'fenced within fenced\n'
#[tokio::test]
async fn chatwidget_tall() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
chat.conversation_id = Some(ThreadId::new());
chat.handle_codex_event(Event {
id: "t1".into(),
msg: EventMsg::TurnStarted(TurnStartedEvent {

View File

@@ -894,6 +894,7 @@ pub(crate) fn new_session_info(
// Header box rendered as history (so it appears at the very top)
let header = SessionHeaderHistoryCell::new(
model.clone(),
Style::default(),
reasoning_effort,
config.cwd.clone(),
CODEX_CLI_VERSION,
@@ -959,16 +960,28 @@ pub(crate) fn new_user_prompt(message: String) -> UserHistoryCell {
}
#[derive(Debug)]
struct SessionHeaderHistoryCell {
pub(crate) struct SessionHeaderHistoryCell {
version: &'static str,
model: String,
model_style: Style,
reasoning_effort: Option<ReasoningEffortConfig>,
directory: PathBuf,
}
impl SessionHeaderHistoryCell {
fn new(
pub(crate) fn new(
model: String,
model_style: Style,
reasoning_effort: Option<ReasoningEffortConfig>,
directory: PathBuf,
version: &'static str,
) -> Self {
Self::new_with_style(model, model_style, reasoning_effort, directory, version)
}
pub(crate) fn new_with_style(
model: String,
model_style: Style,
reasoning_effort: Option<ReasoningEffortConfig>,
directory: PathBuf,
version: &'static str,
@@ -976,6 +989,7 @@ impl SessionHeaderHistoryCell {
Self {
version,
model,
model_style,
reasoning_effort,
directory,
}
@@ -1048,7 +1062,7 @@ impl HistoryCell for SessionHeaderHistoryCell {
let reasoning_label = self.reasoning_label();
let mut model_spans: Vec<Span<'static>> = vec![
Span::from(format!("{model_label} ")).dim(),
Span::from(self.model.clone()),
Span::styled(self.model.clone(), self.model_style),
];
if let Some(reasoning) = reasoning_label {
model_spans.push(Span::from(" "));
@@ -2297,6 +2311,7 @@ mod tests {
fn session_header_includes_reasoning_level_when_present() {
let cell = SessionHeaderHistoryCell::new(
"gpt-4o".to_string(),
Style::default(),
Some(ReasoningEffortConfig::High),
std::env::temp_dir(),
"test",