Compare commits

...

7 Commits

Author SHA1 Message Date
pash
8aaeae9312 Log service tier resume mismatches 2026-03-04 16:06:48 -08:00
pash
4f21df2bfe adjust fast header spacing 2026-03-04 15:52:06 -08:00
pash
7bd5190a27 style fast header status 2026-03-04 15:20:55 -08:00
pash
65e99c76ed show fast header status only for chatgpt auth 2026-03-04 15:03:48 -08:00
pash
859e94bad2 [tui] Lowercase speed labels
Co-authored-by: Codex <noreply@openai.com>
2026-03-04 01:17:18 -08:00
pash
462a5574b8 [tui] Align speed hint with model hint
Co-authored-by: Codex <noreply@openai.com>
2026-03-04 01:14:34 -08:00
pash
246c6dc8a1 [tui] Show speed in session header
Co-authored-by: Codex <noreply@openai.com>
2026-03-04 01:10:04 -08:00
7 changed files with 119 additions and 4 deletions

View File

@@ -6380,6 +6380,14 @@ fn collect_resume_override_mismatches(
config_snapshot.model_provider_id
));
}
if let Some(requested_service_tier) = request.service_tier.as_ref()
&& requested_service_tier != &config_snapshot.service_tier
{
mismatch_details.push(format!(
"service_tier requested={requested_service_tier:?} active={:?}",
config_snapshot.service_tier
));
}
if let Some(requested_cwd) = request.cwd.as_deref() {
let requested_cwd_path = std::path::PathBuf::from(requested_cwd);
if requested_cwd_path != config_snapshot.cwd {
@@ -7104,6 +7112,43 @@ mod tests {
validate_dynamic_tools(&tools).expect("valid schema");
}
#[test]
fn collect_resume_override_mismatches_includes_service_tier() {
let request = ThreadResumeParams {
thread_id: "thread-1".to_string(),
history: None,
path: None,
model: None,
model_provider: None,
service_tier: Some(Some(codex_protocol::config_types::ServiceTier::Fast)),
cwd: None,
approval_policy: None,
sandbox: None,
config: None,
base_instructions: None,
developer_instructions: None,
personality: None,
persist_extended_history: false,
};
let config_snapshot = ThreadConfigSnapshot {
model: "gpt-5".to_string(),
model_provider_id: "openai".to_string(),
service_tier: Some(codex_protocol::config_types::ServiceTier::Flex),
approval_policy: codex_protocol::protocol::AskForApproval::OnRequest,
sandbox_policy: codex_protocol::protocol::SandboxPolicy::DangerFullAccess,
cwd: PathBuf::from("/tmp"),
ephemeral: false,
reasoning_effort: None,
personality: None,
session_source: SessionSource::Cli,
};
assert_eq!(
collect_resume_override_mismatches(&request, &config_snapshot),
vec!["service_tier requested=Some(Fast) active=Some(Flex)".to_string()]
);
}
#[test]
fn extract_conversation_summary_prefers_plain_user_messages() -> Result<()> {
let conversation_id = ThreadId::from_string("3f941c35-29b3-493b-b0a4-e25800d9aeb0")?;

View File

@@ -801,6 +801,8 @@ impl App {
history_cell::SessionHeaderHistoryCell::new(
self.chat_widget.current_model().to_string(),
self.chat_widget.current_reasoning_effort(),
self.chat_widget
.should_show_fast_status(self.chat_widget.current_service_tier()),
self.config.cwd.clone(),
version,
)
@@ -4393,6 +4395,7 @@ mod tests {
is_first,
None,
None,
false,
)) as Arc<dyn HistoryCell>
};
@@ -5031,6 +5034,7 @@ mod tests {
is_first,
None,
None,
false,
)) as Arc<dyn HistoryCell>
};

View File

@@ -1189,6 +1189,7 @@ impl ChatWidget {
self.sync_fast_command_enabled();
self.sync_personality_command_enabled();
let startup_tooltip_override = self.startup_tooltip_override.take();
let show_fast_status = self.should_show_fast_status(event.service_tier);
let session_info_cell = history_cell::new_session_info(
&self.config,
&model_for_header,
@@ -1198,6 +1199,7 @@ impl ChatWidget {
self.auth_manager
.auth_cached()
.and_then(|auth| auth.account_plan_type()),
show_fast_status,
);
self.apply_session_info_cell(session_info_cell);
@@ -7060,6 +7062,15 @@ impl ChatWidget {
self.config.service_tier
}
pub(crate) fn should_show_fast_status(&self, service_tier: Option<ServiceTier>) -> bool {
matches!(service_tier, Some(ServiceTier::Fast))
&& self
.auth_manager
.auth_cached()
.as_ref()
.is_some_and(|auth| auth.is_chatgpt_auth())
}
fn fast_mode_enabled(&self) -> bool {
self.config.features.enabled(Feature::FastMode)
}
@@ -7394,6 +7405,7 @@ impl ChatWidget {
DEFAULT_MODEL_DISPLAY_NAME.to_string(),
placeholder_style,
None,
false,
config.cwd.clone(),
CODEX_CLI_VERSION,
))

View File

@@ -7264,6 +7264,26 @@ async fn user_turn_carries_service_tier_after_fast_toggle() {
}
}
#[tokio::test]
async fn fast_status_indicator_requires_chatgpt_auth() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.3-codex")).await;
chat.set_service_tier(Some(ServiceTier::Fast));
assert!(!chat.should_show_fast_status(chat.current_service_tier()));
set_chatgpt_auth(&mut chat);
assert!(chat.should_show_fast_status(chat.current_service_tier()));
}
#[tokio::test]
async fn fast_status_indicator_is_hidden_when_fast_mode_is_off() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(Some("gpt-5.3-codex")).await;
set_chatgpt_auth(&mut chat);
assert!(!chat.should_show_fast_status(chat.current_service_tier()));
}
#[tokio::test]
async fn approvals_popup_shows_disabled_presets() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;

View File

@@ -1046,6 +1046,7 @@ pub(crate) fn new_session_info(
is_first_event: bool,
tooltip_override: Option<String>,
auth_plan: Option<PlanType>,
show_fast_status: bool,
) -> SessionInfoCell {
let SessionConfiguredEvent {
model,
@@ -1056,6 +1057,7 @@ pub(crate) fn new_session_info(
let header = SessionHeaderHistoryCell::new(
model.clone(),
reasoning_effort,
show_fast_status,
config.cwd.clone(),
CODEX_CLI_VERSION,
);
@@ -1137,6 +1139,7 @@ pub(crate) struct SessionHeaderHistoryCell {
model: String,
model_style: Style,
reasoning_effort: Option<ReasoningEffortConfig>,
show_fast_status: bool,
directory: PathBuf,
}
@@ -1144,6 +1147,7 @@ impl SessionHeaderHistoryCell {
pub(crate) fn new(
model: String,
reasoning_effort: Option<ReasoningEffortConfig>,
show_fast_status: bool,
directory: PathBuf,
version: &'static str,
) -> Self {
@@ -1151,6 +1155,7 @@ impl SessionHeaderHistoryCell {
model,
Style::default(),
reasoning_effort,
show_fast_status,
directory,
version,
)
@@ -1160,6 +1165,7 @@ impl SessionHeaderHistoryCell {
model: String,
model_style: Style,
reasoning_effort: Option<ReasoningEffortConfig>,
show_fast_status: bool,
directory: PathBuf,
version: &'static str,
) -> Self {
@@ -1168,6 +1174,7 @@ impl SessionHeaderHistoryCell {
model,
model_style,
reasoning_effort,
show_fast_status,
directory,
}
}
@@ -1247,6 +1254,10 @@ impl HistoryCell for SessionHeaderHistoryCell {
spans.push(Span::from(" "));
spans.push(Span::from(reasoning));
}
if self.show_fast_status {
spans.push(" ".into());
spans.push(Span::styled("fast", self.model_style.magenta()));
}
spans.push(" ".dim());
spans.push(CHANGE_MODEL_HINT_COMMAND.cyan());
spans.push(CHANGE_MODEL_HINT_EXPLANATION.dim());
@@ -2591,6 +2602,7 @@ mod tests {
false,
Some("Model just became available".to_string()),
Some(PlanType::Free),
false,
);
let rendered = render_transcript(&cell).join("\n");
@@ -2608,6 +2620,7 @@ mod tests {
false,
Some("Model just became available".to_string()),
Some(PlanType::Free),
false,
);
let rendered = render_transcript(&cell).join("\n");
@@ -2624,6 +2637,7 @@ mod tests {
true,
Some("Model just became available".to_string()),
Some(PlanType::Free),
false,
);
let rendered = render_transcript(&cell).join("\n");
@@ -2642,6 +2656,7 @@ mod tests {
false,
Some("Model just became available".to_string()),
Some(PlanType::Free),
false,
);
let rendered = render_transcript(&cell).join("\n");
@@ -3274,18 +3289,39 @@ mod tests {
let cell = SessionHeaderHistoryCell::new(
"gpt-4o".to_string(),
Some(ReasoningEffortConfig::High),
true,
std::env::temp_dir(),
"test",
);
let lines = render_lines(&cell.display_lines(80));
let model_line = lines
.into_iter()
.iter()
.find(|line| line.contains("model:"))
.expect("model line");
assert!(model_line.contains("gpt-4o high fast"));
assert!(model_line.contains("/model to change"));
}
#[test]
fn session_header_hides_fast_status_when_disabled() {
let cell = SessionHeaderHistoryCell::new(
"gpt-4o".to_string(),
Some(ReasoningEffortConfig::High),
false,
std::env::temp_dir(),
"test",
);
let lines = render_lines(&cell.display_lines(80));
let model_line = lines
.iter()
.find(|line| line.contains("model:"))
.expect("model line");
assert!(model_line.contains("gpt-4o high"));
assert!(model_line.contains("/model to change"));
assert!(!model_line.contains("fast"));
}
#[test]

View File

@@ -1,6 +1,5 @@
---
source: tui/src/app.rs
assertion_line: 3452
expression: rendered
---
╭─────────────────────────────────────────────╮

View File

@@ -1,6 +1,5 @@
---
source: tui/src/history_cell.rs
assertion_line: 2608
expression: rendered
---
╭─────────────────────────────────────╮