feat: add resume logs when doing /new (#6660)

<img width="769" height="803" alt="Screenshot 2025-11-14 at 10 25 49"
src="https://github.com/user-attachments/assets/12fbc21e-cab9-4d0a-a484-1aeb60219f96"
/>
This commit is contained in:
jif-oai
2025-11-14 11:42:16 +01:00
committed by GitHub
parent 6c384eb9c6
commit 4788fb179a
2 changed files with 69 additions and 0 deletions

View File

@@ -24,6 +24,7 @@ use codex_core::ConversationManager;
use codex_core::config::Config;
use codex_core::config::edit::ConfigEditsBuilder;
use codex_core::model_family::find_family_for_model;
use codex_core::protocol::FinalOutput;
use codex_core::protocol::SessionSource;
use codex_core::protocol::TokenUsage;
use codex_core::protocol_config_types::ReasoningEffort as ReasoningEffortConfig;
@@ -54,6 +55,29 @@ pub struct AppExitInfo {
pub update_action: Option<UpdateAction>,
}
fn session_summary(
token_usage: TokenUsage,
conversation_id: Option<ConversationId>,
) -> Option<SessionSummary> {
if token_usage.is_zero() {
return None;
}
let usage_line = FinalOutput::from(token_usage).to_string();
let resume_command =
conversation_id.map(|conversation_id| format!("codex resume {conversation_id}"));
Some(SessionSummary {
usage_line,
resume_command,
})
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct SessionSummary {
usage_line: String,
resume_command: Option<String>,
}
fn should_show_model_migration_prompt(
current_model: &str,
target_model: &str,
@@ -365,6 +389,10 @@ impl App {
async fn handle_event(&mut self, tui: &mut tui::Tui, event: AppEvent) -> Result<bool> {
match event {
AppEvent::NewSession => {
let summary = session_summary(
self.chat_widget.token_usage(),
self.chat_widget.conversation_id(),
);
let init = crate::chatwidget::ChatWidgetInit {
config: self.config.clone(),
frame_requester: tui.frame_requester(),
@@ -376,6 +404,14 @@ impl App {
feedback: self.feedback.clone(),
};
self.chat_widget = ChatWidget::new(init, self.server.clone());
if let Some(summary) = summary {
let mut lines: Vec<Line<'static>> = vec![summary.usage_line.clone().into()];
if let Some(command) = summary.resume_command {
let spans = vec!["To continue this session, run ".into(), command.cyan()];
lines.push(spans.into());
}
self.chat_widget.add_plain_history_lines(lines);
}
tui.frame_requester().schedule_frame();
}
AppEvent::InsertHistoryCell(cell) => {
@@ -970,4 +1006,31 @@ mod tests {
assert_eq!(nth, 1);
assert_eq!(prefill, "follow-up (edited)");
}
#[test]
fn session_summary_skip_zero_usage() {
assert!(session_summary(TokenUsage::default(), None).is_none());
}
#[test]
fn session_summary_includes_resume_hint() {
let usage = TokenUsage {
input_tokens: 10,
output_tokens: 2,
total_tokens: 12,
..Default::default()
};
let conversation =
ConversationId::from_string("123e4567-e89b-12d3-a456-426614174000").unwrap();
let summary = session_summary(usage, Some(conversation)).expect("summary");
assert_eq!(
summary.usage_line,
"Token usage: total=12 input=10 output=2"
);
assert_eq!(
summary.resume_command,
Some("codex resume 123e4567-e89b-12d3-a456-426614174000".to_string())
);
}
}

View File

@@ -85,6 +85,7 @@ use crate::history_cell;
use crate::history_cell::AgentMessageCell;
use crate::history_cell::HistoryCell;
use crate::history_cell::McpToolCallCell;
use crate::history_cell::PlainHistoryCell;
use crate::markdown::append_markdown;
#[cfg(target_os = "windows")]
use crate::onboarding::WSL_INSTRUCTIONS;
@@ -2462,6 +2463,11 @@ impl ChatWidget {
self.request_redraw();
}
pub(crate) fn add_plain_history_lines(&mut self, lines: Vec<Line<'static>>) {
self.add_boxed_history(Box::new(PlainHistoryCell::new(lines)));
self.request_redraw();
}
pub(crate) fn add_error_message(&mut self, message: String) {
self.add_to_history(history_cell::new_error_event(message));
self.request_redraw();