Compare commits

...

9 Commits

Author SHA1 Message Date
Tuan-Lung Wang
5f4a2b0fbb Add fast draft completion popup 2026-03-17 15:44:10 -07:00
Tuan-Lung Wang
877e28dde6 Add fast answer interleaving 2026-03-17 13:27:32 -07:00
Tuan-Lung Wang
5396defe86 Add transcript search navigation 2026-03-16 21:54:18 -07:00
Tuan-Lung Wang
21191e1707 Add transcript details toggle 2026-03-16 16:07:46 -07:00
Tuan-Lung Wang
06a22de9a2 Lighten transcript prompt highlight 2026-03-16 15:51:35 -07:00
Tuan-Lung Wang
cb10b24f6d Polish transcript browser navigation 2026-03-16 15:05:27 -07:00
Tuan-Lung Wang
be0d4e0505 Add transcript folding previews 2026-03-16 14:35:32 -07:00
Tuan-Lung Wang
0ffa9c8103 Add transcript anchor navigation pane 2026-03-16 13:51:58 -07:00
Tuan-Lung Wang
fa8ced9304 Add transcript browser side pane shell 2026-03-16 13:35:17 -07:00
22 changed files with 3869 additions and 105 deletions

View File

@@ -0,0 +1,608 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::Duration;
use codex_core::AuthManager;
use codex_core::ModelClient;
use codex_core::Prompt;
use codex_core::ResponseEvent;
use codex_core::config::Config;
use codex_core::default_client::originator;
use codex_core::models_manager::manager::ModelsManager;
use codex_otel::SessionTelemetry;
use codex_protocol::ThreadId;
use codex_protocol::models::BaseInstructions;
use codex_protocol::models::ContentItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::SessionSource;
use ratatui::prelude::Stylize;
use ratatui::text::Line;
use tokio::time::timeout;
use tokio_stream::StreamExt;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
use crate::history_cell::HistoryCell;
use crate::markdown;
use crate::wrapping::RtOptions;
use crate::wrapping::adaptive_wrap_lines;
const ANSWER_INTERLEAVE_MODEL: &str = "gpt-5.1-codex-mini";
const ANSWER_INTERLEAVE_TIMEOUT: Duration = Duration::from_secs(8);
const ANSWER_INTERLEAVE_STATUS: &str = "restructuring answer...";
const ANSWER_INTERLEAVE_AGENT_LABEL: &str = "tui_answer_interleave";
#[derive(Debug, Clone)]
pub(crate) struct AnswerInterleaveRequest {
pub(crate) request_id: u64,
pub(crate) user_prompt: String,
pub(crate) final_answer: String,
}
#[derive(Debug)]
struct LiveAnswerState {
markdown: String,
interleaving_pending: bool,
}
#[derive(Debug, Clone)]
pub(crate) struct LiveAnswerHandle {
state: Arc<Mutex<LiveAnswerState>>,
}
impl LiveAnswerHandle {
pub(crate) fn new(cwd: PathBuf) -> (Self, LiveAnswerCell) {
let state = Arc::new(Mutex::new(LiveAnswerState {
markdown: String::new(),
interleaving_pending: false,
}));
(
Self {
state: Arc::clone(&state),
},
LiveAnswerCell { state, cwd },
)
}
pub(crate) fn push_delta(&self, delta: &str) {
#[expect(clippy::unwrap_used)]
let mut state = self.state.lock().unwrap();
state.markdown.push_str(delta);
}
pub(crate) fn set_markdown(&self, markdown: String) {
#[expect(clippy::unwrap_used)]
let mut state = self.state.lock().unwrap();
state.markdown = markdown;
}
pub(crate) fn set_interleaving_pending(&self, interleaving_pending: bool) {
#[expect(clippy::unwrap_used)]
let mut state = self.state.lock().unwrap();
state.interleaving_pending = interleaving_pending;
}
pub(crate) fn markdown(&self) -> String {
#[expect(clippy::unwrap_used)]
let state = self.state.lock().unwrap();
state.markdown.clone()
}
}
#[derive(Debug)]
pub(crate) struct LiveAnswerCell {
state: Arc<Mutex<LiveAnswerState>>,
cwd: PathBuf,
}
impl HistoryCell for LiveAnswerCell {
fn display_lines(&self, width: u16) -> Vec<Line<'static>> {
self.render(width)
}
fn transcript_lines(&self, width: u16) -> Vec<Line<'static>> {
self.render(width)
}
}
impl LiveAnswerCell {
fn render(&self, width: u16) -> Vec<Line<'static>> {
#[expect(clippy::unwrap_used)]
let state = self.state.lock().unwrap();
let mut lines = Vec::new();
markdown::append_markdown(
&state.markdown,
Some(width as usize),
Some(self.cwd.as_path()),
&mut lines,
);
if state.interleaving_pending {
if !lines.is_empty() {
lines.push(Line::from(""));
}
lines.push(Line::from(ANSWER_INTERLEAVE_STATUS.dim()));
}
adaptive_wrap_lines(
&lines,
RtOptions::new(width as usize)
.initial_indent("".dim().into())
.subsequent_indent(" ".into()),
)
}
}
pub(crate) struct AnswerInterleaveManager {
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
app_tx: AppEventSender,
config: Config,
}
impl AnswerInterleaveManager {
pub(crate) fn new(
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
app_tx: AppEventSender,
config: Config,
) -> Self {
Self {
auth_manager,
models_manager,
app_tx,
config,
}
}
pub(crate) fn start_request(&self, request: AnswerInterleaveRequest) {
let auth_manager = Arc::clone(&self.auth_manager);
let models_manager = Arc::clone(&self.models_manager);
let app_tx = self.app_tx.clone();
let config = self.config.clone();
tokio::spawn(async move {
let result = timeout(
ANSWER_INTERLEAVE_TIMEOUT,
interleave_answer(
auth_manager,
models_manager,
config,
&request.user_prompt,
&request.final_answer,
),
)
.await;
let result = match result {
Ok(result) => result,
Err(_) => Err("answer interleaving timed out".to_string()),
}
.or_else(|err| {
heuristic_interleave_answer(&request.user_prompt, &request.final_answer).ok_or(err)
});
app_tx.send(AppEvent::AnswerInterleaveResult {
request_id: request.request_id,
result,
});
});
}
}
async fn interleave_answer(
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
config: Config,
user_prompt: &str,
final_answer: &str,
) -> Result<String, String> {
let model_info = models_manager
.get_model_info(ANSWER_INTERLEAVE_MODEL, &config)
.await;
let auth = auth_manager.auth_cached();
let auth_mode = auth
.as_ref()
.map(codex_core::CodexAuth::auth_mode)
.map(codex_otel::TelemetryAuthMode::from);
let account_id = auth
.as_ref()
.and_then(codex_core::CodexAuth::get_account_id);
let account_email = auth
.as_ref()
.and_then(codex_core::CodexAuth::get_account_email);
let telemetry = SessionTelemetry::new(
ThreadId::new(),
ANSWER_INTERLEAVE_MODEL,
ANSWER_INTERLEAVE_MODEL,
account_id,
account_email,
auth_mode,
originator().value,
config.otel.log_user_prompt,
codex_core::terminal::user_agent(),
SessionSource::SubAgent(codex_protocol::protocol::SubAgentSource::Other(
ANSWER_INTERLEAVE_AGENT_LABEL.to_string(),
)),
);
let model_client = ModelClient::new(
Some(auth_manager),
ThreadId::new(),
config.model_provider.clone(),
SessionSource::SubAgent(codex_protocol::protocol::SubAgentSource::Other(
ANSWER_INTERLEAVE_AGENT_LABEL.to_string(),
)),
config.model_verbosity,
false,
false,
false,
None,
);
let mut session = model_client.new_session();
let prompt = PromptBuilder::new(user_prompt, final_answer).build();
let mut stream = session
.stream(
&prompt,
&model_info,
&telemetry,
None,
model_info.default_reasoning_summary,
None,
None,
)
.await
.map_err(|err| err.to_string())?;
let mut output = String::new();
while let Some(event) = stream.next().await {
match event.map_err(|err| err.to_string())? {
ResponseEvent::OutputTextDelta(delta) => output.push_str(&delta),
ResponseEvent::OutputItemDone(item) if output.is_empty() => {
output.push_str(&response_item_text(&item));
}
ResponseEvent::OutputItemDone(_) => {}
ResponseEvent::Completed { .. } => break,
ResponseEvent::Created
| ResponseEvent::OutputItemAdded(_)
| ResponseEvent::ServerModel(_)
| ResponseEvent::ServerReasoningIncluded(_)
| ResponseEvent::ReasoningSummaryDelta { .. }
| ResponseEvent::ReasoningContentDelta { .. }
| ResponseEvent::ReasoningSummaryPartAdded { .. }
| ResponseEvent::RateLimits(_)
| ResponseEvent::ModelsEtag(_) => {}
}
}
let output = output.trim().to_string();
if output.is_empty() {
Err("answer interleaving returned an empty response".to_string())
} else {
Ok(post_process_interleaved_answer(user_prompt, final_answer, &output).unwrap_or(output))
}
}
fn response_item_text(item: &ResponseItem) -> String {
match item {
ResponseItem::Message { content, .. } => content
.iter()
.map(|content_item| match content_item {
ContentItem::InputText { text } | ContentItem::OutputText { text } => text.as_str(),
ContentItem::InputImage { .. } => "",
})
.collect(),
_ => String::new(),
}
}
struct PromptBuilder<'a> {
user_prompt: &'a str,
final_answer: &'a str,
}
impl<'a> PromptBuilder<'a> {
fn new(user_prompt: &'a str, final_answer: &'a str) -> Self {
Self {
user_prompt,
final_answer,
}
}
fn build(self) -> Prompt {
let request = format!(
"You are formatting a Codex final answer for display in a CLI transcript.\n\
Rewrite the assistant answer so it interleaves the user's questions with the answer.\n\
Rules:\n\
- Preserve the original meaning and facts.\n\
- Do not invent information.\n\
- If the user prompt contains multiple questions, numbered items, or bullet points, you must rewrite it into interleaved question and answer pairs.\n\
- Keep the user's numbering or bullets when present.\n\
- For each question or sub-question, show the prompt text, then a line starting with `A:`.\n\
- Keep trailing sections like `Assumptions:` or warnings after the paired answers when they do not map cleanly to a single question.\n\
- Output markdown only. Do not add a preamble or code fence.\n\
- Only return the original answer unchanged when the prompt is clearly asking a single question.\n\n\
<user_prompt>\n{user_prompt}\n</user_prompt>\n\n\
<assistant_final_answer>\n{final_answer}\n</assistant_final_answer>\n",
user_prompt = self.user_prompt,
final_answer = self.final_answer,
);
let mut prompt = Prompt::default();
prompt.input = vec![ResponseItem::Message {
id: None,
role: "user".to_string(),
content: vec![ContentItem::InputText { text: request }],
end_turn: None,
phase: None,
}];
prompt.base_instructions = BaseInstructions::default();
prompt
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum ListKind {
Numbered,
Bulleted,
}
#[derive(Debug, PartialEq, Eq)]
struct ListItem {
marker: String,
text: String,
}
#[derive(Debug, PartialEq, Eq)]
struct ParsedList {
items: Vec<ListItem>,
remainder: Option<String>,
}
fn post_process_interleaved_answer(
user_prompt: &str,
final_answer: &str,
model_output: &str,
) -> Option<String> {
if is_meaningfully_interleaved(user_prompt, model_output) {
return None;
}
heuristic_interleave_answer(user_prompt, final_answer)
}
fn is_meaningfully_interleaved(user_prompt: &str, answer: &str) -> bool {
if !has_multiple_prompt_items(user_prompt) {
return true;
}
answer
.lines()
.any(|line| line.trim_start().starts_with("A:"))
}
fn has_multiple_prompt_items(user_prompt: &str) -> bool {
parse_simple_list(user_prompt).is_some_and(|parsed| parsed.items.len() > 1)
}
fn heuristic_interleave_answer(user_prompt: &str, final_answer: &str) -> Option<String> {
let questions = parse_simple_list(user_prompt)?;
let answers = parse_simple_list(final_answer)?;
if questions.items.len() < 2 || questions.items.len() != answers.items.len() {
return None;
}
let mut lines = Vec::with_capacity(questions.items.len() * 2 + 2);
for (question, answer) in questions.items.iter().zip(answers.items.iter()) {
lines.push(format!("{} {}", question.marker, question.text));
lines.push(format!("A: {}", answer.text));
}
if let Some(remainder) = answers.remainder
&& !remainder.trim().is_empty()
{
lines.push(String::new());
lines.push(remainder);
}
Some(lines.join("\n"))
}
fn parse_simple_list(text: &str) -> Option<ParsedList> {
let mut items = Vec::new();
let mut kind = None;
let mut started = false;
let mut remainder_start = None;
let lines: Vec<&str> = text.lines().collect();
let mut idx = 0;
while idx < lines.len() {
let line = lines[idx];
let trimmed = line.trim();
if trimmed.is_empty() {
if started {
remainder_start = lines[idx + 1..]
.iter()
.position(|candidate| !candidate.trim().is_empty())
.map(|offset| idx + 1 + offset);
break;
}
idx += 1;
continue;
}
if let Some((line_kind, marker, item_text)) = parse_list_item_line(trimmed) {
match kind {
Some(existing_kind) if existing_kind != line_kind => {
remainder_start = Some(idx);
break;
}
None => kind = Some(line_kind),
Some(_) => {}
}
started = true;
items.push(ListItem {
marker,
text: item_text.to_string(),
});
idx += 1;
continue;
}
if started {
if let Some(last_item) = items.last_mut() {
last_item.text.push('\n');
last_item.text.push_str(trimmed);
}
idx += 1;
continue;
}
idx += 1;
}
if items.is_empty() {
return None;
}
let remainder = remainder_start.map(|start| lines[start..].join("\n").trim().to_string());
Some(ParsedList { items, remainder })
}
fn parse_list_item_line(line: &str) -> Option<(ListKind, String, &str)> {
if let Some((marker, text)) = parse_numbered_item_line(line) {
return Some((ListKind::Numbered, marker, text));
}
if let Some(text) = line.strip_prefix("- ").or_else(|| line.strip_prefix("* ")) {
return Some((ListKind::Bulleted, line[..1].to_string(), text.trim()));
}
None
}
fn parse_numbered_item_line(line: &str) -> Option<(String, &str)> {
let marker_end = line
.char_indices()
.take_while(|(_, ch)| ch.is_ascii_digit())
.last()
.map(|(idx, _)| idx + 1)?;
let marker = line.get(..marker_end)?;
let remainder = line.get(marker_end..)?;
let (separator, text) = remainder.split_at_checked(1)?;
if !matches!(separator, "." | ")") {
return None;
}
let text = text.trim_start();
if text.is_empty() {
return None;
}
Some((format!("{marker}{separator}"), text))
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::heuristic_interleave_answer;
use super::parse_simple_list;
use super::post_process_interleaved_answer;
#[test]
fn heuristic_interleave_pairs_numbered_questions_and_answers() {
let result = heuristic_interleave_answer(
"1. what's my name?\n2. what are we doing?",
"1. Your name is Tuan-Lung.\n2. We are testing interleaving.\n\nAssumptions:\n- This is inferred.",
);
assert_eq!(
result,
Some(
"1. what's my name?\n\
A: Your name is Tuan-Lung.\n\
2. what are we doing?\n\
A: We are testing interleaving.\n\n\
Assumptions:\n\
- This is inferred."
.to_string()
)
);
}
#[test]
fn heuristic_interleave_returns_none_for_mismatched_shapes() {
let result = heuristic_interleave_answer(
"1. what's my name?\n2. what are we doing?",
"Your name is Tuan-Lung.",
);
assert_eq!(result, None);
}
#[test]
fn post_process_uses_heuristic_when_model_returns_raw_list_answer() {
let result = post_process_interleaved_answer(
"1. what's my name?\n2. what are we doing?",
"1. Your name is Tuan-Lung.\n2. We are testing interleaving.",
"1. Your name is Tuan-Lung.\n2. We are testing interleaving.",
);
assert_eq!(
result,
Some(
"1. what's my name?\n\
A: Your name is Tuan-Lung.\n\
2. what are we doing?\n\
A: We are testing interleaving."
.to_string()
)
);
}
#[test]
fn parse_simple_list_preserves_remainder() {
let parsed = parse_simple_list("1. one\n2. two\n\nTrailing section");
assert_eq!(
parsed.map(|parsed| parsed.remainder),
Some(Some("Trailing section".to_string()))
);
}
#[test]
fn heuristic_interleave_supports_prompt_preamble_before_numbered_list() {
let result = heuristic_interleave_answer(
"For testing purpose, let me ask you a few questions:\n1. how are you?\n2. what's your favorite model?",
"1. I'm doing fine.\n2. I don't have a favorite.",
);
assert_eq!(
result,
Some(
"1. how are you?\n\
A: I'm doing fine.\n\
2. what's your favorite model?\n\
A: I don't have a favorite."
.to_string()
)
);
}
#[test]
fn heuristic_interleave_keeps_continuation_lines_with_numbered_answers() {
let result = heuristic_interleave_answer(
"1. how are you doing?\n2. what's the biggest city in the world?\n3. who's the ceo of openai?",
"1. I'm doing fine.\n2. Jakarta is the largest city by population.\nSource: https://example.com/jakarta\n3. Sam Altman is the CEO of OpenAI.\nSource: https://example.com/openai",
);
assert_eq!(
result,
Some(
"1. how are you doing?\n\
A: I'm doing fine.\n\
2. what's the biggest city in the world?\n\
A: Jakarta is the largest city by population.\n\
Source: https://example.com/jakarta\n\
3. who's the ceo of openai?\n\
A: Sam Altman is the CEO of OpenAI.\n\
Source: https://example.com/openai"
.to_string()
)
);
}
}

View File

@@ -1,3 +1,4 @@
use crate::answer_interleave::AnswerInterleaveManager;
use crate::app_backtrack::BacktrackState;
use crate::app_event::AppEvent;
use crate::app_event::ExitMode;
@@ -16,6 +17,7 @@ use crate::chatwidget::ExternalEditorState;
use crate::chatwidget::ThreadInputState;
use crate::cwd_prompt::CwdPromptAction;
use crate::diff_render::DiffSummary;
use crate::draft_completion::DraftCompletionManager;
use crate::exec_command::strip_bash_lc_and_escape;
use crate::external_editor;
use crate::file_search::FileSearchManager;
@@ -675,6 +677,8 @@ pub(crate) struct App {
runtime_sandbox_policy_override: Option<SandboxPolicy>,
pub(crate) file_search: FileSearchManager,
draft_completion: DraftCompletionManager,
answer_interleave: AnswerInterleaveManager,
pub(crate) transcript_cells: Vec<Arc<dyn HistoryCell>>,
@@ -2141,6 +2145,18 @@ impl App {
.maybe_prompt_windows_sandbox_enable(should_prompt_windows_sandbox_nux_at_startup);
let file_search = FileSearchManager::new(config.cwd.clone(), app_event_tx.clone());
let draft_completion = DraftCompletionManager::new(
auth_manager.clone(),
thread_manager.get_models_manager(),
app_event_tx.clone(),
config.clone(),
);
let answer_interleave = AnswerInterleaveManager::new(
auth_manager.clone(),
thread_manager.get_models_manager(),
app_event_tx.clone(),
config.clone(),
);
#[cfg(not(debug_assertions))]
let upgrade_version = crate::updates::get_upgrade_version(&config);
@@ -2157,6 +2173,8 @@ impl App {
runtime_approval_policy_override: None,
runtime_sandbox_policy_override: None,
file_search,
draft_completion,
answer_interleave,
enhanced_keys_supported,
transcript_cells: Vec::new(),
overlay: None,
@@ -2703,6 +2721,20 @@ impl App {
AppEvent::FileSearchResult { query, matches } => {
self.chat_widget.apply_file_search_result(query, matches);
}
AppEvent::StartDraftCompletion(request) => {
self.draft_completion.start_request(request);
}
AppEvent::DraftCompletionResult { request_id, result } => {
self.chat_widget
.on_draft_completion_result(request_id, result);
}
AppEvent::StartAnswerInterleave(request) => {
self.answer_interleave.start_request(request);
}
AppEvent::AnswerInterleaveResult { request_id, result } => {
self.chat_widget
.on_answer_interleave_result(request_id, result);
}
AppEvent::RateLimitSnapshotFetched(snapshot) => {
self.chat_widget.on_rate_limit_snapshot(Some(snapshot));
}
@@ -6342,6 +6374,18 @@ guardian_approval = true
CodexAuth::from_api_key("Test API Key"),
);
let file_search = FileSearchManager::new(config.cwd.clone(), app_event_tx.clone());
let draft_completion = DraftCompletionManager::new(
auth_manager.clone(),
server.get_models_manager(),
app_event_tx.clone(),
config.clone(),
);
let answer_interleave = AnswerInterleaveManager::new(
auth_manager.clone(),
server.get_models_manager(),
app_event_tx.clone(),
config.clone(),
);
let model = codex_core::test_support::get_model_offline(config.model.as_deref());
let session_telemetry = test_session_telemetry(&config, model.as_str());
@@ -6358,6 +6402,8 @@ guardian_approval = true
runtime_approval_policy_override: None,
runtime_sandbox_policy_override: None,
file_search,
draft_completion,
answer_interleave,
transcript_cells: Vec::new(),
overlay: None,
deferred_history_lines: Vec::new(),
@@ -6401,6 +6447,18 @@ guardian_approval = true
CodexAuth::from_api_key("Test API Key"),
);
let file_search = FileSearchManager::new(config.cwd.clone(), app_event_tx.clone());
let draft_completion = DraftCompletionManager::new(
auth_manager.clone(),
server.get_models_manager(),
app_event_tx.clone(),
config.clone(),
);
let answer_interleave = AnswerInterleaveManager::new(
auth_manager.clone(),
server.get_models_manager(),
app_event_tx.clone(),
config.clone(),
);
let model = codex_core::test_support::get_model_offline(config.model.as_deref());
let session_telemetry = test_session_telemetry(&config, model.as_str());
@@ -6418,6 +6476,8 @@ guardian_approval = true
runtime_approval_policy_override: None,
runtime_sandbox_policy_override: None,
file_search,
draft_completion,
answer_interleave,
transcript_cells: Vec::new(),
overlay: None,
deferred_history_lines: Vec::new(),

View File

@@ -156,8 +156,12 @@ impl App {
..
}) = event
{
// First Esc in transcript overlay: begin backtrack preview at latest user message.
self.begin_overlay_backtrack_preview(tui);
if self.overlay.as_ref().is_some_and(Overlay::consumes_escape) {
self.overlay_forward_event(tui, event)?;
} else {
// First Esc in transcript overlay: begin backtrack preview at latest user message.
self.begin_overlay_backtrack_preview(tui);
}
Ok(true)
} else {
// Not in backtrack mode: forward events to the overlay widget.

View File

@@ -18,8 +18,10 @@ use codex_protocol::protocol::Event;
use codex_protocol::protocol::RateLimitSnapshot;
use codex_utils_approval_presets::ApprovalPreset;
use crate::answer_interleave::AnswerInterleaveRequest;
use crate::bottom_pane::ApprovalRequest;
use crate::bottom_pane::StatusLineItem;
use crate::draft_completion::DraftCompletionRequest;
use crate::history_cell::HistoryCell;
use codex_core::config::types::ApprovalsReviewer;
@@ -128,6 +130,25 @@ pub(crate) enum AppEvent {
matches: Vec<FileMatch>,
},
/// Kick off an asynchronous fast-model pass that restructures a final answer into an
/// interleaved question/answer display.
StartAnswerInterleave(AnswerInterleaveRequest),
/// Result of a completed answer interleaving request.
AnswerInterleaveResult {
request_id: u64,
result: Result<String, String>,
},
/// Kick off an asynchronous fast-model draft completion request.
StartDraftCompletion(DraftCompletionRequest),
/// Result of a completed draft completion request.
DraftCompletionResult {
request_id: u64,
result: Result<Vec<String>, String>,
},
/// Result of refreshing rate limits
RateLimitSnapshotFetched(RateLimitSnapshot),

View File

@@ -153,6 +153,7 @@ use super::chat_composer_history::HistoryEntry;
use super::command_popup::CommandItem;
use super::command_popup::CommandPopup;
use super::command_popup::CommandPopupFlags;
use super::draft_completion_popup::DraftCompletionPopup;
use super::file_search_popup::FileSearchPopup;
use super::footer::CollaborationModeIndicator;
use super::footer::FooterMode;
@@ -258,6 +259,10 @@ pub enum InputResult {
},
Command(SlashCommand),
CommandWithArgs(SlashCommand, String, Vec<TextElement>),
RequestDraftCompletion {
request_id: u64,
draft: String,
},
None,
}
@@ -404,6 +409,7 @@ pub(crate) struct ChatComposer {
collaboration_mode_indicator: Option<CollaborationModeIndicator>,
connectors_enabled: bool,
fast_command_enabled: bool,
next_draft_completion_request_id: u64,
personality_command_enabled: bool,
realtime_conversation_enabled: bool,
audio_device_selection_enabled: bool,
@@ -430,6 +436,7 @@ struct ComposerMentionBinding {
enum ActivePopup {
None,
Command(CommandPopup),
Completion(DraftCompletionPopup),
File(FileSearchPopup),
Skill(SkillPopup),
}
@@ -526,6 +533,7 @@ impl ChatComposer {
collaboration_mode_indicator: None,
connectors_enabled: false,
fast_command_enabled: false,
next_draft_completion_request_id: 0,
personality_command_enabled: false,
realtime_conversation_enabled: false,
audio_device_selection_enabled: false,
@@ -666,6 +674,9 @@ impl ChatComposer {
ActivePopup::Command(popup) => {
Constraint::Max(popup.calculate_required_height(area.width))
}
ActivePopup::Completion(popup) => {
Constraint::Max(popup.calculate_required_height(area.width))
}
ActivePopup::File(popup) => Constraint::Max(popup.calculate_required_height()),
ActivePopup::Skill(popup) => {
Constraint::Max(popup.calculate_required_height(area.width))
@@ -1239,6 +1250,23 @@ impl ChatComposer {
}
}
pub(crate) fn on_draft_completion_result(
&mut self,
request_id: u64,
result: Result<Vec<String>, String>,
) {
let ActivePopup::Completion(popup) = &mut self.active_popup else {
return;
};
if popup.request_id() != request_id {
return;
}
match result {
Ok(suggestions) => popup.set_suggestions(suggestions),
Err(err) => popup.set_error_message(err),
}
}
/// Show the transient "press again to quit" hint for `key`.
///
/// The owner (`BottomPane`/`ChatWidget`) is responsible for scheduling a
@@ -1332,6 +1360,9 @@ impl ChatComposer {
let result = match &mut self.active_popup {
ActivePopup::Command(_) => self.handle_key_event_with_slash_popup(key_event),
ActivePopup::Completion(_) => {
self.handle_key_event_with_draft_completion_popup(key_event)
}
ActivePopup::File(_) => self.handle_key_event_with_file_popup(key_event),
ActivePopup::Skill(_) => self.handle_key_event_with_skill_popup(key_event),
ActivePopup::None => self.handle_key_event_without_popup(key_event),
@@ -1348,6 +1379,9 @@ impl ChatComposer {
/// Handle key event when the slash-command popup is visible.
fn handle_key_event_with_slash_popup(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
if is_draft_completion_shortcut(key_event) {
return (InputResult::None, true);
}
if self.handle_shortcut_overlay_key(&key_event) {
return (InputResult::None, true);
}
@@ -1550,6 +1584,70 @@ impl ChatComposer {
p
}
fn handle_key_event_with_draft_completion_popup(
&mut self,
key_event: KeyEvent,
) -> (InputResult, bool) {
if is_draft_completion_shortcut(key_event) {
return (InputResult::None, true);
}
if self.handle_shortcut_overlay_key(&key_event) {
return (InputResult::None, true);
}
self.footer_mode = reset_mode_after_activity(self.footer_mode);
let ActivePopup::Completion(popup) = &mut self.active_popup else {
unreachable!();
};
match key_event {
KeyEvent {
code: KeyCode::Up, ..
}
| KeyEvent {
code: KeyCode::Char('p'),
modifiers: KeyModifiers::CONTROL,
..
} => {
popup.move_up();
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Down,
..
}
| KeyEvent {
code: KeyCode::Char('n'),
modifiers: KeyModifiers::CONTROL,
..
} => {
popup.move_down();
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Esc, ..
} => {
self.active_popup = ActivePopup::None;
(InputResult::None, true)
}
KeyEvent {
code: KeyCode::Enter,
modifiers: KeyModifiers::NONE,
..
} => {
let selected = popup.selected_suggestion().map(str::to_string);
self.active_popup = ActivePopup::None;
if let Some(suggestion) = selected {
self.textarea.insert_str(&suggestion);
}
(InputResult::None, true)
}
input => {
self.active_popup = ActivePopup::None;
self.handle_key_event_without_popup(input)
}
}
}
/// Handle non-ASCII character input (often IME) while still supporting paste-burst detection.
///
/// This handler exists because non-ASCII input often comes from IMEs, where characters can
@@ -1636,6 +1734,9 @@ impl ChatComposer {
/// Handle key events when file search popup is visible.
fn handle_key_event_with_file_popup(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
if is_draft_completion_shortcut(key_event) {
return (InputResult::None, true);
}
if self.handle_shortcut_overlay_key(&key_event) {
return (InputResult::None, true);
}
@@ -1759,6 +1860,9 @@ impl ChatComposer {
}
fn handle_key_event_with_skill_popup(&mut self, key_event: KeyEvent) -> (InputResult, bool) {
if is_draft_completion_shortcut(key_event) {
return (InputResult::None, true);
}
if self.handle_shortcut_overlay_key(&key_event) {
return (InputResult::None, true);
}
@@ -2778,6 +2882,25 @@ impl ChatComposer {
}
self.handle_input_basic(key_event)
}
KeyEvent {
code: KeyCode::Char('o'),
modifiers: KeyModifiers::CONTROL,
kind: KeyEventKind::Press,
..
} if self.popups_enabled()
&& self.textarea.cursor() == self.textarea.text().len()
&& !self.is_empty() =>
{
self.next_draft_completion_request_id =
self.next_draft_completion_request_id.wrapping_add(1);
let request_id = self.next_draft_completion_request_id;
let draft = self.current_text_with_pending();
self.active_popup = ActivePopup::Completion(DraftCompletionPopup::new(request_id));
(
InputResult::RequestDraftCompletion { request_id, draft },
true,
)
}
KeyEvent {
code: KeyCode::Tab,
modifiers: KeyModifiers::NONE,
@@ -3237,6 +3360,9 @@ impl ChatComposer {
}
pub(crate) fn sync_popups(&mut self) {
if matches!(self.active_popup, ActivePopup::Completion(_)) {
return;
}
self.sync_slash_command_elements();
if !self.popups_enabled() {
self.active_popup = ActivePopup::None;
@@ -4154,6 +4280,7 @@ impl Renderable for ChatComposer {
+ match &self.active_popup {
ActivePopup::None => footer_total_height,
ActivePopup::Command(c) => c.calculate_required_height(width),
ActivePopup::Completion(c) => c.calculate_required_height(width),
ActivePopup::File(c) => c.calculate_required_height(),
ActivePopup::Skill(c) => c.calculate_required_height(width),
}
@@ -4172,6 +4299,9 @@ impl ChatComposer {
ActivePopup::Command(popup) => {
popup.render_ref(popup_rect, buf);
}
ActivePopup::Completion(popup) => {
popup.render_ref(popup_rect, buf);
}
ActivePopup::File(popup) => {
popup.render_ref(popup_rect, buf);
}
@@ -4482,6 +4612,18 @@ fn prompt_selection_action(
}
}
fn is_draft_completion_shortcut(key_event: KeyEvent) -> bool {
matches!(
key_event,
KeyEvent {
code: KeyCode::Char('o'),
modifiers: KeyModifiers::CONTROL,
kind: KeyEventKind::Press,
..
}
)
}
impl Drop for ChatComposer {
fn drop(&mut self) {
// Stop any running spinner tasks.
@@ -6537,6 +6679,160 @@ mod tests {
}
}
#[test]
fn draft_completion_popup_snapshot() {
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
snapshot_composer_state_with_width("draft_completion_popup", 72, false, |composer| {
composer.set_text_content("hell".to_string(), Vec::new(), Vec::new());
composer.move_cursor_to_end();
let (result, _needs_redraw) =
composer.handle_key_event(KeyEvent::new(KeyCode::Char('o'), KeyModifiers::CONTROL));
let InputResult::RequestDraftCompletion { request_id, .. } = result else {
panic!("expected draft completion request");
};
composer.on_draft_completion_result(
request_id,
Ok(vec![
"o world".to_string(),
" there".to_string(),
" everyone".to_string(),
]),
);
});
}
#[test]
fn draft_completion_popup_error_snapshot() {
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
snapshot_composer_state_with_width("draft_completion_popup_error", 72, false, |composer| {
composer.set_text_content("hell".to_string(), Vec::new(), Vec::new());
composer.move_cursor_to_end();
let (result, _needs_redraw) =
composer.handle_key_event(KeyEvent::new(KeyCode::Char('o'), KeyModifiers::CONTROL));
let InputResult::RequestDraftCompletion { request_id, .. } = result else {
panic!("expected draft completion request");
};
composer.on_draft_completion_result(
request_id,
Err("draft completion timed out".to_string()),
);
});
}
#[test]
fn draft_completion_enter_accepts_selected_suggestion() {
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
let (tx, _rx) = unbounded_channel::<AppEvent>();
let sender = AppEventSender::new(tx);
let mut composer = ChatComposer::new(
true,
sender,
false,
"Ask Codex to do anything".to_string(),
false,
);
composer.set_text_content("hell".to_string(), Vec::new(), Vec::new());
composer.move_cursor_to_end();
let (result, _needs_redraw) =
composer.handle_key_event(KeyEvent::new(KeyCode::Char('o'), KeyModifiers::CONTROL));
let InputResult::RequestDraftCompletion { request_id, draft } = result else {
panic!("expected draft completion request");
};
assert_eq!(draft, "hell");
composer.on_draft_completion_result(
request_id,
Ok(vec![
"o world".to_string(),
" there".to_string(),
" everyone".to_string(),
]),
);
let _ = composer.handle_key_event(KeyEvent::new(KeyCode::Down, KeyModifiers::NONE));
let (result, _needs_redraw) =
composer.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
assert_eq!(result, InputResult::None);
assert_eq!(composer.current_text(), "hell there");
assert!(matches!(composer.active_popup, ActivePopup::None));
}
#[test]
fn draft_completion_popup_dismisses_on_edit() {
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
let (tx, _rx) = unbounded_channel::<AppEvent>();
let sender = AppEventSender::new(tx);
let mut composer = ChatComposer::new(
true,
sender,
false,
"Ask Codex to do anything".to_string(),
false,
);
composer.set_text_content("hell".to_string(), Vec::new(), Vec::new());
composer.move_cursor_to_end();
let (result, _needs_redraw) =
composer.handle_key_event(KeyEvent::new(KeyCode::Char('o'), KeyModifiers::CONTROL));
let InputResult::RequestDraftCompletion { request_id, .. } = result else {
panic!("expected draft completion request");
};
composer.on_draft_completion_result(
request_id,
Ok(vec![
"o world".to_string(),
" there".to_string(),
" everyone".to_string(),
]),
);
let (result, _needs_redraw) =
composer.handle_key_event(KeyEvent::new(KeyCode::Char('!'), KeyModifiers::NONE));
flush_after_paste_burst(&mut composer);
assert_eq!(result, InputResult::None);
assert_eq!(composer.current_text(), "hell!");
assert!(matches!(composer.active_popup, ActivePopup::None));
}
#[test]
fn draft_completion_shortcut_is_ignored_while_other_popup_is_active() {
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
use crossterm::event::KeyModifiers;
let (tx, _rx) = unbounded_channel::<AppEvent>();
let sender = AppEventSender::new(tx);
let mut composer = ChatComposer::new(
true,
sender,
false,
"Ask Codex to do anything".to_string(),
false,
);
type_chars_humanlike(&mut composer, &['/', 'm', 'o']);
assert!(matches!(composer.active_popup, ActivePopup::Command(_)));
let (result, _needs_redraw) =
composer.handle_key_event(KeyEvent::new(KeyCode::Char('o'), KeyModifiers::CONTROL));
assert_eq!(result, InputResult::None);
assert!(matches!(composer.active_popup, ActivePopup::Command(_)));
}
fn flush_after_paste_burst(composer: &mut ChatComposer) -> bool {
std::thread::sleep(PasteBurst::recommended_active_flush_delay());
composer.flush_paste_burst_if_due()
@@ -6600,6 +6896,9 @@ mod tests {
InputResult::Queued { .. } => {
panic!("expected command dispatch, but composer queued literal text")
}
InputResult::RequestDraftCompletion { .. } => {
panic!("expected command dispatch, but composer requested draft completion")
}
InputResult::None => panic!("expected Command result for '/init'"),
}
assert!(composer.textarea.is_empty(), "composer should be cleared");
@@ -7005,6 +7304,9 @@ mod tests {
InputResult::Queued { .. } => {
panic!("expected command dispatch after Tab completion, got literal queue")
}
InputResult::RequestDraftCompletion { .. } => {
panic!("expected command dispatch after Tab completion, got draft completion")
}
InputResult::None => panic!("expected Command result for '/diff'"),
}
assert!(composer.textarea.is_empty());
@@ -7172,6 +7474,9 @@ mod tests {
InputResult::Queued { .. } => {
panic!("expected command dispatch, but composer queued literal text")
}
InputResult::RequestDraftCompletion { .. } => {
panic!("expected command dispatch, but composer requested draft completion")
}
InputResult::None => panic!("expected Command result for '/mention'"),
}
assert!(composer.textarea.is_empty(), "composer should be cleared");

View File

@@ -0,0 +1,165 @@
use ratatui::buffer::Buffer;
use ratatui::layout::Rect;
use ratatui::prelude::Stylize;
use ratatui::text::Line;
use ratatui::text::Span;
use ratatui::widgets::Widget;
use ratatui::widgets::WidgetRef;
use unicode_width::UnicodeWidthStr;
use crate::render::Insets;
use crate::render::RectExt;
use super::popup_consts::MAX_POPUP_ROWS;
use super::scroll_state::ScrollState;
use super::selection_popup_common::GenericDisplayRow;
use super::selection_popup_common::measure_rows_height;
use super::selection_popup_common::render_rows;
pub(crate) struct DraftCompletionPopup {
request_id: u64,
waiting: bool,
suggestions: Vec<String>,
error_message: Option<String>,
state: ScrollState,
}
impl DraftCompletionPopup {
pub(crate) fn new(request_id: u64) -> Self {
let mut state = ScrollState::new();
state.selected_idx = Some(0);
Self {
request_id,
waiting: true,
suggestions: Vec::new(),
error_message: None,
state,
}
}
pub(crate) fn request_id(&self) -> u64 {
self.request_id
}
pub(crate) fn set_suggestions(&mut self, suggestions: Vec<String>) {
self.waiting = false;
self.suggestions = suggestions;
self.error_message = None;
let len = self.suggestions.len();
self.state.clamp_selection(len);
self.state.ensure_visible(len, len.clamp(1, MAX_POPUP_ROWS));
}
pub(crate) fn set_error_message(&mut self, error_message: String) {
self.waiting = false;
self.suggestions.clear();
self.error_message = Some(error_message);
self.state.selected_idx = None;
self.state.scroll_top = 0;
}
pub(crate) fn move_up(&mut self) {
let len = self.suggestions.len();
self.state.move_up_wrap(len);
self.state.ensure_visible(len, len.clamp(1, MAX_POPUP_ROWS));
}
pub(crate) fn move_down(&mut self) {
let len = self.suggestions.len();
self.state.move_down_wrap(len);
self.state.ensure_visible(len, len.clamp(1, MAX_POPUP_ROWS));
}
pub(crate) fn selected_suggestion(&self) -> Option<&str> {
self.state
.selected_idx
.and_then(|idx| self.suggestions.get(idx))
.map(String::as_str)
}
pub(crate) fn calculate_required_height(&self, width: u16) -> u16 {
if let Some(message) = self.error_message.as_deref() {
let wrapped_height =
wrapped_message_lines(message, width.saturating_sub(2)).len() as u16;
return wrapped_height.max(1);
}
let rows = self.rows();
measure_rows_height(&rows, &self.state, MAX_POPUP_ROWS, width.saturating_sub(2))
}
fn rows(&self) -> Vec<GenericDisplayRow> {
self.suggestions
.iter()
.map(|suggestion| GenericDisplayRow {
name: suggestion.trim_start().to_string(),
name_prefix_spans: Vec::new(),
match_indices: None,
display_shortcut: None,
description: None,
category_tag: None,
wrap_indent: None,
is_disabled: false,
disabled_reason: None,
})
.collect()
}
}
impl WidgetRef for &DraftCompletionPopup {
fn render_ref(&self, area: Rect, buf: &mut Buffer) {
if let Some(message) = self.error_message.as_deref() {
render_wrapped_message(area, buf, message);
return;
}
let rows = self.rows();
let empty_message = if self.waiting {
"loading..."
} else {
"no suggestions"
};
render_rows(
area.inset(Insets::tlbr(0, 2, 0, 0)),
buf,
&rows,
&self.state,
MAX_POPUP_ROWS,
empty_message,
);
}
}
fn render_wrapped_message(area: Rect, buf: &mut Buffer, message: &str) {
if area.height == 0 || area.width == 0 {
return;
}
let lines = wrapped_message_lines(message, area.width);
for (idx, line) in lines.into_iter().take(area.height as usize).enumerate() {
Line::from(vec![Span::from(line).dim().italic()]).render(
Rect {
x: area.x,
y: area.y + idx as u16,
width: area.width,
height: 1,
},
buf,
);
}
}
fn wrapped_message_lines(message: &str, width: u16) -> Vec<String> {
let width = width.max(1) as usize;
textwrap::wrap(message, width)
.into_iter()
.map(|line| {
let text = line.into_owned();
if UnicodeWidthStr::width(text.as_str()) > width {
let trimmed = text.trim().to_string();
if trimmed.is_empty() { text } else { trimmed }
} else {
text
}
})
.collect()
}

View File

@@ -76,6 +76,7 @@ mod chat_composer;
mod chat_composer_history;
mod command_popup;
pub mod custom_prompt_view;
mod draft_completion_popup;
mod experimental_features_view;
mod file_search_popup;
mod footer;
@@ -1099,6 +1100,15 @@ impl BottomPane {
self.request_redraw();
}
pub(crate) fn on_draft_completion_result(
&mut self,
request_id: u64,
result: Result<Vec<String>, String>,
) {
self.composer.on_draft_completion_result(request_id, result);
self.request_redraw();
}
pub(crate) fn attach_image(&mut self, path: PathBuf) {
if self.view_stack.is_empty() {
self.composer.attach_image(path);

View File

@@ -0,0 +1,13 @@
---
source: tui/src/bottom_pane/chat_composer.rs
expression: terminal.backend()
---
" "
" hell "
" "
" "
" "
" "
" o world "
" there "
" everyone "

View File

@@ -0,0 +1,13 @@
---
source: tui/src/bottom_pane/chat_composer.rs
expression: terminal.backend()
---
" "
" hell "
" "
" "
" "
" "
" "
" "
"draft completion timed out "

View File

@@ -38,12 +38,16 @@ use std::time::Duration;
use std::time::Instant;
use self::realtime::PendingSteerCompareKey;
use crate::answer_interleave::AnswerInterleaveRequest;
use crate::answer_interleave::LiveAnswerCell;
use crate::answer_interleave::LiveAnswerHandle;
use crate::app_event::RealtimeAudioDeviceKind;
#[cfg(all(not(target_os = "linux"), feature = "voice-input"))]
use crate::audio_device::list_realtime_audio_device_names;
use crate::bottom_pane::StatusLineItem;
use crate::bottom_pane::StatusLinePreviewData;
use crate::bottom_pane::StatusLineSetupView;
use crate::draft_completion::DraftCompletionRequest;
use crate::status::RateLimitWindowDisplay;
use crate::status::format_directory_display;
use crate::status::format_tokens_compact;
@@ -90,6 +94,7 @@ use codex_protocol::config_types::Settings;
use codex_protocol::config_types::WindowsSandboxLevel;
use codex_protocol::items::AgentMessageContent;
use codex_protocol::items::AgentMessageItem;
use codex_protocol::items::TurnItem;
use codex_protocol::models::MessagePhase;
use codex_protocol::models::local_image_label_text;
use codex_protocol::parse_command::ParsedCommand;
@@ -118,6 +123,7 @@ use codex_protocol::protocol::GuardianAssessmentEvent;
use codex_protocol::protocol::GuardianAssessmentStatus;
use codex_protocol::protocol::ImageGenerationBeginEvent;
use codex_protocol::protocol::ImageGenerationEndEvent;
use codex_protocol::protocol::ItemStartedEvent;
use codex_protocol::protocol::ListCustomPromptsResponseEvent;
use codex_protocol::protocol::ListSkillsResponseEvent;
use codex_protocol::protocol::McpListToolsResponseEvent;
@@ -667,6 +673,11 @@ pub(crate) struct ChatWidget {
adaptive_chunking: AdaptiveChunkingPolicy,
// Stream lifecycle controller
stream_controller: Option<StreamController>,
agent_message_stream_kind: Option<AgentMessageStreamKind>,
live_answer_handle: Option<LiveAnswerHandle>,
pending_answer_interleave: Option<PendingAnswerInterleave>,
deferred_turn_complete: Option<DeferredTurnComplete>,
next_answer_interleave_request_id: u64,
// Stream lifecycle controller for proposed plan output.
plan_stream_controller: Option<PlanStreamController>,
// Latest completed user-visible Codex output that `/copy` should place on the clipboard.
@@ -907,6 +918,25 @@ struct PendingSteer {
compare_key: PendingSteerCompareKey,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum AgentMessageStreamKind {
Unknown,
Commentary,
FinalAnswer,
}
#[derive(Debug)]
struct PendingAnswerInterleave {
request_id: u64,
handle: LiveAnswerHandle,
raw_answer: String,
}
#[derive(Debug)]
struct DeferredTurnComplete {
from_replay: bool,
}
pub(crate) fn create_initial_user_message(
text: Option<String>,
local_image_paths: Vec<PathBuf>,
@@ -1560,6 +1590,156 @@ impl ChatWidget {
self.request_redraw();
}
fn current_agent_message_stream_kind(&self) -> AgentMessageStreamKind {
self.agent_message_stream_kind
.unwrap_or(AgentMessageStreamKind::Unknown)
}
fn last_user_prompt_text(&self) -> Option<String> {
self.last_rendered_user_message_event
.as_ref()
.map(|event| event.message.trim().to_string())
.filter(|message| !message.is_empty())
}
fn current_session_cwd(&self) -> &Path {
self.current_cwd
.as_deref()
.unwrap_or(self.config.cwd.as_path())
}
fn prepare_for_new_assistant_output(&mut self) {
self.flush_unified_exec_wait_streak();
self.flush_active_cell();
if self.needs_final_message_separator && self.had_work_activity {
let elapsed_seconds = self
.bottom_pane
.status_widget()
.map(super::status_indicator_widget::StatusIndicatorWidget::elapsed_seconds)
.map(|current| self.worked_elapsed_from(current));
self.add_to_history(history_cell::FinalMessageSeparator::new(
elapsed_seconds,
None,
));
self.needs_final_message_separator = false;
self.had_work_activity = false;
} else if self.needs_final_message_separator {
self.needs_final_message_separator = false;
}
}
fn ensure_live_answer_handle(&mut self) -> LiveAnswerHandle {
if let Some(handle) = self.live_answer_handle.as_ref() {
return handle.clone();
}
if let Some(pending) = self.pending_answer_interleave.as_ref() {
return pending.handle.clone();
}
if self.stream_controller.is_some() {
self.flush_answer_stream_with_separator();
}
self.prepare_for_new_assistant_output();
let (handle, cell) = LiveAnswerHandle::new(self.current_session_cwd().to_path_buf());
self.live_answer_handle = Some(handle.clone());
self.active_cell = Some(Box::new(cell));
self.bump_active_cell_revision();
handle
}
fn handle_final_answer_delta(&mut self, delta: String) {
if delta.is_empty() {
return;
}
let handle = self.ensure_live_answer_handle();
handle.push_delta(&delta);
self.bump_active_cell_revision();
self.request_redraw();
}
fn finalize_live_answer_without_interleave(&mut self, raw_answer: String) {
let handle = self.ensure_live_answer_handle();
handle.set_interleaving_pending(false);
handle.set_markdown(raw_answer.clone());
self.live_answer_handle = None;
self.pending_answer_interleave = None;
self.last_copyable_output = Some(raw_answer);
self.bump_active_cell_revision();
self.flush_active_cell();
self.request_redraw();
}
fn start_answer_interleave(&mut self, raw_answer: String) {
let Some(user_prompt) = self.last_user_prompt_text() else {
self.finalize_live_answer_without_interleave(raw_answer);
return;
};
self.next_answer_interleave_request_id =
self.next_answer_interleave_request_id.wrapping_add(1);
let request_id = self.next_answer_interleave_request_id;
let handle = self.ensure_live_answer_handle();
handle.set_markdown(raw_answer.clone());
handle.set_interleaving_pending(true);
self.pending_answer_interleave = Some(PendingAnswerInterleave {
request_id,
handle,
raw_answer: raw_answer.clone(),
});
self.bump_active_cell_revision();
self.app_event_tx
.send(AppEvent::StartAnswerInterleave(AnswerInterleaveRequest {
request_id,
user_prompt,
final_answer: raw_answer,
}));
self.request_redraw();
}
fn on_agent_message_item_started(&mut self, item: AgentMessageItem) {
self.agent_message_stream_kind = Some(match item.phase {
Some(MessagePhase::Commentary) => AgentMessageStreamKind::Commentary,
Some(MessagePhase::FinalAnswer) => AgentMessageStreamKind::FinalAnswer,
None => AgentMessageStreamKind::Unknown,
});
}
pub(crate) fn on_answer_interleave_result(
&mut self,
request_id: u64,
result: Result<String, String>,
) {
let Some(pending) = self.pending_answer_interleave.take() else {
return;
};
if pending.request_id != request_id {
self.pending_answer_interleave = Some(pending);
return;
}
let final_answer = match result {
Ok(answer) if !answer.trim().is_empty() => answer,
Ok(_) => pending.raw_answer.clone(),
Err(err) => {
tracing::warn!("answer interleave failed: {err}");
pending.raw_answer.clone()
}
};
pending.handle.set_interleaving_pending(false);
pending.handle.set_markdown(final_answer.clone());
self.live_answer_handle = None;
self.last_copyable_output = Some(final_answer.clone());
self.bump_active_cell_revision();
self.flush_active_cell();
if let Some(deferred) = self.deferred_turn_complete.take() {
self.finish_task_complete(Some(final_answer), deferred.from_replay);
}
self.request_redraw();
}
fn finalize_completed_assistant_message(&mut self, message: Option<&str>) {
// If we have a stream_controller, the finalized message payload is redundant because the
// visible content has already been accumulated through deltas.
@@ -1579,7 +1759,12 @@ impl ChatWidget {
}
fn on_agent_message_delta(&mut self, delta: String) {
self.handle_streaming_delta(delta);
match self.current_agent_message_stream_kind() {
AgentMessageStreamKind::Unknown | AgentMessageStreamKind::Commentary => {
self.handle_streaming_delta(delta)
}
AgentMessageStreamKind::FinalAnswer => self.handle_final_answer_delta(delta),
}
}
fn on_plan_delta(&mut self, delta: String) {
@@ -1699,6 +1884,10 @@ impl ChatWidget {
self.plan_item_active = false;
self.adaptive_chunking.reset();
self.plan_stream_controller = None;
self.agent_message_stream_kind = None;
self.live_answer_handle = None;
self.pending_answer_interleave = None;
self.deferred_turn_complete = None;
self.turn_runtime_metrics = RuntimeMetricsSummary::default();
self.session_telemetry.reset_runtime_metrics();
self.bottom_pane.clear_quit_shortcut_hint();
@@ -1715,6 +1904,15 @@ impl ChatWidget {
}
fn on_task_complete(&mut self, last_agent_message: Option<String>, from_replay: bool) {
if self.pending_answer_interleave.is_some() && !from_replay {
self.deferred_turn_complete = Some(DeferredTurnComplete { from_replay });
self.request_redraw();
return;
}
self.finish_task_complete(last_agent_message, from_replay);
}
fn finish_task_complete(&mut self, last_agent_message: Option<String>, from_replay: bool) {
self.submit_pending_steers_after_interrupt = false;
if let Some(message) = last_agent_message.as_ref()
&& !message.trim().is_empty()
@@ -3000,16 +3198,61 @@ impl ChatWidget {
/// Commentary completion sets a deferred restore flag so the status row
/// returns once stream queues are idle. Final-answer completion (or absent
/// phase for legacy models) clears the flag to preserve historical behavior.
fn on_agent_message_item_completed(&mut self, item: AgentMessageItem) {
fn on_agent_message_item_completed(&mut self, item: AgentMessageItem, from_replay: bool) {
let mut message = String::new();
for content in &item.content {
match content {
AgentMessageContent::Text { text } => message.push_str(text),
}
}
self.finalize_completed_assistant_message(
(!message.is_empty()).then_some(message.as_str()),
);
let live_answer_markdown = self
.live_answer_handle
.as_ref()
.map(LiveAnswerHandle::markdown);
if from_replay || self.is_review_mode {
self.finalize_completed_assistant_message((!message.is_empty()).then_some(&message));
} else {
match item.phase {
Some(MessagePhase::Commentary) => {
if self.live_answer_handle.is_some() {
self.finalize_live_answer_without_interleave(message.clone());
} else {
self.finalize_completed_assistant_message(
(!message.is_empty()).then_some(message.as_str()),
);
}
}
Some(MessagePhase::FinalAnswer) => {
let final_answer = if message.is_empty() {
live_answer_markdown.unwrap_or_default()
} else {
message.clone()
};
if !final_answer.is_empty() && self.thread_id.is_some() {
self.start_answer_interleave(final_answer);
} else {
self.finalize_live_answer_without_interleave(final_answer);
}
}
None => {
if self.live_answer_handle.is_some() {
let final_answer = if message.is_empty() {
live_answer_markdown.unwrap_or_default()
} else {
message.clone()
};
self.finalize_live_answer_without_interleave(final_answer);
} else if !message.is_empty() && self.thread_id.is_some() {
self.start_answer_interleave(message.clone());
} else {
self.finalize_completed_assistant_message(
(!message.is_empty()).then_some(message.as_str()),
);
}
}
}
}
self.agent_message_stream_kind = None;
self.pending_status_indicator_restore = match item.phase {
// Models that don't support preambles only output AgentMessageItems on turn completion.
Some(MessagePhase::FinalAnswer) | None => false,
@@ -3097,29 +3340,8 @@ impl ChatWidget {
#[inline]
fn handle_streaming_delta(&mut self, delta: String) {
// Before streaming agent content, flush any active exec cell group.
self.flush_unified_exec_wait_streak();
self.flush_active_cell();
if self.stream_controller.is_none() {
// If the previous turn inserted non-stream history (exec output, patch status, MCP
// calls), render a separator before starting the next streamed assistant message.
if self.needs_final_message_separator && self.had_work_activity {
let elapsed_seconds = self
.bottom_pane
.status_widget()
.map(super::status_indicator_widget::StatusIndicatorWidget::elapsed_seconds)
.map(|current| self.worked_elapsed_from(current));
self.add_to_history(history_cell::FinalMessageSeparator::new(
elapsed_seconds,
None,
));
self.needs_final_message_separator = false;
self.had_work_activity = false;
} else if self.needs_final_message_separator {
// Reset the flag even if we don't show separator (no work was done)
self.needs_final_message_separator = false;
}
self.prepare_for_new_assistant_output();
self.stream_controller = Some(StreamController::new(
self.last_rendered_width.get().map(|w| w.saturating_sub(2)),
&self.config.cwd,
@@ -3585,6 +3807,11 @@ impl ChatWidget {
rate_limit_poller: None,
adaptive_chunking: AdaptiveChunkingPolicy::default(),
stream_controller: None,
agent_message_stream_kind: None,
live_answer_handle: None,
pending_answer_interleave: None,
deferred_turn_complete: None,
next_answer_interleave_request_id: 0,
plan_stream_controller: None,
last_copyable_output: None,
running_commands: HashMap::new(),
@@ -3771,6 +3998,11 @@ impl ChatWidget {
rate_limit_poller: None,
adaptive_chunking: AdaptiveChunkingPolicy::default(),
stream_controller: None,
agent_message_stream_kind: None,
live_answer_handle: None,
pending_answer_interleave: None,
deferred_turn_complete: None,
next_answer_interleave_request_id: 0,
plan_stream_controller: None,
last_copyable_output: None,
running_commands: HashMap::new(),
@@ -3949,6 +4181,11 @@ impl ChatWidget {
rate_limit_poller: None,
adaptive_chunking: AdaptiveChunkingPolicy::default(),
stream_controller: None,
agent_message_stream_kind: None,
live_answer_handle: None,
pending_answer_interleave: None,
deferred_turn_complete: None,
next_answer_interleave_request_id: 0,
plan_stream_controller: None,
last_copyable_output: None,
running_commands: HashMap::new(),
@@ -4214,6 +4451,16 @@ impl ChatWidget {
InputResult::CommandWithArgs(cmd, args, text_elements) => {
self.dispatch_command_with_args(cmd, args, text_elements);
}
InputResult::RequestDraftCompletion { request_id, draft } => {
self.app_event_tx.send(AppEvent::StartDraftCompletion(
DraftCompletionRequest {
request_id,
draft,
last_user_turn: self.last_user_prompt_text(),
last_assistant_turn: self.last_copyable_output.clone(),
},
));
}
InputResult::None => {}
},
}
@@ -4792,7 +5039,14 @@ impl ChatWidget {
}
fn flush_active_cell(&mut self) {
let flushed_live_answer = self
.active_cell
.as_ref()
.is_some_and(|cell| cell.as_any().is::<LiveAnswerCell>());
if let Some(active) = self.active_cell.take() {
if flushed_live_answer {
self.live_answer_handle = None;
}
self.needs_final_message_separator = true;
self.app_event_tx.send(AppEvent::InsertHistoryCell(active));
}
@@ -4810,8 +5064,16 @@ impl ChatWidget {
.active_cell
.as_ref()
.is_some_and(|c| c.as_any().is::<history_cell::SessionHeaderHistoryCell>());
let keep_interleaving_answer_active = self.live_answer_handle.is_some()
&& self
.active_cell
.as_ref()
.is_some_and(|c| c.as_any().is::<LiveAnswerCell>());
if !keep_placeholder_header_active && !cell.display_lines(u16::MAX).is_empty() {
if !keep_placeholder_header_active
&& !keep_interleaving_answer_active
&& !cell.display_lines(u16::MAX).is_empty()
{
// Only break exec grouping if the cell renders visible lines.
self.flush_active_cell();
self.needs_final_message_separator = true;
@@ -5222,6 +5484,15 @@ impl ChatWidget {
self.on_agent_message(message)
}
EventMsg::AgentMessage(AgentMessageEvent { .. }) => {}
EventMsg::ItemStarted(ItemStartedEvent { item, .. })
if !from_replay && matches!(item, TurnItem::AgentMessage(_)) =>
{
let TurnItem::AgentMessage(item) = item else {
unreachable!("guard ensures agent message item");
};
self.on_agent_message_item_started(item);
}
EventMsg::ItemStarted(_) => {}
EventMsg::AgentMessageDelta(AgentMessageDeltaEvent { delta }) => {
self.on_agent_message_delta(delta)
}
@@ -5397,7 +5668,6 @@ impl ChatWidget {
}
}
EventMsg::RawResponseItem(_)
| EventMsg::ItemStarted(_)
| EventMsg::AgentMessageContentDelta(_)
| EventMsg::ReasoningContentDelta(_)
| EventMsg::ReasoningRawContentDelta(_)
@@ -5462,7 +5732,7 @@ impl ChatWidget {
self.on_plan_item_completed(plan_item.text.clone());
}
if let codex_protocol::items::TurnItem::AgentMessage(item) = item {
self.on_agent_message_item_completed(item);
self.on_agent_message_item_completed(item, from_replay);
}
}
}
@@ -8586,6 +8856,15 @@ impl ChatWidget {
self.bottom_pane.on_file_search_result(query, matches);
}
pub(crate) fn on_draft_completion_result(
&mut self,
request_id: u64,
result: Result<Vec<String>, String>,
) {
self.bottom_pane
.on_draft_completion_result(request_id, result);
}
/// Handles a Ctrl+C press at the chat-widget layer.
///
/// The first press arms a time-bounded quit shortcut and shows a footer hint via the bottom

View File

@@ -0,0 +1,9 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 4199
expression: rendered
---
• 1. what's my name?
A: Your name is Tuan-Lung.
2. what are we doing?
A: We are testing interleaving.

View File

@@ -0,0 +1,9 @@
---
source: tui/src/chatwidget/tests.rs
assertion_line: 4192
expression: pending
---
• 1. Your name is Tuan-Lung.
2. We are testing interleaving.
restructuring answer...

View File

@@ -5,6 +5,7 @@
//! changes show up as stable, reviewable diffs.
use super::*;
use crate::answer_interleave::AnswerInterleaveRequest;
use crate::app_event::AppEvent;
use crate::app_event::ExitMode;
#[cfg(all(not(target_os = "linux"), feature = "voice-input"))]
@@ -1864,6 +1865,11 @@ async fn make_chatwidget_manual(
rate_limit_poller: None,
adaptive_chunking: crate::streaming::chunking::AdaptiveChunkingPolicy::default(),
stream_controller: None,
agent_message_stream_kind: None,
live_answer_handle: None,
pending_answer_interleave: None,
deferred_turn_complete: None,
next_answer_interleave_request_id: 0,
plan_stream_controller: None,
pending_guardian_review_status: PendingGuardianReviewStatus::default(),
last_copyable_output: None,
@@ -2031,6 +2037,26 @@ fn drain_insert_history(
out
}
fn take_answer_interleave_request(
rx: &mut tokio::sync::mpsc::UnboundedReceiver<AppEvent>,
) -> AnswerInterleaveRequest {
loop {
match rx.try_recv() {
Ok(AppEvent::StartAnswerInterleave(request)) => return request,
Ok(AppEvent::InsertHistoryCell(_)) => {
panic!("unexpected history insert before answer interleave result")
}
Ok(_) => continue,
Err(TryRecvError::Empty) => {
panic!("expected answer interleave request but queue was empty")
}
Err(TryRecvError::Disconnected) => {
panic!("expected answer interleave request but channel closed")
}
}
}
}
fn lines_to_single_string(lines: &[ratatui::text::Line<'static>]) -> String {
let mut s = String::new();
for line in lines {
@@ -4133,6 +4159,79 @@ async fn live_legacy_agent_message_after_item_completed_does_not_duplicate_assis
assert!(drain_insert_history(&mut rx).is_empty());
}
#[tokio::test]
async fn explicit_final_answer_requests_interleaving_and_commits_result() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
complete_user_message(
&mut chat,
"user-1",
"1. what's my name?\n2. what are we doing?",
);
let _ = drain_insert_history(&mut rx);
complete_assistant_message(
&mut chat,
"msg-live",
"1. Your name is Tuan-Lung.\n2. We are testing interleaving.",
Some(MessagePhase::FinalAnswer),
);
let pending = lines_to_single_string(
&chat
.active_cell
.as_ref()
.expect("live answer cell")
.display_lines(80),
);
assert_snapshot!("final_answer_interleave_pending", pending);
let request = take_answer_interleave_request(&mut rx);
assert_eq!(
request.user_prompt,
"1. what's my name?\n2. what are we doing?"
);
assert!(drain_insert_history(&mut rx).is_empty());
chat.on_answer_interleave_result(
request.request_id,
Ok("1. what's my name?\nA: Your name is Tuan-Lung.\n2. what are we doing?\nA: We are testing interleaving.".into()),
);
let inserted = drain_insert_history(&mut rx);
assert_eq!(inserted.len(), 1);
let rendered = lines_to_single_string(&inserted[0]);
assert_snapshot!("final_answer_interleave_committed", rendered);
}
#[tokio::test]
async fn legacy_final_answer_without_phase_still_requests_interleaving() {
let (mut chat, mut rx, _op_rx) = make_chatwidget_manual(None).await;
chat.thread_id = Some(ThreadId::new());
complete_user_message(
&mut chat,
"user-legacy",
"1. what's my name?\n2. what are we doing?",
);
let _ = drain_insert_history(&mut rx);
complete_assistant_message(
&mut chat,
"msg-legacy",
"1. Your name is Tuan-Lung.\n2. We are testing interleaving.",
None,
);
let request = take_answer_interleave_request(&mut rx);
assert_eq!(
request.user_prompt,
"1. what's my name?\n2. what are we doing?"
);
assert_eq!(
request.final_answer,
"1. Your name is Tuan-Lung.\n2. We are testing interleaving."
);
}
#[test]
fn rendered_user_message_event_from_inputs_matches_flattened_user_message_shape() {
let local_image = PathBuf::from("/tmp/local.png");

View File

@@ -0,0 +1,338 @@
use std::sync::Arc;
use std::time::Duration;
use codex_core::AuthManager;
use codex_core::ModelClient;
use codex_core::Prompt;
use codex_core::ResponseEvent;
use codex_core::config::Config;
use codex_core::default_client::originator;
use codex_core::models_manager::manager::ModelsManager;
use codex_otel::SessionTelemetry;
use codex_protocol::ThreadId;
use codex_protocol::models::BaseInstructions;
use codex_protocol::models::ContentItem;
use codex_protocol::models::ResponseItem;
use codex_protocol::protocol::SessionSource;
use tokio::time::timeout;
use tokio_stream::StreamExt;
use crate::app_event::AppEvent;
use crate::app_event_sender::AppEventSender;
const DRAFT_COMPLETION_MODEL: &str = "galapagos-nano-alpha";
const DRAFT_COMPLETION_TIMEOUT: Duration = Duration::from_secs(6);
const DRAFT_COMPLETION_AGENT_LABEL: &str = "tui_draft_completion";
const MAX_SUGGESTIONS: usize = 3;
#[derive(Debug, Clone)]
pub(crate) struct DraftCompletionRequest {
pub(crate) request_id: u64,
pub(crate) draft: String,
pub(crate) last_user_turn: Option<String>,
pub(crate) last_assistant_turn: Option<String>,
}
pub(crate) struct DraftCompletionManager {
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
app_tx: AppEventSender,
config: Config,
}
impl DraftCompletionManager {
pub(crate) fn new(
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
app_tx: AppEventSender,
config: Config,
) -> Self {
Self {
auth_manager,
models_manager,
app_tx,
config,
}
}
pub(crate) fn start_request(&self, request: DraftCompletionRequest) {
let auth_manager = Arc::clone(&self.auth_manager);
let models_manager = Arc::clone(&self.models_manager);
let app_tx = self.app_tx.clone();
let config = self.config.clone();
tokio::spawn(async move {
let result = timeout(
DRAFT_COMPLETION_TIMEOUT,
complete_draft(
auth_manager,
models_manager,
config,
&request.draft,
request.last_user_turn.as_deref(),
request.last_assistant_turn.as_deref(),
),
)
.await;
let result = match result {
Ok(Ok(suggestions)) => Ok(suggestions),
Ok(Err(err)) => {
tracing::warn!("draft completion failed: {err}");
Err(err)
}
Err(_) => {
tracing::warn!("draft completion timed out");
Err("draft completion timed out".to_string())
}
};
app_tx.send(AppEvent::DraftCompletionResult {
request_id: request.request_id,
result,
});
});
}
}
async fn complete_draft(
auth_manager: Arc<AuthManager>,
models_manager: Arc<ModelsManager>,
config: Config,
draft: &str,
last_user_turn: Option<&str>,
last_assistant_turn: Option<&str>,
) -> Result<Vec<String>, String> {
let model_info = models_manager
.get_model_info(DRAFT_COMPLETION_MODEL, &config)
.await;
let auth = auth_manager.auth_cached();
let auth_mode = auth
.as_ref()
.map(codex_core::CodexAuth::auth_mode)
.map(codex_otel::TelemetryAuthMode::from);
let account_id = auth
.as_ref()
.and_then(codex_core::CodexAuth::get_account_id);
let account_email = auth
.as_ref()
.and_then(codex_core::CodexAuth::get_account_email);
let telemetry = SessionTelemetry::new(
ThreadId::new(),
DRAFT_COMPLETION_MODEL,
DRAFT_COMPLETION_MODEL,
account_id,
account_email,
auth_mode,
originator().value,
config.otel.log_user_prompt,
codex_core::terminal::user_agent(),
SessionSource::SubAgent(codex_protocol::protocol::SubAgentSource::Other(
DRAFT_COMPLETION_AGENT_LABEL.to_string(),
)),
);
let model_client = ModelClient::new(
Some(auth_manager),
ThreadId::new(),
config.model_provider.clone(),
SessionSource::SubAgent(codex_protocol::protocol::SubAgentSource::Other(
DRAFT_COMPLETION_AGENT_LABEL.to_string(),
)),
config.model_verbosity,
false,
false,
false,
None,
);
let mut session = model_client.new_session();
let prompt = PromptBuilder::new(draft, last_user_turn, last_assistant_turn).build();
let mut stream = session
.stream(
&prompt,
&model_info,
&telemetry,
None,
model_info.default_reasoning_summary,
None,
None,
)
.await
.map_err(|err| err.to_string())?;
let mut output = String::new();
while let Some(event) = stream.next().await {
match event.map_err(|err| err.to_string())? {
ResponseEvent::OutputTextDelta(delta) => output.push_str(&delta),
ResponseEvent::OutputItemDone(item) if output.is_empty() => {
output.push_str(&response_item_text(&item));
}
ResponseEvent::OutputItemDone(_) => {}
ResponseEvent::Completed { .. } => break,
ResponseEvent::Created
| ResponseEvent::OutputItemAdded(_)
| ResponseEvent::ServerModel(_)
| ResponseEvent::ServerReasoningIncluded(_)
| ResponseEvent::ReasoningSummaryDelta { .. }
| ResponseEvent::ReasoningContentDelta { .. }
| ResponseEvent::ReasoningSummaryPartAdded { .. }
| ResponseEvent::RateLimits(_)
| ResponseEvent::ModelsEtag(_) => {}
}
}
parse_suggestions(draft, &output)
}
fn response_item_text(item: &ResponseItem) -> String {
match item {
ResponseItem::Message { content, .. } => content
.iter()
.map(|content_item| match content_item {
ContentItem::InputText { text } | ContentItem::OutputText { text } => text.as_str(),
ContentItem::InputImage { .. } => "",
})
.collect(),
_ => String::new(),
}
}
struct PromptBuilder<'a> {
draft: &'a str,
last_user_turn: Option<&'a str>,
last_assistant_turn: Option<&'a str>,
}
impl<'a> PromptBuilder<'a> {
fn new(
draft: &'a str,
last_user_turn: Option<&'a str>,
last_assistant_turn: Option<&'a str>,
) -> Self {
Self {
draft,
last_user_turn,
last_assistant_turn,
}
}
fn build(self) -> Prompt {
let request = format!(
"You are helping complete a draft in Codex CLI.\n\
Return exactly three candidate continuations for the current draft.\n\
Rules:\n\
- Each suggestion must be only the text to append to the draft.\n\
- Do not repeat the existing draft.\n\
- Prefer concise, plausible continuations.\n\
- Preserve the apparent tone and topic of the conversation.\n\
- Return strict JSON: an array of exactly 3 strings and nothing else.\n\n\
<last_user_turn>\n{}\n</last_user_turn>\n\n\
<last_assistant_turn>\n{}\n</last_assistant_turn>\n\n\
<current_draft>\n{}\n</current_draft>\n",
self.last_user_turn.unwrap_or(""),
self.last_assistant_turn.unwrap_or(""),
self.draft,
);
let mut prompt = Prompt::default();
prompt.input = vec![ResponseItem::Message {
id: None,
role: "user".to_string(),
content: vec![ContentItem::InputText { text: request }],
end_turn: None,
phase: None,
}];
prompt.base_instructions = BaseInstructions::default();
prompt
}
}
fn parse_suggestions(draft: &str, output: &str) -> Result<Vec<String>, String> {
let trimmed = output.trim();
if trimmed.is_empty() {
return Err("draft completion returned an empty response".to_string());
}
let suggestions = if let Ok(parsed) = serde_json::from_str::<Vec<String>>(trimmed) {
parsed
} else {
trimmed
.lines()
.map(str::trim)
.filter(|line| !line.is_empty())
.map(|line| {
line.trim_start_matches(|ch: char| {
ch.is_ascii_digit() || matches!(ch, '.' | ')' | '-' | '*' | ' ')
})
})
.map(str::trim)
.filter(|line| !line.is_empty())
.map(str::to_string)
.collect()
};
let mut sanitized = Vec::new();
for suggestion in suggestions {
let suggestion = sanitize_suggestion(draft, &suggestion);
if suggestion.is_empty() || sanitized.contains(&suggestion) {
continue;
}
sanitized.push(suggestion);
if sanitized.len() == MAX_SUGGESTIONS {
break;
}
}
if sanitized.is_empty() {
Err("draft completion returned no usable suggestions".to_string())
} else {
Ok(sanitized)
}
}
fn sanitize_suggestion(draft: &str, suggestion: &str) -> String {
let trimmed = suggestion
.trim_matches(|ch: char| matches!(ch, '\n' | '\r' | '\t' | '`'))
.trim_end();
let suffix = trimmed.strip_prefix(draft).unwrap_or(trimmed);
suffix.trim_end().to_string()
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;
use super::parse_suggestions;
#[test]
fn parse_suggestions_strips_repeated_draft_prefix() {
let suggestions =
parse_suggestions("hello", "[\"hello world\", \" there\", \" everyone\"]")
.expect("parse suggestions");
assert_eq!(
suggestions,
vec![
" world".to_string(),
" there".to_string(),
" everyone".to_string()
]
);
}
#[test]
fn parse_suggestions_falls_back_to_plain_lines() {
let suggestions = parse_suggestions(
"draft",
"1. first suggestion\n2. second suggestion\n3. third suggestion",
)
.expect("parse suggestions");
assert_eq!(
suggestions,
vec![
"first suggestion".to_string(),
"second suggestion".to_string(),
"third suggestion".to_string()
]
);
}
}

View File

@@ -66,6 +66,7 @@ use tracing_subscriber::prelude::*;
use uuid::Uuid;
mod additional_dirs;
mod answer_interleave;
mod app;
mod app_backtrack;
mod app_event;
@@ -84,6 +85,7 @@ pub mod custom_terminal;
mod cwd_prompt;
mod debug_config;
mod diff_render;
mod draft_completion;
mod exec_cell;
mod exec_command;
mod external_editor;

File diff suppressed because it is too large Load Diff

View File

@@ -9,6 +9,6 @@ expression: term.backend()
"~ "
"~ "
"───────────────────────────────── 100% ─"
" ↑/↓ to scroll pgup/pgdn to page hom"
" q to quit "
" <↑>/<↓> to scroll <pgup>/<pgdn> to pa"
" <q> to quit "
" "

View File

@@ -1,6 +1,5 @@
---
source: tui/src/pager_overlay.rs
assertion_line: 798
expression: term.backend()
---
"/ S T A T I C / / / / / "
@@ -8,6 +7,6 @@ expression: term.backend()
"should wrap when "
"rendered within a narrow"
"─────────────────── 0% ─"
" ↑/↓ to scroll pgup/pg"
" q to quit "
" <↑>/<↓> to scroll <pg"
" <q> to quit "
" "

View File

@@ -2,14 +2,15 @@
source: tui/src/pager_overlay.rs
expression: snapshot
---
/ T R A N S C R I P T / / / / / / / / / / / / / / / / / / / / / / / / / / / / /
• Added foo.txt (+2 -0)
1 +hello
2 +world
• Added foo.txt (+2 -0)
1 +hello
2 +world
─────────────────────────────────────────────────────────────────────────── 0% ─
↑/↓ to scroll pgup/pgdn to page home/end to jump
q to quit esc to edit prev
/ T R A N S C R I P T / / / / / / / / / / / ┌User Prompts──────────────────────┐
• Added foo.txt (+2 -0) │No user prompts yet. │
1 +hello │ │
2 +world │Add another turn │
│and reopen Ctrl+T. │
• Added foo.txt (+2 -0) │ │
1 +hello │ │
2 +world │ │
─────────────────────────────────────── 0% ─└──────────────────────────────────
<↑>/<↓> to scroll <pgup>/<pgdn> to page <home>/<end> to jump
<q> to quit <a> hide side panel <e> expand all <d> show details <tab> s
</>/<?> to search

View File

@@ -9,6 +9,6 @@ expression: term.backend()
"~ "
"~ "
"───────────────────────────────── 100% ─"
" ↑/↓ to scroll pgup/pgdn to page hom"
" q to quit esc to edit prev "
" "
" <↑>/<↓> to scroll <pgup>/<pgdn> to pa"
" <q> to quit <a> hide side panel <e>"
"</>/<?> to search "

View File

@@ -0,0 +1,20 @@
---
source: tui/src/pager_overlay.rs
expression: term.backend()
---
"/ T R A N S C R I P T / / / / / / / / / / / / / / / / / / / / / / / / / "
" "
" Find the foo item "
" "
" "
" Details... "
" "
"• Final answer that also mentions foo. "
" "
" 2. Other prompt "
"• (no response yet) "
"~ "
"───────────────────────────────────────────────────────────────── 100% ─"
" <↑>/<↓> to scroll <pgup>/<pgdn> to page <home>/<end> to jump "
" <q> to quit <a> hide side panel <e> expand all <d> show details "
"Search /foo 1/3 Find the foo item <n>/<N> <esc> clear "

View File

@@ -9,6 +9,6 @@ expression: term.backend()
" "
"gamma "
"───────────────────────────────── 100% ─"
" ↑/↓ to scroll pgup/pgdn to page hom"
" q to quit esc to edit prev "
" "
" <↑>/<↓> to scroll <pgup>/<pgdn> to pa"
" <q> to quit <a> hide side panel <e>"
"</>/<?> to search "