Persist text elements through TUI input and history (#9393)

Continuation of breaking up this PR
https://github.com/openai/codex/pull/9116

## Summary
- Thread user text element ranges through TUI/TUI2 input, submission,
queueing, and history so placeholders survive resume/edit flows.
- Preserve local image attachments alongside text elements and rehydrate
placeholders when restoring drafts.
- Keep model-facing content shapes clean by attaching UI metadata only
to user input/events (no API content changes).

## Key Changes
- TUI/TUI2 composer now captures text element ranges, trims them with
text edits, and restores them when submission is suppressed.
- User history cells render styled spans for text elements and keep
local image paths for future rehydration.
- Initial chat widget bootstraps accept empty `initial_text_elements` to
keep initialization uniform.
- Protocol/core helpers updated to tolerate the new InputText field
shape without changing payloads sent to the API.
This commit is contained in:
charley-oai
2026-01-19 23:49:34 -08:00
committed by GitHub
parent 675f165c56
commit eb90e20c0b
28 changed files with 3081 additions and 494 deletions

View File

@@ -8,6 +8,8 @@ use super::*;
use crate::app_event::AppEvent;
use crate::app_event::ExitMode;
use crate::app_event_sender::AppEventSender;
use crate::bottom_pane::LocalImageAttachment;
use crate::history_cell::UserHistoryCell;
use crate::test_backend::VT100Backend;
use crate::tui::FrameRequester;
use assert_matches::assert_matches;
@@ -67,6 +69,8 @@ use codex_protocol::plan_tool::PlanItemArg;
use codex_protocol::plan_tool::StepStatus;
use codex_protocol::plan_tool::UpdatePlanArgs;
use codex_protocol::protocol::CodexErrorInfo;
use codex_protocol::user_input::TextElement;
use codex_protocol::user_input::UserInput;
use codex_utils_absolute_path::AbsolutePathBuf;
use crossterm::event::KeyCode;
use crossterm::event::KeyEvent;
@@ -182,6 +186,364 @@ async fn resumed_initial_messages_render_history() {
);
}
#[tokio::test]
async fn replayed_user_message_preserves_text_elements_and_local_images() {
let (mut chat, mut rx, _ops) = make_chatwidget_manual(None).await;
let placeholder = "[Image #1]";
let message = format!("{placeholder} replayed");
let text_elements = vec![TextElement {
byte_range: (0..placeholder.len()).into(),
placeholder: Some(placeholder.to_string()),
}];
let local_images = vec![PathBuf::from("/tmp/replay.png")];
let conversation_id = ThreadId::new();
let rollout_file = NamedTempFile::new().unwrap();
let configured = codex_core::protocol::SessionConfiguredEvent {
session_id: conversation_id,
forked_from_id: None,
model: "test-model".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: Some(ReasoningEffortConfig::default()),
history_log_id: 0,
history_entry_count: 0,
initial_messages: Some(vec![EventMsg::UserMessage(UserMessageEvent {
message: message.clone(),
images: None,
text_elements: text_elements.clone(),
local_images: local_images.clone(),
})]),
rollout_path: rollout_file.path().to_path_buf(),
};
chat.handle_codex_event(Event {
id: "initial".into(),
msg: EventMsg::SessionConfigured(configured),
});
let mut user_cell = None;
while let Ok(ev) = rx.try_recv() {
if let AppEvent::InsertHistoryCell(cell) = ev
&& let Some(cell) = cell.as_any().downcast_ref::<UserHistoryCell>()
{
user_cell = Some((
cell.message.clone(),
cell.text_elements.clone(),
cell.local_image_paths.clone(),
));
break;
}
}
let (stored_message, stored_elements, stored_images) =
user_cell.expect("expected a replayed user history cell");
assert_eq!(stored_message, message);
assert_eq!(stored_elements, text_elements);
assert_eq!(stored_images, local_images);
}
#[tokio::test]
async fn submission_preserves_text_elements_and_local_images() {
let (mut chat, mut rx, mut op_rx) = make_chatwidget_manual(None).await;
let conversation_id = ThreadId::new();
let rollout_file = NamedTempFile::new().unwrap();
let configured = codex_core::protocol::SessionConfiguredEvent {
session_id: conversation_id,
forked_from_id: None,
model: "test-model".to_string(),
model_provider_id: "test-provider".to_string(),
approval_policy: AskForApproval::Never,
sandbox_policy: SandboxPolicy::ReadOnly,
cwd: PathBuf::from("/home/user/project"),
reasoning_effort: Some(ReasoningEffortConfig::default()),
history_log_id: 0,
history_entry_count: 0,
initial_messages: None,
rollout_path: rollout_file.path().to_path_buf(),
};
chat.handle_codex_event(Event {
id: "initial".into(),
msg: EventMsg::SessionConfigured(configured),
});
drain_insert_history(&mut rx);
let placeholder = "[Image #1]";
let text = format!("{placeholder} submit");
let text_elements = vec![TextElement {
byte_range: (0..placeholder.len()).into(),
placeholder: Some(placeholder.to_string()),
}];
let local_images = vec![PathBuf::from("/tmp/submitted.png")];
chat.bottom_pane
.set_composer_text(text.clone(), text_elements.clone(), local_images.clone());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
let items = match next_submit_op(&mut op_rx) {
Op::UserTurn { items, .. } => items,
other => panic!("expected Op::UserTurn, got {other:?}"),
};
assert_eq!(items.len(), 2);
assert_eq!(
items[0],
UserInput::LocalImage {
path: local_images[0].clone()
}
);
assert_eq!(
items[1],
UserInput::Text {
text: text.clone(),
text_elements: text_elements.clone(),
}
);
let mut user_cell = None;
while let Ok(ev) = rx.try_recv() {
if let AppEvent::InsertHistoryCell(cell) = ev
&& let Some(cell) = cell.as_any().downcast_ref::<UserHistoryCell>()
{
user_cell = Some((
cell.message.clone(),
cell.text_elements.clone(),
cell.local_image_paths.clone(),
));
break;
}
}
let (stored_message, stored_elements, stored_images) =
user_cell.expect("expected submitted user history cell");
assert_eq!(stored_message, text);
assert_eq!(stored_elements, text_elements);
assert_eq!(stored_images, local_images);
}
#[tokio::test]
async fn interrupted_turn_restores_queued_messages_with_images_and_elements() {
let (mut chat, _rx, _op_rx) = make_chatwidget_manual(None).await;
let first_placeholder = "[Image #1]";
let first_text = format!("{first_placeholder} first");
let first_elements = vec![TextElement {
byte_range: (0..first_placeholder.len()).into(),
placeholder: Some(first_placeholder.to_string()),
}];
let first_images = [PathBuf::from("/tmp/first.png")];
let second_placeholder = "[Image #1]";
let second_text = format!("{second_placeholder} second");
let second_elements = vec![TextElement {
byte_range: (0..second_placeholder.len()).into(),
placeholder: Some(second_placeholder.to_string()),
}];
let second_images = [PathBuf::from("/tmp/second.png")];
let existing_placeholder = "[Image #1]";
let existing_text = format!("{existing_placeholder} existing");
let existing_elements = vec![TextElement {
byte_range: (0..existing_placeholder.len()).into(),
placeholder: Some(existing_placeholder.to_string()),
}];
let existing_images = vec![PathBuf::from("/tmp/existing.png")];
chat.queued_user_messages.push_back(UserMessage {
text: first_text,
local_images: vec![LocalImageAttachment {
placeholder: first_placeholder.to_string(),
path: first_images[0].clone(),
}],
text_elements: first_elements,
});
chat.queued_user_messages.push_back(UserMessage {
text: second_text,
local_images: vec![LocalImageAttachment {
placeholder: second_placeholder.to_string(),
path: second_images[0].clone(),
}],
text_elements: second_elements,
});
chat.refresh_queued_user_messages();
chat.bottom_pane
.set_composer_text(existing_text, existing_elements, existing_images.clone());
// When interrupted, queued messages are merged into the composer; image placeholders
// must be renumbered to match the combined local image list.
chat.handle_codex_event(Event {
id: "interrupt".into(),
msg: EventMsg::TurnAborted(codex_core::protocol::TurnAbortedEvent {
reason: TurnAbortReason::Interrupted,
}),
});
let first = "[Image #1] first".to_string();
let second = "[Image #2] second".to_string();
let third = "[Image #3] existing".to_string();
let expected_text = format!("{first}\n{second}\n{third}");
assert_eq!(chat.bottom_pane.composer_text(), expected_text);
let first_start = 0;
let second_start = first.len() + 1;
let third_start = second_start + second.len() + 1;
let expected_elements = vec![
TextElement {
byte_range: (first_start..first_start + "[Image #1]".len()).into(),
placeholder: Some("[Image #1]".to_string()),
},
TextElement {
byte_range: (second_start..second_start + "[Image #2]".len()).into(),
placeholder: Some("[Image #2]".to_string()),
},
TextElement {
byte_range: (third_start..third_start + "[Image #3]".len()).into(),
placeholder: Some("[Image #3]".to_string()),
},
];
assert_eq!(chat.bottom_pane.composer_text_elements(), expected_elements);
assert_eq!(
chat.bottom_pane.composer_local_image_paths(),
vec![
first_images[0].clone(),
second_images[0].clone(),
existing_images[0].clone(),
]
);
}
#[tokio::test]
async fn remap_placeholders_uses_attachment_labels() {
let placeholder_one = "[Image #1]";
let placeholder_two = "[Image #2]";
let text = format!("{placeholder_two} before {placeholder_one}");
let elements = vec![
TextElement {
byte_range: (0..placeholder_two.len()).into(),
placeholder: Some(placeholder_two.to_string()),
},
TextElement {
byte_range: ("[Image #2] before ".len().."[Image #2] before [Image #1]".len()).into(),
placeholder: Some(placeholder_one.to_string()),
},
];
let attachments = vec![
LocalImageAttachment {
placeholder: placeholder_one.to_string(),
path: PathBuf::from("/tmp/one.png"),
},
LocalImageAttachment {
placeholder: placeholder_two.to_string(),
path: PathBuf::from("/tmp/two.png"),
},
];
let message = UserMessage {
text,
text_elements: elements,
local_images: attachments,
};
let mut next_label = 3usize;
let remapped = remap_placeholders_for_message(message, &mut next_label);
assert_eq!(remapped.text, "[Image #4] before [Image #3]");
assert_eq!(
remapped.text_elements,
vec![
TextElement {
byte_range: (0.."[Image #4]".len()).into(),
placeholder: Some("[Image #4]".to_string()),
},
TextElement {
byte_range: ("[Image #4] before ".len().."[Image #4] before [Image #3]".len())
.into(),
placeholder: Some("[Image #3]".to_string()),
},
]
);
assert_eq!(
remapped.local_images,
vec![
LocalImageAttachment {
placeholder: "[Image #3]".to_string(),
path: PathBuf::from("/tmp/one.png"),
},
LocalImageAttachment {
placeholder: "[Image #4]".to_string(),
path: PathBuf::from("/tmp/two.png"),
},
]
);
}
#[tokio::test]
async fn remap_placeholders_uses_byte_ranges_when_placeholder_missing() {
let placeholder_one = "[Image #1]";
let placeholder_two = "[Image #2]";
let text = format!("{placeholder_two} before {placeholder_one}");
let elements = vec![
TextElement {
byte_range: (0..placeholder_two.len()).into(),
placeholder: None,
},
TextElement {
byte_range: ("[Image #2] before ".len().."[Image #2] before [Image #1]".len()).into(),
placeholder: None,
},
];
let attachments = vec![
LocalImageAttachment {
placeholder: placeholder_one.to_string(),
path: PathBuf::from("/tmp/one.png"),
},
LocalImageAttachment {
placeholder: placeholder_two.to_string(),
path: PathBuf::from("/tmp/two.png"),
},
];
let message = UserMessage {
text,
text_elements: elements,
local_images: attachments,
};
let mut next_label = 3usize;
let remapped = remap_placeholders_for_message(message, &mut next_label);
assert_eq!(remapped.text, "[Image #4] before [Image #3]");
assert_eq!(
remapped.text_elements,
vec![
TextElement {
byte_range: (0.."[Image #4]".len()).into(),
placeholder: Some("[Image #4]".to_string()),
},
TextElement {
byte_range: ("[Image #4] before ".len().."[Image #4] before [Image #3]".len())
.into(),
placeholder: Some("[Image #3]".to_string()),
},
]
);
assert_eq!(
remapped.local_images,
vec![
LocalImageAttachment {
placeholder: "[Image #3]".to_string(),
path: PathBuf::from("/tmp/one.png"),
},
LocalImageAttachment {
placeholder: "[Image #4]".to_string(),
path: PathBuf::from("/tmp/two.png"),
},
]
);
}
/// Entering review mode uses the hint provided by the review request.
#[tokio::test]
async fn entered_review_mode_uses_request_hint() {
@@ -352,8 +714,7 @@ async fn helpers_are_available_and_do_not_panic() {
config: cfg,
frame_requester: FrameRequester::test_dummy(),
app_event_tx: tx,
initial_prompt: None,
initial_images: Vec::new(),
initial_user_message: None,
enhanced_keys_supported: false,
auth_manager,
models_manager: thread_manager.get_models_manager(),
@@ -1080,7 +1441,8 @@ async fn enqueueing_history_prompt_multiple_times_is_stable() {
chat.thread_id = Some(ThreadId::new());
// Submit an initial prompt to seed history.
chat.bottom_pane.set_composer_text("repeat me".to_string());
chat.bottom_pane
.set_composer_text("repeat me".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::new(KeyCode::Enter, KeyModifiers::NONE));
// Simulate an active task so further submissions are queued.
@@ -1114,7 +1476,7 @@ async fn streaming_final_answer_keeps_task_running_state() {
assert!(chat.bottom_pane.status_widget().is_none());
chat.bottom_pane
.set_composer_text("queued submission".to_string());
.set_composer_text("queued submission".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::new(KeyCode::Tab, KeyModifiers::NONE));
assert_eq!(chat.queued_user_messages.len(), 1);
@@ -1581,7 +1943,8 @@ async fn collab_slash_command_sets_mode_and_next_submit_sends_user_turn() {
chat.dispatch_command_with_args(SlashCommand::Collab, "plan".to_string());
assert_eq!(chat.collaboration_mode, CollaborationModeSelection::Plan);
chat.bottom_pane.set_composer_text("hello".to_string());
chat.bottom_pane
.set_composer_text("hello".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
match next_submit_op(&mut op_rx) {
Op::UserTurn {
@@ -1591,7 +1954,8 @@ async fn collab_slash_command_sets_mode_and_next_submit_sends_user_turn() {
other => panic!("expected Op::UserTurn with plan collab mode, got {other:?}"),
}
chat.bottom_pane.set_composer_text("follow up".to_string());
chat.bottom_pane
.set_composer_text("follow up".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
match next_submit_op(&mut op_rx) {
Op::UserTurn {
@@ -1608,7 +1972,8 @@ async fn collab_mode_defaults_to_pair_programming_when_enabled() {
chat.thread_id = Some(ThreadId::new());
chat.set_feature_enabled(Feature::CollaborationModes, true);
chat.bottom_pane.set_composer_text("hello".to_string());
chat.bottom_pane
.set_composer_text("hello".to_string(), Vec::new(), Vec::new());
chat.handle_key_event(KeyEvent::from(KeyCode::Enter));
match next_submit_op(&mut op_rx) {
Op::UserTurn {
@@ -2863,7 +3228,7 @@ async fn interrupt_prepends_queued_messages_before_existing_composer_text() {
chat.bottom_pane.set_task_running(true);
chat.bottom_pane
.set_composer_text("current draft".to_string());
.set_composer_text("current draft".to_string(), Vec::new(), Vec::new());
chat.queued_user_messages
.push_back(UserMessage::from("first queued".to_string()));
@@ -3887,8 +4252,11 @@ async fn chatwidget_exec_and_status_layout_vt100_snapshot() {
delta: "**Investigating rendering code**".into(),
}),
});
chat.bottom_pane
.set_composer_text("Summarize recent commits".to_string());
chat.bottom_pane.set_composer_text(
"Summarize recent commits".to_string(),
Vec::new(),
Vec::new(),
);
let width: u16 = 80;
let ui_height: u16 = chat.desired_height(width);